lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | eca7a95958956946d6d9ce39b6f56016796f23e6 | 0 | didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit,didi/DoraemonKit | package com.didichuxing.doraemonkit.kit.core;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.CompoundButton;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.Switch;
import android.widget.TextView;
import androidx.constraintlayout.widget.ConstraintLayout;
import com.didichuxing.doraemonkit.R;
import com.didichuxing.doraemonkit.widget.tableview.utils.DensityUtils;
/**
* @Author: changzuozhen
* @Date: 2020-12-22
* <p>
* 悬浮窗,支持折叠
* @see com.didichuxing.doraemonkit.kit.core.SimpleDokitView
* 启动工具函数
* @see com.didichuxing.doraemonkit.kit.core.SimpleDokitStarter.startFloating
*/
public abstract class SimpleDokitView extends AbsDokitView {
private static final String TAG = "SimpleBaseFloatPage";
int mWidth;
int mHeight;
int mDp50InPx;
private WindowManager mWindowManager;
private FrameLayout mFloatContainer;
private Switch mShowSwitch;
@Override
public void onEnterForeground() {
super.onEnterForeground();
getRootView().setVisibility(View.VISIBLE);
}
@Override
public void onEnterBackground() {
super.onEnterBackground();
getRootView().setVisibility(View.GONE);
}
public void showContainer(boolean isChecked) {
mFloatContainer.setVisibility(isChecked ? View.VISIBLE : View.GONE);
invalidate();
}
@Override
public void onCreate(Context context) {
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics outMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(outMetrics);
mDp50InPx = DensityUtils.dp2px(context, 50);
mWidth = outMetrics.widthPixels - mDp50InPx;
mHeight = outMetrics.heightPixels - mDp50InPx;
}
@Override
public View onCreateView(Context context, FrameLayout rootView) {
ConstraintLayout root = (ConstraintLayout) LayoutInflater.from(context).inflate(R.layout.dk_layout_simple_dokit_float_view, rootView, false);
mFloatContainer = root.findViewById(R.id.floatContainer);
mShowSwitch = root.findViewById(R.id.showHideSwitch);
TextView title = root.findViewById(R.id.floatPageTitle);
ImageButton close = root.findViewById(R.id.floatClose);
close.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
DokitViewManager.getInstance().detach(SimpleDokitView.this);
}
});
title.setText(getTag());
mShowSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
showContainer(isChecked);
}
});
LayoutInflater.from(context).inflate(getLayoutId(), mFloatContainer);
return root;
}
@Override
public void onViewCreated(FrameLayout rootView) {
initView();
}
protected abstract int getLayoutId();
@Override
public void initDokitViewLayoutParams(DokitViewLayoutParams params) {
params.width = DokitViewLayoutParams.WRAP_CONTENT;
params.height = DokitViewLayoutParams.WRAP_CONTENT;
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 200;
params.y = 200;
}
@Override
public boolean onBackPressed() {
mShowSwitch.setChecked(false);
return super.onBackPressed();
}
@Override
public boolean shouldDealBackKey() {
return true;
}
protected void initView() {
}
@Override
public void invalidate() {
if (getDoKitView() == null) {
return;
}
if (isNormalMode()) {
FrameLayout.LayoutParams layoutParams = getNormalLayoutParams();
if (layoutParams == null) {
return;
}
layoutParams.width = WindowManager.LayoutParams.WRAP_CONTENT;
layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
getDoKitView().setLayoutParams(layoutParams);
} else {
WindowManager.LayoutParams layoutParams = getSystemLayoutParams();
if (layoutParams == null) {
return;
}
layoutParams.width = WindowManager.LayoutParams.WRAP_CONTENT;
layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
mWindowManager.updateViewLayout(getDoKitView(), layoutParams);
}
}
} | Android/java/doraemonkit/src/main/java/com/didichuxing/doraemonkit/kit/core/SimpleDokitView.java | package com.didichuxing.doraemonkit.kit.core;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.CompoundButton;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.Switch;
import android.widget.TextView;
import androidx.constraintlayout.widget.ConstraintLayout;
import com.didichuxing.doraemonkit.R;
import com.didichuxing.doraemonkit.widget.tableview.utils.DensityUtils;
/**
* @Author: changzuozhen
* @Date: 2020-12-22
* <p>
* 悬浮窗,支持折叠
* @see com.didichuxing.doraemonkit.kit.core.SimpleDokitView
* 启动工具函数
* @see com.didichuxing.doraemonkit.kit.core.SimpleDokitStarter.startFloating
*/
public abstract class SimpleDokitView extends AbsDokitView {
private static final String TAG = "SimpleBaseFloatPage";
int mWidth;
int mHeight;
int mDp50InPx;
private WindowManager mWindowManager;
private FrameLayout mFloatContainer;
private Switch mShowSwitch;
@Override
public void onEnterForeground() {
super.onEnterForeground();
getRootView().setVisibility(View.VISIBLE);
}
@Override
public void onEnterBackground() {
super.onEnterBackground();
getRootView().setVisibility(View.GONE);
}
public void showContainer(boolean isChecked) {
mFloatContainer.setVisibility(isChecked ? View.VISIBLE : View.GONE);
}
@Override
public void onCreate(Context context) {
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics outMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(outMetrics);
mDp50InPx = DensityUtils.dp2px(context, 50);
mWidth = outMetrics.widthPixels - mDp50InPx;
mHeight = outMetrics.heightPixels - mDp50InPx;
}
@Override
public View onCreateView(Context context, FrameLayout rootView) {
ConstraintLayout root = (ConstraintLayout) LayoutInflater.from(context).inflate(R.layout.dk_layout_simple_dokit_float_view, rootView, false);
mFloatContainer = root.findViewById(R.id.floatContainer);
mShowSwitch = root.findViewById(R.id.showHideSwitch);
TextView title = root.findViewById(R.id.floatPageTitle);
ImageButton close = root.findViewById(R.id.floatClose);
close.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
DokitViewManager.getInstance().detach(SimpleDokitView.this);
}
});
title.setText(getTag());
mShowSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
showContainer(isChecked);
}
});
LayoutInflater.from(context).inflate(getLayoutId(), mFloatContainer);
return root;
}
@Override
public void onViewCreated(FrameLayout rootView) {
initView();
}
protected abstract int getLayoutId();
@Override
public void initDokitViewLayoutParams(DokitViewLayoutParams params) {
params.width = DokitViewLayoutParams.WRAP_CONTENT;
params.height = DokitViewLayoutParams.WRAP_CONTENT;
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 200;
params.y = 200;
}
@Override
public boolean onBackPressed() {
mShowSwitch.setChecked(false);
return super.onBackPressed();
}
@Override
public boolean shouldDealBackKey() {
return true;
}
protected void initView() {
}
} | modify:
[Android]-悬浮窗适配
| Android/java/doraemonkit/src/main/java/com/didichuxing/doraemonkit/kit/core/SimpleDokitView.java | modify: [Android]-悬浮窗适配 | <ide><path>ndroid/java/doraemonkit/src/main/java/com/didichuxing/doraemonkit/kit/core/SimpleDokitView.java
<ide>
<ide> public void showContainer(boolean isChecked) {
<ide> mFloatContainer.setVisibility(isChecked ? View.VISIBLE : View.GONE);
<add> invalidate();
<ide> }
<ide>
<ide> @Override
<ide>
<ide> protected void initView() {
<ide> }
<add>
<add> @Override
<add> public void invalidate() {
<add> if (getDoKitView() == null) {
<add> return;
<add> }
<add> if (isNormalMode()) {
<add> FrameLayout.LayoutParams layoutParams = getNormalLayoutParams();
<add> if (layoutParams == null) {
<add> return;
<add> }
<add> layoutParams.width = WindowManager.LayoutParams.WRAP_CONTENT;
<add> layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
<add> getDoKitView().setLayoutParams(layoutParams);
<add> } else {
<add> WindowManager.LayoutParams layoutParams = getSystemLayoutParams();
<add> if (layoutParams == null) {
<add> return;
<add> }
<add> layoutParams.width = WindowManager.LayoutParams.WRAP_CONTENT;
<add> layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
<add> mWindowManager.updateViewLayout(getDoKitView(), layoutParams);
<add> }
<add> }
<ide> } |
|
Java | apache-2.0 | 226b749b0fdfafb72d4e7c79326727fb60348ea0 | 0 | TDDFT/aerosolve,TDDFT/aerosolve,airbnb/aerosolve,airbnb/aerosolve | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import lombok.extern.slf4j.Slf4j;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Similar to MoveFloatToStringAndFloat, however, just move defined float value into String Feature
* not using bucket. This is used when there are certain number of incorrect data,
* i.e. x = 0 doesn't mean it is worse than x = 0.00001, it just somewhere in the pipeline
* make null = 0, so before we fixed the pipeline, convert it to string feature.
*/
@Slf4j
public class FloatToStringTransform implements Transform {
private String fieldName;
private Collection<String> keys;
private Set<Double> values;
private String stringOutputName;
@Override
public void configure(Config config, String key) {
fieldName = config.getString(key + ".field1");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
values = new HashSet<>(config.getDoubleList(key + ".values"));
stringOutputName = config.getString(key + ".string_output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (floatFeatures == null || floatFeatures.isEmpty()) {
return;
}
Map<String, Double> input = floatFeatures.get(fieldName);
if (input == null || input.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> stringOutput = Util.getOrCreateStringFeature(stringOutputName, stringFeatures);
Collection<String> localKeys = (keys == null)? input.keySet() : keys;
log.debug("k {} {}", localKeys, input);
for (String key : localKeys) {
moveFloatToString(
input, key, values, stringOutput);
}
}
private void moveFloatToString(
Map<String, Double> input,
String key, Set<Double> values,
Set<String> stringOutput) {
if (input.containsKey(key)) {
Double inputFloatValue = input.get(key);
if (values.contains(inputFloatValue)) {
String movedFloat = key + "=" + inputFloatValue;
stringOutput.add(movedFloat);
input.remove(key);
}
}
}
}
| core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatToStringTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import lombok.extern.slf4j.Slf4j;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Similar to MoveFloatToStringAndFloat, however, just move defined float value into String Feature
* not using bucket. This is used when there are certain number of incorrect data,
* i.e. x = 0 doesn't mean it is worse than x = 0.00001, it just somewhere in the pipeline
* make null = 0, so before we fixed the pipeline, convert it to string feature.
*/
@Slf4j
public class FloatToStringTransform implements Transform {
private String fieldName;
private Collection<String> keys;
private Set<Double> values;
private String stringOutputName;
@Override
public void configure(Config config, String key) {
fieldName = config.getString(key + ".field1");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
values = new HashSet<>(config.getDoubleList(key + ".values"));
stringOutputName = config.getString(key + ".string_output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (floatFeatures == null || floatFeatures.isEmpty()) {
return;
}
Map<String, Double> input = floatFeatures.get(fieldName);
if (input == null || input.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> stringOutput = Util.getOrCreateStringFeature(stringOutputName, stringFeatures);
Collection<String> localKeys = (keys == null)? input.keySet() : keys;
log.debug("k {} {}", localKeys, input);
for (String key : localKeys) {
moveFloatToStringAndFloat(
input, key, values, stringOutput);
}
}
private void moveFloatToStringAndFloat(
Map<String, Double> input,
String key, Set<Double> values,
Set<String> stringOutput) {
if (input.containsKey(key)) {
Double inputFloatValue = input.get(key);
if (values.contains(inputFloatValue)) {
String movedFloat = key + "=" + inputFloatValue;
stringOutput.add(movedFloat);
input.remove(key);
}
}
}
}
| better naming | core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatToStringTransform.java | better naming | <ide><path>ore/src/main/java/com/airbnb/aerosolve/core/transforms/FloatToStringTransform.java
<ide> Collection<String> localKeys = (keys == null)? input.keySet() : keys;
<ide> log.debug("k {} {}", localKeys, input);
<ide> for (String key : localKeys) {
<del> moveFloatToStringAndFloat(
<add> moveFloatToString(
<ide> input, key, values, stringOutput);
<ide> }
<ide> }
<ide>
<del> private void moveFloatToStringAndFloat(
<add> private void moveFloatToString(
<ide> Map<String, Double> input,
<ide> String key, Set<Double> values,
<ide> Set<String> stringOutput) { |
|
Java | mit | c887bce7c1c85e79f4d4369d1227e0e1177f0a10 | 0 | selig/qea | package benchmark.rovers;
import java.io.PrintStream;
import java.util.Arrays;
abstract class DoEval<S> {
private static PrintStream old_out = System.out;
private static PrintStream null_out = null;
static{
try{new java.io.PrintStream("redirected.txt");}
catch(Exception e){}
}
public void set_runs(int r){runs=r;}
private static int runs = 5;
private static int warmup = 5;
public abstract DoWork<S> makeWork();
public void run_eval(S spec, String name, int[] args, int w){
warmup = w;
run_eval(spec,name,args);
warmup = 5;
}
public void run_eval(S spec,String name,int[] args){
System.setOut(null_out);
for(int i=0;i<warmup;i++){
DoWork<S> work = makeWork();
work.run_with_spec(spec,name,args);
}
old_out.println("==\t"+spec+":"+Arrays.toString(args)+"\t==");
for(int i=0;i<runs;i++){
System.setOut(null_out);
System.gc();
long start = System.currentTimeMillis();
DoWork<S> work = makeWork();
work.run_with_spec(spec,name,args);
long end = System.currentTimeMillis();
System.setOut(old_out);
System.out.println((end-start));
}
}
public void pick_eval(S spec, String name){
if(name.equals("IncreasingCommand")){eval_for_IncreasingCommand(spec,name);}
if(name.equals("ResourceLifecycle")){ eval_for_ResourceLifecycle(spec,name);}
if(name.equals("ExactlyOneSuccess")){ eval_for_ExactlyOneSuccess(spec,name);}
if(name.equals("AcknowledgeCommands")){ eval_for_AcknowledgeCommands(spec,name);}
if(name.equals("NestedCommand")){ eval_for_NestedCommand(spec,name);}
if(name.equals("GrantCancel")){ eval_for_GrantCancel(spec,name);}
if(name.equals("ReleaseResource")){ eval_for_ReleaseResource(spec,name);}
if(name.equals("RespectConflicts")){ eval_for_RespectConflicts(spec,name);}
if(name.equals("ExistsSatellite")){ eval_for_ExistsSatellite(spec,name);}
if(name.equals("ExistsLeader")){ eval_for_ExistsLeader(spec,name);}
if(name.equals("MessageHashCorrectInvInt")){ eval_for_MessageHashCorrectInvInt(spec,name);}
}
public void eval_for_IncreasingCommand(S spec, String name){
run_eval(spec,name,new int[]{10000}); // put this one first to make sure we're nice and warm
run_eval(spec,name,new int[]{10});
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000});
run_eval(spec,name,new int[]{10000},0);
run_eval(spec,name,new int[]{100000},0);
run_eval(spec,name,new int[]{1000000},0);
}
public void eval_for_ResourceLifecycle(S spec, String name){
run_eval(spec,name,new int[]{10,10000},10);
run_eval(spec,name,new int[]{100,10000});
run_eval(spec,name,new int[]{1000,10000},0);
run_eval(spec,name,new int[]{5000,10000},0);
run_eval(spec,name,new int[]{100,1000000},0);
run_eval(spec,name,new int[]{1000,1000000},0);
run_eval(spec,name,new int[]{5000,1000000},0);
}
public void eval_for_ExactlyOneSuccess(S spec, String name){
run_eval(spec,name,new int[]{10},10);
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000},0);
run_eval(spec,name,new int[]{10000},0);
}
public void eval_for_AcknowledgeCommands(S spec, String name){
run_eval(spec,name,new int[]{10});
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000},0);
run_eval(spec,name,new int[]{10000},0);
run_eval(spec,name,new int[]{100000},0);
run_eval(spec,name,new int[]{1000000},0);
}
public void eval_for_NestedCommand(S spec, String name){
run_eval(spec,name,new int[]{2,2,10});
run_eval(spec,name,new int[]{3,3,10},0);
run_eval(spec,name,new int[]{2,2,100},0);
}
public void eval_for_GrantCancel(S spec, String name){
run_eval(spec,name,new int[]{10,10,1000},100);
run_eval(spec,name,new int[]{10,10,10000});
run_eval(spec,name,new int[]{100,100,1000},0);
run_eval(spec,name,new int[]{100,100,10000},0);
run_eval(spec,name,new int[]{1000,1000,1000});
run_eval(spec,name,new int[]{1000,1000,10000},0);
run_eval(spec,name,new int[]{1000,1000,1000000},0);
}
public void eval_for_ReleaseResource(S spec, String name){
run_eval(spec,name,new int[]{5,5,100});
run_eval(spec,name,new int[]{5,5,1000});
run_eval(spec,name,new int[]{10,10,100},0);
run_eval(spec,name,new int[]{10,10,1000},0);
run_eval(spec,name,new int[]{10,10,10000},0);
}
public void eval_for_RespectConflicts(S spec, String name){
run_eval(spec,name,new int[]{3,100,100});
run_eval(spec,name,new int[]{4,100,100},0);
run_eval(spec,name,new int[]{5,100,100},0);
run_eval(spec,name,new int[]{6,100,100},0);
run_eval(spec,name,new int[]{7,100,100},0);
run_eval(spec,name,new int[]{3,1000,1000},0);
run_eval(spec,name,new int[]{4,1000,1000},0);
run_eval(spec,name,new int[]{5,1000,1000},0);
}
public void eval_for_ExistsSatellite(S spec, String name){
run_eval(spec,name,new int[]{10,10});
run_eval(spec,name,new int[]{10,100});
run_eval(spec,name,new int[]{10,1000},0);
run_eval(spec,name,new int[]{100,100},0);
run_eval(spec,name,new int[]{100,1000},0);
run_eval(spec,name,new int[]{1000,1000},0);
}
public void eval_for_ExistsLeader(S spec, String name){
run_eval(spec,name,new int[]{5});
run_eval(spec,name,new int[]{10},0);
run_eval(spec,name,new int[]{15},0);
run_eval(spec,name,new int[]{20},0);
}
public void eval_for_MessageHashCorrectInvInt(S spec, String name){
run_eval(spec,name,new int[]{100,100},50);
run_eval(spec,name,new int[]{100,1000});
run_eval(spec,name,new int[]{100,10000},0);
run_eval(spec,name,new int[]{1000,1000},0);
run_eval(spec,name,new int[]{1000,10000},0);
run_eval(spec,name,new int[]{1000,100000},0);
run_eval(spec,name,new int[]{1000,1000000},0);
}
}
| code/qea/src/benchmark/rovers/DoEval.java | package benchmark.rovers;
import java.io.PrintStream;
import java.util.Arrays;
abstract class DoEval<S> {
private static PrintStream old_out = System.out;
private static PrintStream null_out = null;
static{
try{new java.io.PrintStream("redirected.txt");}
catch(Exception e){}
}
public void set_runs(int r){runs=r;}
private static int runs = 5;
private static int warmup = 5;
public abstract DoWork<S> makeWork();
public void run_eval(S spec, String name, int[] args, int w){
warmup = w;
run_eval(spec,name,args);
warmup = 5;
}
public void run_eval(S spec,String name,int[] args){
System.setOut(null_out);
for(int i=0;i<warmup;i++){
DoWork<S> work = makeWork();
work.run_with_spec(spec,name,args);
}
old_out.println("==\t"+spec+":"+Arrays.toString(args)+"\t==");
for(int i=0;i<runs;i++){
System.setOut(null_out);
System.gc();
long start = System.currentTimeMillis();
DoWork<S> work = makeWork();
work.run_with_spec(spec,name,args);
long end = System.currentTimeMillis();
System.setOut(old_out);
System.out.println((end-start));
}
}
public void pick_eval(S spec, String name){
if(name.equals("IncreasingCommand")){eval_for_IncreasingCommand(spec,name);}
if(name.equals("ResourceLifecycle")){ eval_for_ResourceLifecycle(spec,name);}
if(name.equals("ExactlyOneSuccess")){ eval_for_ExactlyOneSuccess(spec,name);}
if(name.equals("AcknowledgeCommands")){ eval_for_AcknowledgeCommands(spec,name);}
if(name.equals("NestedCommand")){ eval_for_NestedCommand(spec,name);}
if(name.equals("GrantCancel")){ eval_for_GrantCancel(spec,name);}
if(name.equals("ReleaseResource")){ eval_for_ReleaseResource(spec,name);}
if(name.equals("RespectConflicts")){ eval_for_RespectConflicts(spec,name);}
if(name.equals("ExistsSatellite")){ eval_for_ExistsSatellite(spec,name);}
if(name.equals("ExistsLeader")){ eval_for_ExistsLeader(spec,name);}
if(name.equals("MessageHashCorrectInvInt")){ eval_for_MessageHashCorrectInvInt(spec,name);}
}
public void eval_for_IncreasingCommand(S spec, String name){
run_eval(spec,name,new int[]{10000}); // put this one first to make sure we're nice and warm
run_eval(spec,name,new int[]{10});
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000});
run_eval(spec,name,new int[]{10000},0);
run_eval(spec,name,new int[]{100000},0);
run_eval(spec,name,new int[]{1000000},0);
}
public void eval_for_ResourceLifecycle(S spec, String name){
run_eval(spec,name,new int[]{10,10000},10);
run_eval(spec,name,new int[]{100,10000});
run_eval(spec,name,new int[]{1000,10000},0);
run_eval(spec,name,new int[]{5000,10000},0);
run_eval(spec,name,new int[]{100,1000000},0);
run_eval(spec,name,new int[]{1000,1000000},0);
run_eval(spec,name,new int[]{5000,1000000},0);
}
public void eval_for_ExactlyOneSuccess(S spec, String name){
run_eval(spec,name,new int[]{10},10);
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000},0);
run_eval(spec,name,new int[]{10000},0);
}
public void eval_for_AcknowledgeCommands(S spec, String name){
run_eval(spec,name,new int[]{10});
run_eval(spec,name,new int[]{100});
run_eval(spec,name,new int[]{1000},0);
run_eval(spec,name,new int[]{10000},0);
run_eval(spec,name,new int[]{100000},0);
run_eval(spec,name,new int[]{1000000},0);
}
public void eval_for_NestedCommand(S spec, String name){
run_eval(spec,name,new int[]{2,2,10});
run_eval(spec,name,new int[]{3,3,10},0);
run_eval(spec,name,new int[]{2,2,100},0);
}
public void eval_for_GrantCancel(S spec, String name){
run_eval(spec,name,new int[]{10,10,1000},100);
run_eval(spec,name,new int[]{10,10,10000});
run_eval(spec,name,new int[]{100,100,1000},0);
run_eval(spec,name,new int[]{100,100,10000},0);
run_eval(spec,name,new int[]{1000,1000,1000});
run_eval(spec,name,new int[]{1000,1000,10000},0);
}
public void eval_for_ReleaseResource(S spec, String name){
run_eval(spec,name,new int[]{5,5,100});
run_eval(spec,name,new int[]{5,5,1000});
run_eval(spec,name,new int[]{10,10,100},0);
run_eval(spec,name,new int[]{10,10,1000},0);
run_eval(spec,name,new int[]{10,10,10000},0);
}
public void eval_for_RespectConflicts(S spec, String name){
run_eval(spec,name,new int[]{3,100,100});
run_eval(spec,name,new int[]{4,100,100},0);
run_eval(spec,name,new int[]{5,100,100},0);
run_eval(spec,name,new int[]{6,100,100},0);
run_eval(spec,name,new int[]{7,100,100},0);
run_eval(spec,name,new int[]{3,1000,1000},0);
run_eval(spec,name,new int[]{4,1000,1000},0);
run_eval(spec,name,new int[]{5,1000,1000},0);
}
public void eval_for_ExistsSatellite(S spec, String name){
run_eval(spec,name,new int[]{10,10});
run_eval(spec,name,new int[]{10,100});
run_eval(spec,name,new int[]{10,1000},0);
run_eval(spec,name,new int[]{100,100},0);
run_eval(spec,name,new int[]{100,1000},0);
run_eval(spec,name,new int[]{1000,1000},0);
}
public void eval_for_ExistsLeader(S spec, String name){
run_eval(spec,name,new int[]{5});
run_eval(spec,name,new int[]{10},0);
run_eval(spec,name,new int[]{15},0);
run_eval(spec,name,new int[]{20},0);
}
public void eval_for_MessageHashCorrectInvInt(S spec, String name){
run_eval(spec,name,new int[]{100,100},50);
run_eval(spec,name,new int[]{100,1000});
run_eval(spec,name,new int[]{100,10000},0);
run_eval(spec,name,new int[]{1000,1000},0);
run_eval(spec,name,new int[]{1000,10000},0);
run_eval(spec,name,new int[]{1000,100000},0);
run_eval(spec,name,new int[]{1000,1000000},0);
}
}
| Editing DoEval
| code/qea/src/benchmark/rovers/DoEval.java | Editing DoEval | <ide><path>ode/qea/src/benchmark/rovers/DoEval.java
<ide> run_eval(spec,name,new int[]{100,100,10000},0);
<ide> run_eval(spec,name,new int[]{1000,1000,1000});
<ide> run_eval(spec,name,new int[]{1000,1000,10000},0);
<add> run_eval(spec,name,new int[]{1000,1000,1000000},0);
<ide>
<ide> }
<ide> public void eval_for_ReleaseResource(S spec, String name){ |
|
JavaScript | mit | 6d6e1a8556eb2cf0122706ffdfa7d85a325978ca | 0 | nrabinowitz/timemap,nrabinowitz/timemap | /*----------------------------------------------------------------------------
* TimeMap
*
* @author Nick Rabinowitz (www.nickrabinowitz.com)
* The TimeMap object is intended to sync a SIMILE Timeline with a Google Map.
* Dependencies: Google Maps API v2, SIMILE Timeline v1.2
* Thanks to Jrn Clausen (http://www.oe-files.de) for initial concept and code.
*
* Copyright 2008 Nick Rabinowitz.
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* See <http://www.gnu.org/licenses/>.
*---------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
* TimeMap Class - holds references to timeline, map, and datasets
*---------------------------------------------------------------------------*/
/**
* Creates a new TimeMap with map placemarks synched to timeline events
* This will create the visible map, but not the timeline, which must be initialized separately.
*
* @constructor
* @param {element} tElement The timeline element.
* @param {element} mElement The map element.
* @param {Object} options A container for optional arguments:
* {Boolean} syncBands Whether to synchronize all bands in timeline
* {GLatLng} mapCenter Point for map center
* {Number} mapZoom Intial map zoom level
* {GMapType} mapType The maptype for the map
* {Boolean} showMapTypeCtrl Whether to display the map type control
* {Boolean} showMapCtrl Whether to show map navigation control
* {Boolean} hidePastFuture Whether to hide map placemarks for events not visible on timeline
* {Boolean} centerMapOnItems Whether to center and zoom the map based on loaded item positions
*/
function TimeMap(tElement, mElement, options) {
// save elements
this.mElement = mElement;
this.tElement = tElement;
// initialize array of datasets
this.datasets = {};
// initialize map bounds
this.mapBounds = new GLatLngBounds();
// default settings, can be overridden by options
// other options can be set directly on the map or timeline
options = options || {}; // make sure the options object isn't null
this.settings = {
syncBands: options['syncBands'] || true,
mapCenter: options['mapCenter'] || new GLatLng(0,0),
mapZoom: options['mapZoom'] || 4,
mapType: options['mapType'] || G_PHYSICAL_MAP,
showMapTypeCtrl: options['showMapTypeCtrl'] || true,
showMapCtrl: options['showMapCtrl'] || true,
hidePastFuture: options['hidePastFuture'] || true,
centerMapOnItems: options['centerMapOnItems'] || true
};
// initialize map
var s = this.settings;
if (GBrowserIsCompatible()) {
this.map = new GMap2(this.mElement);
if (s.showMapCtrl)
this.map.addControl(new GLargeMapControl());
if (s.showMapTypeCtrl)
this.map.addControl(new GMapTypeControl());
this.map.addMapType(G_PHYSICAL_MAP);
this.map.removeMapType(G_HYBRID_MAP);
this.map.enableDoubleClickZoom();
this.map.enableScrollWheelZoom();
this.map.enableContinuousZoom();
// initialize map center and zoom
this.map.setCenter(s.mapCenter, s.mapZoom);
// must be called after setCenter, for reasons unclear
this.map.setMapType(s.mapType);
}
// hijack popup window callback to show map info window
var oMap = this.map;
Timeline.DurationEventPainter.prototype._showBubble = function(x, y, evt) {
GEvent.trigger(evt.placemark, 'click');
}
}
/**
* Create an empty dataset object and add it to the timemap
*
* @param {String} id The id of the dataset
* @param {Object} options A container for optional arguments:
*/
TimeMap.prototype.createDataset = function(id, options) {
if(!("title" in options)) options.title = id;
var dataset = new TimeMapDataset(this, options);
this.datasets[id] = dataset;
return dataset;
}
/**
* Initialize the timeline - this must happen separately to allow full control of
* timeline properties.
*
* @param {BandInfo Array} bands Array of band information objects for timeline
*/
TimeMap.prototype.initTimeline = function(bands) {
// synchronize & highlight timeline bands
for (var x=1; x < bands.length; x++) {
if (this.settings.syncBands)
bands[x].syncWith = (x-1);
bands[x].highlight = true;
}
// initialize timeline
this.timeline = Timeline.create(this.tElement, bands);
// set event listener to hide off-timeline items on the map
if (this.settings.hidePastFuture) {
var topband = this.timeline.getBand(0);
var datasets = this.datasets;
var oMap = this.map;
topband.addOnScrollListener(function() {
var maxVisibleDate = topband.getMaxVisibleDate().getTime();
var minVisibleDate = topband.getMinVisibleDate().getTime();
for (id in datasets) {
var items = datasets[id].getItems();
for (var x=0; x < items.length; x++) {
if (items[x].event != null) {
var itemStart = items[x].event.getStart().getTime();
var itemEnd = items[x].event.getEnd().getTime();
// hide items in the future
if (itemStart > maxVisibleDate) {
items[x].placemark.hide();
items[x].closeInfoWindow();
}
// hide items in the past
else if (itemEnd < minVisibleDate ||
(items[x].event.isInstant() && itemStart < minVisibleDate)) {
items[x].placemark.hide();
items[x].closeInfoWindow();
}
else items[x].placemark.show();
}
}
}
});
}
// set event listener to hide off-map items on the timeline
if (this.settings.hideOffMap) {
var datasets = this.datasets;
var oMap = this.map;
GEvent.addListener(oMap, "moveend", function() {
var bounds = oMap.getBounds();
for (id in datasets) {
var items = datasets[id].getItems();
for (var x=0; x < items.length; x++) {
var placemarkPoint = items[x].placemark.getLatLng();
// hide events outside map bounds
if (!bounds.containsLatLng(placemarkPoint) && items[x].event != null)
items[x].event.hide();
else items[x].event.show();
}
}
});
}
// add callback for window resize here instead of the html file, to conform with XHTML 1.0
resizeTimerID = null;
var oTImeline = this.timeline;
window.onresize = function() {
if (resizeTimerID == null) {
resizeTimerID = window.setTimeout(function() {
resizeTimerID = null;
oTImeline.layout();
}, 500);
}
};
};
/**
* Create a legend with the current datasets, using an existing DOM element.
* This relies on the color property being set for each dataset
* XXX: still depends on jQuery...
*
* @param {String} legendId The id of the legend element.
*/
TimeMap.prototype.createLegend = function(legendId) {
legendId = "#"+legendId;
for (id in this.datasets) {
var dataset = this.datasets[id];
var colorBox = '<div style="float:left;margin-right:5px;border:1px solid #000;width:12px;height:12px;background:' + dataset._theme.color + '"> </div>';
var divHtml = '<div class="legenditem">' + colorBox+dataset.title + '</div>';
$(legendId).append(divHtml);
}
}
/*----------------------------------------------------------------------------
* TimeMapDataset Class - holds references to items and visual themes
*---------------------------------------------------------------------------*/
/**
* Create a new TimeMap dataset to hold a set of items
*
* @constructor
* @param {TimeMap} timemap Reference to the timemap object
* @param {Object} options Object holding optional arguments:
* {String} title Title of the dataset (for the legend)
* {TimeMapDatasetTheme} theme Theme settings
*
*/
function TimeMapDataset(timemap, options) {
// hold reference to timemap
this.timemap = timemap;
// initialize timeline event source
this.eventSource = new Timeline.DefaultEventSource();
// initialize array of items
this._items = [];
// initialize vars
this.title = ("title" in options) ? options.title : "";
this._theme = ("theme" in options) ? options.theme : new TimeMapDatasetTheme({});
}
/*
* Get the items for this dataset
*/
TimeMapDataset.prototype.getItems = function() {
return this._items;
}
/*
* Add items to map and timeline.
* Each item has both a timeline event and a map placemark.
*
* @param {Object} data Data to be loaded. See loadItem() below for the format.
* @param {Function} transform If data is not in the above format, transformation function to make it so
*/
TimeMapDataset.prototype.loadItems = function(data, transform) {
for (var x=0; x < data.length; x++) {
this.loadItem(data[x], transform);
}
var tm = this.timemap;
// XXX - probably change to this.timemap.onLoadItems()
if (tm.settings.centerMapOnItems) {
// determine the zoom level from the bounds
tm.map.setZoom(tm.map.getBoundsZoomLevel(tm.mapBounds));
// determine the center from the bounds
tm.map.setCenter(tm.mapBounds.getCenter());
}
};
/*
* Add one item to map and timeline.
* Each item has both a timeline event and a map placemark.
*
* @param {Object} data Data to be loaded, in the following format:
* {String} title Title of the item (visible on timeline)
* {Iso8601DateTime} start Start time of the event on the timeline
* {Iso8601DateTime} end End time of the event on the timeline (duration events only)
* {Object} point Data for a single-point placemark:
* {Float} lat Latitude of map marker
* {Float} lon Longitude of map marker
* {Array of points} polyline Data for a polyline placemark, in format above
* {Array of points} polygon Data for a polygon placemark, in format above
* {Object} options Optional arguments to be passed to the TimeMapItem:
* {String} description Description to be shown in the info window
* {String} infoHtml Full HTML for the info window, defaults to title + description
* {String} infoUrl URL from which to retrieve full HTML for the info window
* {String} maxInfoHtml Full HTML for the maximized info window
* {String} maxInfoUrl URL from which to retrieve full HTML for the maximized info window
* {String} maxOnly Whether to auto-maximize on open
* @param {Function} transform If data is not in the above format, transformation function to make it so
*/
TimeMapDataset.prototype.loadItem = function(data, transform) {
// apply transformation, if any
if (transform != undefined)
data = transform(data);
// transform functions can return a null value to skip a datum in the set
if (data == null) return;
var tm = this.timemap;
// create timeline event
var start = (data.start == undefined||data.start == "") ? null :
Timeline.DateTime.parseIso8601DateTime(data.start);
var end = (data.end == undefined||data.end == "") ? null :
Timeline.DateTime.parseIso8601DateTime(data.end);
var instant = (data.end == undefined);
var eventIcon = instant ? this._theme.eventIcon : null;
var title = data.title;
// allow event-less placemarks - these will be always present
if (start != null)
var event = new Timeline.DefaultEventSource.Event(start, end, null, null,
instant, title, null, null, null,
eventIcon, this._theme.eventColor, null);
else var event = null;
// create map placemark
var placemark = null;
var type = "";
var point = null;
// point placemark
if ("point" in data) {
point = new GLatLng(
parseFloat(data.point["lat"]),
parseFloat(data.point["lon"])
);
// add point to visible map bounds
if (tm.settings.centerMapOnItems) {
tm.mapBounds.extend(point);
}
markerIcon = ("icon" in data) ? data["icon"] : this._theme.icon;
placemark = new GMarker(point, { icon: markerIcon });
type = "marker";
} else if ("polyline" in data || "polygon" in data) {
var points = [];
if ("polyline" in data)
var line = data.polyline;
else var line = data.polygon;
for (var x=0; x<line.length; x++) {
point = new GLatLng(
parseFloat(line[x]["lat"]),
parseFloat(line[x]["lon"])
);
points.push(point);
// add point to visible map bounds
if (tm.settings.centerMapOnItems) {
tm.mapBounds.extend(point);
}
}
if ("polyline" in data) {
placemark = new GPolyline(points,
this._theme.lineColor,
this._theme.lineWeight,
this._theme.lineOpacity);
type = "polyline";
} else {
placemark = new GPolygon(points,
this._theme.polygonLineColor,
this._theme.polygonLineWeight,
this._theme.polygonLineOpacity,
this._theme.fillColor,
this._theme.fillOpacity);
type = "polygon";
}
}
// XXX: It would be nice to handle missing placemarks better
if (placemark == null) return;
// define the center point of this item
if (type == "marker") {
// just the marker point
point = placemark.getLatLng();
} else if (type == "polyline") {
// the middle vertex of the line
point = placemark.getVertex(Math.floor(placemark.getVertexCount()/2));
} else if (type == "polygon") {
// the middle of the polygon bounds
point = placemark.getBounds().getCenter();
}
var options = ("options" in data) ? data.options : {};
options["title"] = title;
options["type"] = type;
options["infoPoint"] = point;
// create cross-references
var item = new TimeMapItem(placemark, event, tm.map, options);
if (event != null) {
event.placemark = placemark;
event.item = item;
}
placemark.event = event;
placemark.item = item;
this._items.push(item);
// add listener to make placemark open when event is clicked
// XXX: Will need to think about how to make this work w/Ajax
var oMap = tm.map;
GEvent.addListener(placemark, "click", function() {
item.openInfoWindow();
});
// add placemark and event to map and timeline
tm.map.addOverlay(placemark);
if (event != null)
this.eventSource.add(event);
};
/*
* Static function to parse KML with time data and load it.
*
* @param {XML text} kml KML to be parsed
*/
TimeMapDataset.parseKML = function(kml) {
var items = [];
kmlnode = GXml.parse(kml);
var placemarks = kmlnode.getElementsByTagName("Placemark");
var timeCheck = false;
for (var i=0; i<placemarks.length; i++) {
var pm = placemarks[i];
var nList, data = {};
// get title
nList = pm.getElementsByTagName("name");
if (nList.length > 0) {
data["title"] = nList[0].firstChild.nodeValue;
}
// get description
nList = pm.getElementsByTagName("description");
data["options"] = {};
if (nList.length > 0) {
data["options"]["description"] = nList[0].firstChild.nodeValue;
}
// look for instant timestamp
nList = pm.getElementsByTagName("TimeStamp");
if (nList.length > 0) {
data["start"] = nList[0].getElementsByTagName("when")[0].firstChild.nodeValue;
timeCheck = true;
}
// otherwise look for span
if (!timeCheck) {
nList = pm.getElementsByTagName("TimeSpan");
if (nList.length > 0) {
data["start"] = nList[0].getElementsByTagName("begin")[0].firstChild.nodeValue;
data["end"] = nList[0].getElementsByTagName("end")[0].firstChild.nodeValue;
timeCheck = true;
}
}
if (!timeCheck) {
data.push(TimeMapDataset.parseParentNode(pm));
}
// look for marker
nList = pm.getElementsByTagName("Point");
if (nList.length > 0) {
data["point"] = {};
// get lat/lon
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var latlon = coords.split(",");
data["point"] = {
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
};
}
// look for polyline / polygon
else {
nList = pm.getElementsByTagName("LineString");
if (nList.length > 0) {
data["polyline"] = [];
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var coordArr = trim(coords).split(/[\r\n\f]+/);
for (var x=0; x<coordArr.length; x++) {
var latlon = coordArr[x].split(",");
data["polyline"].push({
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
});
}
} else {
nList = pm.getElementsByTagName("Polygon");
if (nList.length > 0) {
data["polyline"] = [];
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var coordArr = trim(coords).split(/[\r\n\f]+/);
for (var x=0; x<coordArr.length; x++) {
var latlon = coordArr[x].split(",");
data["polyline"].push({
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
});
}
}
}
}
items.push(data);
}
kmlnode = null;
nList = null;
return items;
}
TimeMapDataset.parseParentNode = function(pm){
check = false;
var data = {};
pn = pm.parentNode;
if (pn.nodename == "Folder" || pn.nodename=="Document"){
for (ele in pn.childNodes) {
if (ele.nodename == "TimeStamp") {
data["start"] = ele.getElementsByTagName("when")[0].firstChild.nodeValue;
check = true;
}
else if (ele.nodename = "TimeStamp"){
beginNodes = ele.getElementsByTagName("begin");
if (beginNodes.length > 0) data["start"] = beginNodes[0].firstChild.nodeValue;
if (endNodes.length > 0) data["end"] = endNodes[0].firstChild.nodeValue;
check = true;
}
}
}
else check = true;
if (!check) {
data = TimeMapDataset.parseParentNode(pn);
}
return data;
}
/*----------------------------------------------------------------------------
* Predefined visual themes for datasets, based on Google markers
*---------------------------------------------------------------------------*/
/**
* Create a new theme for a TimeMap dataset, defining colors and images
*
* @constructor
* @param {Object} options A container for optional arguments:
* {GIcon} icon Icon for marker placemarks
* {String} color Default color in hex for events, polylines, polygons
* {String} lineColor Color for polylines, defaults to options.color
* {String} polygonLineColor Color for polygon outlines, defaults to lineColor
* {Number} lineOpacity Opacity for polylines
* {Number} polgonLineOpacity Opacity for polygon outlines, defaults to options.lineOpacity
* {Number} lineWeight Line weight in pixels for polylines
* {Number} polygonLineWeight Line weight for polygon outlines, defaults to options.lineWeight
* {String} fillColor Color for polygon fill, defaults to options.color
* {String} fillOpacity Opacity for polygon fill
* {String} eventColor Background color for duration events
* {URL} eventIcon Icon URL for instant events
*/
function TimeMapDatasetTheme(options) {
// work out various defaults - the default theme is Google's reddish color
options = options || {};
this.icon = options['icon'] || G_DEFAULT_ICON; //("icon" in options) ? options.icon : G_DEFAULT_ICON;
this.color = options['color'] || "#FE766A";
this.lineColor = options['lineColor'] || this.color;
this.polygonLineColor = options['polygonLineColor'] || this.lineColor;
this.lineOpacity = options['lineOpacity'] || 1;
this.polgonLineOpacity = options['polgonLineOpacity'] || this.lineOpacity;
this.lineWeight = options['lineWeight'] || 2;
this.polygonLineWeight = options['polygonLineWeight'] || this.lineWeight;
this.fillColor = options['fillColor'] || this.color;
this.fillOpacity = options['fillOpacity'] || 0.25;
this.eventColor = options['eventColor'] || this.color;
this.eventIcon = options['eventIcon'] || "timemap/images/red-circle.png"; // XXX: probably need to work out the URLs better here
}
TimeMapDataset.redTheme = function() {
return new TimeMapDatasetTheme({});
}
TimeMapDataset.blueTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/blue-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#5A7ACF",
eventIcon: "timemap/images/blue-circle.png"
});
}
TimeMapDataset.greenTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/green-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#19CF54",
eventIcon: "timemap/images/green-circle.png"
});
}
TimeMapDataset.ltblueTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/ltblue-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#5ACFCF",
eventIcon: "timemap/images/ltblue-circle.png"
});
}
TimeMapDataset.purpleTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/purple-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#8E67FD",
eventIcon: "timemap/images/purple-circle.png"
});
}
/*----------------------------------------------------------------------------
* TimeMapItem Class - holds references to map placemark and timeline event
*---------------------------------------------------------------------------*/
/**
* Create a new TimeMap item with a map placemark and a timeline event
*
* @constructor
* @param {placemark} placemark The map placemark (one of GMarker, GPolyline, or GPolygon)
* @param {Event} event The timeline event
* @param {GMap2} map Reference to the map object
* @param {Object} options A container for optional arguments:
* {String} title Title of the item
* {String} description Plain-text description of the item
* {String} type Type of map placemark used (marker. polyline, polygon)
* {GLatLng} infoPoint Point indicating the center of this item
* {String} infoHtml Full HTML for the info window
* {String} infoUrl URL from which to retrieve full HTML for the info window
* {String} maxInfoHtml Full HTML for the maximized info window
* {String} maxInfoUrl URL from which to retrieve full HTML for the maximized info window
* {String} maxOnly Whether to auto-maximize on open
*/
function TimeMapItem(placemark, event, map, options) {
// initialize vars
this.placemark = placemark;
this.event = event;
this.map = map;
// get vars from options
options = options || {};
this._type = options['type'] || '';
this._title = options['title'] || '';
this._description = options['description'] || '';
this._infoPoint = options['infoPoint'] || null;
this._infoHtml = options['infoHtml'] || '';
this._infoUrl = options['infoUrl'] || '';
this._maxInfoHtml = options['maxInfoHtml'] || '';
this._maxInfoUrl = options['maxInfoUrl'] || '';
this._maxOnly = options['maxOnly'] || false;
// get functions
this.getType = function() { return this._type; };
this.getTitle = function() { return this._title; };
this.getInfoPoint = function() { return this._infoPoint; };
// create content for info window if none is provided
if (this._infoHtml == "" && this._infoUrl == "" && !this._maxOnly) {
this._infoHtml = '<div class="infotitle">' + this._title + '</div>';
if (this._description != "")
this._infoHtml += '<div class="infodescription">' + this._description + '</div>';
}
}
/*
* Open the info window at an appropriate point
*
* @param {GMap2} map Reference to the map object
*/
TimeMapItem.prototype.openInfoWindow = function() {
// support for max content loaded via ajax
var infoWindowOptions;
var hasMax = false;
if (this._maxInfoHtml != ""||this._maxInfoUrl != "") {
hasMax = true;
var maxContentDiv = document.createElement('div');
if (this._maxInfoHtml != "")
// load straight from data
maxContentDiv.innerHTML = this._maxInfoHtml;
else maxContentDiv.innerHTML = 'Loading...';
infoWindowOptions = {maxContent: maxContentDiv};
if (this._maxInfoHtml == "") {
// load via ajax instead
var iw = this.map.getInfoWindow();
var ajaxUrl = this._maxInfoUrl;
GEvent.addListener(iw, "maximizeclick", function() {
GDownloadUrl(ajaxUrl, function(data) {
maxContentDiv.innerHTML = data;
});
});
}
} else infoWindowOptions = {};
// standard version - text already loaded
if (this._infoHtml != ""||(hasMax && this._maxOnly)) {
if (this.getType() == "marker") {
this.placemark.openInfoWindowHtml(this._infoHtml, infoWindowOptions);
} else {
this.map.openInfoWindowHtml(this.getInfoPoint(), this._infoHtml, infoWindowOptions);
}
if (hasMax && this._maxOnly) {
this.map.getInfoWindow().maximize();
var iw = this.map.getInfoWindow();
var oMap = this.map;
GEvent.addListener(iw, "restoreclick", function() {
oMap.closeInfoWindow();
});
}
}
// load window html via ajax
else if (this._infoUrl != "") {
var item = this;
GDownloadUrl(this._infoUrl, function(result) {
item._infoHtml = result;
item.openInfoWindow();
});
}
}
/*
* Close the info window if it appears to be associated with this item
*
* @param {GMap2} map Reference to the map object
*/
TimeMapItem.prototype.closeInfoWindow = function() {
if (this.getType() == "marker") {
this.placemark.closeInfoWindow();
} else {
var infoWindow = this.map.getInfoWindow();
// close info window if its point is the same as this item's point
if (infoWindow.getPoint() == this.getInfoPoint()
&& !infoWindow.isHidden())
this.map.closeInfoWindow();
}
}
// convenience trim function
function trim(str) {
return str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
} | timemap.js | /*----------------------------------------------------------------------------
* TimeMap
*
* @author Nick Rabinowitz (www.nickrabinowitz.com)
* The TimeMap object is intended to sync a SIMILE Timeline with a Google Map.
* Dependencies: Google Maps API v2, SIMILE Timeline v1.2
* Thanks to Jrn Clausen (http://www.oe-files.de) for initial concept and code.
*
* Copyright 2008 Nick Rabinowitz.
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* See <http://www.gnu.org/licenses/>.
*---------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
* TimeMap Class - holds references to timeline, map, and datasets
*---------------------------------------------------------------------------*/
/**
* Creates a new TimeMap with map placemarks synched to timeline events
* This will create the visible map, but not the timeline, which must be initialized separately.
*
* @constructor
* @param {element} tElement The timeline element.
* @param {element} mElement The map element.
* @param {Object} options A container for optional arguments:
* {Boolean} syncBands Whether to synchronize all bands in timeline
* {GLatLng} mapCenter Point for map center
* {Number} mapZoom Intial map zoom level
* {GMapType} mapType The maptype for the map
* {Boolean} showMapTypeCtrl Whether to display the map type control
* {Boolean} showMapCtrl Whether to show map navigation control
* {Boolean} hidePastFuture Whether to hide map placemarks for events not visible on timeline
* {Boolean} centerMapOnItems Whether to center and zoom the map based on loaded item positions
*/
function TimeMap(tElement, mElement, options) {
// save elements
this.mElement = mElement;
this.tElement = tElement;
// initialize array of datasets
this.datasets = {};
// initialize map bounds
this.mapBounds = new GLatLngBounds();
// default settings, can be overridden by options
// other options can be set directly on the map or timeline
options = options || {}; // make sure the options object isn't null
this.settings = {
syncBands: options['syncBands'] || true,
mapCenter: options['mapCenter'] || new GLatLng(0,0),
mapZoom: options['mapZoom'] || 4,
mapType: options['mapType'] || G_PHYSICAL_MAP,
showMapTypeCtrl: options['showMapTypeCtrl'] || true,
showMapCtrl: options['showMapCtrl'] || true,
hidePastFuture: options['hidePastFuture'] || true,
centerMapOnItems: options['centerMapOnItems'] || true
};
// initialize map
var s = this.settings;
if (GBrowserIsCompatible()) {
this.map = new GMap2(this.mElement);
if (s.showMapCtrl)
this.map.addControl(new GLargeMapControl());
if (s.showMapTypeCtrl)
this.map.addControl(new GMapTypeControl());
this.map.addMapType(G_PHYSICAL_MAP);
this.map.removeMapType(G_HYBRID_MAP);
this.map.enableDoubleClickZoom();
this.map.enableScrollWheelZoom();
this.map.enableContinuousZoom();
// initialize map center and zoom
this.map.setCenter(s.mapCenter, s.mapZoom);
// must be called after setCenter, for reasons unclear
this.map.setMapType(s.mapType);
}
// hijack popup window callback to show map info window
var oMap = this.map;
Timeline.DurationEventPainter.prototype._showBubble = function(x, y, evt) {
GEvent.trigger(evt.placemark, 'click');
}
}
/**
* Create an empty dataset object and add it to the timemap
*
* @param {String} id The id of the dataset
* @param {Object} options A container for optional arguments:
*/
TimeMap.prototype.createDataset = function(id, options) {
if(!("title" in options)) options.title = id;
var dataset = new TimeMapDataset(this, options);
this.datasets[id] = dataset;
return dataset;
}
/**
* Initialize the timeline - this must happen separately to allow full control of
* timeline properties.
*
* @param {BandInfo Array} bands Array of band information objects for timeline
*/
TimeMap.prototype.initTimeline = function(bands) {
// synchronize & highlight timeline bands
for (var x=1; x < bands.length; x++) {
if (this.settings.syncBands)
bands[x].syncWith = (x-1);
bands[x].highlight = true;
}
// initialize timeline
this.timeline = Timeline.create(this.tElement, bands);
// set event listener to hide off-timeline items on the map
if (this.settings.hidePastFuture) {
var topband = this.timeline.getBand(0);
var datasets = this.datasets;
var oMap = this.map;
topband.addOnScrollListener(function() {
var maxVisibleDate = topband.getMaxVisibleDate().getTime();
var minVisibleDate = topband.getMinVisibleDate().getTime();
for (id in datasets) {
var items = datasets[id].getItems();
for (var x=0; x < items.length; x++) {
if (items[x].event != null) {
var itemStart = items[x].event.getStart().getTime();
var itemEnd = items[x].event.getEnd().getTime();
// hide items in the future
if (itemStart > maxVisibleDate) {
items[x].placemark.hide();
items[x].closeInfoWindow();
}
// hide items in the past
else if (itemEnd < minVisibleDate ||
(items[x].event.isInstant() && itemStart < minVisibleDate)) {
items[x].placemark.hide();
items[x].closeInfoWindow();
}
else items[x].placemark.show();
}
}
}
});
}
// set event listener to hide off-map items on the timeline
if (this.settings.hideOffMap) {
var datasets = this.datasets;
var oMap = this.map;
GEvent.addListener(oMap, "moveend", function() {
var bounds = oMap.getBounds();
for (id in datasets) {
var items = datasets[id].getItems();
for (var x=0; x < items.length; x++) {
var placemarkPoint = items[x].placemark.getLatLng();
// hide events outside map bounds
if (!bounds.containsLatLng(placemarkPoint) && items[x].event != null)
items[x].event.hide();
else items[x].event.show();
}
}
});
}
// add callback for window resize here instead of the html file, to conform with XHTML 1.0
resizeTimerID = null;
var oTImeline = this.timeline;
window.onresize = function() {
if (resizeTimerID == null) {
resizeTimerID = window.setTimeout(function() {
resizeTimerID = null;
oTImeline.layout();
}, 500);
}
};
};
/**
* Create a legend with the current datasets, using an existing DOM element.
* This relies on the color property being set for each dataset
* XXX: still depends on jQuery...
*
* @param {String} legendId The id of the legend element.
*/
TimeMap.prototype.createLegend = function(legendId) {
legendId = "#"+legendId;
for (id in this.datasets) {
var dataset = this.datasets[id];
var colorBox = '<div style="float:left;margin-right:5px;border:1px solid #000;width:12px;height:12px;background:' + dataset._theme.color + '"> </div>';
var divHtml = '<div class="legenditem">' + colorBox+dataset.title + '</div>';
$(legendId).append(divHtml);
}
}
/*----------------------------------------------------------------------------
* TimeMapDataset Class - holds references to items and visual themes
*---------------------------------------------------------------------------*/
/**
* Create a new TimeMap dataset to hold a set of items
*
* @constructor
* @param {TimeMap} timemap Reference to the timemap object
* @param {Object} options Object holding optional arguments:
* {String} title Title of the dataset (for the legend)
* {TimeMapDatasetTheme} theme Theme settings
*
*/
function TimeMapDataset(timemap, options) {
// hold reference to timemap
this.timemap = timemap;
// initialize timeline event source
this.eventSource = new Timeline.DefaultEventSource();
// initialize array of items
this._items = [];
// initialize vars
this.title = ("title" in options) ? options.title : "";
this._theme = ("theme" in options) ? options.theme : new TimeMapDatasetTheme({});
}
/*
* Get the items for this dataset
*/
TimeMapDataset.prototype.getItems = function() {
return this._items;
}
/*
* Add items to map and timeline.
* Each item has both a timeline event and a map placemark.
*
* @param {Object} data Data to be loaded. See loadItem() below for the format.
* @param {Function} transform If data is not in the above format, transformation function to make it so
*/
TimeMapDataset.prototype.loadItems = function(data, transform) {
for (var x=0; x < data.length; x++) {
this.loadItem(data[x], transform);
}
var tm = this.timemap;
// XXX - probably change to this.timemap.onLoadItems()
if (tm.settings.centerMapOnItems) {
// determine the zoom level from the bounds
tm.map.setZoom(tm.map.getBoundsZoomLevel(tm.mapBounds));
// determine the center from the bounds
tm.map.setCenter(tm.mapBounds.getCenter());
}
};
/*
* Add one item to map and timeline.
* Each item has both a timeline event and a map placemark.
*
* @param {Object} data Data to be loaded, in the following format:
* {String} title Title of the item (visible on timeline)
* {Iso8601DateTime} start Start time of the event on the timeline
* {Iso8601DateTime} end End time of the event on the timeline (duration events only)
* {Object} point Data for a single-point placemark:
* {Float} lat Latitude of map marker
* {Float} lon Longitude of map marker
* {Array of points} polyline Data for a polyline placemark, in format above
* {Array of points} polygon Data for a polygon placemark, in format above
* {Object} options Optional arguments to be passed to the TimeMapItem:
* {String} description Description to be shown in the info window
* {String} infoHtml Full HTML for the info window, defaults to title + description
* {String} infoUrl URL from which to retrieve full HTML for the info window
* {String} maxInfoHtml Full HTML for the maximized info window
* {String} maxInfoUrl URL from which to retrieve full HTML for the maximized info window
* {String} maxOnly Whether to auto-maximize on open
* @param {Function} transform If data is not in the above format, transformation function to make it so
*/
TimeMapDataset.prototype.loadItem = function(data, transform) {
// apply transformation, if any
if (transform != undefined)
data = transform(data);
// transform functions can return a null value to skip a datum in the set
if (data == null) return;
var tm = this.timemap;
// create timeline event
var start = (data.start == undefined||data.start == "") ? null :
Timeline.DateTime.parseIso8601DateTime(data.start);
var end = (data.end == undefined||data.end == "") ? null :
Timeline.DateTime.parseIso8601DateTime(data.end);
var instant = (data.end == undefined);
var eventIcon = instant ? this._theme.eventIcon : null;
var title = data.title;
// allow event-less placemarks - these will be always present
if (start != null)
var event = new Timeline.DefaultEventSource.Event(start, end, null, null,
instant, title, null, null, null,
eventIcon, this._theme.eventColor, null);
else var event = null;
// create map placemark
var placemark = null;
var type = "";
var point = null;
// point placemark
if ("point" in data) {
point = new GLatLng(
parseFloat(data.point["lat"]),
parseFloat(data.point["lon"])
);
// add point to visible map bounds
if (tm.settings.centerMapOnItems) {
tm.mapBounds.extend(point);
}
markerIcon = ("icon" in data) ? data["icon"] : this._theme.icon;
placemark = new GMarker(point, { icon: markerIcon });
type = "marker";
} else if ("polyline" in data || "polygon" in data) {
var points = [];
if ("polyline" in data)
var line = data.polyline;
else var line = data.polygon;
for (var x=0; x<line.length; x++) {
point = new GLatLng(
parseFloat(line[x]["lat"]),
parseFloat(line[x]["lon"])
);
points.push(point);
// add point to visible map bounds
if (tm.settings.centerMapOnItems) {
tm.mapBounds.extend(point);
}
}
if ("polyline" in data) {
placemark = new GPolyline(points,
this._theme.lineColor,
this._theme.lineWeight,
this._theme.lineOpacity);
type = "polyline";
} else {
placemark = new GPolygon(points,
this._theme.polygonLineColor,
this._theme.polygonLineWeight,
this._theme.polygonLineOpacity,
this._theme.fillColor,
this._theme.fillOpacity);
type = "polygon";
}
}
// XXX: It would be nice to handle missing placemarks better
if (placemark == null) return;
// define the center point of this item
if (type == "marker") {
// just the marker point
point = placemark.getLatLng();
} else if (type == "polyline") {
// the middle vertex of the line
point = placemark.getVertex(Math.floor(placemark.getVertexCount()/2));
} else if (type == "polygon") {
// the middle of the polygon bounds
point = placemark.getBounds().getCenter();
}
var options = ("options" in data) ? data.options : {};
options["title"] = title;
options["type"] = type;
options["infoPoint"] = point;
// create cross-references
var item = new TimeMapItem(placemark, event, tm.map, options);
if (event != null) {
event.placemark = placemark;
event.item = item;
}
placemark.event = event;
placemark.item = item;
this._items.push(item);
// add listener to make placemark open when event is clicked
// XXX: Will need to think about how to make this work w/Ajax
var oMap = tm.map;
GEvent.addListener(placemark, "click", function() {
item.openInfoWindow();
});
// add placemark and event to map and timeline
tm.map.addOverlay(placemark);
if (event != null)
this.eventSource.add(event);
};
/*
* Static function to parse KML with time data and load it.
*
* @param {XML text} kml KML to be parsed
*/
TimeMapDataset.parseKML = function(kml) {
var items = [];
kmlnode = GXml.parse(kml);
var placemarks = kmlnode.getElementsByTagName("Placemark");
var timeCheck = false;
for (var i=0; i<placemarks.length; i++) {
var pm = placemarks[i];
var nList, data = {};
// get title
nList = pm.getElementsByTagName("name");
if (nList.length > 0) {
data["title"] = nList[0].firstChild.nodeValue;
}
// get description
nList = pm.getElementsByTagName("description");
data["options"] = {};
if (nList.length > 0) {
data["options"]["description"] = nList[0].firstChild.nodeValue;
}
// look for instant timestamp
nList = pm.getElementsByTagName("TimeStamp");
if (nList.length > 0) {
data["start"] = nList[0].getElementsByTagName("when")[0].firstChild.nodeValue;
timeCheck = true;
}
// otherwise look for span
if (!timeCheck) {
nList = pm.getElementsByTagName("TimeSpan");
if (nList.length > 0) {
data["start"] = nList[0].getElementsByTagName("begin")[0].firstChild.nodeValue;
data["end"] = nList[0].getElementsByTagName("end")[0].firstChild.nodeValue;
timeCheck = true;
}
}
if (!timeCheck) {
data.push(TimeMapDataset.parseParentNode(pm));
}
// look for marker
nList = pm.getElementsByTagName("Point");
if (nList.length > 0) {
data["point"] = {};
// get lat/lon
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var latlon = coords.split(",");
data["point"] = {
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
};
}
// look for polyline / polygon
else {
nList = pm.getElementsByTagName("LineString");
if (nList.length > 0) {
data["polyline"] = [];
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var coordArr = trim(coords).split(/[\r\n\f]+/);
for (var x=0; x<coordArr.length; x++) {
var latlon = coordArr[x].split(",");
data["polyline"].push({
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
});
}
} else {
nList = pm.getElementsByTagName("Polygon");
if (nList.length > 0) {
data["polyline"] = [];
var coords = nList[0].getElementsByTagName("coordinates")[0].firstChild.nodeValue;
var coordArr = trim(coords).split(/[\r\n\f]+/);
for (var x=0; x<coordArr.length; x++) {
var latlon = coordArr[x].split(",");
data["polyline"].push({
"lat": trim(latlon[1]),
"lon": trim(latlon[0])
});
}
}
}
}
items.push(data);
}
kmlnode = null;
nList = null;
return items;
}
TimeMapDataset.parseParentNode = function(pm){
check = false;
var data = {};
pn = pm.parentNode;
if (pn.nodename == "Folder" || pn.nodename=="Document"){
for (ele in pn.childNodes) {
if (ele.nodename == "TimeStamp") {
data["start"] = ele.getElementsByTagName("when")[0].firstChild.nodeValue;
check = true;
}
else if (ele.nodename = "TimeStamp"){
beginNodes = ele.getElementsByTagName("begin");
if (beginNodes.length > 0) data["start"] = beginNodes[0].firstChild.nodeValue;
if (endNodes.length > 0) data["end"] = endNodes[0].firstChild.nodeValue;
check = true;
}
}
}
else check = true;
if (!check) {
data = TimeMapDataset.parseParentNode(pn);
}
return data;
}
/*----------------------------------------------------------------------------
* Predefined visual themes for datasets, based on Google markers
*---------------------------------------------------------------------------*/
/**
* Create a new theme for a TimeMap dataset, defining colors and images
*
* @constructor
* @param {Object} options A container for optional arguments:
* {GIcon} icon Icon for marker placemarks
* {String} color Default color in hex for events, polylines, polygons
* {String} lineColor Color for polylines, defaults to options.color
* {String} polygonLineColor Color for polygon outlines, defaults to lineColor
* {Number} lineOpacity Opacity for polylines
* {Number} polgonLineOpacity Opacity for polygon outlines, defaults to options.lineOpacity
* {Number} lineWeight Line weight in pixels for polylines
* {Number} polygonLineWeight Line weight for polygon outlines, defaults to options.lineWeight
* {String} fillColor Color for polygon fill, defaults to options.color
* {String} fillOpacity Opacity for polygon fill
* {String} eventColor Background color for duration events
* {URL} eventIcon Icon URL for instant events
*/
function TimeMapDatasetTheme(options) {
// work out various defaults - the default theme is Google's reddish color
options = options || {};
this.icon = options['icon'] || G_DEFAULT_ICON; //("icon" in options) ? options.icon : G_DEFAULT_ICON;
this.color = options['color'] || "#FE766A";
this.lineColor = options['lineColor'] || this.color;
this.polygonLineColor = options['polygonLineColor'] || this.lineColor;
this.lineOpacity = options['lineOpacity'] || 1;
this.polgonLineOpacity = options['polgonLineOpacity'] || this.lineOpacity;
this.lineWeight = options['lineWeight'] || 2;
this.polygonLineWeight = options['polygonLineWeight'] || this.lineWeight;
this.fillColor = options['fillColor'] || this.color;
this.fillOpacity = options['fillOpacity'] || 0.25;
this.eventColor = options['eventColor'] || this.color;
this.eventIcon = options['eventIcon'] || "timemap/images/red-circle.png"; // XXX: probably need to work out the URLs better here
}
TimeMapDataset.redTheme = function() {
return new TimeMapDatasetTheme({});
}
TimeMapDataset.blueTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/blue-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#5A7ACF",
eventIcon: "timemap/images/blue-circle.png"
});
}
TimeMapDataset.greenTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/green-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#19CF54",
eventIcon: "timemap/images/green-circle.png"
});
}
TimeMapDataset.ltblueTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/ltblue-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#5ACFCF",
eventIcon: "timemap/images/ltblue-circle.png"
});
}
TimeMapDataset.purpleTheme = function() {
// marker icon
var markerIcon = new GIcon(G_DEFAULT_ICON);
markerIcon.image = "http://www.google.com/intl/en_us/mapfiles/ms/icons/purple-dot.png";
markerIcon.iconSize = new GSize(32, 32);
markerIcon.shadow = "http://www.google.com/intl/en_us/mapfiles/ms/icons/msmarker.shadow.png"
markerIcon.shadowSize = new GSize(59, 32);
return new TimeMapDatasetTheme({
icon: markerIcon,
color: "#8E67FD",
eventIcon: "timemap/images/purple-circle.png"
});
}
/*----------------------------------------------------------------------------
* TimeMapItem Class - holds references to map placemark and timeline event
*---------------------------------------------------------------------------*/
/**
* Create a new TimeMap item with a map placemark and a timeline event
*
* @constructor
* @param {placemark} placemark The map placemark (one of GMarker, GPolyline, or GPolygon)
* @param {Event} event The timeline event
* @param {GMap2} map Reference to the map object
* @param {Object} options A container for optional arguments:
* {String} title Title of the item
* {String} description Plain-text description of the item
* {String} type Type of map placemark used (marker. polyline, polygon)
* {GLatLng} infoPoint Point indicating the center of this item
* {String} infoHtml Full HTML for the info window
* {String} infoUrl URL from which to retrieve full HTML for the info window
* {String} maxInfoHtml Full HTML for the maximized info window
* {String} maxInfoUrl URL from which to retrieve full HTML for the maximized info window
* {String} maxOnly Whether to auto-maximize on open
*/
function TimeMapItem(placemark, event, map, options) {
// initialize vars
this.placemark = placemark;
this.event = event;
this.map = map;
// get vars from options
options = options || {};
this._type = options['type'] || '';
this._title = options['title'] || '';
this._description = options['description'] || '';
this._infoPoint = options['infoPoint'] || null;
this._infoHtml = options['infoHtml'] || '';
this._infoUrl = options['infoUrl'] || '';
this._maxInfoHtml = options['maxInfoHtml'] || '';
this._maxInfoUrl = options['maxInfoUrl'] || '';
this._maxOnly = options['maxInfoUrl'] || false;
// get functions
this.getType = function() { return this._type; };
this.getTitle = function() { return this._title; };
this.getInfoPoint = function() { return this._infoPoint; };
// create content for info window if none is provided
if (this._infoHtml == "" && this._infoUrl == "" && !this._maxOnly) {
this._infoHtml = '<div class="infotitle">' + this._title + '</div>';
if (this._description != "")
this._infoHtml += '<div class="infodescription">' + this._description + '</div>';
}
}
/*
* Open the info window at an appropriate point
*
* @param {GMap2} map Reference to the map object
*/
TimeMapItem.prototype.openInfoWindow = function() {
// support for max content loaded via ajax
var infoWindowOptions;
var hasMax = false;
if (this._maxInfoHtml != ""||this._maxInfoUrl != "") {
hasMax = true;
var maxContentDiv = document.createElement('div');
if (this._maxInfoHtml != "")
// load straight from data
maxContentDiv.innerHTML = this._maxInfoHtml;
else maxContentDiv.innerHTML = 'Loading...';
infoWindowOptions = {maxContent: maxContentDiv};
if (this._maxInfoHtml == "") {
// load via ajax instead
var iw = this.map.getInfoWindow();
var ajaxUrl = this._maxInfoUrl;
GEvent.addListener(iw, "maximizeclick", function() {
GDownloadUrl(ajaxUrl, function(data) {
maxContentDiv.innerHTML = data;
});
});
}
} else infoWindowOptions = {};
// standard version - text already loaded
if (this._infoHtml != ""||(hasMax && this._maxOnly)) {
if (this.getType() == "marker") {
this.placemark.openInfoWindowHtml(this._infoHtml, infoWindowOptions);
} else {
this.map.openInfoWindowHtml(this.getInfoPoint(), this._infoHtml, infoWindowOptions);
}
if (hasMax && this._maxOnly) {
this.map.getInfoWindow().maximize();
var iw = this.map.getInfoWindow();
var oMap = this.map;
GEvent.addListener(iw, "restoreclick", function() {
oMap.closeInfoWindow();
});
}
}
// load window html via ajax
else if (this._infoUrl != "") {
var item = this;
GDownloadUrl(this._infoUrl, function(result) {
item._infoHtml = result;
item.openInfoWindow();
});
}
}
/*
* Close the info window if it appears to be associated with this item
*
* @param {GMap2} map Reference to the map object
*/
TimeMapItem.prototype.closeInfoWindow = function() {
if (this.getType() == "marker") {
this.placemark.closeInfoWindow();
} else {
var infoWindow = this.map.getInfoWindow();
// close info window if its point is the same as this item's point
if (infoWindow.getPoint() == this.getInfoPoint()
&& !infoWindow.isHidden())
this.map.closeInfoWindow();
}
}
// convenience trim function
function trim(str) {
return str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
} | typo fix | timemap.js | typo fix | <ide><path>imemap.js
<ide> this._infoUrl = options['infoUrl'] || '';
<ide> this._maxInfoHtml = options['maxInfoHtml'] || '';
<ide> this._maxInfoUrl = options['maxInfoUrl'] || '';
<del> this._maxOnly = options['maxInfoUrl'] || false;
<add> this._maxOnly = options['maxOnly'] || false;
<ide>
<ide> // get functions
<ide> this.getType = function() { return this._type; }; |
|
JavaScript | mit | 0e73511fbf4f2ca2152704f0dcb52c710bfdd604 | 0 | conveyal/datatools-manager,conveyal/datatools-manager,conveyal/datatools-manager,conveyal/datatools-manager | // @flow
import Icon from '@conveyal/woonerf/components/icon'
import moment from 'moment'
import React, {Component} from 'react'
import {Grid, Row, Col, ButtonToolbar, Button, FormControl, ControlLabel, FormGroup} from 'react-bootstrap'
import DateTimeField from 'react-bootstrap-datetimepicker'
import {browserHistory} from 'react-router'
import Toggle from 'react-toggle'
import AffectedServices from './AffectedServices'
import * as alertActions from '../actions/alerts'
import * as activeAlertActions from '../actions/activeAlert'
import Loading from '../../common/components/Loading'
import ManagerPage from '../../common/components/ManagerPage'
import PageNotFound from '../../common/components/PageNotFound'
import {isModuleEnabled} from '../../common/util/config'
import {checkEntitiesForFeeds} from '../../common/util/permissions'
import toSentenceCase from '../../common/util/to-sentence-case'
import GtfsMapSearch from '../../gtfs/components/gtfsmapsearch'
import GlobalGtfsFilter from '../../gtfs/containers/GlobalGtfsFilter'
import {CAUSES, EFFECTS, isNew} from '../util'
import type {Props as ContainerProps} from '../containers/ActiveAlertEditor'
import type {Alert, Feed, GtfsRoute, GtfsStop, Project} from '../../types'
import type {ManagerUserState} from '../../types/reducers'
type Props = ContainerProps & {
activeFeeds: Array<Feed>,
addActiveEntity: typeof activeAlertActions.addActiveEntity,
alert: Alert,
createAlert: typeof alertActions.createAlert,
deleteActiveEntity: typeof activeAlertActions.deleteActiveEntity,
deleteAlert: typeof alertActions.deleteAlert,
editableFeeds: Array<Feed>,
onAlertEditorMount: typeof alertActions.onAlertEditorMount,
permissionFilter: string,
project: Project,
publishableFeeds: Array<Feed>,
saveAlert: typeof alertActions.saveAlert,
setActiveProperty: typeof activeAlertActions.setActiveProperty,
setActivePublished: typeof activeAlertActions.setActivePublished,
updateActiveEntity: typeof activeAlertActions.updateActiveEntity,
user: ManagerUserState
}
const ALERT_TITLE_CHAR_LIMIT = 100
const ALERT_DESCRIPTION_CHAR_LIMIT = 1200
const CHAR_WARNING_LIMIT = 10
const _stringToOption = str => (
<option key={str} value={str}>
{toSentenceCase(str.replace('_', ' '))}
</option>
)
const sortFeeds511 = (a, b) => {
// return 511 Staff as first in list to avoid 511 Emergency being first in list
if (/511 Staff/.test(a.name)) return -1
if (/511 Staff/.test(b.name)) return 1
if (a.name < b.name) return -1
if (a.name > b.name) return 1
return 0
}
export default class AlertEditor extends Component<Props> {
componentWillMount () {
const {alert, location, onAlertEditorMount, permissionFilter, user} = this.props
onAlertEditorMount(alert, location, permissionFilter, user)
}
validateAndSave = () => {
const {alert, saveAlert} = this.props
const {affectedEntities, description, end, start, title} = alert
const momentEnd = moment(end)
const momentStart = moment(start)
// alert title must not be blank nor just whitespace
if (!title.trim()) {
return window.alert('You must specify an alert title')
}
if (!end || !start || !momentEnd.isValid() || !momentStart.isValid()) {
return window.alert('Alert must have a valid start and end date')
}
if (end < start) {
return window.alert(`Alert end date ${momentEnd.format()} cannot be before start date (${momentStart.format()})`)
}
if (momentEnd.isBefore(moment())) {
return window.alert('Alert end date cannot be before the current date (alerts must not be in the past)')
}
if (affectedEntities.length === 0) {
return window.alert('You must specify at least one affected entity')
}
saveAlert(alert)
}
_deleteAlert = () => this.props.deleteAlert(this.props.alert)
_onChange = (evt: SyntheticInputEvent<HTMLInputElement>) =>
this.props.setActiveProperty({[evt.target.name]: evt.target.value})
_onChangeEnd = (time: string) => this.props.setActiveProperty({end: +time})
_onChangeStart = (time: string) => this.props.setActiveProperty({start: +time})
_onClickBack = () => browserHistory.push('/alerts')
_onClickDelete = () => {
const {alert} = this.props
this.refs.page.showConfirmModal({
title: 'Delete Alert #' + alert.id + '?',
body: <p>Are you sure you want to delete <strong>Alert {alert.id}</strong>?</p>,
onConfirm: this._deleteAlert
})
}
_onClickPublish = () => this.props.setActivePublished(!this.props.alert.published)
_onRouteClick = (feed: Feed, route: GtfsRoute) => {
this.props.addActiveEntity('ROUTE', route, feed)
}
_onStopClick = ({entities, feed}: {entities: Array<GtfsStop>, feed: Feed}) => {
entities.forEach(stop => this.props.addActiveEntity('STOP', stop, feed))
}
/* eslint-disable complexity */
render () {
const {
activeFeeds,
alert,
editableFeeds,
publishableFeeds
} = this.props
if (!isModuleEnabled('alerts')) return <PageNotFound message='The alerts module is not enabled.' />
if (!alert) return <ManagerPage><Loading /></ManagerPage>
const titleCharactersRemaining = alert.title
? ALERT_TITLE_CHAR_LIMIT - alert.title.length
: ALERT_TITLE_CHAR_LIMIT
const descriptionCharactersRemaining = alert.description
? ALERT_DESCRIPTION_CHAR_LIMIT - alert.description.length
: ALERT_DESCRIPTION_CHAR_LIMIT
const titleCharacterCount = alert.title
? alert.title.length
: 0
const descriptionCharactersCount = alert.description
? alert.description.length
: 0
const canPublish =
alert.affectedEntities.length &&
checkEntitiesForFeeds(alert.affectedEntities, publishableFeeds)
const canEdit = checkEntitiesForFeeds(alert.affectedEntities, editableFeeds)
const editingIsDisabled = alert.published && !canPublish ? true : !canEdit
const sortedFeeds = editableFeeds.sort(sortFeeds511)
// If user has edit rights and alert is unpublished, user can delete alert,
// else check if they have publish rights.
const deleteIsDisabled = !editingIsDisabled && !alert.published
? false
: !canPublish
const deleteButtonMessage = alert.published && deleteIsDisabled
? 'Cannot delete because alert is published'
: !canEdit ? 'Cannot alter alerts for other agencies' : 'Delete alert'
const editButtonMessage = alert.published && deleteIsDisabled
? 'Cannot edit because alert is published'
: !canEdit ? 'Cannot alter alerts for other agencies' : 'Edit alert'
return (
<ManagerPage
ref='page'
title={isNew(alert) ? `Alert ${alert.id}` : 'New Alert'}>
<Grid fluid>
<Row>
<Col xs={4} sm={7} md={8}>
<Button
onClick={this._onClickBack}>
<Icon type='chevron-left' /> Back
</Button>
</Col>
<Col xs={8} sm={5} md={4}>
<ButtonToolbar className='pull-right' style={{marginLeft: '5px'}}>
<Button
title={editButtonMessage}
bsStyle='primary'
disabled={editingIsDisabled}
onClick={this.validateAndSave}
><Icon type='save' /> Save</Button>
<Button
title={deleteButtonMessage}
bsStyle='danger'
disabled={deleteIsDisabled}
onClick={this._onClickDelete}
><Icon type='trash' /> Delete</Button>
</ButtonToolbar>
<FormGroup
className='pull-right'
style={{position: 'relative', top: '5px'}}>
<Toggle
id='alert-published'
disabled={!canPublish}
checked={alert.published}
onChange={this._onClickPublish} />
<label
htmlFor='alert-published'
style={{position: 'relative', top: '-5px', marginLeft: '5px'}}>
Published?
</label>
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12} sm={6}>
<Row>
<Col xs={12} style={{marginTop: '10px'}}>
<FormGroup controlId='formControlsTitle'>
<ControlLabel>
Alert Title
{' '}
<span
className={
titleCharactersRemaining > CHAR_WARNING_LIMIT
? 'text-muted'
: 'text-danger'
}
style={{fontWeight: 400}}>
{titleCharacterCount}
</span>
<h5 style={{margin: '0px'}}>
<small>
{titleCharacterCount > ALERT_TITLE_CHAR_LIMIT
? (
<span className='text-danger'>
{`WARNING: Alert title longer than ${ALERT_TITLE_CHAR_LIMIT} characters may get truncated in some dissemination channels. `}
</span>
) : ''}
Note: alert title serves as text for eTID alerts. Use
descriptive language so it can serve as a standalone
alert.
</small>
</h5>
</ControlLabel>
<FormControl
bsSize='large'
placeholder='E.g., Sig. Delays due to Golden Gate Bridge Closure'
defaultValue={alert.title || ''}
name='title'
onChange={this._onChange} />
</FormGroup>
</Col>
<Col xs={6}>
<div style={{marginBottom: '5px'}}><strong>Start</strong></div>
{alert.start
? <DateTimeField
disabled
dateTime={alert.start}
onChange={this._onChangeStart} />
: <DateTimeField
defaultText='Please select a date'
onChange={this._onChangeStart} />
}
</Col>
<Col xs={6}>
<div style={{marginBottom: '5px'}}><strong>End</strong></div>
{alert.end
? <DateTimeField
dateTime={alert.end}
onChange={this._onChangeEnd} />
: <DateTimeField
defaultText='Please select a date'
onChange={this._onChangeEnd} />
}
</Col>
</Row>
<Row>
<Col xs={6}>
<FormGroup controlId='formControlsCause'>
<ControlLabel>Cause</ControlLabel>
<FormControl
componentClass='select'
onChange={this._onChange}
name='cause'
value={alert.cause}>
{CAUSES.map(_stringToOption)}
</FormControl>
</FormGroup>
</Col>
<Col xs={6}>
<FormGroup controlId='formControlsEffect'>
<ControlLabel>Effect</ControlLabel>
<FormControl
componentClass='select'
onChange={this._onChange}
name='effect'
value={alert.effect}>
{EFFECTS.map(_stringToOption)}
</FormControl>
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12} sm={6}>
<FormGroup controlId='formControlsDescription'>
<ControlLabel>
Description
{' '}
<span
className={
descriptionCharactersRemaining > CHAR_WARNING_LIMIT
? 'text-muted'
: 'text-danger'
}
style={{fontWeight: 400}}>
{descriptionCharactersCount}
</span>
{descriptionCharactersCount > ALERT_DESCRIPTION_CHAR_LIMIT
? (
<h5 style={{margin: '0px'}}>
<small className='text-danger'>
{`WARNING: Alert description longer than ${ALERT_DESCRIPTION_CHAR_LIMIT} characters may get truncated in some dissemination channels. `}
</small>
</h5>
) : ''}
</ControlLabel>
<FormControl
componentClass='textarea'
placeholder='Detailed description of alert...'
defaultValue={alert.description}
name='description'
onChange={this._onChange} />
</FormGroup>
</Col>
<Col xs={12} sm={6}>
<FormGroup controlId='formControlsURL'>
<ControlLabel>URL</ControlLabel>
<FormControl
type='text'
placeholder='http://511.org/alerts/transit/123'
defaultValue={alert.url}
name='url'
onChange={this._onChange} />
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12}>
<AffectedServices
sortedFeeds={sortedFeeds}
{...this.props} />
</Col>
</Row>
</Col>
<Col xs={12} sm={6}>
<Row>
<Col xs={12}>
<GlobalGtfsFilter />
</Col>
</Row>
<GtfsMapSearch
feeds={activeFeeds}
onRouteClick={this._onRouteClick}
onStopClick={this._onStopClick}
popupActionPrefix='Add' />
</Col>
</Row>
</Grid>
</ManagerPage>
)
}
}
| lib/alerts/components/AlertEditor.js | // @flow
import * as activeAlertActions from '../actions/activeAlert'
import * as alertActions from '../actions/alerts'
import type {Alert, Feed, GtfsRoute, GtfsStop, Project} from '../../types'
import {Button, ButtonToolbar, Col, ControlLabel, FormControl, FormGroup, Grid, Row} from 'react-bootstrap'
import {CAUSES, EFFECTS, isNew} from '../util'
import React, {Component} from 'react'
import AffectedServices from './AffectedServices'
import type {Props as ContainerProps} from '../containers/ActiveAlertEditor'
import DateTimeField from 'react-bootstrap-datetimepicker'
import GlobalGtfsFilter from '../../gtfs/containers/GlobalGtfsFilter'
import GtfsMapSearch from '../../gtfs/components/gtfsmapsearch'
import Icon from '@conveyal/woonerf/components/icon'
import Loading from '../../common/components/Loading'
import ManagerPage from '../../common/components/ManagerPage'
import type {ManagerUserState} from '../../types/reducers'
import PageNotFound from '../../common/components/PageNotFound'
import Toggle from 'react-toggle'
import {browserHistory} from 'react-router'
import {checkEntitiesForFeeds} from '../../common/util/permissions'
import {isModuleEnabled} from '../../common/util/config'
import moment from 'moment'
import toSentenceCase from '../../common/util/to-sentence-case'
type Props = ContainerProps & {
activeFeeds: Array<Feed>,
addActiveEntity: typeof activeAlertActions.addActiveEntity,
alert: Alert,
createAlert: typeof alertActions.createAlert,
deleteActiveEntity: typeof activeAlertActions.deleteActiveEntity,
deleteAlert: typeof alertActions.deleteAlert,
editableFeeds: Array<Feed>,
onAlertEditorMount: typeof alertActions.onAlertEditorMount,
permissionFilter: string,
project: Project,
publishableFeeds: Array<Feed>,
saveAlert: typeof alertActions.saveAlert,
setActiveProperty: typeof activeAlertActions.setActiveProperty,
setActivePublished: typeof activeAlertActions.setActivePublished,
updateActiveEntity: typeof activeAlertActions.updateActiveEntity,
user: ManagerUserState
}
const ALERT_TITLE_CHAR_LIMIT = 100
const ALERT_DESCRIPTION_CHAR_LIMIT = 1200
const CHAR_WARNING_LIMIT = 10
const _stringToOption = str => (
<option key={str} value={str}>
{toSentenceCase(str.replace('_', ' '))}
</option>
)
const sortFeeds511 = (a, b) => {
// return 511 Staff as first in list to avoid 511 Emergency being first in list
if (/511 Staff/.test(a.name)) return -1
if (/511 Staff/.test(b.name)) return 1
if (a.name < b.name) return -1
if (a.name > b.name) return 1
return 0
}
export default class AlertEditor extends Component<Props> {
componentWillMount () {
const {alert, location, onAlertEditorMount, permissionFilter, user} = this.props
onAlertEditorMount(alert, location, permissionFilter, user)
}
validateAndSave = () => {
const {alert, saveAlert} = this.props
const {affectedEntities, description, end, start, title} = alert
const momentEnd = moment(end)
const momentStart = moment(start)
// alert title must not be blank nor just whitespace
if (!title.trim()) {
return window.alert('You must specify an alert title')
}
if (!end || !start || !momentEnd.isValid() || !momentStart.isValid()) {
return window.alert('Alert must have a valid start and end date')
}
if (end < start) {
return window.alert(`Alert end date ${momentEnd.format()} cannot be before start date (${momentStart.format()})`)
}
if (momentEnd.isBefore(moment())) {
return window.alert('Alert end date cannot be before the current date (alerts must not be in the past)')
}
if (affectedEntities.length === 0) {
return window.alert('You must specify at least one affected entity')
}
saveAlert(alert)
}
_deleteAlert = () => this.props.deleteAlert(this.props.alert)
_onChange = (evt: SyntheticInputEvent<HTMLInputElement>) =>
this.props.setActiveProperty({[evt.target.name]: evt.target.value})
_onChangeEnd = (time: string) => this.props.setActiveProperty({end: +time})
_onChangeStart = (time: string) => this.props.setActiveProperty({start: +time})
_onClickBack = () => browserHistory.push('/alerts')
_onClickDelete = () => {
const {alert} = this.props
this.refs.page.showConfirmModal({
title: 'Delete Alert #' + alert.id + '?',
body: <p>Are you sure you want to delete <strong>Alert {alert.id}</strong>?</p>,
onConfirm: this._deleteAlert
})
}
_onClickPublish = () => this.props.setActivePublished(!this.props.alert.published)
_onRouteClick = (feed: Feed, route: GtfsRoute) => {
this.props.addActiveEntity('ROUTE', route, feed)
}
_onStopClick = ({entities, feed}: {entities: Array<GtfsStop>, feed: Feed}) => {
entities.forEach(stop => this.props.addActiveEntity('STOP', stop, feed))
}
/* eslint-disable complexity */
render () {
const {
activeFeeds,
alert,
editableFeeds,
publishableFeeds
} = this.props
if (!isModuleEnabled('alerts')) return <PageNotFound message='The alerts module is not enabled.' />
if (!alert) return <ManagerPage><Loading /></ManagerPage>
const titleCharacterCount = alert.title ? alert.title.length : 0
const descriptionCharactersCount = alert.description ? alert.description.length : 0
const canPublish = alert.affectedEntities.length &&
checkEntitiesForFeeds(alert.affectedEntities, publishableFeeds)
const canEdit = checkEntitiesForFeeds(alert.affectedEntities, editableFeeds)
const editingIsDisabled = alert.published && !canPublish ? true : !canEdit
const sortedFeeds = editableFeeds.sort(sortFeeds511)
// If user has edit rights and alert is unpublished, user can delete alert,
// else check if they have publish rights.
const deleteIsDisabled = !editingIsDisabled && !alert.published
? false
: !canPublish
const deleteButtonMessage = alert.published && deleteIsDisabled
? 'Cannot delete because alert is published'
: !canEdit ? 'Cannot alter alerts for other agencies' : 'Delete alert'
const editButtonMessage = alert.published && deleteIsDisabled
? 'Cannot edit because alert is published'
: !canEdit ? 'Cannot alter alerts for other agencies' : 'Edit alert'
return (
<ManagerPage
ref='page'
title={isNew(alert) ? `Alert ${alert.id}` : 'New Alert'}>
<Grid fluid>
<Row>
<Col xs={4} sm={7} md={8}>
<Button
onClick={this._onClickBack}>
<Icon type='chevron-left' /> Back
</Button>
</Col>
<Col xs={8} sm={5} md={4}>
<ButtonToolbar className='pull-right' style={{marginLeft: '5px'}}>
<Button
title={editButtonMessage}
bsStyle='primary'
disabled={editingIsDisabled}
onClick={this.validateAndSave}
><Icon type='save' /> Save</Button>
<Button
title={deleteButtonMessage}
bsStyle='danger'
disabled={deleteIsDisabled}
onClick={this._onClickDelete}
><Icon type='trash' /> Delete</Button>
</ButtonToolbar>
<FormGroup
className='pull-right'
style={{position: 'relative', top: '5px'}}>
<Toggle
id='alert-published'
disabled={!canPublish}
checked={alert.published}
onChange={this._onClickPublish} />
<label
htmlFor='alert-published'
style={{position: 'relative', top: '-5px', marginLeft: '5px'}}>
Published?
</label>
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12} sm={6}>
<Row>
<Col xs={12} style={{marginTop: '10px'}}>
<FormGroup controlId='formControlsTitle'>
<ControlLabel>
Alert Title
{' '}
<span
className={
titleCharacterCount > CHAR_WARNING_LIMIT && titleCharacterCount <= ALERT_TITLE_CHAR_LIMIT
? 'text-muted'
: 'text-danger'
}
style={{fontWeight: 400}}>
{titleCharacterCount}
</span>
<h5 style={{margin: '0px'}}>
<small>
<span className="text-danger">{titleCharacterCount > ALERT_TITLE_CHAR_LIMIT ? 'WARNING: Alert title longer than 100 characters may get truncated in some dissemination channels. ' : ''}</span>
Note: alert title serves as text for eTID alerts. Use
descriptive language so it can serve as a standalone
alert.
</small>
</h5>
</ControlLabel>
<FormControl
bsSize='large'
placeholder='E.g., Sig. Delays due to Golden Gate Bridge Closure'
defaultValue={alert.title || ''}
name='title'
onChange={this._onChange} />
</FormGroup>
</Col>
<Col xs={6}>
<div style={{marginBottom: '5px'}}><strong>Start</strong></div>
{alert.start
? <DateTimeField
disabled
dateTime={alert.start}
onChange={this._onChangeStart} />
: <DateTimeField
defaultText='Please select a date'
onChange={this._onChangeStart} />
}
</Col>
<Col xs={6}>
<div style={{marginBottom: '5px'}}><strong>End</strong></div>
{alert.end
? <DateTimeField
dateTime={alert.end}
onChange={this._onChangeEnd} />
: <DateTimeField
defaultText='Please select a date'
onChange={this._onChangeEnd} />
}
</Col>
</Row>
<Row>
<Col xs={6}>
<FormGroup controlId='formControlsCause'>
<ControlLabel>Cause</ControlLabel>
<FormControl
componentClass='select'
onChange={this._onChange}
name='cause'
value={alert.cause}>
{CAUSES.map(_stringToOption)}
</FormControl>
</FormGroup>
</Col>
<Col xs={6}>
<FormGroup controlId='formControlsEffect'>
<ControlLabel>Effect</ControlLabel>
<FormControl
componentClass='select'
onChange={this._onChange}
name='effect'
value={alert.effect}>
{EFFECTS.map(_stringToOption)}
</FormControl>
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12} sm={6}>
<FormGroup controlId='formControlsDescription'>
<ControlLabel>
Description
{' '}
<span
className={
descriptionCharactersCount > CHAR_WARNING_LIMIT && descriptionCharactersCount <= ALERT_DESCRIPTION_CHAR_LIMIT
? 'text-muted'
: 'text-danger'
}
style={{fontWeight: 400}}>
{descriptionCharactersCount}
<p><span className="text-danger">{descriptionCharactersCount > ALERT_DESCRIPTION_CHAR_LIMIT ? 'WARNING: Alert description longer than 1200 characters may get truncated in some dissemination channels. ' : ''}</span></p>
</span>
</ControlLabel>
<FormControl
componentClass='textarea'
placeholder='Detailed description of alert...'
defaultValue={alert.description}
name='description'
onChange={this._onChange} />
</FormGroup>
</Col>
<Col xs={12} sm={6}>
<FormGroup controlId='formControlsURL'>
<ControlLabel>URL</ControlLabel>
<FormControl
type='text'
placeholder='http://511.org/alerts/transit/123'
defaultValue={alert.url}
name='url'
onChange={this._onChange} />
</FormGroup>
</Col>
</Row>
<Row>
<Col xs={12}>
<AffectedServices
sortedFeeds={sortedFeeds}
{...this.props} />
</Col>
</Row>
</Col>
<Col xs={12} sm={6}>
<Row>
<Col xs={12}>
<GlobalGtfsFilter />
</Col>
</Row>
<GtfsMapSearch
feeds={activeFeeds}
onRouteClick={this._onRouteClick}
onStopClick={this._onStopClick}
popupActionPrefix='Add' />
</Col>
</Row>
</Grid>
</ManagerPage>
)
}
}
| refactor(AlertEditor.js): Refactored, removed formatted IDE code
#647
| lib/alerts/components/AlertEditor.js | refactor(AlertEditor.js): Refactored, removed formatted IDE code | <ide><path>ib/alerts/components/AlertEditor.js
<ide> // @flow
<ide>
<add>import Icon from '@conveyal/woonerf/components/icon'
<add>import moment from 'moment'
<add>import React, {Component} from 'react'
<add>import {Grid, Row, Col, ButtonToolbar, Button, FormControl, ControlLabel, FormGroup} from 'react-bootstrap'
<add>import DateTimeField from 'react-bootstrap-datetimepicker'
<add>import {browserHistory} from 'react-router'
<add>import Toggle from 'react-toggle'
<add>
<add>import AffectedServices from './AffectedServices'
<add>import * as alertActions from '../actions/alerts'
<ide> import * as activeAlertActions from '../actions/activeAlert'
<del>import * as alertActions from '../actions/alerts'
<del>
<del>import type {Alert, Feed, GtfsRoute, GtfsStop, Project} from '../../types'
<del>import {Button, ButtonToolbar, Col, ControlLabel, FormControl, FormGroup, Grid, Row} from 'react-bootstrap'
<del>import {CAUSES, EFFECTS, isNew} from '../util'
<del>import React, {Component} from 'react'
<del>
<del>import AffectedServices from './AffectedServices'
<del>import type {Props as ContainerProps} from '../containers/ActiveAlertEditor'
<del>import DateTimeField from 'react-bootstrap-datetimepicker'
<del>import GlobalGtfsFilter from '../../gtfs/containers/GlobalGtfsFilter'
<del>import GtfsMapSearch from '../../gtfs/components/gtfsmapsearch'
<del>import Icon from '@conveyal/woonerf/components/icon'
<ide> import Loading from '../../common/components/Loading'
<ide> import ManagerPage from '../../common/components/ManagerPage'
<add>import PageNotFound from '../../common/components/PageNotFound'
<add>import {isModuleEnabled} from '../../common/util/config'
<add>import {checkEntitiesForFeeds} from '../../common/util/permissions'
<add>import toSentenceCase from '../../common/util/to-sentence-case'
<add>import GtfsMapSearch from '../../gtfs/components/gtfsmapsearch'
<add>import GlobalGtfsFilter from '../../gtfs/containers/GlobalGtfsFilter'
<add>import {CAUSES, EFFECTS, isNew} from '../util'
<add>
<add>import type {Props as ContainerProps} from '../containers/ActiveAlertEditor'
<add>import type {Alert, Feed, GtfsRoute, GtfsStop, Project} from '../../types'
<ide> import type {ManagerUserState} from '../../types/reducers'
<del>import PageNotFound from '../../common/components/PageNotFound'
<del>import Toggle from 'react-toggle'
<del>import {browserHistory} from 'react-router'
<del>import {checkEntitiesForFeeds} from '../../common/util/permissions'
<del>import {isModuleEnabled} from '../../common/util/config'
<del>import moment from 'moment'
<del>import toSentenceCase from '../../common/util/to-sentence-case'
<ide>
<ide> type Props = ContainerProps & {
<ide> activeFeeds: Array<Feed>,
<ide> _deleteAlert = () => this.props.deleteAlert(this.props.alert)
<ide>
<ide> _onChange = (evt: SyntheticInputEvent<HTMLInputElement>) =>
<del> this.props.setActiveProperty({[evt.target.name]: evt.target.value})
<add> this.props.setActiveProperty({[evt.target.name]: evt.target.value})
<ide>
<ide> _onChangeEnd = (time: string) => this.props.setActiveProperty({end: +time})
<ide>
<ide> } = this.props
<ide> if (!isModuleEnabled('alerts')) return <PageNotFound message='The alerts module is not enabled.' />
<ide> if (!alert) return <ManagerPage><Loading /></ManagerPage>
<del> const titleCharacterCount = alert.title ? alert.title.length : 0
<del> const descriptionCharactersCount = alert.description ? alert.description.length : 0
<del> const canPublish = alert.affectedEntities.length &&
<add> const titleCharactersRemaining = alert.title
<add> ? ALERT_TITLE_CHAR_LIMIT - alert.title.length
<add> : ALERT_TITLE_CHAR_LIMIT
<add> const descriptionCharactersRemaining = alert.description
<add> ? ALERT_DESCRIPTION_CHAR_LIMIT - alert.description.length
<add> : ALERT_DESCRIPTION_CHAR_LIMIT
<add> const titleCharacterCount = alert.title
<add> ? alert.title.length
<add> : 0
<add> const descriptionCharactersCount = alert.description
<add> ? alert.description.length
<add> : 0
<add> const canPublish =
<add> alert.affectedEntities.length &&
<ide> checkEntitiesForFeeds(alert.affectedEntities, publishableFeeds)
<del> const canEdit = checkEntitiesForFeeds(alert.affectedEntities, editableFeeds)
<del> const editingIsDisabled = alert.published && !canPublish ? true : !canEdit
<del> const sortedFeeds = editableFeeds.sort(sortFeeds511)
<del> // If user has edit rights and alert is unpublished, user can delete alert,
<del> // else check if they have publish rights.
<del> const deleteIsDisabled = !editingIsDisabled && !alert.published
<del> ? false
<del> : !canPublish
<del> const deleteButtonMessage = alert.published && deleteIsDisabled
<del> ? 'Cannot delete because alert is published'
<del> : !canEdit ? 'Cannot alter alerts for other agencies' : 'Delete alert'
<del> const editButtonMessage = alert.published && deleteIsDisabled
<del> ? 'Cannot edit because alert is published'
<del> : !canEdit ? 'Cannot alter alerts for other agencies' : 'Edit alert'
<del> return (
<del> <ManagerPage
<del> ref='page'
<del> title={isNew(alert) ? `Alert ${alert.id}` : 'New Alert'}>
<del> <Grid fluid>
<del> <Row>
<del> <Col xs={4} sm={7} md={8}>
<del> <Button
<del> onClick={this._onClickBack}>
<del> <Icon type='chevron-left' /> Back
<del> </Button>
<del> </Col>
<del> <Col xs={8} sm={5} md={4}>
<del> <ButtonToolbar className='pull-right' style={{marginLeft: '5px'}}>
<add> const canEdit = checkEntitiesForFeeds(alert.affectedEntities, editableFeeds)
<add> const editingIsDisabled = alert.published && !canPublish ? true : !canEdit
<add> const sortedFeeds = editableFeeds.sort(sortFeeds511)
<add> // If user has edit rights and alert is unpublished, user can delete alert,
<add> // else check if they have publish rights.
<add> const deleteIsDisabled = !editingIsDisabled && !alert.published
<add> ? false
<add> : !canPublish
<add> const deleteButtonMessage = alert.published && deleteIsDisabled
<add> ? 'Cannot delete because alert is published'
<add> : !canEdit ? 'Cannot alter alerts for other agencies' : 'Delete alert'
<add> const editButtonMessage = alert.published && deleteIsDisabled
<add> ? 'Cannot edit because alert is published'
<add> : !canEdit ? 'Cannot alter alerts for other agencies' : 'Edit alert'
<add> return (
<add> <ManagerPage
<add> ref='page'
<add> title={isNew(alert) ? `Alert ${alert.id}` : 'New Alert'}>
<add> <Grid fluid>
<add> <Row>
<add> <Col xs={4} sm={7} md={8}>
<ide> <Button
<del> title={editButtonMessage}
<del> bsStyle='primary'
<del> disabled={editingIsDisabled}
<del> onClick={this.validateAndSave}
<del> ><Icon type='save' /> Save</Button>
<del> <Button
<del> title={deleteButtonMessage}
<del> bsStyle='danger'
<del> disabled={deleteIsDisabled}
<del> onClick={this._onClickDelete}
<del> ><Icon type='trash' /> Delete</Button>
<del> </ButtonToolbar>
<del> <FormGroup
<del> className='pull-right'
<del> style={{position: 'relative', top: '5px'}}>
<del> <Toggle
<del> id='alert-published'
<del> disabled={!canPublish}
<del> checked={alert.published}
<del> onChange={this._onClickPublish} />
<del> <label
<del> htmlFor='alert-published'
<del> style={{position: 'relative', top: '-5px', marginLeft: '5px'}}>
<del> Published?
<del> </label>
<del> </FormGroup>
<del> </Col>
<del> </Row>
<del>
<del> <Row>
<del> <Col xs={12} sm={6}>
<del> <Row>
<del> <Col xs={12} style={{marginTop: '10px'}}>
<del> <FormGroup controlId='formControlsTitle'>
<del> <ControlLabel>
<del> Alert Title
<del> {' '}
<add> onClick={this._onClickBack}>
<add> <Icon type='chevron-left' /> Back
<add> </Button>
<add> </Col>
<add> <Col xs={8} sm={5} md={4}>
<add> <ButtonToolbar className='pull-right' style={{marginLeft: '5px'}}>
<add> <Button
<add> title={editButtonMessage}
<add> bsStyle='primary'
<add> disabled={editingIsDisabled}
<add> onClick={this.validateAndSave}
<add> ><Icon type='save' /> Save</Button>
<add> <Button
<add> title={deleteButtonMessage}
<add> bsStyle='danger'
<add> disabled={deleteIsDisabled}
<add> onClick={this._onClickDelete}
<add> ><Icon type='trash' /> Delete</Button>
<add> </ButtonToolbar>
<add> <FormGroup
<add> className='pull-right'
<add> style={{position: 'relative', top: '5px'}}>
<add> <Toggle
<add> id='alert-published'
<add> disabled={!canPublish}
<add> checked={alert.published}
<add> onChange={this._onClickPublish} />
<add> <label
<add> htmlFor='alert-published'
<add> style={{position: 'relative', top: '-5px', marginLeft: '5px'}}>
<add> Published?
<add> </label>
<add> </FormGroup>
<add> </Col>
<add> </Row>
<add>
<add> <Row>
<add> <Col xs={12} sm={6}>
<add> <Row>
<add> <Col xs={12} style={{marginTop: '10px'}}>
<add> <FormGroup controlId='formControlsTitle'>
<add> <ControlLabel>
<add> Alert Title
<add> {' '}
<ide> <span
<ide> className={
<del> titleCharacterCount > CHAR_WARNING_LIMIT && titleCharacterCount <= ALERT_TITLE_CHAR_LIMIT
<add> titleCharactersRemaining > CHAR_WARNING_LIMIT
<ide> ? 'text-muted'
<ide> : 'text-danger'
<ide> }
<ide> </span>
<ide> <h5 style={{margin: '0px'}}>
<ide> <small>
<del> <span className="text-danger">{titleCharacterCount > ALERT_TITLE_CHAR_LIMIT ? 'WARNING: Alert title longer than 100 characters may get truncated in some dissemination channels. ' : ''}</span>
<add> {titleCharacterCount > ALERT_TITLE_CHAR_LIMIT
<add> ? (
<add> <span className='text-danger'>
<add> {`WARNING: Alert title longer than ${ALERT_TITLE_CHAR_LIMIT} characters may get truncated in some dissemination channels. `}
<add> </span>
<add> ) : ''}
<ide> Note: alert title serves as text for eTID alerts. Use
<ide> descriptive language so it can serve as a standalone
<ide> alert.
<ide> {' '}
<ide> <span
<ide> className={
<del> descriptionCharactersCount > CHAR_WARNING_LIMIT && descriptionCharactersCount <= ALERT_DESCRIPTION_CHAR_LIMIT
<add> descriptionCharactersRemaining > CHAR_WARNING_LIMIT
<ide> ? 'text-muted'
<ide> : 'text-danger'
<ide> }
<ide> style={{fontWeight: 400}}>
<ide> {descriptionCharactersCount}
<del> <p><span className="text-danger">{descriptionCharactersCount > ALERT_DESCRIPTION_CHAR_LIMIT ? 'WARNING: Alert description longer than 1200 characters may get truncated in some dissemination channels. ' : ''}</span></p>
<ide> </span>
<add> {descriptionCharactersCount > ALERT_DESCRIPTION_CHAR_LIMIT
<add> ? (
<add> <h5 style={{margin: '0px'}}>
<add> <small className='text-danger'>
<add> {`WARNING: Alert description longer than ${ALERT_DESCRIPTION_CHAR_LIMIT} characters may get truncated in some dissemination channels. `}
<add> </small>
<add> </h5>
<add> ) : ''}
<ide> </ControlLabel>
<ide> <FormControl
<ide> componentClass='textarea'
<ide> </Row>
<ide> <Row>
<ide> <Col xs={12}>
<del> <AffectedServices
<add> <AffectedServices
<ide> sortedFeeds={sortedFeeds}
<ide> {...this.props} />
<ide> </Col> |
|
JavaScript | mit | 7cef8dad848c5648c6b843e26ea683b9b1f4d43d | 0 | ljharb/javascript,LodoSoftware/javascript-style-guide,getjll/JavaScript-Style-Guide,xalexec/javascript,LodoSoftware/javascript-style-guide,cloudability/javascript-style-guide,bl00mber/javascript,airbnb/javascript | module.exports = {
plugins: [
'react',
],
parserOptions: {
ecmaFeatures: {
jsx: true,
},
},
ecmaFeatures: {
jsx: true,
},
// View link below for react rules documentation
// https://github.com/yannickcr/eslint-plugin-react#list-of-supported-rules
rules: {
// Specify whether double or single quotes should be used in JSX attributes
// http://eslint.org/docs/rules/jsx-quotes
'jsx-quotes': ['error', 'prefer-double'],
'class-methods-use-this': ['error', {
exceptMethods: [
'render',
'getInitialState',
'getDefaultProps',
'getChildContext',
'componentWillMount',
'componentDidMount',
'componentWillReceiveProps',
'shouldComponentUpdate',
'componentWillUpdate',
'componentDidUpdate',
'componentWillUnmount',
],
}],
// Prevent missing displayName in a React component definition
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md
'react/display-name': ['off', { ignoreTranspilerName: false }],
// Forbid certain propTypes (any, array, object)
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-prop-types.md
'react/forbid-prop-types': ['error', { forbid: ['any', 'array', 'object'] }],
// Enforce boolean attributes notation in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md
'react/jsx-boolean-value': ['error', 'never'],
// Validate closing bracket location in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-closing-bracket-location.md
'react/jsx-closing-bracket-location': ['error', 'line-aligned'],
// Enforce or disallow spaces inside of curly braces in JSX attributes
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-curly-spacing.md
'react/jsx-curly-spacing': ['error', 'never', { allowMultiline: true }],
// Enforce event handler naming conventions in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md
'react/jsx-handler-names': ['off', {
eventHandlerPrefix: 'handle',
eventHandlerPropPrefix: 'on',
}],
// Validate props indentation in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent-props.md
'react/jsx-indent-props': ['error', 2],
// Validate JSX has key prop when in array or iterator
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-key.md
'react/jsx-key': 'off',
// Limit maximum of props on a single line in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-max-props-per-line.md
'react/jsx-max-props-per-line': ['error', { maximum: 1, when: 'multiline' }],
// Prevent usage of .bind() in JSX props
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-bind.md
'react/jsx-no-bind': ['error', {
ignoreRefs: true,
allowArrowFunctions: true,
allowBind: false,
}],
// Prevent duplicate props in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-duplicate-props.md
'react/jsx-no-duplicate-props': ['error', { ignoreCase: true }],
// Prevent usage of unwrapped JSX strings
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-literals.md
'react/jsx-no-literals': 'off',
// Disallow undeclared variables in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-undef.md
'react/jsx-no-undef': 'error',
// Enforce PascalCase for user-defined JSX components
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-pascal-case.md
'react/jsx-pascal-case': ['error', {
allowAllCaps: true,
ignore: [],
}],
// Enforce propTypes declarations alphabetical sorting
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-prop-types.md
'react/sort-prop-types': ['off', {
ignoreCase: true,
callbacksLast: false,
requiredFirst: false,
}],
// Deprecated in favor of react/jsx-sort-props
'react/jsx-sort-prop-types': 'off',
// Enforce props alphabetical sorting
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-sort-props.md
'react/jsx-sort-props': ['off', {
ignoreCase: true,
callbacksLast: false,
shorthandFirst: false,
shorthandLast: false,
noSortAlphabetically: false,
}],
// Prevent React to be incorrectly marked as unused
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md
'react/jsx-uses-react': ['error'],
// Prevent variables used in JSX to be incorrectly marked as unused
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-vars.md
'react/jsx-uses-vars': 'error',
// Prevent usage of dangerous JSX properties
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger.md
'react/no-danger': 'warn',
// Prevent usage of deprecated methods
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md
'react/no-deprecated': ['error'],
// Prevent usage of setState in componentDidMount
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md
'react/no-did-mount-set-state': ['error'],
// Prevent usage of setState in componentDidUpdate
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md
'react/no-did-update-set-state': ['error'],
// Prevent direct mutation of this.state
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-direct-mutation-state.md
'react/no-direct-mutation-state': 'off',
// Prevent usage of isMounted
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md
'react/no-is-mounted': 'error',
// Prevent multiple component definition per file
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md
'react/no-multi-comp': ['error', { ignoreStateless: true }],
// Prevent usage of setState
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-set-state.md
'react/no-set-state': 'off',
// Prevent using string references
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md
'react/no-string-refs': 'error',
// Prevent usage of unknown DOM property
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md
'react/no-unknown-property': 'error',
// Require ES6 class declarations over React.createClass
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-es6-class.md
'react/prefer-es6-class': ['error', 'always'],
// Require stateless functions when not using lifecycle methods, setState or ref
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-stateless-function.md
'react/prefer-stateless-function': ['error', { ignorePureComponents: true }],
// Prevent missing props validation in a React component definition
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prop-types.md
'react/prop-types': ['error', { ignore: [], customValidators: [], skipUndeclared: false }],
// Prevent missing React when using JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md
'react/react-in-jsx-scope': 'error',
// Restrict file extensions that may be required
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-extension.md
// deprecated in favor of import/extensions
'react/require-extension': ['off', { extensions: ['.jsx', '.js'] }],
// Require render() methods to return something
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-render-return.md
'react/require-render-return': 'error',
// Prevent extra closing tags for components without children
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md
'react/self-closing-comp': 'error',
// Enforce spaces before the closing bracket of self-closing JSX elements
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-space-before-closing.md
'react/jsx-space-before-closing': ['error', 'always'],
// Enforce component methods order
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-comp.md
'react/sort-comp': ['error', {
order: [
'static-methods',
'lifecycle',
'/^on.+$/',
'/^(get|set)(?!(InitialState$|DefaultProps$|ChildContext$)).+$/',
'everything-else',
'/^render.+$/',
'render'
],
}],
// Prevent missing parentheses around multilines JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-wrap-multilines.md
'react/jsx-wrap-multilines': ['error', {
declaration: true,
assignment: true,
return: true
}],
'react/wrap-multilines': 'off', // deprecated version
// Require that the first prop in a JSX element be on a new line when the element is multiline
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-first-prop-new-line.md
'react/jsx-first-prop-new-line': ['error', 'multiline'],
// Enforce spacing around jsx equals signs
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-equals-spacing.md
'react/jsx-equals-spacing': ['error', 'never'],
// Enforce JSX indentation
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent.md
'react/jsx-indent': ['error', 2],
// Disallow target="_blank" on links
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-target-blank.md
'react/jsx-no-target-blank': 'error',
// only .jsx files may have JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-filename-extension.md
'react/jsx-filename-extension': ['error', { extensions: ['.jsx'] }],
// prevent accidental JS comments from being injected into JSX as text
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-comment-textnodes.md
'react/jsx-no-comment-textnodes': 'error',
'react/no-comment-textnodes': 'off', // deprecated version
// disallow using React.render/ReactDOM.render's return value
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md
'react/no-render-return-value': 'error',
// require a shouldComponentUpdate method, or PureRenderMixin
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-optimization.md
'react/require-optimization': ['off', { allowDecorators: [] }],
// warn against using findDOMNode()
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md
'react/no-find-dom-node': 'error',
// Forbid certain props on Components
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-component-props.md
'react/forbid-component-props': ['off', { forbid: [] }],
// Forbid certain elements
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-elements.md
'react/forbid-elements': ['off', { forbid: [], }],
// Prevent problem with children and props.dangerouslySetInnerHTML
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md
'react/no-danger-with-children': 'error',
// Prevent unused propType definitions
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unused-prop-types.md
'react/no-unused-prop-types': ['error', {
customValidators: [
],
skipShapeProps: true,
}],
// Require style prop value be an object or var
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/style-prop-object.md
'react/style-prop-object': 'error',
// Prevent invalid characters from appearing in markup
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md
'react/no-unescaped-entities': 'error',
// Prevent passing of children as props
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-children-prop.md
'react/no-children-prop': 'error',
// Validate whitespace in and around the JSX opening and closing brackets
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-tag-spacing.md
'react/jsx-tag-spacing': ['error', {
closingSlash: 'never',
beforeSelfClosing: 'always',
afterOpening: 'never'
}],
// Prevent usage of Array index in keys
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-array-index-key.md
'react/no-array-index-key': 'error',
// Enforce a defaultProps definition for every prop that is not a required prop
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-default-props.md
'react/require-default-props': 'error',
// Forbids using non-exported propTypes
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-foreign-prop-types.md
'react/forbid-foreign-prop-types': 'off',
// Prevent void DOM elements from receiving children
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/void-dom-elements-no-children.md
'react/void-dom-elements-no-children': 'error',
},
settings: {
'import/resolver': {
node: {
extensions: ['.js', '.jsx', '.json']
}
},
react: {
pragma: 'React',
version: '0.15'
},
}
};
| packages/eslint-config-airbnb/rules/react.js | module.exports = {
plugins: [
'react',
],
parserOptions: {
ecmaFeatures: {
jsx: true,
},
},
ecmaFeatures: {
jsx: true,
},
// View link below for react rules documentation
// https://github.com/yannickcr/eslint-plugin-react#list-of-supported-rules
rules: {
// Specify whether double or single quotes should be used in JSX attributes
// http://eslint.org/docs/rules/jsx-quotes
'jsx-quotes': ['error', 'prefer-double'],
'class-methods-use-this': ['error', {
exceptMethods: [
'render',
'getInitialState',
'getDefaultProps',
'getChildContext',
'componentWillMount',
'componentDidMount',
'componentWillReceiveProps',
'shouldComponentUpdate',
'componentWillUpdate',
'componentDidUpdate',
'componentWillUnmount',
],
}],
// Prevent missing displayName in a React component definition
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md
'react/display-name': ['off', { ignoreTranspilerName: false }],
// Forbid certain propTypes (any, array, object)
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-prop-types.md
'react/forbid-prop-types': ['error', { forbid: ['any', 'array', 'object'] }],
// Enforce boolean attributes notation in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md
'react/jsx-boolean-value': ['error', 'never'],
// Validate closing bracket location in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-closing-bracket-location.md
'react/jsx-closing-bracket-location': ['error', 'line-aligned'],
// Enforce or disallow spaces inside of curly braces in JSX attributes
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-curly-spacing.md
'react/jsx-curly-spacing': ['error', 'never', { allowMultiline: true }],
// Enforce event handler naming conventions in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md
'react/jsx-handler-names': ['off', {
eventHandlerPrefix: 'handle',
eventHandlerPropPrefix: 'on',
}],
// Validate props indentation in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent-props.md
'react/jsx-indent-props': ['error', 2],
// Validate JSX has key prop when in array or iterator
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-key.md
'react/jsx-key': 'off',
// Limit maximum of props on a single line in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-max-props-per-line.md
'react/jsx-max-props-per-line': ['error', { maximum: 1, when: 'multiline' }],
// Prevent usage of .bind() in JSX props
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-bind.md
'react/jsx-no-bind': ['error', {
ignoreRefs: true,
allowArrowFunctions: true,
allowBind: false,
}],
// Prevent duplicate props in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-duplicate-props.md
'react/jsx-no-duplicate-props': ['error', { ignoreCase: true }],
// Prevent usage of unwrapped JSX strings
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-literals.md
'react/jsx-no-literals': 'off',
// Disallow undeclared variables in JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-undef.md
'react/jsx-no-undef': 'error',
// Enforce PascalCase for user-defined JSX components
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-pascal-case.md
'react/jsx-pascal-case': ['error', {
allowAllCaps: true,
ignore: [],
}],
// Enforce propTypes declarations alphabetical sorting
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-prop-types.md
'react/sort-prop-types': ['off', {
ignoreCase: true,
callbacksLast: false,
requiredFirst: false,
}],
// Deprecated in favor of react/jsx-sort-props
'react/jsx-sort-prop-types': 'off',
// Enforce props alphabetical sorting
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-sort-props.md
'react/jsx-sort-props': ['off', {
ignoreCase: true,
callbacksLast: false,
shorthandFirst: false,
shorthandLast: false,
noSortAlphabetically: false,
}],
// Prevent React to be incorrectly marked as unused
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md
'react/jsx-uses-react': ['error'],
// Prevent variables used in JSX to be incorrectly marked as unused
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-vars.md
'react/jsx-uses-vars': 'error',
// Prevent usage of dangerous JSX properties
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger.md
'react/no-danger': 'warn',
// Prevent usage of deprecated methods
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md
'react/no-deprecated': ['error'],
// Prevent usage of setState in componentDidMount
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md
'react/no-did-mount-set-state': ['error'],
// Prevent usage of setState in componentDidUpdate
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md
'react/no-did-update-set-state': ['error'],
// Prevent direct mutation of this.state
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-direct-mutation-state.md
'react/no-direct-mutation-state': 'off',
// Prevent usage of isMounted
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md
'react/no-is-mounted': 'error',
// Prevent multiple component definition per file
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md
'react/no-multi-comp': ['error', { ignoreStateless: true }],
// Prevent usage of setState
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-set-state.md
'react/no-set-state': 'off',
// Prevent using string references
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md
'react/no-string-refs': 'error',
// Prevent usage of unknown DOM property
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md
'react/no-unknown-property': 'error',
// Require ES6 class declarations over React.createClass
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-es6-class.md
'react/prefer-es6-class': ['error', 'always'],
// Require stateless functions when not using lifecycle methods, setState or ref
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prefer-stateless-function.md
'react/prefer-stateless-function': ['error', { ignorePureComponents: true }],
// Prevent missing props validation in a React component definition
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/prop-types.md
'react/prop-types': ['error', { ignore: [], customValidators: [], skipUndeclared: false }],
// Prevent missing React when using JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md
'react/react-in-jsx-scope': 'error',
// Restrict file extensions that may be required
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-extension.md
// deprecated in favor of import/extensions
'react/require-extension': ['off', { extensions: ['.jsx', '.js'] }],
// Require render() methods to return something
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-render-return.md
'react/require-render-return': 'error',
// Prevent extra closing tags for components without children
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md
'react/self-closing-comp': 'error',
// Enforce spaces before the closing bracket of self-closing JSX elements
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-space-before-closing.md
'react/jsx-space-before-closing': ['error', 'always'],
// Enforce component methods order
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-comp.md
'react/sort-comp': ['error', {
order: [
'static-methods',
'lifecycle',
'/^on.+$/',
'/^(get|set)(?!(InitialState$|DefaultProps$|ChildContext$)).+$/',
'everything-else',
'/^render.+$/',
'render'
],
}],
// Prevent missing parentheses around multilines JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-wrap-multilines.md
'react/jsx-wrap-multilines': ['error', {
declaration: true,
assignment: true,
return: true
}],
'react/wrap-multilines': 'off', // deprecated version
// Require that the first prop in a JSX element be on a new line when the element is multiline
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-first-prop-new-line.md
'react/jsx-first-prop-new-line': ['error', 'multiline'],
// Enforce spacing around jsx equals signs
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-equals-spacing.md
'react/jsx-equals-spacing': ['error', 'never'],
// Enforce JSX indentation
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-indent.md
'react/jsx-indent': ['error', 2],
// Disallow target="_blank" on links
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-target-blank.md
'react/jsx-no-target-blank': 'error',
// only .jsx files may have JSX
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-filename-extension.md
'react/jsx-filename-extension': ['error', { extensions: ['.jsx'] }],
// prevent accidental JS comments from being injected into JSX as text
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-comment-textnodes.md
'react/jsx-no-comment-textnodes': 'error',
'react/no-comment-textnodes': 'off', // deprecated version
// disallow using React.render/ReactDOM.render's return value
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md
'react/no-render-return-value': 'error',
// require a shouldComponentUpdate method, or PureRenderMixin
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-optimization.md
'react/require-optimization': ['off', { allowDecorators: [] }],
// warn against using findDOMNode()
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md
'react/no-find-dom-node': 'error',
// Forbid certain props on Components
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-component-props.md
'react/forbid-component-props': ['off', { forbid: [] }],
// Prevent problem with children and props.dangerouslySetInnerHTML
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md
'react/no-danger-with-children': 'error',
// Prevent unused propType definitions
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unused-prop-types.md
'react/no-unused-prop-types': ['error', {
customValidators: [
],
skipShapeProps: true,
}],
// Require style prop value be an object or var
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/style-prop-object.md
'react/style-prop-object': 'error',
// Prevent invalid characters from appearing in markup
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md
'react/no-unescaped-entities': 'error',
// Prevent passing of children as props
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-children-prop.md
'react/no-children-prop': 'error',
// Validate whitespace in and around the JSX opening and closing brackets
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-tag-spacing.md
'react/jsx-tag-spacing': ['error', {
closingSlash: 'never',
beforeSelfClosing: 'always',
afterOpening: 'never'
}],
// Prevent usage of Array index in keys
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-array-index-key.md
'react/no-array-index-key': 'error',
// Enforce a defaultProps definition for every prop that is not a required prop
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-default-props.md
'react/require-default-props': 'error',
'react/forbid-elements': ['off', {
forbid: [
],
}],
// Forbids using non-exported propTypes
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-foreign-prop-types.md
'react/forbid-foreign-prop-types': 'off',
// Prevent void DOM elements from receiving children
// https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/void-dom-elements-no-children.md
'react/void-dom-elements-no-children': 'error',
},
settings: {
'import/resolver': {
node: {
extensions: ['.js', '.jsx', '.json']
}
},
react: {
pragma: 'React',
version: '0.15'
},
}
};
| [eslint config] [docs] add rule documentation to `forbid-elements` entry | packages/eslint-config-airbnb/rules/react.js | [eslint config] [docs] add rule documentation to `forbid-elements` entry | <ide><path>ackages/eslint-config-airbnb/rules/react.js
<ide> // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-component-props.md
<ide> 'react/forbid-component-props': ['off', { forbid: [] }],
<ide>
<add> // Forbid certain elements
<add> // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-elements.md
<add> 'react/forbid-elements': ['off', { forbid: [], }],
<add>
<ide> // Prevent problem with children and props.dangerouslySetInnerHTML
<ide> // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md
<ide> 'react/no-danger-with-children': 'error',
<ide> // Enforce a defaultProps definition for every prop that is not a required prop
<ide> // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-default-props.md
<ide> 'react/require-default-props': 'error',
<del>
<del> 'react/forbid-elements': ['off', {
<del> forbid: [
<del> ],
<del> }],
<ide>
<ide> // Forbids using non-exported propTypes
<ide> // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/forbid-foreign-prop-types.md |
|
Java | apache-2.0 | 6289aa34449eaa30bb5f0691fe2ab8d1be7e6c77 | 0 | AKSW/KBox,AKSW/KBox | package org.aksw.kbox.kibe;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import org.aksw.kbox.kibe.tdb.TDBTest;
import org.aksw.kbox.kns.KBResolver;
import org.aksw.kbox.kns.KN;
import org.aksw.kbox.kns.KNSSever;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.hp.hpl.jena.query.ResultSet;
public class KBoxTest {
@BeforeClass
public static void setUp() throws Exception {
File indexFile = File.createTempFile("knowledgebase","idx");
URL[] filesToIndex = new URL[1];
URL url = TDBTest.class.getResource("/org/aksw/kbox/kibe/dbpedia_3.9.xml");
filesToIndex[0] = url;
KBox.createIndex(indexFile, filesToIndex);
KBox.install(indexFile.toURI().toURL(), new URL("http://dbpedia39"));
indexFile.deleteOnExit();
KBox.install(indexFile.toURI().toURL(), new URL("http://dbpedia.org/3.9"));
indexFile.deleteOnExit();
indexFile = File.createTempFile("knowledgebase","idx");
url = TDBTest.class.getResource("/org/aksw/kbox/kibe/foaf.rdf");
filesToIndex[0] = url;
KBox.createIndex(indexFile, filesToIndex);
KBox.install(indexFile.toURI().toURL(), new URL("http://foaf"));
indexFile.deleteOnExit();
}
@Test
public void testVisitKBs() throws Exception {
URL serverURL = KBoxTest.class.getResource("/org/aksw/kbox/kibe/");
MockKNSVisitor visitor = new MockKNSVisitor();
KNSSever.visit(serverURL, visitor);
assertEquals(1, visitor.getKNSVisitedList().size());
}
@Test
public void testKNEquals() throws Exception {
KN kn = new KN("teste", "a", "b", null, null, null, null);
assertTrue("teste".equals(kn.getName()));
}
@Test
public void testKNEqualsWithFormat() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
assertTrue(kn.equals("teste","b", "c"));
}
@Test
public void testKNEqualsWithFormatWithNull1() throws Exception {
KN kn = new KN("teste", "a", "b", "d", null, null, null);
Assert.assertFalse(kn.equals("teste","b", "e"));
}
@Test
public void testKNEqualsWithFormatWithNull2() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
assertTrue(kn.equals("teste","b", null));
}
@Test
public void testKNEqualsWithFormatWithNull3() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
Assert.assertFalse(kn.equals("teste", "b", "d"));
}
@Test
public void testResolveURLWithKBoxKNSService() throws Exception {
URL serverURL = KBoxTest.class.getResource("/org/aksw/kbox/kibe/");
KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"));
assertEquals(resolvedKN.getTarget(), "http://target.org");
}
@Test
public void testResolveKNS() throws MalformedURLException, Exception {
URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
KBResolver resolver = new KBResolver();
KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"), resolver);
assertEquals(resolvedKN.getTarget(), "http://target.org");
}
@Test
public void testNewDir() throws Exception {
File f = KBox.newDir(new URL("http://dbpedia.org/en/full"));
assertTrue(f.getAbsolutePath().endsWith("en" + File.separator + "full"));
}
@Test
public void testInstallProcess() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testDescribeQuery() throws Exception {
ResultSet rs = KBox.query("Describe <http://dbpedia.org/ontology/Place>",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testAskQuery() throws Exception {
ResultSet rs = KBox.query("ASK { <http://dbpedia.org/ontology/Place> ?p ?o. " +
" FILTER(?o = 'test') . }",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(1, i);
}
@Test
public void testQueryInstalledKB2() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia.org/3.9"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testQueryInstalledKB() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
rs = KBox.query(
"Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test(expected=Exception.class)
public void testQueryNotInstalledKB() throws Exception {
@SuppressWarnings("unused")
ResultSet rs = KBox.query(
"Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39.o"));
Assert.fail("The query should have returned an Exception.");
}
@Test
public void listKNSServers() throws MalformedURLException, Exception {
MockKNSServerListVisitor mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(1, mockKNSVisitor.getVisits());
}
@Test
public void listKNSServers2() throws MalformedURLException, Exception {
URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
KBox.installKNS(serverURL);
MockKNSServerListVisitor mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(2, mockKNSVisitor.getVisits());
KBox.removeKNS(serverURL);
mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(1, mockKNSVisitor.getVisits());
}
}
| kbox.kibe.test/src/test/java/org/aksw/kbox/kibe/KBoxTest.java | package org.aksw.kbox.kibe;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import org.aksw.kbox.kibe.tdb.TDBTest;
import org.aksw.kbox.kns.KN;
import org.aksw.kbox.kns.KNSSever;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.hp.hpl.jena.query.ResultSet;
public class KBoxTest {
@BeforeClass
public static void setUp() throws Exception {
File indexFile = File.createTempFile("knowledgebase","idx");
URL[] filesToIndex = new URL[1];
URL url = TDBTest.class.getResource("/org/aksw/kbox/kibe/dbpedia_3.9.xml");
filesToIndex[0] = url;
KBox.createIndex(indexFile, filesToIndex);
KBox.install(indexFile.toURI().toURL(), new URL("http://dbpedia39"));
indexFile.deleteOnExit();
KBox.install(indexFile.toURI().toURL(), new URL("http://dbpedia.org/3.9"));
indexFile.deleteOnExit();
indexFile = File.createTempFile("knowledgebase","idx");
url = TDBTest.class.getResource("/org/aksw/kbox/kibe/foaf.rdf");
filesToIndex[0] = url;
KBox.createIndex(indexFile, filesToIndex);
KBox.install(indexFile.toURI().toURL(), new URL("http://foaf"));
indexFile.deleteOnExit();
}
@Test
public void testVisitKBs() throws Exception {
URL serverURL = KBoxTest.class.getResource("/org/aksw/kbox/kibe/");
MockKNSVisitor visitor = new MockKNSVisitor();
KNSSever.visit(serverURL, visitor);
assertEquals(1, visitor.getKNSVisitedList().size());
}
@Test
public void testKNEquals() throws Exception {
KN kn = new KN("teste", "a", "b", null, null, null, null);
assertTrue("teste".equals(kn.getName()));
}
@Test
public void testKNEqualsWithFormat() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
assertTrue(kn.equals("teste","b", "c"));
}
@Test
public void testKNEqualsWithFormatWithNull1() throws Exception {
KN kn = new KN("teste", "a", "b", "d", null, null, null);
Assert.assertFalse(kn.equals("teste","b", "e"));
}
@Test
public void testKNEqualsWithFormatWithNull2() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
assertTrue(kn.equals("teste","b", null));
}
@Test
public void testKNEqualsWithFormatWithNull3() throws Exception {
KN kn = new KN("teste", "a", "b", "c", null, null, null);
Assert.assertFalse(kn.equals("teste", "b", "d"));
}
@Test
public void testResolveURLWithKBoxKNSService() throws Exception {
KN resolvedKN = KBox.resolve(new URL("http://dbpedia.org/3.9/en/full"));
assertEquals(resolvedKN.getTarget(), "http://vmdbpedia.informatik.uni-leipzig.de:3031/dbpedia/3.9/kbox.kb");
}
@Test
public void testNewDir() throws Exception {
File f = KBox.newDir(new URL("http://dbpedia.org/en/full"));
assertTrue(f.getAbsolutePath().endsWith("en" + File.separator + "full"));
}
@Test
public void testInstallProcess() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testDescribeQuery() throws Exception {
ResultSet rs = KBox.query("Describe <http://dbpedia.org/ontology/Place>",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testAskQuery() throws Exception {
ResultSet rs = KBox.query("ASK { <http://dbpedia.org/ontology/Place> ?p ?o. " +
" FILTER(?o = 'test') . }",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(1, i);
}
@Test
public void testQueryInstalledKB2() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia.org/3.9"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test
public void testQueryInstalledKB() throws Exception {
ResultSet rs = KBox.query("Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
int i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
rs = KBox.query(
"Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39"));
i = 0;
while (rs != null && rs.hasNext()) {
rs.next();
i++;
}
assertEquals(19, i);
}
@Test(expected=Exception.class)
public void testQueryNotInstalledKB() throws Exception {
@SuppressWarnings("unused")
ResultSet rs = KBox.query(
"Select ?p where {<http://dbpedia.org/ontology/Place> ?p ?o}",
new URL("http://dbpedia39.o"));
Assert.fail("The query should have returned an Exception.");
}
@Test
public void testResolveKNS() throws MalformedURLException, Exception {
URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"));
assertEquals(resolvedKN.getTarget(), "http://target.org");
}
@Test
public void listKNSServers() throws MalformedURLException, Exception {
MockKNSServerListVisitor mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(1, mockKNSVisitor.getVisits());
}
@Test
public void listKNSServers2() throws MalformedURLException, Exception {
URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
KBox.installKNS(serverURL);
MockKNSServerListVisitor mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(2, mockKNSVisitor.getVisits());
KBox.removeKNS(serverURL);
mockKNSVisitor = new MockKNSServerListVisitor();
KBox.visit(mockKNSVisitor);
assertEquals(1, mockKNSVisitor.getVisits());
}
}
| - adding Resolver unit test
| kbox.kibe.test/src/test/java/org/aksw/kbox/kibe/KBoxTest.java | - adding Resolver unit test | <ide><path>box.kibe.test/src/test/java/org/aksw/kbox/kibe/KBoxTest.java
<ide> import java.net.URL;
<ide>
<ide> import org.aksw.kbox.kibe.tdb.TDBTest;
<add>import org.aksw.kbox.kns.KBResolver;
<ide> import org.aksw.kbox.kns.KN;
<ide> import org.aksw.kbox.kns.KNSSever;
<ide> import org.junit.Assert;
<ide>
<ide> @Test
<ide> public void testResolveURLWithKBoxKNSService() throws Exception {
<del> KN resolvedKN = KBox.resolve(new URL("http://dbpedia.org/3.9/en/full"));
<del> assertEquals(resolvedKN.getTarget(), "http://vmdbpedia.informatik.uni-leipzig.de:3031/dbpedia/3.9/kbox.kb");
<add> URL serverURL = KBoxTest.class.getResource("/org/aksw/kbox/kibe/");
<add> KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"));
<add> assertEquals(resolvedKN.getTarget(), "http://target.org");
<add> }
<add>
<add> @Test
<add> public void testResolveKNS() throws MalformedURLException, Exception {
<add> URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
<add> KBResolver resolver = new KBResolver();
<add> KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"), resolver);
<add> assertEquals(resolvedKN.getTarget(), "http://target.org");
<ide> }
<ide>
<ide> @Test
<ide> new URL("http://dbpedia39.o"));
<ide> Assert.fail("The query should have returned an Exception.");
<ide> }
<del>
<del> @Test
<del> public void testResolveKNS() throws MalformedURLException, Exception {
<del> URL serverURL = TDBTest.class.getResource("/org/aksw/kbox/kibe/");
<del> KN resolvedKN = KBox.resolve(serverURL, new URL("http://test.org"));
<del> assertEquals(resolvedKN.getTarget(), "http://target.org");
<del> }
<add>
<ide>
<ide> @Test
<ide> public void listKNSServers() throws MalformedURLException, Exception { |
|
Java | apache-2.0 | 622c5893580135d682cf38a5e5e83aa49c346c77 | 0 | rizafu/CoachMark | package io.github.rizafu.coachmark;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.databinding.DataBindingUtil;
import android.graphics.Color;
import android.graphics.Rect;
import android.support.annotation.IdRes;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorListenerAdapter;
import android.support.v7.widget.CardView;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.Animation;
import android.widget.FrameLayout;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import io.github.rizafu.coachmark.databinding.WidgetCoachTooltipBinding;
/**
* Created by RizaFu on 11/7/16.
*/
public class CoachMark {
private int animDuration;
private Activity activity;
private FrameLayout container;
private @TooltipAlignment int tooltipAlignment;
private @PointerTooltipAlignment int tooltipPointerAlignment;
private int overlayPadding;
private int backgroundColorResource;
private boolean dismissible;
private boolean isCircleMark;
private Builder builder;
private boolean isShow;
private WidgetCoachTooltipViewModel tooltipViewModel;
private WidgetCoachTooltipBinding tooltipBinding;
private CoachMarkOverlay coachMarkOverlay;
private View targetView;
private View.OnClickListener targetOnClick;
private Runnable onDismissListener;
private Animation tooltipShowAnimation;
private Animation tooltipDismissAnimation;
private final double CIRCLE_ADDITIONAL_RADIUS_RATIO = 1.5f;
public static final int ROOT_TOP = 1;
public static final int ROOT_BOTTOM = 2;
public static final int TARGET_TOP = 3;
public static final int TARGET_BOTTOM = 4;
@IntDef({ROOT_TOP,ROOT_BOTTOM,TARGET_TOP,TARGET_BOTTOM})
@Retention(RetentionPolicy.SOURCE)
public @interface TooltipAlignment {}
public static final int POINTER_RIGHT = 1;
public static final int POINTER_MIDDLE = 2;
public static final int POINTER_LEFT = 3;
public static final int POINTER_GONE = 4;
@IntDef({POINTER_RIGHT,POINTER_MIDDLE,POINTER_LEFT,POINTER_GONE})
@Retention(RetentionPolicy.SOURCE)
public @interface PointerTooltipAlignment{}
private CoachMark(final Builder builder) {
this.builder = builder;
this.activity = builder.activity;
this.container = new FrameLayout(activity);
this.tooltipViewModel = new WidgetCoachTooltipViewModel();
this.tooltipBinding = DataBindingUtil.inflate(activity.getLayoutInflater(),R.layout.widget_coach_tooltip,container,false);
this.tooltipBinding.setViewModel(this.tooltipViewModel);
this.backgroundColorResource = builder.isTransparentBackground ? android.R.color.transparent : R.color.background;
this.isCircleMark = builder.isCircleMark;
this.targetView = builder.target;
this.overlayPadding = builder.markerPadding;
this.dismissible = builder.dismissible;
this.tooltipAlignment = builder.tooltipAlignment;
tooltipViewModel.title.set(builder.title);
tooltipViewModel.description.set(builder.description);
tooltipViewModel.actionName.set(builder.actionName);
tooltipViewModel.backgroundColor.set(builder.tooltipBagroundColor);
tooltipViewModel.textColorResource.set(builder.textColor);
this.onDismissListener = builder.onDismissListener;
this.tooltipShowAnimation = builder.tooltipShowAnimation;
this.tooltipDismissAnimation = builder.tooltipDismissAnimation;
Window window = activity.getWindow();
if (window != null) {
ViewGroup decorView = (ViewGroup) window.getDecorView();
if (decorView != null) {
ViewGroup content = (ViewGroup) decorView.findViewById(android.R.id.content);
if (content != null) {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
container.setBackgroundColor(Color.TRANSPARENT);
decorView.addView(container, layoutParams);
coachMarkOverlay = new CoachMarkOverlay(activity);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
container.addView(coachMarkOverlay,layoutParams);
container.addView(tooltipBinding.getRoot());
}
}
}
animDuration = container.getResources().getInteger(android.R.integer.config_longAnimTime);
container.setClickable(true);
container.setVisibility(View.GONE);
ViewCompat.setAlpha(container,0f);
addTarget();
setTooltipAlignment(tooltipAlignment, builder.pointerTooltipAlignment);
}
private void setAcitonClick(View.OnClickListener acitonClick){
tooltipViewModel.actionClick = acitonClick;
}
private void setTargetOnClick(View.OnClickListener targetOnClick) {
this.targetOnClick = targetOnClick;
}
private void setTooltipAlignment(@TooltipAlignment final int tooltipAlignment, @PointerTooltipAlignment final int pointerTooltipAlignment){
this.tooltipAlignment = tooltipAlignment;
this.tooltipPointerAlignment = pointerTooltipAlignment;
tooltipBinding.getRoot().getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
relocationTooltip(targetView, tooltipAlignment);
pointerTooltipAlignment(targetView, pointerTooltipAlignment);
tooltipBinding.getRoot().getViewTreeObserver().removeOnPreDrawListener(this);
return false;
}
});
}
private void addTarget(){
if (targetView!=null)
targetView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
if (isCircleMark){
addCircleRect(targetView);
} else {
addRoundRect(targetView);
}
targetView.getViewTreeObserver().removeOnPreDrawListener(this);
return false;
}
});
}
private void addRoundRect(View view){
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
int radius = 5;
if (view instanceof CardView){
CardView cardView = (CardView) view;
radius = (int) cardView.getRadius();
}
final int x = rect.left;
final int y = rect.top;
final int width = rect.width();
final int height = rect.height();
addTargetClick(rect,view);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
coachMarkOverlay.addRect(x,y,width,height, radius,overlayPadding,isCircleMark);
coachMarkOverlay.postInvalidate();
}
private void addCircleRect(View view){
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
int cx = rect.centerX();
int cy = rect.centerY();
int radius = (int) (Math.max(rect.width(), rect.height()) / 2f * CIRCLE_ADDITIONAL_RADIUS_RATIO);
addTargetClick(rect,view);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
coachMarkOverlay.addRect(cx,cy,0,0, radius, overlayPadding,isCircleMark);
coachMarkOverlay.postInvalidate();
}
private void relocationTooltip(View view, @TooltipAlignment int alignment){
View tooltipView = tooltipBinding.getRoot();
final int tooltipHeight = tooltipView.getHeight();
final int defaultPadding = 10;
final int padding = ViewUtils.dpToPx(overlayPadding + defaultPadding);
final int triangleHeight = ViewUtils.dpToPx(12);
if (view!=null) {
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
final int y = rect.top;
final int height = rect.height();
float result;
if (alignment == TARGET_BOTTOM) {
tooltipBinding.triangleTop.setVisibility(View.VISIBLE);
result = y + height + padding;
result = (float) (result + (isCircleMark? defaultPadding * CIRCLE_ADDITIONAL_RADIUS_RATIO : 0));
tooltipView.setY(result);
} else if (alignment == TARGET_TOP){
tooltipBinding.triangleBottom.setVisibility(View.VISIBLE);
result = y - tooltipHeight - padding - triangleHeight;
result = (float) (result - (isCircleMark? defaultPadding * CIRCLE_ADDITIONAL_RADIUS_RATIO : 0));
tooltipView.setY(result);
}
}
if (alignment == ROOT_TOP){
tooltipView.setY(0);
} else if (alignment == ROOT_BOTTOM){
tooltipView.setY(getScreenHeight() - tooltipHeight);
}
tooltipView.postInvalidate();
}
private void pointerTooltipAlignment(View view, @PointerTooltipAlignment int pointerTooltipAlignment){
if (view==null)return;
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
final int x = rect.left;
final int width = rect.width();
final int margin = ViewUtils.dpToPx(overlayPadding + 16);
final int triangleWidth = ViewUtils.dpToPx(24);
int result = 0;
if (pointerTooltipAlignment == POINTER_LEFT){
result = x + margin;
} else if (pointerTooltipAlignment == POINTER_MIDDLE){
result = x + (width/2);
} else if (pointerTooltipAlignment == POINTER_RIGHT){
result = x + (width-margin);
}
View triangle;
if (this.tooltipAlignment == TARGET_TOP){
triangle = tooltipBinding.triangleBottom;
triangle.setX(result - (triangleWidth/2));
} else if (this.tooltipAlignment == TARGET_BOTTOM){
triangle = tooltipBinding.triangleTop;
triangle.setX(result - (triangleWidth/2));
}
tooltipBinding.getRoot().postInvalidate();
}
private int getStatusBarHeight() {
int result = 0;
Context context = activity;
Resources resources = context.getResources();
int resourceId = resources.getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = resources.getDimensionPixelSize(resourceId);
}
return result;
}
private int getScreenHeight(){
int result;
Context context = activity;
Resources resources = context.getResources();
result = resources.getDisplayMetrics().heightPixels;
return result;
}
private int getScreenWidth(){
int result;
Context context = activity;
Resources resources = context.getResources();
result = resources.getDisplayMetrics().heightPixels;
return result;
}
private void addTargetClick(Rect rect, View view){
View clickableView = new View(view.getContext());
int width = rect.width();
int height = rect.height();
int x = rect.left - (width - rect.width()) / 2;
int y = rect.top - (height - rect.height()) / 2;
clickableView.setLayoutParams(new ViewGroup.MarginLayoutParams(width, height));
ViewCompat.setTranslationY(clickableView, y);
ViewCompat.setTranslationX(clickableView, x);
clickableView.setOnClickListener(targetOnClick);
clickableView.setBackgroundColor(Color.TRANSPARENT);
container.addView(clickableView);
container.invalidate();
}
private void animateTooltipShow(){
tooltipBinding.getRoot().setVisibility(tooltipViewModel.isEmptyValue() ? View.GONE : View.VISIBLE);
if (!tooltipViewModel.isEmptyValue() && tooltipShowAnimation!=null) {
tooltipBinding.getRoot().startAnimation(tooltipShowAnimation);
}
}
private void animateTooltipDismiss(){
if (!tooltipViewModel.isEmptyValue() && tooltipDismissAnimation!=null) {
tooltipDismissAnimation.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
if (tooltipBinding.getRoot().getVisibility() == View.VISIBLE) {
tooltipBinding.getRoot().setVisibility(View.GONE);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
tooltipBinding.getRoot().startAnimation(tooltipDismissAnimation);
}
}
public CoachMark show(){
container.setVisibility(View.VISIBLE);
animateTooltipShow();
ViewCompat.animate(container)
.alpha(1f)
.setDuration(animDuration)
.start();
isShow = true;
container.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (dismissible) dismiss();
}
});
return this;
}
public void dismiss(){
dismiss(null);
}
public void dismiss(final Runnable afterDismiss) {
if (onDismissListener !=null) onDismissListener.run();
animateTooltipDismiss();
ViewCompat.animate(container)
.alpha(0f)
.setDuration(animDuration)
.setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
super.onAnimationEnd(view);
if (container.getAlpha()== 0f) {
container.setVisibility(View.GONE);
isShow = false;
if (afterDismiss!=null)afterDismiss.run();
}
}
}).start();
}
public void destroy(){
destroy(null);
}
public void destroy(final Runnable afterDestroy){
if (onDismissListener !=null) onDismissListener.run();
animateTooltipDismiss();
ViewCompat.animate(container)
.alpha(0f)
.setDuration(animDuration)
.setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
super.onAnimationEnd(view);
if (container.getAlpha()== 0f) {
ViewParent parent = view.getParent();
if (parent instanceof ViewGroup) {
((ViewGroup) parent).removeView(view);
}
isShow = false;
if (afterDestroy != null) afterDestroy.run();
}
}
}).start();
}
public boolean isShow(){
return isShow;
}
public static class Builder{
private Activity activity;
private View target;
private int markerPadding;
private OnClick onClickTarget;
private String title;
private String description;
private String actionName;
private OnClick onClickAction;
private boolean isTransparentBackground;
private boolean dismissible;
private boolean isCircleMark;
private int textColor;
private int tooltipBagroundColor;
private int tooltipAlignment;
private int pointerTooltipAlignment;
private Runnable onDismissListener;
private Animation tooltipShowAnimation;
private Animation tooltipDismissAnimation;
public interface OnClick{
void onClick(CoachMark coachMark);
}
public Builder(Activity activity) {
this.activity = activity;
this.tooltipAlignment = CoachMark.ROOT_BOTTOM; // default
this.pointerTooltipAlignment = CoachMark.POINTER_MIDDLE; // default
this.pointerTooltipAlignment = CoachMark.POINTER_MIDDLE;
}
@Nullable
private View findViewById(@IdRes int viewId) {
View view;
view = activity.findViewById(viewId);
return view;
}
public Builder setTarget(View target){
this.target = target;
return this;
}
public Builder setTarget(@IdRes int itemViewId){
this.target = findViewById(itemViewId);
return this;
}
public Builder setCircleMark() {
isCircleMark = true;
return this;
}
public Builder setMarkerPadding(int dp) {
this.markerPadding = dp;
return this;
}
public Builder setTransparentBackground(){
this.isTransparentBackground = true;
return this;
}
public Builder setOnClickTarget(OnClick onClickTarget){
this.onClickTarget = onClickTarget;
return this;
}
public Builder setDismissible(){
this.dismissible = true;
return this;
}
public Builder setTitle(String title){
this.title = title;
return this;
}
public Builder setDescription(String description){
this.description = description;
public Builder setTooltipPointer(@PointerTooltipAlignment int pointerTooltipAlignment) {
this.pointerTooltipAlignment = pointerTooltipAlignment;
return this;
}
public Builder setAction(String actionName, OnClick onClickAction){
this.actionName = actionName;
this.onClickAction = onClickAction;
return this;
}
public Builder setTooltipAlignment(@TooltipAlignment int tooltipAlignment) {
this.tooltipAlignment = tooltipAlignment;
return this;
}
public Builder setPointerTooltipAlignment(@PointerTooltipAlignment int pointerTooltipAlignment) {
this.pointerTooltipAlignment = pointerTooltipAlignment;
return this;
}
public Builder setTooltipBackgroundColor(int colorResource) {
this.tooltipBagroundColor = colorResource;
public Builder setOnDismissListener(Runnable onDismiss) {
this.onDismissListener = onDismiss;
return this;
}
public Builder setTooltipShowAnimation(Animation tooltipShowAnimation) {
this.tooltipShowAnimation = tooltipShowAnimation;
return this;
}
public Builder setTooltipDismissAnimation(Animation tooltipDismissAnimation) {
this.tooltipDismissAnimation = tooltipDismissAnimation;
return this;
}
public Builder setTextColor(int colorResource) {
this.textColor = colorResource;
return this;
}
public Builder setOnClickAction(OnClick onClickAction) {
this.onClickAction = onClickAction;
return this;
}
public CoachMark build(){
final CoachMark coachMark = new CoachMark(this);
coachMark.setTargetOnClick(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (onClickTarget!=null) {
onClickTarget.onClick(coachMark);
} else {
coachMark.destroy();
}
}
});
return coachMark;
}
public CoachMark show(){
return build().show();
}
}
}
| coachmark/src/main/java/io/github/rizafu/coachmark/CoachMark.java | package io.github.rizafu.coachmark;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.databinding.DataBindingUtil;
import android.graphics.Color;
import android.graphics.Rect;
import android.support.annotation.IdRes;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorListenerAdapter;
import android.support.v7.widget.CardView;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.Animation;
import android.widget.FrameLayout;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import io.github.rizafu.coachmark.databinding.WidgetCoachTooltipBinding;
/**
* Created by RizaFu on 11/7/16.
*/
public class CoachMark {
private int animDuration;
private Activity activity;
private FrameLayout container;
private @TooltipAlignment int tooltipAlignment;
private int overlayPadding;
private int backgroundColorResource;
private boolean dismissible;
private boolean isCircleMark;
private Builder builder;
private boolean isShow;
private WidgetCoachTooltipViewModel tooltipViewModel;
private WidgetCoachTooltipBinding tooltipBinding;
private CoachMarkOverlay coachMarkOverlay;
private View targetView;
private View.OnClickListener targetOnClick;
private Runnable onDismissListener;
private Animation tooltipShowAnimation;
private Animation tooltipDismissAnimation;
private final double CIRCLE_ADDITIONAL_RADIUS_RATIO = 1.5f;
public static final int ROOT_TOP = 1;
public static final int ROOT_BOTTOM = 2;
public static final int TARGET_TOP = 3;
public static final int TARGET_BOTTOM = 4;
@IntDef({ROOT_TOP,ROOT_BOTTOM,TARGET_TOP,TARGET_BOTTOM})
@Retention(RetentionPolicy.SOURCE)
public @interface TooltipAlignment {}
public static final int POINTER_RIGHT = 1;
public static final int POINTER_MIDDLE = 2;
public static final int POINTER_LEFT = 3;
@IntDef({POINTER_RIGHT,POINTER_MIDDLE,POINTER_LEFT})
@Retention(RetentionPolicy.SOURCE)
public @interface PointerTooltipAlignment{}
private CoachMark(final Builder builder) {
this.builder = builder;
this.activity = builder.activity;
this.container = new FrameLayout(activity);
this.tooltipViewModel = new WidgetCoachTooltipViewModel();
this.tooltipBinding = DataBindingUtil.inflate(activity.getLayoutInflater(),R.layout.widget_coach_tooltip,container,false);
this.tooltipBinding.setViewModel(this.tooltipViewModel);
this.backgroundColorResource = builder.isTransparentBackground ? android.R.color.transparent : R.color.background;
this.isCircleMark = builder.isCircleMark;
this.targetView = builder.target;
this.overlayPadding = builder.markerPadding;
this.dismissible = builder.dismissible;
this.tooltipAlignment = builder.tooltipAlignment;
tooltipViewModel.title.set(builder.title);
tooltipViewModel.description.set(builder.description);
tooltipViewModel.actionName.set(builder.actionName);
tooltipViewModel.backgroundColor.set(builder.tooltipBagroundColor);
tooltipViewModel.textColorResource.set(builder.textColor);
this.onDismissListener = builder.onDismissListener;
this.tooltipShowAnimation = builder.tooltipShowAnimation;
this.tooltipDismissAnimation = builder.tooltipDismissAnimation;
Window window = activity.getWindow();
if (window != null) {
ViewGroup decorView = (ViewGroup) window.getDecorView();
if (decorView != null) {
ViewGroup content = (ViewGroup) decorView.findViewById(android.R.id.content);
if (content != null) {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
container.setBackgroundColor(Color.TRANSPARENT);
decorView.addView(container, layoutParams);
coachMarkOverlay = new CoachMarkOverlay(activity);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
container.addView(coachMarkOverlay,layoutParams);
container.addView(tooltipBinding.getRoot());
}
}
}
animDuration = container.getResources().getInteger(android.R.integer.config_longAnimTime);
container.setClickable(true);
container.setVisibility(View.GONE);
ViewCompat.setAlpha(container,0f);
addTarget();
setTooltipAlignment(tooltipAlignment, builder.pointerTooltipAlignment);
}
private void setAcitonClick(View.OnClickListener acitonClick){
tooltipViewModel.actionClick = acitonClick;
}
private void setTargetOnClick(View.OnClickListener targetOnClick) {
this.targetOnClick = targetOnClick;
}
private void setTooltipAlignment(@TooltipAlignment final int tooltipAlignment, @PointerTooltipAlignment final int pointerTooltipAlignment){
this.tooltipAlignment = tooltipAlignment;
tooltipBinding.getRoot().getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
relocationTooltip(targetView, tooltipAlignment);
pointerTooltipAlignment(targetView, pointerTooltipAlignment);
tooltipBinding.getRoot().getViewTreeObserver().removeOnPreDrawListener(this);
return false;
}
});
}
private void addTarget(){
if (targetView!=null)
targetView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
if (isCircleMark){
addCircleRect(targetView);
} else {
addRoundRect(targetView);
}
targetView.getViewTreeObserver().removeOnPreDrawListener(this);
return false;
}
});
}
private void addRoundRect(View view){
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
int radius = 5;
if (view instanceof CardView){
CardView cardView = (CardView) view;
radius = (int) cardView.getRadius();
}
final int x = rect.left;
final int y = rect.top;
final int width = rect.width();
final int height = rect.height();
addTargetClick(rect,view);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
coachMarkOverlay.addRect(x,y,width,height, radius,overlayPadding,isCircleMark);
coachMarkOverlay.postInvalidate();
}
private void addCircleRect(View view){
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
int cx = rect.centerX();
int cy = rect.centerY();
int radius = (int) (Math.max(rect.width(), rect.height()) / 2f * CIRCLE_ADDITIONAL_RADIUS_RATIO);
addTargetClick(rect,view);
coachMarkOverlay.setBackgroundResource(backgroundColorResource);
coachMarkOverlay.addRect(cx,cy,0,0, radius, overlayPadding,isCircleMark);
coachMarkOverlay.postInvalidate();
}
private void relocationTooltip(View view, @TooltipAlignment int alignment){
View tooltipView = tooltipBinding.getRoot();
final int tooltipHeight = tooltipView.getHeight();
final int defaultPadding = 10;
final int padding = ViewUtils.dpToPx(overlayPadding + defaultPadding);
final int triangleHeight = ViewUtils.dpToPx(12);
if (view!=null) {
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
final int y = rect.top;
final int height = rect.height();
float result;
if (alignment == TARGET_BOTTOM) {
tooltipBinding.triangleTop.setVisibility(View.VISIBLE);
result = y + height + padding;
result = (float) (result + (isCircleMark? defaultPadding * CIRCLE_ADDITIONAL_RADIUS_RATIO : 0));
tooltipView.setY(result);
} else if (alignment == TARGET_TOP){
tooltipBinding.triangleBottom.setVisibility(View.VISIBLE);
result = y - tooltipHeight - padding - triangleHeight;
result = (float) (result - (isCircleMark? defaultPadding * CIRCLE_ADDITIONAL_RADIUS_RATIO : 0));
tooltipView.setY(result);
}
}
if (alignment == ROOT_TOP){
tooltipView.setY(0);
} else if (alignment == ROOT_BOTTOM){
tooltipView.setY(getScreenHeight() - tooltipHeight);
}
tooltipView.postInvalidate();
}
private void pointerTooltipAlignment(View view, @PointerTooltipAlignment int pointerTooltipAlignment){
if (view==null)return;
Rect rect = new Rect();
view.getGlobalVisibleRect(rect);
final int x = rect.left;
final int width = rect.width();
final int margin = ViewUtils.dpToPx(overlayPadding + 16);
final int triangleWidth = ViewUtils.dpToPx(24);
int result = 0;
if (pointerTooltipAlignment == POINTER_LEFT){
result = x + margin;
} else if (pointerTooltipAlignment == POINTER_MIDDLE){
result = x + (width/2);
} else if (pointerTooltipAlignment == POINTER_RIGHT){
result = x + (width-margin);
}
View triangle;
if (this.tooltipAlignment == TARGET_TOP){
triangle = tooltipBinding.triangleBottom;
triangle.setX(result - (triangleWidth/2));
} else if (this.tooltipAlignment == TARGET_BOTTOM){
triangle = tooltipBinding.triangleTop;
triangle.setX(result - (triangleWidth/2));
}
tooltipBinding.getRoot().postInvalidate();
}
private int getStatusBarHeight() {
int result = 0;
Context context = activity;
Resources resources = context.getResources();
int resourceId = resources.getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = resources.getDimensionPixelSize(resourceId);
}
return result;
}
private int getScreenHeight(){
int result;
Context context = activity;
Resources resources = context.getResources();
result = resources.getDisplayMetrics().heightPixels;
return result;
}
private int getScreenWidth(){
int result;
Context context = activity;
Resources resources = context.getResources();
result = resources.getDisplayMetrics().heightPixels;
return result;
}
private void addTargetClick(Rect rect, View view){
View clickableView = new View(view.getContext());
int width = rect.width();
int height = rect.height();
int x = rect.left - (width - rect.width()) / 2;
int y = rect.top - (height - rect.height()) / 2;
clickableView.setLayoutParams(new ViewGroup.MarginLayoutParams(width, height));
ViewCompat.setTranslationY(clickableView, y);
ViewCompat.setTranslationX(clickableView, x);
clickableView.setOnClickListener(targetOnClick);
clickableView.setBackgroundColor(Color.TRANSPARENT);
container.addView(clickableView);
container.invalidate();
}
private void animateTooltipShow(){
tooltipBinding.getRoot().setVisibility(tooltipViewModel.isEmptyValue() ? View.GONE : View.VISIBLE);
if (!tooltipViewModel.isEmptyValue() && tooltipShowAnimation!=null) {
tooltipBinding.getRoot().startAnimation(tooltipShowAnimation);
}
}
private void animateTooltipDismiss(){
if (!tooltipViewModel.isEmptyValue() && tooltipDismissAnimation!=null) {
tooltipDismissAnimation.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
if (tooltipBinding.getRoot().getVisibility() == View.VISIBLE) {
tooltipBinding.getRoot().setVisibility(View.GONE);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
tooltipBinding.getRoot().startAnimation(tooltipDismissAnimation);
}
}
public CoachMark show(){
container.setVisibility(View.VISIBLE);
animateTooltipShow();
ViewCompat.animate(container)
.alpha(1f)
.setDuration(animDuration)
.start();
isShow = true;
container.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (dismissible) dismiss();
}
});
return this;
}
public void dismiss(){
dismiss(null);
}
public void dismiss(final Runnable afterDismiss) {
if (onDismissListener !=null) onDismissListener.run();
animateTooltipDismiss();
ViewCompat.animate(container)
.alpha(0f)
.setDuration(animDuration)
.setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
super.onAnimationEnd(view);
if (container.getAlpha()== 0f) {
container.setVisibility(View.GONE);
isShow = false;
if (afterDismiss!=null)afterDismiss.run();
}
}
}).start();
}
public void destroy(){
destroy(null);
}
public void destroy(final Runnable afterDestroy){
if (onDismissListener !=null) onDismissListener.run();
animateTooltipDismiss();
ViewCompat.animate(container)
.alpha(0f)
.setDuration(animDuration)
.setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
super.onAnimationEnd(view);
if (container.getAlpha()== 0f) {
ViewParent parent = view.getParent();
if (parent instanceof ViewGroup) {
((ViewGroup) parent).removeView(view);
}
isShow = false;
if (afterDestroy != null) afterDestroy.run();
}
}
}).start();
}
public boolean isShow(){
return isShow;
}
public static class Builder{
private Activity activity;
private View target;
private int markerPadding;
private OnClick onClickTarget;
private String title;
private String description;
private String actionName;
private OnClick onClickAction;
private boolean isTransparentBackground;
private boolean dismissible;
private boolean isCircleMark;
private int textColor;
private int tooltipBagroundColor;
private int tooltipAlignment;
private int pointerTooltipAlignment;
private Runnable onDismissListener;
private Animation tooltipShowAnimation;
private Animation tooltipDismissAnimation;
public interface OnClick{
void onClick(CoachMark coachMark);
}
public Builder(Activity activity) {
this.activity = activity;
this.tooltipAlignment = CoachMark.ROOT_BOTTOM; // default
this.pointerTooltipAlignment = CoachMark.POINTER_MIDDLE; // default
}
@Nullable
private View findViewById(@IdRes int viewId) {
View view;
view = activity.findViewById(viewId);
return view;
}
public Builder setTarget(View target){
this.target = target;
return this;
}
public Builder setTarget(@IdRes int itemViewId){
this.target = findViewById(itemViewId);
return this;
}
public Builder setCircleMark() {
isCircleMark = true;
return this;
}
public Builder setMarkerPadding(int dp) {
this.markerPadding = dp;
return this;
}
public Builder setTransparentBackground(){
this.isTransparentBackground = true;
return this;
}
public Builder setOnClickTarget(OnClick onClickTarget){
this.onClickTarget = onClickTarget;
return this;
}
public Builder setDismissible(){
this.dismissible = true;
return this;
}
public Builder setTitle(String title){
this.title = title;
return this;
}
public Builder setDescription(String description){
this.description = description;
return this;
}
public Builder setAction(String actionName, OnClick onClickAction){
this.actionName = actionName;
this.onClickAction = onClickAction;
return this;
}
public Builder setTooltipAlignment(@TooltipAlignment int tooltipAlignment) {
this.tooltipAlignment = tooltipAlignment;
return this;
}
public Builder setPointerTooltipAlignment(@PointerTooltipAlignment int pointerTooltipAlignment) {
this.pointerTooltipAlignment = pointerTooltipAlignment;
return this;
}
public Builder setTooltipBackgroundColor(int colorResource) {
this.tooltipBagroundColor = colorResource;
public Builder setOnDismissListener(Runnable onDismiss) {
this.onDismissListener = onDismiss;
return this;
}
public Builder setTooltipShowAnimation(Animation tooltipShowAnimation) {
this.tooltipShowAnimation = tooltipShowAnimation;
return this;
}
public Builder setTooltipDismissAnimation(Animation tooltipDismissAnimation) {
this.tooltipDismissAnimation = tooltipDismissAnimation;
return this;
}
public Builder setTextColor(int colorResource) {
this.textColor = colorResource;
return this;
}
public Builder setOnClickAction(OnClick onClickAction) {
this.onClickAction = onClickAction;
return this;
}
public CoachMark build(){
final CoachMark coachMark = new CoachMark(this);
coachMark.setTargetOnClick(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (onClickTarget!=null) {
onClickTarget.onClick(coachMark);
} else {
coachMark.destroy();
}
}
});
return coachMark;
}
public CoachMark show(){
return build().show();
}
}
}
| Update pointer alignment constant
| coachmark/src/main/java/io/github/rizafu/coachmark/CoachMark.java | Update pointer alignment constant | <ide><path>oachmark/src/main/java/io/github/rizafu/coachmark/CoachMark.java
<ide> private FrameLayout container;
<ide>
<ide> private @TooltipAlignment int tooltipAlignment;
<add> private @PointerTooltipAlignment int tooltipPointerAlignment;
<ide> private int overlayPadding;
<ide> private int backgroundColorResource;
<ide> private boolean dismissible;
<ide> public static final int POINTER_RIGHT = 1;
<ide> public static final int POINTER_MIDDLE = 2;
<ide> public static final int POINTER_LEFT = 3;
<del>
<del> @IntDef({POINTER_RIGHT,POINTER_MIDDLE,POINTER_LEFT})
<add> public static final int POINTER_GONE = 4;
<add>
<add> @IntDef({POINTER_RIGHT,POINTER_MIDDLE,POINTER_LEFT,POINTER_GONE})
<ide> @Retention(RetentionPolicy.SOURCE)
<ide> public @interface PointerTooltipAlignment{}
<ide>
<ide>
<ide> private void setTooltipAlignment(@TooltipAlignment final int tooltipAlignment, @PointerTooltipAlignment final int pointerTooltipAlignment){
<ide> this.tooltipAlignment = tooltipAlignment;
<add> this.tooltipPointerAlignment = pointerTooltipAlignment;
<ide> tooltipBinding.getRoot().getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
<ide> @Override
<ide> public boolean onPreDraw() {
<ide> this.activity = activity;
<ide> this.tooltipAlignment = CoachMark.ROOT_BOTTOM; // default
<ide> this.pointerTooltipAlignment = CoachMark.POINTER_MIDDLE; // default
<add> this.pointerTooltipAlignment = CoachMark.POINTER_MIDDLE;
<ide> }
<ide>
<ide> @Nullable
<ide>
<ide> public Builder setDescription(String description){
<ide> this.description = description;
<add> public Builder setTooltipPointer(@PointerTooltipAlignment int pointerTooltipAlignment) {
<add> this.pointerTooltipAlignment = pointerTooltipAlignment;
<ide> return this;
<ide> }
<ide> |
|
Java | lgpl-2.1 | c020e1289ca0b0d512b9dd5999746d485a009f3f | 0 | RemiKoutcherawy/exist,jensopetersen/exist,windauer/exist,jessealama/exist,jensopetersen/exist,eXist-db/exist,olvidalo/exist,jensopetersen/exist,patczar/exist,shabanovd/exist,lcahlander/exist,wshager/exist,wshager/exist,joewiz/exist,RemiKoutcherawy/exist,hungerburg/exist,joewiz/exist,zwobit/exist,zwobit/exist,lcahlander/exist,MjAbuz/exist,RemiKoutcherawy/exist,wolfgangmm/exist,joewiz/exist,wolfgangmm/exist,shabanovd/exist,joewiz/exist,shabanovd/exist,adamretter/exist,lcahlander/exist,joewiz/exist,kohsah/exist,hungerburg/exist,opax/exist,windauer/exist,jessealama/exist,ambs/exist,olvidalo/exist,wolfgangmm/exist,dizzzz/exist,eXist-db/exist,dizzzz/exist,ljo/exist,dizzzz/exist,jensopetersen/exist,ambs/exist,zwobit/exist,windauer/exist,eXist-db/exist,adamretter/exist,jensopetersen/exist,patczar/exist,kohsah/exist,eXist-db/exist,lcahlander/exist,kohsah/exist,ambs/exist,ambs/exist,zwobit/exist,jessealama/exist,wshager/exist,hungerburg/exist,adamretter/exist,MjAbuz/exist,olvidalo/exist,patczar/exist,wolfgangmm/exist,ambs/exist,dizzzz/exist,adamretter/exist,hungerburg/exist,MjAbuz/exist,MjAbuz/exist,olvidalo/exist,wshager/exist,kohsah/exist,patczar/exist,patczar/exist,opax/exist,windauer/exist,wshager/exist,adamretter/exist,opax/exist,lcahlander/exist,joewiz/exist,shabanovd/exist,opax/exist,MjAbuz/exist,jessealama/exist,ambs/exist,dizzzz/exist,adamretter/exist,eXist-db/exist,windauer/exist,wshager/exist,jessealama/exist,RemiKoutcherawy/exist,olvidalo/exist,ljo/exist,MjAbuz/exist,zwobit/exist,shabanovd/exist,ljo/exist,eXist-db/exist,RemiKoutcherawy/exist,lcahlander/exist,jensopetersen/exist,kohsah/exist,RemiKoutcherawy/exist,hungerburg/exist,ljo/exist,jessealama/exist,kohsah/exist,shabanovd/exist,ljo/exist,windauer/exist,opax/exist,ljo/exist,dizzzz/exist,zwobit/exist,patczar/exist,wolfgangmm/exist,wolfgangmm/exist |
/*
* eXist Native XML Database
* Copyright (C) 2001-06, Wolfgang M. Meier ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist.xquery;
import org.apache.log4j.Logger;
import org.exist.dom.QName;
import org.exist.memtree.MemTreeBuilder;
import org.exist.memtree.NodeImpl;
import org.exist.util.XMLChar;
import org.exist.xquery.util.ExpressionDumper;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.QNameValue;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.StringValue;
import org.xml.sax.helpers.AttributesImpl;
import java.util.Iterator;
/**
* Constructor for element nodes. This class handles both, direct and dynamic
* element constructors.
*
* @author wolf
*/
public class ElementConstructor extends NodeConstructor {
private Expression qnameExpr;
private PathExpr content = null;
private AttributeConstructor attributes[] = null;
private QName namespaceDecls[] = null;
protected final static Logger LOG =
Logger.getLogger(ElementConstructor.class);
public ElementConstructor(XQueryContext context) {
super(context);
}
public ElementConstructor(XQueryContext context, String qname) {
super(context);
this.qnameExpr = new LiteralValue(context, new StringValue(qname));
}
public void setContent(PathExpr path) {
this.content = path;
this.content.setUseStaticContext(true);
}
public PathExpr getContent() {
return content;
}
public void setNameExpr(Expression expr) {
//Deferred atomization (we could have a QNameValue)
//this.qnameExpr = new Atomize(context, expr);
this.qnameExpr = expr;
}
public Expression getNameExpr() {
return qnameExpr;
}
public void addAttribute(AttributeConstructor attr) throws XPathException {
if(attr.isNamespaceDeclaration()) {
if(attr.getQName().equals("xmlns"))
addNamespaceDecl("", attr.getLiteralValue());
else
addNamespaceDecl(QName.extractLocalName(attr.getQName()), attr.getLiteralValue());
} else if(attributes == null) {
attributes = new AttributeConstructor[1];
attributes[0] = attr;
} else {
AttributeConstructor natts[] = new AttributeConstructor[attributes.length + 1];
System.arraycopy(attributes, 0, natts, 0, attributes.length);
natts[attributes.length] = attr;
attributes = natts;
}
}
public void addNamespaceDecl(String name, String uri) throws XPathException {
QName qn = new QName(name, uri, "xmlns");
if (name.equalsIgnoreCase("xml")) {
throw new XPathException(this, "XQST0070 : can not redefine '" + qn + "'");
}
if (name.equalsIgnoreCase("xmlns")) {
throw new XPathException(this, "XQST0070 : can not redefine '" + qn + "'");
}
if (name.length()!=0 && uri.trim().length()==0) {
throw new XPathException(this, "XQST0085 : cannot undeclare a prefix "+name+".");
}
addNamespaceDecl(qn);
}
private void addNamespaceDecl(QName qn) throws XPathException {
if(namespaceDecls == null) {
namespaceDecls = new QName[1];
namespaceDecls[0] = qn;
} else {
for(int i = 0; i < namespaceDecls.length; i++) {
if (qn.equals(namespaceDecls[i]))
throw new XPathException(this, "XQST0071 : duplicate definition for '" + qn + "'");
}
QName decls[] = new QName[namespaceDecls.length + 1];
System.arraycopy(namespaceDecls, 0, decls, 0, namespaceDecls.length);
decls[namespaceDecls.length] = qn;
namespaceDecls = decls;
}
//context.inScopeNamespaces.put(qn.getLocalName(), qn.getNamespaceURI());
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#analyze(org.exist.xquery.AnalyzeContextInfo)
*/
public void analyze(AnalyzeContextInfo contextInfo) throws XPathException {
super.analyze(contextInfo);
context.pushInScopeNamespaces();
// declare namespaces
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
if ("".equals(namespaceDecls[i].getNamespaceURI())) {
// TODO: the specs are unclear here: should we throw XQST0085 or not?
context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName());
// if (context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName()) == null)
// throw new XPathException(getASTNode(), "XQST0085 : can not undefine '" + namespaceDecls[i] + "'");
} else
context.declareInScopeNamespace(namespaceDecls[i].getLocalName(), namespaceDecls[i].getNamespaceURI());
}
}
AnalyzeContextInfo newContextInfo = new AnalyzeContextInfo(contextInfo);
newContextInfo.setParent(this);
newContextInfo.addFlag(IN_NODE_CONSTRUCTOR);
qnameExpr.analyze(newContextInfo);
if(attributes != null) {
for(int i = 0; i < attributes.length; i++) {
attributes[i].analyze(newContextInfo);
}
}
if(content != null)
content.analyze(newContextInfo);
context.popInScopeNamespaces();
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.xquery.StaticContext, org.exist.dom.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(
Sequence contextSequence,
Item contextItem)
throws XPathException {
context.expressionStart(this);
context.pushInScopeNamespaces();
if (newDocumentContext)
context.pushDocumentContext();
try {
MemTreeBuilder builder = context.getDocumentBuilder();
// declare namespaces
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
//if ("".equals(namespaceDecls[i].getNamespaceURI())) {
// TODO: the specs are unclear here: should we throw XQST0085 or not?
// context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName());
// if (context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName()) == null)
// throw new XPathException(getAS TNode(), "XQST0085 : can not undefine '" + namespaceDecls[i] + "'");
//} else
context.declareInScopeNamespace(namespaceDecls[i].getLocalName(), namespaceDecls[i].getNamespaceURI());
}
}
// process attributes
AttributesImpl attrs = new AttributesImpl();
if(attributes != null) {
AttributeConstructor constructor;
Sequence attrValues;
QName attrQName;
// first, search for xmlns attributes and declare in-scope namespaces
for (int i = 0; i < attributes.length; i++) {
constructor = attributes[i];
if(constructor.isNamespaceDeclaration()) {
int p = constructor.getQName().indexOf(':');
if(p == Constants.STRING_NOT_FOUND)
context.declareInScopeNamespace("", constructor.getLiteralValue());
else {
String prefix = constructor.getQName().substring(p + 1);
context.declareInScopeNamespace(prefix, constructor.getLiteralValue());
}
}
}
// process the remaining attributes
for (int i = 0; i < attributes.length; i++) {
context.proceed(this, builder);
constructor = attributes[i];
attrValues = constructor.eval(contextSequence, contextItem);
attrQName = QName.parse(context, constructor.getQName(), "");
if (attrs.getIndex(attrQName.getNamespaceURI(), attrQName.getLocalName()) != -1)
throw new XPathException(this, "XQST0040 '" + attrQName.getLocalName() + "' is a duplicate attribute name");
attrs.addAttribute(attrQName.getNamespaceURI(), attrQName.getLocalName(),
attrQName.getStringValue(), "CDATA", attrValues.getStringValue());
}
}
context.proceed(this, builder);
// create the element
Sequence qnameSeq = qnameExpr.eval(contextSequence, contextItem);
if(!qnameSeq.hasOne())
throw new XPathException(this, "Type error: the node name should evaluate to a single item");
Item qnitem = qnameSeq.itemAt(0);
QName qn;
if (qnitem instanceof QNameValue) {
qn = ((QNameValue)qnitem).getQName();
} else {
//Do we have the same result than Atomize there ? -pb
qn = QName.parse(context, qnitem.getStringValue());
//Use the default namespace if specified
/*
if (qn.getPrefix() == null && context.inScopeNamespaces.get("xmlns") != null) {
qn.setNamespaceURI((String)context.inScopeNamespaces.get("xmlns"));
}
*/
if (qn.getPrefix() == null && context.getInScopeNamespace("") != null) {
qn.setNamespaceURI(context.getInScopeNamespace(""));
}
}
//Not in the specs but... makes sense
if(!XMLChar.isValidName(qn.getLocalName()))
throw new XPathException(this, "XPTY0004 '" + qnitem.getStringValue() + "' is not a valid element name");
// add namespace declaration nodes
int nodeNr = builder.startElement(qn, attrs);
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
builder.namespaceNode(namespaceDecls[i]);
}
}
// do we need to add a namespace declaration for the current node?
if (qn.needsNamespaceDecl()) {
if (context.getInScopePrefix(qn.getNamespaceURI()) == null) {
String prefix = qn.getPrefix();
if (prefix == null || prefix.length() == 0)
prefix = "";
context.declareInScopeNamespace(prefix, qn.getNamespaceURI());
builder.namespaceNode(new QName(prefix, qn.getNamespaceURI(), "xmlns"));
}
} else if ((qn.getPrefix() == null || qn.getPrefix().length() == 0) &&
context.getInheritedNamespace("") != null) {
context.declareInScopeNamespace("", "");
builder.namespaceNode(new QName("", "", "xmlns"));
}
// process element contents
if(content != null) {
content.eval(contextSequence, contextItem);
}
builder.endElement();
NodeImpl node = builder.getDocument().getNode(nodeNr);
return node;
} finally {
context.popInScopeNamespaces();
if (newDocumentContext)
context.popDocumentContext();
context.expressionEnd(this);
}
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#dump(org.exist.xquery.util.ExpressionDumper)
*/
public void dump(ExpressionDumper dumper) {
dumper.display("element ");
//TODO : remove curly braces if Qname
dumper.display("{");
qnameExpr.dump(dumper);
dumper.display("} ");
dumper.display("{");
dumper.startIndent();
if(attributes != null) {
AttributeConstructor attr;
for(int i = 0; i < attributes.length; i++) {
if(i > 0)
dumper.nl();
attr = attributes[i];
attr.dump(dumper);
}
dumper.endIndent();
dumper.startIndent();
}
if(content != null) {
for(Iterator i = content.steps.iterator(); i.hasNext(); ) {
Expression expr = (Expression) i.next();
expr.dump(dumper);
if(i.hasNext())
dumper.nl();
}
dumper.endIndent().nl();
}
dumper.display("} ");
}
public String toString() {
StringBuilder result = new StringBuilder();
result.append("element ");
//TODO : remove curly braces if Qname
result.append("{");
result.append(qnameExpr.toString());
result.append("} ");
result.append("{");
if(attributes != null) {
AttributeConstructor attr;
for(int i = 0; i < attributes.length; i++) {
if(i > 0)
result.append(" ");
attr = attributes[i];
result.append(attr.toString());
}
}
if(content != null) {
for(Iterator i = content.steps.iterator(); i.hasNext(); ) {
Expression expr = (Expression) i.next();
result.append(expr.toString());
if(i.hasNext())
result.append(" ");
}
}
result.append("} ");
return result.toString();
}
/* (non-Javadoc)
* @see org.exist.xquery.AbstractExpression#setPrimaryAxis(int)
*/
public void setPrimaryAxis(int axis) {
if(content != null)
content.setPrimaryAxis(axis);
}
public int getPrimaryAxis() {
if (content != null)
content.getPrimaryAxis();
return Constants.UNKNOWN_AXIS;
}
/* (non-Javadoc)
* @see org.exist.xquery.AbstractExpression#resetState()
*/
public void resetState(boolean postOptimization) {
super.resetState(postOptimization);
qnameExpr.resetState(postOptimization);
if(content != null)
content.resetState(postOptimization);
if(attributes != null)
for(int i = 0; i < attributes.length; i++) {
Expression next = attributes[i];
next.resetState(postOptimization);
}
}
public void accept(ExpressionVisitor visitor) {
visitor.visitElementConstructor(this);
}
}
| src/org/exist/xquery/ElementConstructor.java |
/*
* eXist Native XML Database
* Copyright (C) 2001-06, Wolfgang M. Meier ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist.xquery;
import org.apache.log4j.Logger;
import org.exist.dom.QName;
import org.exist.memtree.MemTreeBuilder;
import org.exist.memtree.NodeImpl;
import org.exist.util.XMLChar;
import org.exist.xquery.util.ExpressionDumper;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.QNameValue;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.StringValue;
import org.xml.sax.helpers.AttributesImpl;
import java.util.Iterator;
/**
* Constructor for element nodes. This class handles both, direct and dynamic
* element constructors.
*
* @author wolf
*/
public class ElementConstructor extends NodeConstructor {
private Expression qnameExpr;
private PathExpr content = null;
private AttributeConstructor attributes[] = null;
private QName namespaceDecls[] = null;
protected final static Logger LOG =
Logger.getLogger(ElementConstructor.class);
public ElementConstructor(XQueryContext context) {
super(context);
}
public ElementConstructor(XQueryContext context, String qname) {
super(context);
this.qnameExpr = new LiteralValue(context, new StringValue(qname));
}
public void setContent(PathExpr path) {
this.content = path;
this.content.setUseStaticContext(true);
}
public PathExpr getContent() {
return content;
}
public void setNameExpr(Expression expr) {
//Deferred atomization (we could have a QNameValue)
//this.qnameExpr = new Atomize(context, expr);
this.qnameExpr = expr;
}
public Expression getNameExpr() {
return qnameExpr;
}
public void addAttribute(AttributeConstructor attr) throws XPathException {
if(attr.isNamespaceDeclaration()) {
if(attr.getQName().equals("xmlns"))
addNamespaceDecl("", attr.getLiteralValue());
else
addNamespaceDecl(QName.extractLocalName(attr.getQName()), attr.getLiteralValue());
} else if(attributes == null) {
attributes = new AttributeConstructor[1];
attributes[0] = attr;
} else {
AttributeConstructor natts[] = new AttributeConstructor[attributes.length + 1];
System.arraycopy(attributes, 0, natts, 0, attributes.length);
natts[attributes.length] = attr;
attributes = natts;
}
}
public void addNamespaceDecl(String name, String uri) throws XPathException {
QName qn = new QName(name, uri, "xmlns");
if (name.equalsIgnoreCase("xml")) {
throw new XPathException(this, "XQST0070 : can not redefine '" + qn + "'");
}
if (name.equalsIgnoreCase("xmlns")) {
throw new XPathException(this, "XQST0070 : can not redefine '" + qn + "'");
}
if (name.length()!=0 && uri.trim().length()==0) {
throw new XPathException(this, "XQST0085 : cannot undeclare a prefix "+name+".");
}
addNamespaceDecl(qn);
}
private void addNamespaceDecl(QName qn) throws XPathException {
if(namespaceDecls == null) {
namespaceDecls = new QName[1];
namespaceDecls[0] = qn;
} else {
for(int i = 0; i < namespaceDecls.length; i++) {
if (qn.equals(namespaceDecls[i]))
throw new XPathException(this, "XQST0071 : duplicate definition for '" + qn + "'");
}
QName decls[] = new QName[namespaceDecls.length + 1];
System.arraycopy(namespaceDecls, 0, decls, 0, namespaceDecls.length);
decls[namespaceDecls.length] = qn;
namespaceDecls = decls;
}
//context.inScopeNamespaces.put(qn.getLocalName(), qn.getNamespaceURI());
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#analyze(org.exist.xquery.AnalyzeContextInfo)
*/
public void analyze(AnalyzeContextInfo contextInfo) throws XPathException {
super.analyze(contextInfo);
context.pushInScopeNamespaces();
// declare namespaces
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
if ("".equals(namespaceDecls[i].getNamespaceURI())) {
// TODO: the specs are unclear here: should we throw XQST0085 or not?
context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName());
// if (context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName()) == null)
// throw new XPathException(getASTNode(), "XQST0085 : can not undefine '" + namespaceDecls[i] + "'");
} else
context.declareInScopeNamespace(namespaceDecls[i].getLocalName(), namespaceDecls[i].getNamespaceURI());
}
}
AnalyzeContextInfo newContextInfo = new AnalyzeContextInfo(contextInfo);
newContextInfo.setParent(this);
newContextInfo.addFlag(IN_NODE_CONSTRUCTOR);
qnameExpr.analyze(newContextInfo);
if(attributes != null) {
for(int i = 0; i < attributes.length; i++) {
attributes[i].analyze(newContextInfo);
}
}
if(content != null)
content.analyze(newContextInfo);
context.popInScopeNamespaces();
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.xquery.StaticContext, org.exist.dom.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(
Sequence contextSequence,
Item contextItem)
throws XPathException {
context.expressionStart(this);
context.pushInScopeNamespaces();
if (newDocumentContext)
context.pushDocumentContext();
try {
MemTreeBuilder builder = context.getDocumentBuilder();
// declare namespaces
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
//if ("".equals(namespaceDecls[i].getNamespaceURI())) {
// TODO: the specs are unclear here: should we throw XQST0085 or not?
// context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName());
// if (context.inScopeNamespaces.remove(namespaceDecls[i].getLocalName()) == null)
// throw new XPathException(getAS TNode(), "XQST0085 : can not undefine '" + namespaceDecls[i] + "'");
//} else
context.declareInScopeNamespace(namespaceDecls[i].getLocalName(), namespaceDecls[i].getNamespaceURI());
}
}
// process attributes
AttributesImpl attrs = new AttributesImpl();
if(attributes != null) {
AttributeConstructor constructor;
Sequence attrValues;
QName attrQName;
// first, search for xmlns attributes and declare in-scope namespaces
for (int i = 0; i < attributes.length; i++) {
constructor = attributes[i];
if(constructor.isNamespaceDeclaration()) {
int p = constructor.getQName().indexOf(':');
if(p == Constants.STRING_NOT_FOUND)
context.declareInScopeNamespace("", constructor.getLiteralValue());
else {
String prefix = constructor.getQName().substring(p + 1);
context.declareInScopeNamespace(prefix, constructor.getLiteralValue());
}
}
}
// process the remaining attributes
for (int i = 0; i < attributes.length; i++) {
context.proceed(this, builder);
constructor = attributes[i];
attrValues = constructor.eval(contextSequence, contextItem);
attrQName = QName.parse(context, constructor.getQName(), "");
if (attrs.getIndex(attrQName.getNamespaceURI(), attrQName.getLocalName()) != -1)
throw new XPathException(this, "XQST0040 '" + attrQName.getLocalName() + "' is a duplicate attribute name");
attrs.addAttribute(attrQName.getNamespaceURI(), attrQName.getLocalName(),
attrQName.getStringValue(), "CDATA", attrValues.getStringValue());
}
}
context.proceed(this, builder);
// create the element
Sequence qnameSeq = qnameExpr.eval(contextSequence, contextItem);
if(!qnameSeq.hasOne())
throw new XPathException(this, "Type error: the node name should evaluate to a single item");
Item qnitem = qnameSeq.itemAt(0);
QName qn;
if (qnitem instanceof QNameValue) {
qn = ((QNameValue)qnitem).getQName();
} else {
//Do we have the same result than Atomize there ? -pb
qn = QName.parse(context, qnitem.getStringValue());
//Use the default namespace if specified
/*
if (qn.getPrefix() == null && context.inScopeNamespaces.get("xmlns") != null) {
qn.setNamespaceURI((String)context.inScopeNamespaces.get("xmlns"));
}
*/
if (qn.getPrefix() == null && context.getInScopeNamespace("") != null) {
qn.setNamespaceURI(context.getInScopeNamespace(""));
}
}
//Not in the specs but... makes sense
if(!XMLChar.isValidName(qn.getLocalName()))
throw new XPathException(this, "XPTY0004 '" + qnitem.getStringValue() + "' is not a valid element name");
// add namespace declaration nodes
int nodeNr = builder.startElement(qn, attrs);
if(namespaceDecls != null) {
for(int i = 0; i < namespaceDecls.length; i++) {
builder.namespaceNode(namespaceDecls[i]);
}
}
// do we need to add a namespace declaration for the current node?
if (qn.needsNamespaceDecl()) {
if (context.getInScopePrefix(qn.getNamespaceURI()) == null) {
String prefix = qn.getPrefix();
if (prefix == null || prefix.length() == 0)
prefix = "";
context.declareInScopeNamespace(prefix, qn.getNamespaceURI());
builder.namespaceNode(new QName(prefix, qn.getNamespaceURI(), "xmlns"));
}
}
// process element contents
if(content != null) {
content.eval(contextSequence, contextItem);
}
builder.endElement();
NodeImpl node = builder.getDocument().getNode(nodeNr);
return node;
} finally {
context.popInScopeNamespaces();
if (newDocumentContext)
context.popDocumentContext();
context.expressionEnd(this);
}
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#dump(org.exist.xquery.util.ExpressionDumper)
*/
public void dump(ExpressionDumper dumper) {
dumper.display("element ");
//TODO : remove curly braces if Qname
dumper.display("{");
qnameExpr.dump(dumper);
dumper.display("} ");
dumper.display("{");
dumper.startIndent();
if(attributes != null) {
AttributeConstructor attr;
for(int i = 0; i < attributes.length; i++) {
if(i > 0)
dumper.nl();
attr = attributes[i];
attr.dump(dumper);
}
dumper.endIndent();
dumper.startIndent();
}
if(content != null) {
for(Iterator i = content.steps.iterator(); i.hasNext(); ) {
Expression expr = (Expression) i.next();
expr.dump(dumper);
if(i.hasNext())
dumper.nl();
}
dumper.endIndent().nl();
}
dumper.display("} ");
}
public String toString() {
StringBuilder result = new StringBuilder();
result.append("element ");
//TODO : remove curly braces if Qname
result.append("{");
result.append(qnameExpr.toString());
result.append("} ");
result.append("{");
if(attributes != null) {
AttributeConstructor attr;
for(int i = 0; i < attributes.length; i++) {
if(i > 0)
result.append(" ");
attr = attributes[i];
result.append(attr.toString());
}
}
if(content != null) {
for(Iterator i = content.steps.iterator(); i.hasNext(); ) {
Expression expr = (Expression) i.next();
result.append(expr.toString());
if(i.hasNext())
result.append(" ");
}
}
result.append("} ");
return result.toString();
}
/* (non-Javadoc)
* @see org.exist.xquery.AbstractExpression#setPrimaryAxis(int)
*/
public void setPrimaryAxis(int axis) {
if(content != null)
content.setPrimaryAxis(axis);
}
public int getPrimaryAxis() {
if (content != null)
content.getPrimaryAxis();
return Constants.UNKNOWN_AXIS;
}
/* (non-Javadoc)
* @see org.exist.xquery.AbstractExpression#resetState()
*/
public void resetState(boolean postOptimization) {
super.resetState(postOptimization);
qnameExpr.resetState(postOptimization);
if(content != null)
content.resetState(postOptimization);
if(attributes != null)
for(int i = 0; i < attributes.length; i++) {
Expression next = attributes[i];
next.resetState(postOptimization);
}
}
public void accept(ExpressionVisitor visitor) {
visitor.visitElementConstructor(this);
}
}
| [bugfix] default namespace issue: when constructing an element with an explicit, empty namespace declaration, any existing default namespace declaration in the inherited context needs to be cleared. For example:
<div xmlns="http://www.w3.org/1999/xhtml">
<users xmlns=""/>
</div>
Port of rev 10485
svn path=/trunk/eXist/; revision=10486
| src/org/exist/xquery/ElementConstructor.java | [bugfix] default namespace issue: when constructing an element with an explicit, empty namespace declaration, any existing default namespace declaration in the inherited context needs to be cleared. For example: | <ide><path>rc/org/exist/xquery/ElementConstructor.java
<ide> context.declareInScopeNamespace(prefix, qn.getNamespaceURI());
<ide> builder.namespaceNode(new QName(prefix, qn.getNamespaceURI(), "xmlns"));
<ide> }
<add> } else if ((qn.getPrefix() == null || qn.getPrefix().length() == 0) &&
<add> context.getInheritedNamespace("") != null) {
<add> context.declareInScopeNamespace("", "");
<add> builder.namespaceNode(new QName("", "", "xmlns"));
<ide> }
<ide> // process element contents
<ide> if(content != null) { |
|
JavaScript | mit | 9069b023b8494997cb61c56e649d7a34abb099d6 | 0 | imsun/gh-feed,imsun/gh-feed | const app = require('koa')()
const router = require('koa-router')()
const request = require('co-request')
const RSS = require('rss')
const { DOMParser, XMLSerializer } = require('xmldom')
const parser = new DOMParser({
errorHandler: {}
})
const serlializer = new XMLSerializer()
const HOST_URL = 'https://github.com'
router
.get('/', function *() {
this.body = 'test'
})
.get('/:owner/:repo', function *() {
const { owner, repo } = this.params
const src = `${HOST_URL}/${owner}/${repo}/issues`
const feed = new RSS({
title: `${owner}/${repo}`,
generator: 'gh-feed',
feed_url: this.url,
site_url: src,
image_url: `${HOST_URL}/${owner}.png`,
ttl: 60
})
const res = yield request(src)
const doc = parser.parseFromString(res.body, 'text/html')
const issues = Array.from(doc.getElementsByTagName('ul')[1].getElementsByTagName('li'))
.map(li => {
const issue = {}
Array.from(li.getElementsByTagName('a')).forEach(a => {
const className = a.getAttribute('class')
if (/h4/.test(className)) {
issue.title = a.textContent.trim()
issue.url = HOST_URL + a.getAttribute('href')
} else if (/label/.test(className)) {
issue.categories = issue.categories || []
issue.categories.push(a.textContent.trim())
} else if (className === 'tooltipped tooltipped-s muted-link') {
issue.author = a.textContent.trim()
}
})
issue.date = li.getElementsByTagName('relative-time')[0].getAttribute('datetime')
return issue
})
yield issues.map(issue => {
return function *() {
const res = yield request(issue.url)
const body = res.body.replace(/<!-- '"` --><!-- <\/textarea><\/xmp> --><\/option><\/form>/g, '')
const id = body.match(/id="(issue-.*?)"/)[1]
const doc = parser.parseFromString(body, 'text/html')
const contentElement = doc
.getElementById(id)
.childNodes[3]
.childNodes[1]
.childNodes
issue.description = serlializer.serializeToString(contentElement)
feed.item(issue)
}
})
this.set('Content-Type', 'application/rss+xml; charset=utf-8')
this.body = feed.xml()
})
app.use(router.routes())
app.listen(process.env.PORT || 3000) | index.js | const app = require('koa')()
const router = require('koa-router')()
const request = require('co-request')
const RSS = require('rss')
const { DOMParser, XMLSerializer } = require('xmldom')
const parser = new DOMParser({
errorHandler: {}
})
const serlializer = new XMLSerializer()
const HOST_URL = 'https://github.com'
router
.get('/', function *() {
this.body = 'test'
})
.get('/:owner/:repo', function *() {
const { owner, repo } = this.params
const src = `${HOST_URL}/${owner}/${repo}/issues`
const feed = new RSS({
title: `${owner}/${repo}`,
generator: 'gh-feed',
feed_url: this.url,
site_url: src,
ttl: 60
})
const res = yield request(src)
const doc = parser.parseFromString(res.body, 'text/html')
const issues = Array.from(doc.getElementsByTagName('ul')[1].getElementsByTagName('li'))
.map(li => {
const issue = {}
Array.from(li.getElementsByTagName('a')).forEach(a => {
const className = a.getAttribute('class')
if (/h4/.test(className)) {
issue.title = a.textContent.trim()
issue.url = HOST_URL + a.getAttribute('href')
} else if (/label/.test(className)) {
issue.categories = issue.categories || []
issue.categories.push(a.textContent.trim())
} else if (className === 'tooltipped tooltipped-s muted-link') {
issue.author = a.textContent.trim()
}
})
issue.date = li.getElementsByTagName('relative-time')[0].getAttribute('datetime')
return issue
})
yield issues.map(issue => {
return function *() {
const res = yield request(issue.url)
const body = res.body.replace(/<!-- '"` --><!-- <\/textarea><\/xmp> --><\/option><\/form>/g, '')
const id = body.match(/id="(issue-.*?)"/)[1]
const doc = parser.parseFromString(body, 'text/html')
const contentElement = doc
.getElementById(id)
.childNodes[3]
.childNodes[1]
.childNodes
issue.description = serlializer.serializeToString(contentElement)
feed.item(issue)
}
})
this.set('Content-Type', 'application/rss+xml; charset=utf-8')
this.body = feed.xml()
})
app.use(router.routes())
app.listen(process.env.PORT || 3000) | add feed avatar
| index.js | add feed avatar | <ide><path>ndex.js
<ide> generator: 'gh-feed',
<ide> feed_url: this.url,
<ide> site_url: src,
<add> image_url: `${HOST_URL}/${owner}.png`,
<ide> ttl: 60
<ide> })
<ide> |
|
Java | lgpl-2.1 | 8c7594be51bc6053a30a111e8ac3c15acd280bce | 0 | ChestShop-authors/ChestShop-3,ChestShop-authors/ChestShop-3 | package com.Acrobot.ChestShop.Listeners.Player;
import com.Acrobot.Breeze.Utils.NameUtil;
import com.Acrobot.ChestShop.ChestShop;
import com.Acrobot.ChestShop.UUIDs.NameManager;
import com.Acrobot.ChestShop.UUIDs.PlayerDTO;
import org.bukkit.Bukkit;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
import java.util.UUID;
/**
* @author Acrobot
*/
public class PlayerConnect implements Listener {
@EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true)
public static void onPlayerConnect(final PlayerJoinEvent event) {
final PlayerDTO playerDTO = new PlayerDTO(event.getPlayer());
Bukkit.getScheduler().runTaskAsynchronously(ChestShop.getPlugin(), new Runnable() {
@Override
public void run() {
String playerName = NameUtil.stripUsername(playerDTO.getName());
UUID uuid = NameManager.getUUID(playerName);
NameManager.storeUsername(playerDTO);
}
});
}
}
| src/main/java/com/Acrobot/ChestShop/Listeners/Player/PlayerConnect.java | package com.Acrobot.ChestShop.Listeners.Player;
import com.Acrobot.Breeze.Utils.NameUtil;
import com.Acrobot.ChestShop.ChestShop;
import com.Acrobot.ChestShop.UUIDs.NameManager;
import com.Acrobot.ChestShop.UUIDs.PlayerDTO;
import org.bukkit.Bukkit;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
import java.util.UUID;
/**
* @author Acrobot
*/
public class PlayerConnect implements Listener {
@EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true)
public static void onPlayerConnect(final PlayerJoinEvent event) {
final PlayerDTO playerDTO = new PlayerDTO(event.getPlayer());
Bukkit.getScheduler().runTaskAsynchronously(ChestShop.getPlugin(), new Runnable() {
@Override
public void run() {
String playerName = NameUtil.stripUsername(playerDTO.getName());
UUID uuid = NameManager.getUUID(playerName);
if (uuid != null && !playerDTO.getUniqueId().equals(uuid)) {
Bukkit.getScheduler().runTask(ChestShop.getPlugin(), new Runnable() {
@Override
public void run() {
Bukkit.getPlayer(playerDTO.getUniqueId()).kickPlayer("[ChestShop]" +
"Unfortunately, this username was already used by " +
"another player.");
}
});
}
NameManager.storeUsername(playerDTO);
}
});
}
}
| Revert PlayerKick Behavior
Partial revert of 'Do not let people with already used usernames connect to the server' commit.
See comments here https://github.com/Acrobot/ChestShop-3/commit/b0369d8e69c9035c29756756a3794c5277001882#commitcomment-14185778 and here.
https://github.com/Acrobot/ChestShop-3/commit/c169df2265ef5aa679e47bd4f1acbd64f40cb19e#commitcomment-12846919
I personally believe ChestShops should not dictate whether a player is allowed to connect to the server or not. I personally have had issues with ChestShops kicking legitimate players from the server in online mode. | src/main/java/com/Acrobot/ChestShop/Listeners/Player/PlayerConnect.java | Revert PlayerKick Behavior | <ide><path>rc/main/java/com/Acrobot/ChestShop/Listeners/Player/PlayerConnect.java
<ide> String playerName = NameUtil.stripUsername(playerDTO.getName());
<ide> UUID uuid = NameManager.getUUID(playerName);
<ide>
<del> if (uuid != null && !playerDTO.getUniqueId().equals(uuid)) {
<del> Bukkit.getScheduler().runTask(ChestShop.getPlugin(), new Runnable() {
<del> @Override
<del> public void run() {
<del> Bukkit.getPlayer(playerDTO.getUniqueId()).kickPlayer("[ChestShop]" +
<del> "Unfortunately, this username was already used by " +
<del> "another player.");
<del>
<del> }
<del> });
<del> }
<del>
<ide> NameManager.storeUsername(playerDTO);
<ide> }
<ide> }); |
|
Java | apache-2.0 | 3842b9a085db28e3e471d24236a8325af9921444 | 0 | apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.examples.pdfa;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentCatalog;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.common.PDMetadata;
import org.apache.pdfbox.pdmodel.edit.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDTrueTypeFont;
import org.apache.pdfbox.pdmodel.graphics.color.PDOutputIntent;
import org.apache.xmpbox.XMPMetadata;
import org.apache.xmpbox.schema.PDFAIdentificationSchema;
import org.apache.xmpbox.type.BadFieldValueException;
import org.apache.xmpbox.xml.XmpSerializationException;
import org.apache.xmpbox.xml.XmpSerializer;
import javax.xml.transform.TransformerException;
/**
* This is an example that creates a simple PDF/A document.
*
*/
public class CreatePDFA
{
/**
* Constructor.
*/
public CreatePDFA()
{
super();
}
/**
* Create a simple PDF/A document.
*
* This example is based on HelloWorld example.
*
* As it is a simple case, to conform the PDF/A norm, are added :
* - the font used in the document
* - a light xmp block with only PDF identification schema (the only mandatory)
* - an output intent
*
* @param file The file to write the PDF to.
* @param message The message to write in the file.
*
* @throws Exception If something bad occurs
*/
public void doIt( final String file, final String message, final String fontfile)
throws IOException, TransformerException
{
// the document
PDDocument doc = null;
try
{
doc = new PDDocument();
PDPage page = new PDPage();
doc.addPage( page );
// load the font as this needs to be embedded as part of PDF/A
PDFont font = PDTrueTypeFont.loadTTF(doc, new File(fontfile));
// create a page with the message where needed
PDPageContentStream contentStream = new PDPageContentStream(doc, page);
contentStream.beginText();
contentStream.setFont( font, 12 );
contentStream.newLineAtOffset(100, 700);
contentStream.showText(message);
contentStream.endText();
contentStream.saveGraphicsState();
contentStream.close();
PDDocumentCatalog cat = doc.getDocumentCatalog();
PDMetadata metadata = new PDMetadata(doc);
cat.setMetadata(metadata);
XMPMetadata xmp = XMPMetadata.createXMPMetadata();
try
{
PDFAIdentificationSchema pdfaid = xmp.createAndAddPFAIdentificationSchema();
pdfaid.setConformance("B");
pdfaid.setPart(1);
pdfaid.setAboutAsSimple("PDFBox PDFA sample");
XmpSerializer serializer = new XmpSerializer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serialize(xmp, baos, true);
metadata.importXMPMetadata( baos.toByteArray() );
}
catch(BadFieldValueException badFieldexception)
{
// can't happen here, as the provided value is valid
}
catch(XmpSerializationException xmpException)
{
System.err.println(xmpException.getMessage());
}
InputStream colorProfile = CreatePDFA.class.getResourceAsStream("/org/apache/pdfbox/resources/pdfa/sRGB Color Space Profile.icm");
// create output intent
PDOutputIntent oi = new PDOutputIntent(doc, colorProfile);
oi.setInfo("sRGB IEC61966-2.1");
oi.setOutputCondition("sRGB IEC61966-2.1");
oi.setOutputConditionIdentifier("sRGB IEC61966-2.1");
oi.setRegistryName("http://www.color.org");
cat.addOutputIntent(oi);
doc.save( file );
}
finally
{
if( doc != null )
{
doc.close();
}
}
}
/**
* This will create a hello world PDF/A document.
* <br />
* see usage() for commandline
*
* @param args Command line arguments.
*/
public static void main(String[] args) throws IOException, TransformerException
{
CreatePDFA app = new CreatePDFA();
if( args.length != 3 )
{
app.usage();
}
else
{
app.doIt( args[0], args[1], args[2] );
}
}
/**
* This will print out a message telling how to use this example.
*/
private void usage()
{
System.err.println( "usage: " + this.getClass().getName() + " <output-file> <Message> <ttf-file>" );
}
}
| examples/src/main/java/org/apache/pdfbox/examples/pdfa/CreatePDFA.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.examples.pdfa;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentCatalog;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.common.PDMetadata;
import org.apache.pdfbox.pdmodel.edit.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDTrueTypeFont;
import org.apache.pdfbox.pdmodel.graphics.color.PDOutputIntent;
import org.apache.xmpbox.XMPMetadata;
import org.apache.xmpbox.schema.PDFAIdentificationSchema;
import org.apache.xmpbox.type.BadFieldValueException;
import org.apache.xmpbox.xml.XmpSerializationException;
import org.apache.xmpbox.xml.XmpSerializer;
import javax.xml.transform.TransformerException;
/**
* This is an example that creates a simple PDF/A document.
*
*/
public class CreatePDFA
{
/**
* Constructor.
*/
public CreatePDFA()
{
super();
}
/**
* Create a simple PDF/A document.
*
* This example is based on HelloWorld example.
*
* As it is a simple case, to conform the PDF/A norm, are added :
* - the font used in the document
* - a light xmp block with only PDF identification schema (the only mandatory)
* - an output intent
*
* @param file The file to write the PDF to.
* @param message The message to write in the file.
*
* @throws Exception If something bad occurs
*/
public void doIt( final String file, final String message, final String fontfile)
throws IOException, TransformerException
{
// the document
PDDocument doc = null;
try
{
doc = new PDDocument();
PDPage page = new PDPage();
doc.addPage( page );
// load the font as this needs to be embedded as part of PDF/A
PDFont font = PDTrueTypeFont.loadTTF(doc, new File(fontfile));
// create a page with the message where needed
PDPageContentStream contentStream = new PDPageContentStream(doc, page);
contentStream.beginText();
contentStream.setFont( font, 12 );
contentStream.newLineAtOffset(100, 700);
contentStream.showText(message);
contentStream.endText();
contentStream.saveGraphicsState();
contentStream.close();
PDDocumentCatalog cat = doc.getDocumentCatalog();
PDMetadata metadata = new PDMetadata(doc);
cat.setMetadata(metadata);
XMPMetadata xmp = XMPMetadata.createXMPMetadata();
try
{
PDFAIdentificationSchema pdfaid = xmp.createAndAddPFAIdentificationSchema();
pdfaid.setConformance("B");
pdfaid.setPart(1);
pdfaid.setAboutAsSimple("PDFBox PDFA sample");
XmpSerializer serializer = new XmpSerializer();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serialize(xmp, baos, false);
metadata.importXMPMetadata( baos.toByteArray() );
}
catch(BadFieldValueException badFieldexception)
{
// can't happen here, as the provided value is valid
}
catch(XmpSerializationException xmpException)
{
System.err.println(xmpException.getMessage());
}
InputStream colorProfile = CreatePDFA.class.getResourceAsStream("/org/apache/pdfbox/resources/pdfa/sRGB Color Space Profile.icm");
// create output intent
PDOutputIntent oi = new PDOutputIntent(doc, colorProfile);
oi.setInfo("sRGB IEC61966-2.1");
oi.setOutputCondition("sRGB IEC61966-2.1");
oi.setOutputConditionIdentifier("sRGB IEC61966-2.1");
oi.setRegistryName("http://www.color.org");
cat.addOutputIntent(oi);
doc.save( file );
}
finally
{
if( doc != null )
{
doc.close();
}
}
}
/**
* This will create a hello world PDF/A document.
* <br />
* see usage() for commandline
*
* @param args Command line arguments.
*/
public static void main(String[] args) throws IOException, TransformerException
{
CreatePDFA app = new CreatePDFA();
if( args.length != 3 )
{
app.usage();
}
else
{
app.doIt( args[0], args[1], args[2] );
}
}
/**
* This will print out a message telling how to use this example.
*/
private void usage()
{
System.err.println( "usage: " + this.getClass().getName() + " <output-file> <Message> <ttf-file>" );
}
}
| PDFBOX-2621: enable xpacket serialization
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1653645 13f79535-47bb-0310-9956-ffa450edef68
| examples/src/main/java/org/apache/pdfbox/examples/pdfa/CreatePDFA.java | PDFBOX-2621: enable xpacket serialization | <ide><path>xamples/src/main/java/org/apache/pdfbox/examples/pdfa/CreatePDFA.java
<ide> pdfaid.setAboutAsSimple("PDFBox PDFA sample");
<ide> XmpSerializer serializer = new XmpSerializer();
<ide> ByteArrayOutputStream baos = new ByteArrayOutputStream();
<del> serializer.serialize(xmp, baos, false);
<add> serializer.serialize(xmp, baos, true);
<ide> metadata.importXMPMetadata( baos.toByteArray() );
<ide> }
<ide> catch(BadFieldValueException badFieldexception) |
|
Java | mit | a865a52562a7f29ff065e3c86e67013cf606f9d5 | 0 | singh88/jenkins,aduprat/jenkins,ChrisA89/jenkins,aquarellian/jenkins,aquarellian/jenkins,daniel-beck/jenkins,msrb/jenkins,vvv444/jenkins,petermarcoen/jenkins,shahharsh/jenkins,viqueen/jenkins,NehemiahMi/jenkins,mdonohue/jenkins,arunsingh/jenkins,kohsuke/hudson,SenolOzer/jenkins,my7seven/jenkins,Jochen-A-Fuerbacher/jenkins,dariver/jenkins,deadmoose/jenkins,vjuranek/jenkins,goldchang/jenkins,msrb/jenkins,ydubreuil/jenkins,jpederzolli/jenkins-1,godfath3r/jenkins,jhoblitt/jenkins,albers/jenkins,tfennelly/jenkins,morficus/jenkins,svanoort/jenkins,rashmikanta-1984/jenkins,viqueen/jenkins,wangyikai/jenkins,khmarbaise/jenkins,FTG-003/jenkins,MichaelPranovich/jenkins_sc,ajshastri/jenkins,rlugojr/jenkins,duzifang/my-jenkins,mrooney/jenkins,h4ck3rm1k3/jenkins,ns163/jenkins,SebastienGllmt/jenkins,keyurpatankar/hudson,gorcz/jenkins,nandan4/Jenkins,NehemiahMi/jenkins,seanlin816/jenkins,Vlatombe/jenkins,christ66/jenkins,hplatou/jenkins,paulwellnerbou/jenkins,mcanthony/jenkins,292388900/jenkins,Krasnyanskiy/jenkins,iqstack/jenkins,pselle/jenkins,paulmillar/jenkins,bpzhang/jenkins,jzjzjzj/jenkins,akshayabd/jenkins,noikiy/jenkins,CodeShane/jenkins,andresrc/jenkins,brunocvcunha/jenkins,6WIND/jenkins,stephenc/jenkins,tastatur/jenkins,goldchang/jenkins,hashar/jenkins,aquarellian/jenkins,bkmeneguello/jenkins,pjanouse/jenkins,pselle/jenkins,hplatou/jenkins,csimons/jenkins,stephenc/jenkins,verbitan/jenkins,singh88/jenkins,soenter/jenkins,dariver/jenkins,keyurpatankar/hudson,shahharsh/jenkins,guoxu0514/jenkins,ErikVerheul/jenkins,chbiel/jenkins,rlugojr/jenkins,vvv444/jenkins,jcarrothers-sap/jenkins,rlugojr/jenkins,Vlatombe/jenkins,csimons/jenkins,kohsuke/hudson,yonglehou/jenkins,bkmeneguello/jenkins,andresrc/jenkins,SenolOzer/jenkins,protazy/jenkins,ns163/jenkins,jcsirot/jenkins,bpzhang/jenkins,petermarcoen/jenkins,ndeloof/jenkins,dariver/jenkins,ErikVerheul/jenkins,mdonohue/jenkins,FTG-003/jenkins,recena/jenkins,arcivanov/jenkins,noikiy/jenkins,vjuranek/jenkins,bkmeneguello/jenkins,paulmillar/jenkins,evernat/jenkins,Vlatombe/jenkins,SenolOzer/jenkins,aduprat/jenkins,amruthsoft9/Jenkis,paulwellnerbou/jenkins,everyonce/jenkins,tastatur/jenkins,evernat/jenkins,jk47/jenkins,csimons/jenkins,hplatou/jenkins,everyonce/jenkins,jcarrothers-sap/jenkins,duzifang/my-jenkins,Krasnyanskiy/jenkins,noikiy/jenkins,jpederzolli/jenkins-1,godfath3r/jenkins,scoheb/jenkins,synopsys-arc-oss/jenkins,jglick/jenkins,ndeloof/jenkins,dbroady1/jenkins,vjuranek/jenkins,rashmikanta-1984/jenkins,intelchen/jenkins,lordofthejars/jenkins,patbos/jenkins,verbitan/jenkins,rsandell/jenkins,liorhson/jenkins,samatdav/jenkins,dariver/jenkins,vijayto/jenkins,liorhson/jenkins,varmenise/jenkins,singh88/jenkins,luoqii/jenkins,lordofthejars/jenkins,paulwellnerbou/jenkins,dbroady1/jenkins,vlajos/jenkins,intelchen/jenkins,christ66/jenkins,Jochen-A-Fuerbacher/jenkins,lordofthejars/jenkins,petermarcoen/jenkins,mattclark/jenkins,olivergondza/jenkins,vvv444/jenkins,tfennelly/jenkins,gorcz/jenkins,alvarolobato/jenkins,my7seven/jenkins,Krasnyanskiy/jenkins,ErikVerheul/jenkins,arcivanov/jenkins,AustinKwang/jenkins,jcarrothers-sap/jenkins,tangkun75/jenkins,synopsys-arc-oss/jenkins,scoheb/jenkins,hemantojhaa/jenkins,elkingtonmcb/jenkins,hemantojhaa/jenkins,wuwen5/jenkins,aldaris/jenkins,ajshastri/jenkins,Ykus/jenkins,lilyJi/jenkins,batmat/jenkins,godfath3r/jenkins,vvv444/jenkins,nandan4/Jenkins,MarkEWaite/jenkins,jpbriend/jenkins,intelchen/jenkins,seanlin816/jenkins,maikeffi/hudson,mrooney/jenkins,mattclark/jenkins,wuwen5/jenkins,kohsuke/hudson,1and1/jenkins,vlajos/jenkins,rashmikanta-1984/jenkins,batmat/jenkins,protazy/jenkins,ns163/jenkins,bkmeneguello/jenkins,singh88/jenkins,chbiel/jenkins,guoxu0514/jenkins,jzjzjzj/jenkins,SenolOzer/jenkins,FTG-003/jenkins,csimons/jenkins,iqstack/jenkins,mcanthony/jenkins,NehemiahMi/jenkins,Vlatombe/jenkins,Wilfred/jenkins,stephenc/jenkins,arunsingh/jenkins,amruthsoft9/Jenkis,rlugojr/jenkins,christ66/jenkins,huybrechts/hudson,bpzhang/jenkins,escoem/jenkins,amruthsoft9/Jenkis,alvarolobato/jenkins,varmenise/jenkins,shahharsh/jenkins,Krasnyanskiy/jenkins,SebastienGllmt/jenkins,gusreiber/jenkins,olivergondza/jenkins,ikedam/jenkins,Krasnyanskiy/jenkins,jenkinsci/jenkins,DoctorQ/jenkins,liorhson/jenkins,hemantojhaa/jenkins,akshayabd/jenkins,morficus/jenkins,amuniz/jenkins,amuniz/jenkins,vlajos/jenkins,christ66/jenkins,luoqii/jenkins,mattclark/jenkins,MarkEWaite/jenkins,maikeffi/hudson,sathiya-mit/jenkins,verbitan/jenkins,wuwen5/jenkins,dennisjlee/jenkins,gitaccountforprashant/gittest,Jimilian/jenkins,paulmillar/jenkins,huybrechts/hudson,lilyJi/jenkins,h4ck3rm1k3/jenkins,oleg-nenashev/jenkins,jk47/jenkins,jpbriend/jenkins,recena/jenkins,jcsirot/jenkins,varmenise/jenkins,SebastienGllmt/jenkins,synopsys-arc-oss/jenkins,lindzh/jenkins,vvv444/jenkins,aquarellian/jenkins,maikeffi/hudson,gusreiber/jenkins,luoqii/jenkins,NehemiahMi/jenkins,yonglehou/jenkins,amuniz/jenkins,duzifang/my-jenkins,arunsingh/jenkins,daniel-beck/jenkins,tastatur/jenkins,jcsirot/jenkins,ErikVerheul/jenkins,jglick/jenkins,intelchen/jenkins,Jimilian/jenkins,kzantow/jenkins,verbitan/jenkins,dbroady1/jenkins,paulmillar/jenkins,Wilfred/jenkins,lilyJi/jenkins,gorcz/jenkins,jk47/jenkins,wuwen5/jenkins,varmenise/jenkins,ydubreuil/jenkins,evernat/jenkins,daniel-beck/jenkins,gitaccountforprashant/gittest,jk47/jenkins,alvarolobato/jenkins,nandan4/Jenkins,guoxu0514/jenkins,petermarcoen/jenkins,gitaccountforprashant/gittest,jpbriend/jenkins,godfath3r/jenkins,paulwellnerbou/jenkins,arcivanov/jenkins,brunocvcunha/jenkins,goldchang/jenkins,mcanthony/jenkins,pjanouse/jenkins,DanielWeber/jenkins,msrb/jenkins,recena/jenkins,andresrc/jenkins,jglick/jenkins,albers/jenkins,vjuranek/jenkins,khmarbaise/jenkins,Vlatombe/jenkins,huybrechts/hudson,escoem/jenkins,hemantojhaa/jenkins,arcivanov/jenkins,mcanthony/jenkins,pjanouse/jenkins,batmat/jenkins,Jochen-A-Fuerbacher/jenkins,FTG-003/jenkins,petermarcoen/jenkins,liupugong/jenkins,DoctorQ/jenkins,protazy/jenkins,vijayto/jenkins,huybrechts/hudson,Vlatombe/jenkins,FarmGeek4Life/jenkins,damianszczepanik/jenkins,MichaelPranovich/jenkins_sc,andresrc/jenkins,sathiya-mit/jenkins,paulmillar/jenkins,viqueen/jenkins,maikeffi/hudson,mdonohue/jenkins,h4ck3rm1k3/jenkins,jhoblitt/jenkins,protazy/jenkins,hemantojhaa/jenkins,ndeloof/jenkins,everyonce/jenkins,tfennelly/jenkins,jhoblitt/jenkins,wuwen5/jenkins,damianszczepanik/jenkins,escoem/jenkins,aduprat/jenkins,Ykus/jenkins,pselle/jenkins,olivergondza/jenkins,FarmGeek4Life/jenkins,goldchang/jenkins,SebastienGllmt/jenkins,soenter/jenkins,thomassuckow/jenkins,AustinKwang/jenkins,DoctorQ/jenkins,guoxu0514/jenkins,andresrc/jenkins,akshayabd/jenkins,my7seven/jenkins,batmat/jenkins,jenkinsci/jenkins,shahharsh/jenkins,tfennelly/jenkins,mcanthony/jenkins,guoxu0514/jenkins,FarmGeek4Life/jenkins,v1v/jenkins,6WIND/jenkins,MarkEWaite/jenkins,amuniz/jenkins,jzjzjzj/jenkins,damianszczepanik/jenkins,aldaris/jenkins,Krasnyanskiy/jenkins,vijayto/jenkins,olivergondza/jenkins,deadmoose/jenkins,christ66/jenkins,luoqii/jenkins,morficus/jenkins,elkingtonmcb/jenkins,ndeloof/jenkins,scoheb/jenkins,paulmillar/jenkins,luoqii/jenkins,rlugojr/jenkins,hashar/jenkins,tangkun75/jenkins,azweb76/jenkins,Wilfred/jenkins,alvarolobato/jenkins,jglick/jenkins,NehemiahMi/jenkins,pselle/jenkins,gitaccountforprashant/gittest,alvarolobato/jenkins,tangkun75/jenkins,hplatou/jenkins,liupugong/jenkins,ajshastri/jenkins,evernat/jenkins,Ykus/jenkins,jenkinsci/jenkins,Ykus/jenkins,hashar/jenkins,DoctorQ/jenkins,petermarcoen/jenkins,1and1/jenkins,evernat/jenkins,DanielWeber/jenkins,KostyaSha/jenkins,brunocvcunha/jenkins,rashmikanta-1984/jenkins,chbiel/jenkins,1and1/jenkins,azweb76/jenkins,hemantojhaa/jenkins,jpbriend/jenkins,noikiy/jenkins,olivergondza/jenkins,liupugong/jenkins,wangyikai/jenkins,jcarrothers-sap/jenkins,liorhson/jenkins,mdonohue/jenkins,yonglehou/jenkins,oleg-nenashev/jenkins,aduprat/jenkins,deadmoose/jenkins,DanielWeber/jenkins,ydubreuil/jenkins,KostyaSha/jenkins,KostyaSha/jenkins,AustinKwang/jenkins,viqueen/jenkins,rlugojr/jenkins,escoem/jenkins,stephenc/jenkins,csimons/jenkins,brunocvcunha/jenkins,tangkun75/jenkins,stephenc/jenkins,vvv444/jenkins,FarmGeek4Life/jenkins,SenolOzer/jenkins,6WIND/jenkins,ChrisA89/jenkins,CodeShane/jenkins,pjanouse/jenkins,1and1/jenkins,DanielWeber/jenkins,jcarrothers-sap/jenkins,goldchang/jenkins,DoctorQ/jenkins,godfath3r/jenkins,arcivanov/jenkins,soenter/jenkins,sathiya-mit/jenkins,gusreiber/jenkins,dariver/jenkins,maikeffi/hudson,aduprat/jenkins,azweb76/jenkins,hashar/jenkins,mdonohue/jenkins,ikedam/jenkins,escoem/jenkins,kzantow/jenkins,liupugong/jenkins,patbos/jenkins,protazy/jenkins,lordofthejars/jenkins,alvarolobato/jenkins,sathiya-mit/jenkins,292388900/jenkins,pjanouse/jenkins,bkmeneguello/jenkins,jhoblitt/jenkins,aldaris/jenkins,Jochen-A-Fuerbacher/jenkins,verbitan/jenkins,jpbriend/jenkins,soenter/jenkins,oleg-nenashev/jenkins,patbos/jenkins,fbelzunc/jenkins,akshayabd/jenkins,rsandell/jenkins,Vlatombe/jenkins,gusreiber/jenkins,6WIND/jenkins,AustinKwang/jenkins,oleg-nenashev/jenkins,my7seven/jenkins,evernat/jenkins,kzantow/jenkins,tangkun75/jenkins,shahharsh/jenkins,samatdav/jenkins,daniel-beck/jenkins,chbiel/jenkins,gorcz/jenkins,kohsuke/hudson,chbiel/jenkins,deadmoose/jenkins,gorcz/jenkins,morficus/jenkins,ajshastri/jenkins,MichaelPranovich/jenkins_sc,jzjzjzj/jenkins,varmenise/jenkins,292388900/jenkins,seanlin816/jenkins,mattclark/jenkins,bpzhang/jenkins,lindzh/jenkins,patbos/jenkins,Jimilian/jenkins,hashar/jenkins,gorcz/jenkins,vlajos/jenkins,v1v/jenkins,msrb/jenkins,nandan4/Jenkins,Ykus/jenkins,akshayabd/jenkins,FTG-003/jenkins,nandan4/Jenkins,jpederzolli/jenkins-1,everyonce/jenkins,akshayabd/jenkins,aquarellian/jenkins,vijayto/jenkins,amuniz/jenkins,jenkinsci/jenkins,lordofthejars/jenkins,ndeloof/jenkins,azweb76/jenkins,MarkEWaite/jenkins,vlajos/jenkins,jhoblitt/jenkins,jk47/jenkins,wuwen5/jenkins,h4ck3rm1k3/jenkins,elkingtonmcb/jenkins,protazy/jenkins,vijayto/jenkins,sathiya-mit/jenkins,hplatou/jenkins,iqstack/jenkins,v1v/jenkins,fbelzunc/jenkins,goldchang/jenkins,noikiy/jenkins,jglick/jenkins,deadmoose/jenkins,tastatur/jenkins,ikedam/jenkins,azweb76/jenkins,MichaelPranovich/jenkins_sc,Wilfred/jenkins,v1v/jenkins,christ66/jenkins,kzantow/jenkins,scoheb/jenkins,shahharsh/jenkins,fbelzunc/jenkins,ErikVerheul/jenkins,wuwen5/jenkins,rlugojr/jenkins,alvarolobato/jenkins,ChrisA89/jenkins,dbroady1/jenkins,kohsuke/hudson,albers/jenkins,wangyikai/jenkins,viqueen/jenkins,amuniz/jenkins,lindzh/jenkins,recena/jenkins,jk47/jenkins,guoxu0514/jenkins,dbroady1/jenkins,tfennelly/jenkins,synopsys-arc-oss/jenkins,goldchang/jenkins,MarkEWaite/jenkins,ikedam/jenkins,Wilfred/jenkins,tfennelly/jenkins,ajshastri/jenkins,1and1/jenkins,fbelzunc/jenkins,damianszczepanik/jenkins,daniel-beck/jenkins,liupugong/jenkins,huybrechts/hudson,liorhson/jenkins,khmarbaise/jenkins,yonglehou/jenkins,aquarellian/jenkins,damianszczepanik/jenkins,vvv444/jenkins,verbitan/jenkins,292388900/jenkins,ikedam/jenkins,h4ck3rm1k3/jenkins,patbos/jenkins,jcarrothers-sap/jenkins,jk47/jenkins,ns163/jenkins,CodeShane/jenkins,escoem/jenkins,DanielWeber/jenkins,luoqii/jenkins,elkingtonmcb/jenkins,Ykus/jenkins,christ66/jenkins,ns163/jenkins,singh88/jenkins,jglick/jenkins,lilyJi/jenkins,lindzh/jenkins,verbitan/jenkins,lilyJi/jenkins,singh88/jenkins,albers/jenkins,thomassuckow/jenkins,jcarrothers-sap/jenkins,kzantow/jenkins,maikeffi/hudson,jcsirot/jenkins,rashmikanta-1984/jenkins,mattclark/jenkins,rsandell/jenkins,jenkinsci/jenkins,varmenise/jenkins,dennisjlee/jenkins,bpzhang/jenkins,lilyJi/jenkins,amuniz/jenkins,jpbriend/jenkins,sathiya-mit/jenkins,msrb/jenkins,iqstack/jenkins,nandan4/Jenkins,AustinKwang/jenkins,elkingtonmcb/jenkins,wangyikai/jenkins,h4ck3rm1k3/jenkins,pjanouse/jenkins,iqstack/jenkins,lindzh/jenkins,liupugong/jenkins,jenkinsci/jenkins,ErikVerheul/jenkins,MichaelPranovich/jenkins_sc,samatdav/jenkins,jcsirot/jenkins,SenolOzer/jenkins,gitaccountforprashant/gittest,aduprat/jenkins,soenter/jenkins,thomassuckow/jenkins,jhoblitt/jenkins,bkmeneguello/jenkins,jpederzolli/jenkins-1,gusreiber/jenkins,mrooney/jenkins,paulwellnerbou/jenkins,fbelzunc/jenkins,mdonohue/jenkins,dennisjlee/jenkins,wangyikai/jenkins,jcsirot/jenkins,gitaccountforprashant/gittest,jcsirot/jenkins,ajshastri/jenkins,mattclark/jenkins,ydubreuil/jenkins,elkingtonmcb/jenkins,hashar/jenkins,scoheb/jenkins,albers/jenkins,soenter/jenkins,rsandell/jenkins,ChrisA89/jenkins,liupugong/jenkins,MarkEWaite/jenkins,aldaris/jenkins,ns163/jenkins,v1v/jenkins,liorhson/jenkins,KostyaSha/jenkins,protazy/jenkins,azweb76/jenkins,svanoort/jenkins,andresrc/jenkins,MarkEWaite/jenkins,paulwellnerbou/jenkins,yonglehou/jenkins,morficus/jenkins,CodeShane/jenkins,shahharsh/jenkins,svanoort/jenkins,seanlin816/jenkins,jpbriend/jenkins,batmat/jenkins,soenter/jenkins,olivergondza/jenkins,azweb76/jenkins,my7seven/jenkins,albers/jenkins,thomassuckow/jenkins,mrooney/jenkins,escoem/jenkins,csimons/jenkins,DanielWeber/jenkins,v1v/jenkins,Wilfred/jenkins,rashmikanta-1984/jenkins,bpzhang/jenkins,SenolOzer/jenkins,maikeffi/hudson,pselle/jenkins,maikeffi/hudson,svanoort/jenkins,vjuranek/jenkins,Jimilian/jenkins,synopsys-arc-oss/jenkins,arunsingh/jenkins,tfennelly/jenkins,KostyaSha/jenkins,MichaelPranovich/jenkins_sc,svanoort/jenkins,kzantow/jenkins,jpederzolli/jenkins-1,tangkun75/jenkins,vijayto/jenkins,jzjzjzj/jenkins,viqueen/jenkins,jhoblitt/jenkins,albers/jenkins,bkmeneguello/jenkins,chbiel/jenkins,SebastienGllmt/jenkins,FarmGeek4Life/jenkins,AustinKwang/jenkins,vjuranek/jenkins,everyonce/jenkins,vijayto/jenkins,tastatur/jenkins,synopsys-arc-oss/jenkins,Jochen-A-Fuerbacher/jenkins,jzjzjzj/jenkins,noikiy/jenkins,synopsys-arc-oss/jenkins,singh88/jenkins,FTG-003/jenkins,tastatur/jenkins,samatdav/jenkins,huybrechts/hudson,scoheb/jenkins,NehemiahMi/jenkins,tangkun75/jenkins,mrooney/jenkins,FarmGeek4Life/jenkins,recena/jenkins,hashar/jenkins,ydubreuil/jenkins,hplatou/jenkins,ikedam/jenkins,aldaris/jenkins,dbroady1/jenkins,lindzh/jenkins,ndeloof/jenkins,stephenc/jenkins,deadmoose/jenkins,brunocvcunha/jenkins,mcanthony/jenkins,scoheb/jenkins,kohsuke/hudson,dennisjlee/jenkins,morficus/jenkins,wangyikai/jenkins,lordofthejars/jenkins,msrb/jenkins,Jimilian/jenkins,noikiy/jenkins,jpederzolli/jenkins-1,AustinKwang/jenkins,dennisjlee/jenkins,KostyaSha/jenkins,SebastienGllmt/jenkins,rsandell/jenkins,lordofthejars/jenkins,thomassuckow/jenkins,lindzh/jenkins,1and1/jenkins,khmarbaise/jenkins,SebastienGllmt/jenkins,pselle/jenkins,intelchen/jenkins,varmenise/jenkins,arcivanov/jenkins,dbroady1/jenkins,guoxu0514/jenkins,dariver/jenkins,damianszczepanik/jenkins,recena/jenkins,everyonce/jenkins,brunocvcunha/jenkins,dennisjlee/jenkins,DoctorQ/jenkins,olivergondza/jenkins,damianszczepanik/jenkins,godfath3r/jenkins,yonglehou/jenkins,vlajos/jenkins,samatdav/jenkins,NehemiahMi/jenkins,jcarrothers-sap/jenkins,svanoort/jenkins,intelchen/jenkins,amruthsoft9/Jenkis,jglick/jenkins,stephenc/jenkins,damianszczepanik/jenkins,daniel-beck/jenkins,rashmikanta-1984/jenkins,mcanthony/jenkins,aduprat/jenkins,kohsuke/hudson,viqueen/jenkins,brunocvcunha/jenkins,thomassuckow/jenkins,keyurpatankar/hudson,FTG-003/jenkins,vjuranek/jenkins,samatdav/jenkins,kzantow/jenkins,patbos/jenkins,FarmGeek4Life/jenkins,keyurpatankar/hudson,aquarellian/jenkins,duzifang/my-jenkins,aldaris/jenkins,akshayabd/jenkins,rsandell/jenkins,gorcz/jenkins,huybrechts/hudson,gusreiber/jenkins,ndeloof/jenkins,ns163/jenkins,elkingtonmcb/jenkins,keyurpatankar/hudson,CodeShane/jenkins,ydubreuil/jenkins,keyurpatankar/hudson,arunsingh/jenkins,chbiel/jenkins,seanlin816/jenkins,goldchang/jenkins,jzjzjzj/jenkins,samatdav/jenkins,lilyJi/jenkins,6WIND/jenkins,Jimilian/jenkins,liorhson/jenkins,oleg-nenashev/jenkins,duzifang/my-jenkins,jenkinsci/jenkins,recena/jenkins,pselle/jenkins,msrb/jenkins,tastatur/jenkins,iqstack/jenkins,vlajos/jenkins,ChrisA89/jenkins,my7seven/jenkins,Wilfred/jenkins,ChrisA89/jenkins,keyurpatankar/hudson,ydubreuil/jenkins,ajshastri/jenkins,aldaris/jenkins,Jochen-A-Fuerbacher/jenkins,Jimilian/jenkins,hplatou/jenkins,dariver/jenkins,ErikVerheul/jenkins,godfath3r/jenkins,amruthsoft9/Jenkis,daniel-beck/jenkins,my7seven/jenkins,6WIND/jenkins,ikedam/jenkins,1and1/jenkins,arunsingh/jenkins,Jochen-A-Fuerbacher/jenkins,amruthsoft9/Jenkis,duzifang/my-jenkins,gorcz/jenkins,KostyaSha/jenkins,mdonohue/jenkins,andresrc/jenkins,petermarcoen/jenkins,DanielWeber/jenkins,oleg-nenashev/jenkins,mrooney/jenkins,seanlin816/jenkins,khmarbaise/jenkins,292388900/jenkins,amruthsoft9/Jenkis,khmarbaise/jenkins,jzjzjzj/jenkins,duzifang/my-jenkins,iqstack/jenkins,KostyaSha/jenkins,evernat/jenkins,wangyikai/jenkins,gitaccountforprashant/gittest,rsandell/jenkins,arcivanov/jenkins,Ykus/jenkins,luoqii/jenkins,patbos/jenkins,jpederzolli/jenkins-1,hemantojhaa/jenkins,yonglehou/jenkins,shahharsh/jenkins,svanoort/jenkins,MichaelPranovich/jenkins_sc,Krasnyanskiy/jenkins,seanlin816/jenkins,fbelzunc/jenkins,nandan4/Jenkins,deadmoose/jenkins,DoctorQ/jenkins,292388900/jenkins,gusreiber/jenkins,dennisjlee/jenkins,paulmillar/jenkins,keyurpatankar/hudson,CodeShane/jenkins,mattclark/jenkins,thomassuckow/jenkins,DoctorQ/jenkins,everyonce/jenkins,khmarbaise/jenkins,h4ck3rm1k3/jenkins,CodeShane/jenkins,daniel-beck/jenkins,rsandell/jenkins,batmat/jenkins,292388900/jenkins,mrooney/jenkins,intelchen/jenkins,6WIND/jenkins,fbelzunc/jenkins,bpzhang/jenkins,kohsuke/hudson,ChrisA89/jenkins,sathiya-mit/jenkins,ikedam/jenkins,v1v/jenkins,paulwellnerbou/jenkins,oleg-nenashev/jenkins,morficus/jenkins,MarkEWaite/jenkins,csimons/jenkins,arunsingh/jenkins,pjanouse/jenkins,jenkinsci/jenkins,batmat/jenkins | /*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Stephen Connolly, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson;
import hudson.Proc.LocalProc;
import hudson.model.Computer;
import hudson.util.QuotedStringTokenizer;
import jenkins.model.Jenkins;
import hudson.model.TaskListener;
import hudson.model.Node;
import hudson.remoting.Callable;
import hudson.remoting.Channel;
import hudson.remoting.Pipe;
import hudson.remoting.RemoteInputStream;
import hudson.remoting.RemoteOutputStream;
import hudson.remoting.VirtualChannel;
import hudson.util.StreamCopyThread;
import hudson.util.ArgumentListBuilder;
import hudson.util.ProcessTree;
import org.apache.commons.io.input.NullInputStream;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.apache.commons.io.output.NullOutputStream.NULL_OUTPUT_STREAM;
/**
* Starts a process.
*
* <p>
* This hides the difference between running programs locally vs remotely.
*
*
* <h2>'env' parameter</h2>
* <p>
* To allow important environment variables to be copied over to the remote machine,
* the 'env' parameter shouldn't contain default inherited environment variables
* (which often contains machine-specific information, like PATH, TIMEZONE, etc.)
*
* <p>
* {@link Launcher} is responsible for inheriting environment variables.
*
*
* @author Kohsuke Kawaguchi
* @see FilePath#createLauncher(TaskListener)
*/
public abstract class Launcher {
protected final TaskListener listener;
protected final VirtualChannel channel;
public Launcher(TaskListener listener, VirtualChannel channel) {
this.listener = listener;
this.channel = channel;
}
/**
* Constructor for a decorator.
*/
protected Launcher(Launcher launcher) {
this(launcher.listener, launcher.channel);
}
/**
* Gets the channel that can be used to run a program remotely.
*
* @return
* null if the target node is not configured to support this.
* this is a transitional measure.
* Note that a launcher for the master is always non-null.
*/
public VirtualChannel getChannel() {
return channel;
}
/**
* Gets the {@link TaskListener} that this launcher uses to
* report the commands that it's executing.
*/
public TaskListener getListener() {
return listener;
}
/**
* If this {@link Launcher} is encapsulating an execution on a specific {@link Computer},
* return it.
*
* <p>
* Because of the way internal Hudson abstractions are set up (that is, {@link Launcher} only
* needs a {@link VirtualChannel} to do its job and isn't really required that the channel
* comes from an existing {@link Computer}), this method may not always the right {@link Computer} instance.
*
* @return
* null if this launcher is not created from a {@link Computer} object.
* @deprecated since 2008-11-16.
* See the javadoc for why this is inherently unreliable. If you are trying to
* figure out the current {@link Computer} from within a build, use
* {@link Computer#currentComputer()}
*/
public Computer getComputer() {
for( Computer c : Jenkins.getInstance().getComputers() )
if(c.getChannel()==channel)
return c;
return null;
}
/**
* Builder pattern for configuring a process to launch.
* @since 1.311
*/
public final class ProcStarter {
protected List<String> commands;
protected boolean[] masks;
protected FilePath pwd;
protected OutputStream stdout = NULL_OUTPUT_STREAM, stderr;
protected InputStream stdin = NULL_INPUT_STREAM;
protected String[] envs;
/**
* True to reverse the I/O direction.
*
* For example, if {@link #reverseStdout}==true, then we expose
* {@link InputStream} from {@link Proc} and expect the client to read from it,
* whereas normally we take {@link OutputStream} via {@link #stdout(OutputStream)}
* and feed stdout into that output.
*
* @since 1.399
*/
protected boolean reverseStdin, reverseStdout, reverseStderr;
/**
* Passes a white-space separated single-string command (like "cat abc def") and parse them
* as a command argument. This method also handles quotes.
*/
public ProcStarter cmdAsSingleString(String s) {
return cmds(QuotedStringTokenizer.tokenize(s));
}
public ProcStarter cmds(String... args) {
return cmds(Arrays.asList(args));
}
public ProcStarter cmds(File program, String... args) {
commands = new ArrayList<String>(args.length+1);
commands.add(program.getPath());
commands.addAll(Arrays.asList(args));
return this;
}
public ProcStarter cmds(List<String> args) {
commands = new ArrayList<String>(args);
return this;
}
public ProcStarter cmds(ArgumentListBuilder args) {
commands = args.toList();
masks = args.toMaskArray();
return this;
}
public List<String> cmds() {
return commands;
}
/**
* Hide parts of the command line from being printed to the log.
* @param masks true for each position in {@link #cmds(String[])} which should be masked, false to print
* @return this
* @see ArgumentListBuilder#add(String, boolean)
* @see #maskedPrintCommandLine(List, boolean[], FilePath)
*/
public ProcStarter masks(boolean... masks) {
this.masks = masks;
return this;
}
public boolean[] masks() {
return masks;
}
public ProcStarter pwd(FilePath workDir) {
this.pwd = workDir;
return this;
}
public ProcStarter pwd(File workDir) {
return pwd(new FilePath(workDir));
}
public ProcStarter pwd(String workDir) {
return pwd(new File(workDir));
}
public FilePath pwd() {
return pwd;
}
public ProcStarter stdout(OutputStream out) {
this.stdout = out;
return this;
}
/**
* Sends the stdout to the given {@link TaskListener}.
*/
public ProcStarter stdout(TaskListener out) {
return stdout(out.getLogger());
}
public OutputStream stdout() {
return stdout;
}
/**
* Controls where the stderr of the process goes.
* By default, it's bundled into stdout.
*/
public ProcStarter stderr(OutputStream err) {
this.stderr = err;
return this;
}
public OutputStream stderr() {
return stderr;
}
/**
* Controls where the stdin of the process comes from.
* By default, <tt>/dev/null</tt>.
*/
public ProcStarter stdin(InputStream in) {
this.stdin = in;
return this;
}
public InputStream stdin() {
return stdin;
}
/**
* Sets the environment variable overrides.
*
* <p>
* In adition to what the current process
* is inherited (if this is going to be launched from a slave agent, that
* becomes the "current" process), these variables will be also set.
*/
public ProcStarter envs(Map<String, String> overrides) {
this.envs = Util.mapToEnv(overrides);
return this;
}
/**
* @param overrides
* List of "VAR=VALUE". See {@link #envs(Map)} for the semantics.
*/
public ProcStarter envs(String... overrides) {
if (overrides != null) {
for (String override : overrides) {
if (override.indexOf('=') == -1) {
throw new IllegalArgumentException(override);
}
}
}
this.envs = overrides;
return this;
}
public String[] envs() {
return envs.clone();
}
/**
* Indicates that the caller will pump {@code stdout} from the child process
* via {@link Proc#getStdout()} (whereas by default you call {@link #stdout(OutputStream)}
* and let Jenkins pump stdout into your {@link OutputStream} of choosing.
*
* <p>
* When this method is called, {@link Proc#getStdout()} will read the combined output
* of {@code stdout/stderr} from the child process, unless {@link #readStderr()} is called
* separately, which lets the caller read those two streams separately.
*
* @since 1.399
*/
public ProcStarter readStdout() {
reverseStdout = true;
stdout = stderr = null;
return this;
}
/**
* In addition to the effect of {@link #readStdout()}, indicate that the caller will pump {@code stderr}
* from the child process separately from {@code stdout}. The stderr will be readable from
* {@link Proc#getStderr()} while {@link Proc#getStdout()} reads from stdout.
*
* @since 1.399
*/
public ProcStarter readStderr() {
reverseStdout = true;
reverseStderr = true;
return this;
}
/**
* Indicates that the caller will directly write to the child process {@link #stdin()} via {@link Proc#getStdin()}.
* (Whereas by default you call {@link #stdin(InputStream)}
* and let Jenkins pump your {@link InputStream} of choosing to stdin.)
* @since 1.399
*/
public ProcStarter writeStdin() {
reverseStdin = true;
stdin = null;
return this;
}
/**
* Starts the new process as configured.
*/
public Proc start() throws IOException {
return launch(this);
}
/**
* Starts the process and waits for its completion.
*/
public int join() throws IOException, InterruptedException {
return start().join();
}
/**
* Copies a {@link ProcStarter}.
*/
public ProcStarter copy() {
ProcStarter rhs = new ProcStarter().cmds(commands).pwd(pwd).masks(masks).stdin(stdin).stdout(stdout).stderr(stderr).envs(envs);
rhs.reverseStdin = this.reverseStdin;
rhs.reverseStderr = this.reverseStderr;
rhs.reverseStdout = this.reverseStdout;
return rhs;
}
}
/**
* Launches a process by using a {@linkplain ProcStarter builder-pattern} to configure
* the parameters.
*/
public final ProcStarter launch() {
return new ProcStarter();
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String cmd, Map<String,String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd,Util.mapToEnv(env),out,workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, Map<String, String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, Util.mapToEnv(env), out, workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, Map<String, String> env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, Util.mapToEnv(env), in, out);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, Map<String, String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, mask, Util.mapToEnv(env), out, workDir);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, Map<String, String> env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, mask, Util.mapToEnv(env), in, out);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String cmd,String[] env,OutputStream out, FilePath workDir) throws IOException {
return launch(Util.tokenize(cmd),env,out,workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, String[] env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, env, null, out, workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, env, in, out, null);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, String[] env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, mask, env, null, out, workDir);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, mask, env, in, out, null);
}
/**
* @param env
* Environment variable overrides.
* @param in
* null if there's no input.
* @param workDir
* null if the working directory could be anything.
* @param out
* stdout and stderr of the process will be sent to this stream.
* the stream won't be closed.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return launch(launch().cmds(cmd).envs(env).stdin(in).stdout(out).pwd(workDir));
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return launch(launch().cmds(cmd).masks(mask).envs(env).stdin(in).stdout(out).pwd(workDir));
}
/**
* Primarily invoked from {@link ProcStarter#start()} to start a process with a specific launcher.
*/
public abstract Proc launch(ProcStarter starter) throws IOException;
/**
* Launches a specified process and connects its input/output to a {@link Channel}, then
* return it.
*
* <p>
* When the returned channel is terminated, the process will be killed.
*
* @param out
* Where the stderr from the launched process will be sent.
* @param workDir
* The working directory of the new process, or null to inherit
* from the current process
* @param envVars
* Environment variable overrides. In addition to what the current process
* is inherited (if this is going to be launched from a slave agent, that
* becomes the "current" process), these variables will be also set.
*/
public abstract Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String,String> envVars) throws IOException, InterruptedException;
/**
* Returns true if this {@link Launcher} is going to launch on Unix.
*/
public boolean isUnix() {
return File.pathSeparatorChar==':';
}
/**
* Calls {@link ProcessTree#killAll(Map)} to kill processes.
*/
public abstract void kill(Map<String,String> modelEnvVars) throws IOException, InterruptedException;
/**
* Prints out the command line to the listener so that users know what we are doing.
*/
protected final void printCommandLine(String[] cmd, FilePath workDir) {
StringBuilder buf = new StringBuilder();
if (workDir != null) {
buf.append('[');
if(showFullPath)
buf.append(workDir.getRemote());
else
buf.append(workDir.getRemote().replaceFirst("^.+[/\\\\]", ""));
buf.append("] ");
}
buf.append('$');
for (String c : cmd) {
buf.append(' ');
if(c.indexOf(' ')>=0) {
if(c.indexOf('"')>=0)
buf.append('\'').append(c).append('\'');
else
buf.append('"').append(c).append('"');
} else
buf.append(c);
}
listener.getLogger().println(buf.toString());
}
/**
* Prints out the command line to the listener with some portions masked to prevent sensitive information from being
* recorded on the listener.
*
* @param cmd The commands
* @param mask An array of booleans which control whether a cmd element should be masked (<code>true</code>) or
* remain unmasked (<code>false</code>).
* @param workDir The work dir.
*/
protected final void maskedPrintCommandLine(List<String> cmd, boolean[] mask, FilePath workDir) {
if(mask==null) {
printCommandLine(cmd.toArray(new String[cmd.size()]),workDir);
return;
}
assert mask.length == cmd.size();
final String[] masked = new String[cmd.size()];
for (int i = 0; i < cmd.size(); i++) {
if (mask[i]) {
masked[i] = "********";
} else {
masked[i] = cmd.get(i);
}
}
printCommandLine(masked, workDir);
}
protected final void maskedPrintCommandLine(String[] cmd, boolean[] mask, FilePath workDir) {
maskedPrintCommandLine(Arrays.asList(cmd),mask,workDir);
}
/**
* Returns a decorated {@link Launcher} for the given node.
*/
public final Launcher decorateFor(Node node) {
Launcher l = this;
for (LauncherDecorator d : LauncherDecorator.all())
l = d.decorate(l,node);
return l;
}
/**
* Returns a decorated {@link Launcher} that puts the given set of arguments as a prefix to any commands
* that it invokes.
*
* @since 1.299
*/
public final Launcher decorateByPrefix(final String... prefix) {
final Launcher outer = this;
return new Launcher(outer) {
@Override
public boolean isUnix() {
return outer.isUnix();
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
starter.commands.addAll(0,Arrays.asList(prefix));
if (starter.masks != null) {
starter.masks = prefix(starter.masks);
}
return outer.launch(starter);
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
return outer.launchChannel(prefix(cmd),out,workDir,envVars);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
outer.kill(modelEnvVars);
}
private String[] prefix(String[] args) {
String[] newArgs = new String[args.length+prefix.length];
System.arraycopy(prefix,0,newArgs,0,prefix.length);
System.arraycopy(args,0,newArgs,prefix.length,args.length);
return newArgs;
}
private boolean[] prefix(boolean[] args) {
boolean[] newArgs = new boolean[args.length+prefix.length];
System.arraycopy(args,0,newArgs,prefix.length,args.length);
return newArgs;
}
};
}
/**
* Returns a decorated {@link Launcher} that automatically adds the specified environment
* variables.
*
* Those that are specified in {@link ProcStarter#envs(String...)} will take precedence over
* what's specified here.
*
* @since 1.489
*/
public final Launcher decorateByEnv(EnvVars _env) {
final EnvVars env = new EnvVars(_env);
final Launcher outer = this;
return new Launcher(outer) {
@Override
public boolean isUnix() {
return outer.isUnix();
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
EnvVars e = new EnvVars(env);
if (starter.envs!=null) {
for (String env : starter.envs) {
e.addLine(env);
}
}
starter.envs = Util.mapToEnv(e);
return outer.launch(starter);
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
EnvVars e = new EnvVars(env);
e.putAll(envVars);
return outer.launchChannel(cmd,out,workDir,e);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
outer.kill(modelEnvVars);
}
};
}
/**
* {@link Launcher} that launches process locally.
*/
public static class LocalLauncher extends Launcher {
public LocalLauncher(TaskListener listener) {
this(listener, FilePath.localChannel);
}
public LocalLauncher(TaskListener listener, VirtualChannel channel) {
super(listener, channel);
}
@Override
public Proc launch(ProcStarter ps) throws IOException {
maskedPrintCommandLine(ps.commands, ps.masks, ps.pwd);
EnvVars jobEnv = inherit(ps.envs);
// replace variables in command line
String[] jobCmd = new String[ps.commands.size()];
for ( int idx = 0 ; idx < jobCmd.length; idx++ )
jobCmd[idx] = jobEnv.expand(ps.commands.get(idx));
return new LocalProc(jobCmd, Util.mapToEnv(jobEnv),
ps.reverseStdin ?LocalProc.SELFPUMP_INPUT:ps.stdin,
ps.reverseStdout?LocalProc.SELFPUMP_OUTPUT:ps.stdout,
ps.reverseStderr?LocalProc.SELFPUMP_OUTPUT:ps.stderr,
toFile(ps.pwd));
}
private File toFile(FilePath f) {
return f==null ? null : new File(f.getRemote());
}
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String,String> envVars) throws IOException {
printCommandLine(cmd, workDir);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(toFile(workDir));
if (envVars!=null) pb.environment().putAll(envVars);
return launchChannel(out, pb);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws InterruptedException {
ProcessTree.get().killAll(modelEnvVars);
}
/**
* @param out
* Where the stderr from the launched process will be sent.
*/
public Channel launchChannel(OutputStream out, ProcessBuilder pb) throws IOException {
final EnvVars cookie = EnvVars.createCookie();
pb.environment().putAll(cookie);
final Process proc = pb.start();
final Thread t2 = new StreamCopyThread(pb.command()+": stderr copier", proc.getErrorStream(), out);
t2.start();
return new Channel("locally launched channel on "+ pb.command(),
Computer.threadPoolForRemoting, proc.getInputStream(), proc.getOutputStream(), out) {
/**
* Kill the process when the channel is severed.
*/
@Override
public synchronized void terminate(IOException e) {
super.terminate(e);
ProcessTree pt = ProcessTree.get();
try {
pt.killAll(proc,cookie);
} catch (InterruptedException x) {
LOGGER.log(Level.INFO, "Interrupted", x);
}
}
@Override
public synchronized void close() throws IOException {
super.close();
// wait for all the output from the process to be picked up
try {
t2.join();
} catch (InterruptedException e) {
// process the interrupt later
Thread.currentThread().interrupt();
}
}
};
}
}
@Restricted(NoExternalUse.class)
public static class DummyLauncher extends Launcher {
public DummyLauncher(TaskListener listener) {
super(listener, null);
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
throw new IOException("Can not call launch on a dummy launcher.");
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
throw new IOException("Can not call launchChannel on a dummy launcher.");
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
// Kill method should do nothing.
}
}
/**
* Launches processes remotely by using the given channel.
*/
public static class RemoteLauncher extends Launcher {
private final boolean isUnix;
public RemoteLauncher(TaskListener listener, VirtualChannel channel, boolean isUnix) {
super(listener, channel);
this.isUnix = isUnix;
}
public Proc launch(ProcStarter ps) throws IOException {
final OutputStream out = ps.stdout == null ? null : new RemoteOutputStream(new CloseProofOutputStream(ps.stdout));
final OutputStream err = ps.stderr==null ? null : new RemoteOutputStream(new CloseProofOutputStream(ps.stderr));
final InputStream in = (ps.stdin==null || ps.stdin==NULL_INPUT_STREAM) ? null : new RemoteInputStream(ps.stdin,false);
final String workDir = ps.pwd==null ? null : ps.pwd.getRemote();
try {
return new ProcImpl(getChannel().call(new RemoteLaunchCallable(ps.commands, ps.masks, ps.envs, in, ps.reverseStdin, out, ps.reverseStdout, err, ps.reverseStderr, workDir, listener)));
} catch (InterruptedException e) {
throw (IOException)new InterruptedIOException().initCause(e);
}
}
public Channel launchChannel(String[] cmd, OutputStream err, FilePath _workDir, Map<String,String> envOverrides) throws IOException, InterruptedException {
printCommandLine(cmd, _workDir);
Pipe out = Pipe.createRemoteToLocal();
final String workDir = _workDir==null ? null : _workDir.getRemote();
OutputStream os = getChannel().call(new RemoteChannelLaunchCallable(cmd, out, err, workDir, envOverrides));
return new Channel("remotely launched channel on "+channel,
Computer.threadPoolForRemoting, out.getIn(), new BufferedOutputStream(os));
}
@Override
public boolean isUnix() {
return isUnix;
}
@Override
public void kill(final Map<String,String> modelEnvVars) throws IOException, InterruptedException {
getChannel().call(new KillTask(modelEnvVars));
}
private static final class KillTask implements Callable<Void,RuntimeException> {
private final Map<String, String> modelEnvVars;
public KillTask(Map<String, String> modelEnvVars) {
this.modelEnvVars = modelEnvVars;
}
public Void call() throws RuntimeException {
try {
ProcessTree.get().killAll(modelEnvVars);
} catch (InterruptedException e) {
// we are asked to terminate early by the caller, so no need to do anything
}
return null;
}
private static final long serialVersionUID = 1L;
}
public static final class ProcImpl extends Proc {
private final RemoteProcess process;
private final IOTriplet io;
public ProcImpl(RemoteProcess process) {
this.process = process;
this.io = process.getIOtriplet();
}
@Override
public void kill() throws IOException, InterruptedException {
process.kill();
}
@Override
public int join() throws IOException, InterruptedException {
return process.join();
}
@Override
public boolean isAlive() throws IOException, InterruptedException {
return process.isAlive();
}
@Override
public InputStream getStdout() {
return io.stdout;
}
@Override
public InputStream getStderr() {
return io.stderr;
}
@Override
public OutputStream getStdin() {
return io.stdin;
}
}
}
/**
* A launcher which delegates to a provided inner launcher.
* Allows subclasses to only implement methods they want to override.
* Originally, this launcher has been implemented in
* <a href="https://wiki.jenkins-ci.org/display/JENKINS/Custom+Tools+Plugin">
* Custom Tools Plugin</a>.
*
* @author rcampbell
* @author Oleg Nenashev, Synopsys Inc.
* @since TODO: define version
*/
public static class DecoratedLauncher extends Launcher {
private Launcher inner = null;
public DecoratedLauncher(Launcher inner) {
super(inner);
this.inner = inner;
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
return inner.launch(starter);
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out,
FilePath workDir, Map<String, String> envVars) throws IOException,
InterruptedException {
return inner.launchChannel(cmd, out, workDir, envVars);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException,
InterruptedException {
inner.kill(modelEnvVars);
}
@Override
public boolean isUnix() {
return inner.isUnix();
}
@Override
public Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return inner.launch(cmd, mask, env, in, out, workDir);
}
@Override
public Computer getComputer() {
return inner.getComputer();
}
@Override
public TaskListener getListener() {
return inner.getListener();
}
@Override
public String toString() {
return super.toString() + "; decorates " + inner.toString();
}
@Override
public VirtualChannel getChannel() {
return inner.getChannel();
}
@Override
public Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return inner.launch(cmd, env, in, out, workDir);
}
/**
* Gets nested launcher.
* @return Inner launcher
*/
public Launcher getInner() {
return inner;
}
}
public static class IOTriplet implements Serializable {
InputStream stdout,stderr;
OutputStream stdin;
private static final long serialVersionUID = 1L;
}
/**
* Remoting interface of a remote process
*/
public interface RemoteProcess {
int join() throws InterruptedException, IOException;
void kill() throws IOException, InterruptedException;
boolean isAlive() throws IOException, InterruptedException;
IOTriplet getIOtriplet();
}
private static class RemoteLaunchCallable implements Callable<RemoteProcess,IOException> {
private final List<String> cmd;
private final boolean[] masks;
private final String[] env;
private final InputStream in;
private final OutputStream out;
private final OutputStream err;
private final String workDir;
private final TaskListener listener;
private final boolean reverseStdin, reverseStdout, reverseStderr;
RemoteLaunchCallable(List<String> cmd, boolean[] masks, String[] env, InputStream in, boolean reverseStdin, OutputStream out, boolean reverseStdout, OutputStream err, boolean reverseStderr, String workDir, TaskListener listener) {
this.cmd = new ArrayList<String>(cmd);
this.masks = masks;
this.env = env;
this.in = in;
this.out = out;
this.err = err;
this.workDir = workDir;
this.listener = listener;
this.reverseStdin = reverseStdin;
this.reverseStdout = reverseStdout;
this.reverseStderr = reverseStderr;
}
public RemoteProcess call() throws IOException {
Launcher.ProcStarter ps = new LocalLauncher(listener).launch();
ps.cmds(cmd).masks(masks).envs(env).stdin(in).stdout(out).stderr(err);
if(workDir!=null) ps.pwd(workDir);
if (reverseStdin) ps.writeStdin();
if (reverseStdout) ps.readStdout();
if (reverseStderr) ps.readStderr();
final Proc p = ps.start();
return Channel.current().export(RemoteProcess.class,new RemoteProcess() {
public int join() throws InterruptedException, IOException {
try {
return p.join();
} finally {
// make sure I/O is delivered to the remote before we return
try {
Channel.current().syncIO();
} catch (Throwable _) {
// this includes a failure to sync, slave.jar too old, etc
}
}
}
public void kill() throws IOException, InterruptedException {
p.kill();
}
public boolean isAlive() throws IOException, InterruptedException {
return p.isAlive();
}
public IOTriplet getIOtriplet() {
IOTriplet r = new IOTriplet();
if (reverseStdout) r.stdout = new RemoteInputStream(p.getStdout());
if (reverseStderr) r.stderr = new RemoteInputStream(p.getStderr());
if (reverseStdin) r.stdin = new RemoteOutputStream(p.getStdin());
return r;
}
});
}
private static final long serialVersionUID = 1L;
}
private static class RemoteChannelLaunchCallable implements Callable<OutputStream,IOException> {
private final String[] cmd;
private final Pipe out;
private final String workDir;
private final OutputStream err;
private final Map<String,String> envOverrides;
public RemoteChannelLaunchCallable(String[] cmd, Pipe out, OutputStream err, String workDir, Map<String,String> envOverrides) {
this.cmd = cmd;
this.out = out;
this.err = new RemoteOutputStream(err);
this.workDir = workDir;
this.envOverrides = envOverrides;
}
public OutputStream call() throws IOException {
Process p = Runtime.getRuntime().exec(cmd,
Util.mapToEnv(inherit(envOverrides)),
workDir == null ? null : new File(workDir));
List<String> cmdLines = Arrays.asList(cmd);
new StreamCopyThread("stdin copier for remote agent on "+cmdLines,
p.getInputStream(), out.getOut()).start();
new StreamCopyThread("stderr copier for remote agent on "+cmdLines,
p.getErrorStream(), err).start();
// TODO: don't we need to join?
return new RemoteOutputStream(p.getOutputStream());
}
private static final long serialVersionUID = 1L;
}
/**
* Expands the list of environment variables by inheriting current env variables.
*/
private static EnvVars inherit(String[] env) {
// convert String[] to Map first
EnvVars m = new EnvVars();
if(env!=null) {
for (String e : env) {
int index = e.indexOf('=');
m.put(e.substring(0,index), e.substring(index+1));
}
}
// then do the inheritance
return inherit(m);
}
/**
* Expands the list of environment variables by inheriting current env variables.
*/
private static EnvVars inherit(Map<String,String> overrides) {
EnvVars m = new EnvVars(EnvVars.masterEnvVars);
m.overrideExpandingAll(overrides);
return m;
}
/**
* Debug option to display full current path instead of just the last token.
*/
public static boolean showFullPath = false;
private static final NullInputStream NULL_INPUT_STREAM = new NullInputStream(0);
private static final Logger LOGGER = Logger.getLogger(Launcher.class.getName());
}
| core/src/main/java/hudson/Launcher.java | /*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Stephen Connolly, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson;
import hudson.Proc.LocalProc;
import hudson.model.Computer;
import hudson.util.QuotedStringTokenizer;
import jenkins.model.Jenkins;
import hudson.model.TaskListener;
import hudson.model.Node;
import hudson.remoting.Callable;
import hudson.remoting.Channel;
import hudson.remoting.Pipe;
import hudson.remoting.RemoteInputStream;
import hudson.remoting.RemoteOutputStream;
import hudson.remoting.VirtualChannel;
import hudson.util.StreamCopyThread;
import hudson.util.ArgumentListBuilder;
import hudson.util.ProcessTree;
import org.apache.commons.io.input.NullInputStream;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.apache.commons.io.output.NullOutputStream.NULL_OUTPUT_STREAM;
/**
* Starts a process.
*
* <p>
* This hides the difference between running programs locally vs remotely.
*
*
* <h2>'env' parameter</h2>
* <p>
* To allow important environment variables to be copied over to the remote machine,
* the 'env' parameter shouldn't contain default inherited environment variables
* (which often contains machine-specific information, like PATH, TIMEZONE, etc.)
*
* <p>
* {@link Launcher} is responsible for inheriting environment variables.
*
*
* @author Kohsuke Kawaguchi
* @see FilePath#createLauncher(TaskListener)
*/
public abstract class Launcher {
protected final TaskListener listener;
protected final VirtualChannel channel;
public Launcher(TaskListener listener, VirtualChannel channel) {
this.listener = listener;
this.channel = channel;
}
/**
* Constructor for a decorator.
*/
protected Launcher(Launcher launcher) {
this(launcher.listener, launcher.channel);
}
/**
* Gets the channel that can be used to run a program remotely.
*
* @return
* null if the target node is not configured to support this.
* this is a transitional measure.
* Note that a launcher for the master is always non-null.
*/
public VirtualChannel getChannel() {
return channel;
}
/**
* Gets the {@link TaskListener} that this launcher uses to
* report the commands that it's executing.
*/
public TaskListener getListener() {
return listener;
}
/**
* If this {@link Launcher} is encapsulating an execution on a specific {@link Computer},
* return it.
*
* <p>
* Because of the way internal Hudson abstractions are set up (that is, {@link Launcher} only
* needs a {@link VirtualChannel} to do its job and isn't really required that the channel
* comes from an existing {@link Computer}), this method may not always the right {@link Computer} instance.
*
* @return
* null if this launcher is not created from a {@link Computer} object.
* @deprecated since 2008-11-16.
* See the javadoc for why this is inherently unreliable. If you are trying to
* figure out the current {@link Computer} from within a build, use
* {@link Computer#currentComputer()}
*/
public Computer getComputer() {
for( Computer c : Jenkins.getInstance().getComputers() )
if(c.getChannel()==channel)
return c;
return null;
}
/**
* Builder pattern for configuring a process to launch.
* @since 1.311
*/
public final class ProcStarter {
protected List<String> commands;
protected boolean[] masks;
protected FilePath pwd;
protected OutputStream stdout = NULL_OUTPUT_STREAM, stderr;
protected InputStream stdin = NULL_INPUT_STREAM;
protected String[] envs;
/**
* True to reverse the I/O direction.
*
* For example, if {@link #reverseStdout}==true, then we expose
* {@link InputStream} from {@link Proc} and expect the client to read from it,
* whereas normally we take {@link OutputStream} via {@link #stdout(OutputStream)}
* and feed stdout into that output.
*
* @since 1.399
*/
protected boolean reverseStdin, reverseStdout, reverseStderr;
/**
* Passes a white-space separated single-string command (like "cat abc def") and parse them
* as a command argument. This method also handles quotes.
*/
public ProcStarter cmdAsSingleString(String s) {
return cmds(QuotedStringTokenizer.tokenize(s));
}
public ProcStarter cmds(String... args) {
return cmds(Arrays.asList(args));
}
public ProcStarter cmds(File program, String... args) {
commands = new ArrayList<String>(args.length+1);
commands.add(program.getPath());
commands.addAll(Arrays.asList(args));
return this;
}
public ProcStarter cmds(List<String> args) {
commands = new ArrayList<String>(args);
return this;
}
public ProcStarter cmds(ArgumentListBuilder args) {
commands = args.toList();
masks = args.toMaskArray();
return this;
}
public List<String> cmds() {
return commands;
}
/**
* Hide parts of the command line from being printed to the log.
* @param masks true for each position in {@link #cmds(String[])} which should be masked, false to print
* @return this
* @see ArgumentListBuilder#add(String, boolean)
* @see #maskedPrintCommandLine(List, boolean[], FilePath)
*/
public ProcStarter masks(boolean... masks) {
this.masks = masks;
return this;
}
public boolean[] masks() {
return masks;
}
public ProcStarter pwd(FilePath workDir) {
this.pwd = workDir;
return this;
}
public ProcStarter pwd(File workDir) {
return pwd(new FilePath(workDir));
}
public ProcStarter pwd(String workDir) {
return pwd(new File(workDir));
}
public FilePath pwd() {
return pwd;
}
public ProcStarter stdout(OutputStream out) {
this.stdout = out;
return this;
}
/**
* Sends the stdout to the given {@link TaskListener}.
*/
public ProcStarter stdout(TaskListener out) {
return stdout(out.getLogger());
}
public OutputStream stdout() {
return stdout;
}
/**
* Controls where the stderr of the process goes.
* By default, it's bundled into stdout.
*/
public ProcStarter stderr(OutputStream err) {
this.stderr = err;
return this;
}
public OutputStream stderr() {
return stderr;
}
/**
* Controls where the stdin of the process comes from.
* By default, <tt>/dev/null</tt>.
*/
public ProcStarter stdin(InputStream in) {
this.stdin = in;
return this;
}
public InputStream stdin() {
return stdin;
}
/**
* Sets the environment variable overrides.
*
* <p>
* In adition to what the current process
* is inherited (if this is going to be launched from a slave agent, that
* becomes the "current" process), these variables will be also set.
*/
public ProcStarter envs(Map<String, String> overrides) {
this.envs = Util.mapToEnv(overrides);
return this;
}
/**
* @param overrides
* List of "VAR=VALUE". See {@link #envs(Map)} for the semantics.
*/
public ProcStarter envs(String... overrides) {
if (overrides != null) {
for (String override : overrides) {
if (override.indexOf('=') == -1) {
throw new IllegalArgumentException(override);
}
}
}
this.envs = overrides;
return this;
}
public String[] envs() {
return envs.clone();
}
/**
* Indicates that the caller will pump {@code stdout} from the child process
* via {@link Proc#getStdout()} (whereas by default you call {@link #stdout(OutputStream)}
* and let Jenkins pump stdout into your {@link OutputStream} of choosing.
*
* <p>
* When this method is called, {@link Proc#getStdout()} will read the combined output
* of {@code stdout/stderr} from the child process, unless {@link #readStderr()} is called
* separately, which lets the caller read those two streams separately.
*
* @since 1.399
*/
public ProcStarter readStdout() {
reverseStdout = true;
stdout = stderr = null;
return this;
}
/**
* In addition to the effect of {@link #readStdout()}, indicate that the caller will pump {@code stderr}
* from the child process separately from {@code stdout}. The stderr will be readable from
* {@link Proc#getStderr()} while {@link Proc#getStdout()} reads from stdout.
*
* @since 1.399
*/
public ProcStarter readStderr() {
reverseStdout = true;
reverseStderr = true;
return this;
}
/**
* Indicates that the caller will directly write to the child process {@link #stdin()} via {@link Proc#getStdin()}.
* (Whereas by default you call {@link #stdin(InputStream)}
* and let Jenkins pump your {@link InputStream} of choosing to stdin.)
* @since 1.399
*/
public ProcStarter writeStdin() {
reverseStdin = true;
stdin = null;
return this;
}
/**
* Starts the new process as configured.
*/
public Proc start() throws IOException {
return launch(this);
}
/**
* Starts the process and waits for its completion.
*/
public int join() throws IOException, InterruptedException {
return start().join();
}
/**
* Copies a {@link ProcStarter}.
*/
public ProcStarter copy() {
ProcStarter rhs = new ProcStarter().cmds(commands).pwd(pwd).masks(masks).stdin(stdin).stdout(stdout).stderr(stderr).envs(envs);
rhs.reverseStdin = this.reverseStdin;
rhs.reverseStderr = this.reverseStderr;
rhs.reverseStdout = this.reverseStdout;
return rhs;
}
}
/**
* Launches a process by using a {@linkplain ProcStarter builder-pattern} to configure
* the parameters.
*/
public final ProcStarter launch() {
return new ProcStarter();
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String cmd, Map<String,String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd,Util.mapToEnv(env),out,workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, Map<String, String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, Util.mapToEnv(env), out, workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, Map<String, String> env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, Util.mapToEnv(env), in, out);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, Map<String, String> env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, mask, Util.mapToEnv(env), out, workDir);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, Map<String, String> env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, mask, Util.mapToEnv(env), in, out);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String cmd,String[] env,OutputStream out, FilePath workDir) throws IOException {
return launch(Util.tokenize(cmd),env,out,workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, String[] env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, env, null, out, workDir);
}
/**
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, env, in, out, null);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, String[] env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd, mask, env, null, out, workDir);
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public final Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out) throws IOException {
return launch(cmd, mask, env, in, out, null);
}
/**
* @param env
* Environment variable overrides.
* @param in
* null if there's no input.
* @param workDir
* null if the working directory could be anything.
* @param out
* stdout and stderr of the process will be sent to this stream.
* the stream won't be closed.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return launch(launch().cmds(cmd).envs(env).stdin(in).stdout(out).pwd(workDir));
}
/**
* Launch a command with optional censoring of arguments from the listener (Note: <strong>The censored portions will
* remain visible through /proc, pargs, process explorer, etc. i.e. people logged in on the same machine</strong>
* This version of the launch command just ensures that it is not visible from a build log which is exposed via the
* web)
*
* @param cmd The command and all it's arguments.
* @param mask Which of the command and arguments should be masked from the listener
* @param env Environment variable overrides.
* @param in null if there's no input.
* @param out stdout and stderr of the process will be sent to this stream. the stream won't be closed.
* @param workDir null if the working directory could be anything.
* @return The process of the command.
* @throws IOException When there are IO problems.
*
* @deprecated as of 1.311
* Use {@link #launch()} and its associated builder pattern
*/
public Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
return launch(launch().cmds(cmd).masks(mask).envs(env).stdin(in).stdout(out).pwd(workDir));
}
/**
* Primarily invoked from {@link ProcStarter#start()} to start a process with a specific launcher.
*/
public abstract Proc launch(ProcStarter starter) throws IOException;
/**
* Launches a specified process and connects its input/output to a {@link Channel}, then
* return it.
*
* <p>
* When the returned channel is terminated, the process will be killed.
*
* @param out
* Where the stderr from the launched process will be sent.
* @param workDir
* The working directory of the new process, or null to inherit
* from the current process
* @param envVars
* Environment variable overrides. In addition to what the current process
* is inherited (if this is going to be launched from a slave agent, that
* becomes the "current" process), these variables will be also set.
*/
public abstract Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String,String> envVars) throws IOException, InterruptedException;
/**
* Returns true if this {@link Launcher} is going to launch on Unix.
*/
public boolean isUnix() {
return File.pathSeparatorChar==':';
}
/**
* Calls {@link ProcessTree#killAll(Map)} to kill processes.
*/
public abstract void kill(Map<String,String> modelEnvVars) throws IOException, InterruptedException;
/**
* Prints out the command line to the listener so that users know what we are doing.
*/
protected final void printCommandLine(String[] cmd, FilePath workDir) {
StringBuilder buf = new StringBuilder();
if (workDir != null) {
buf.append('[');
if(showFullPath)
buf.append(workDir.getRemote());
else
buf.append(workDir.getRemote().replaceFirst("^.+[/\\\\]", ""));
buf.append("] ");
}
buf.append('$');
for (String c : cmd) {
buf.append(' ');
if(c.indexOf(' ')>=0) {
if(c.indexOf('"')>=0)
buf.append('\'').append(c).append('\'');
else
buf.append('"').append(c).append('"');
} else
buf.append(c);
}
listener.getLogger().println(buf.toString());
}
/**
* Prints out the command line to the listener with some portions masked to prevent sensitive information from being
* recorded on the listener.
*
* @param cmd The commands
* @param mask An array of booleans which control whether a cmd element should be masked (<code>true</code>) or
* remain unmasked (<code>false</code>).
* @param workDir The work dir.
*/
protected final void maskedPrintCommandLine(List<String> cmd, boolean[] mask, FilePath workDir) {
if(mask==null) {
printCommandLine(cmd.toArray(new String[cmd.size()]),workDir);
return;
}
assert mask.length == cmd.size();
final String[] masked = new String[cmd.size()];
for (int i = 0; i < cmd.size(); i++) {
if (mask[i]) {
masked[i] = "********";
} else {
masked[i] = cmd.get(i);
}
}
printCommandLine(masked, workDir);
}
protected final void maskedPrintCommandLine(String[] cmd, boolean[] mask, FilePath workDir) {
maskedPrintCommandLine(Arrays.asList(cmd),mask,workDir);
}
/**
* Returns a decorated {@link Launcher} for the given node.
*/
public final Launcher decorateFor(Node node) {
Launcher l = this;
for (LauncherDecorator d : LauncherDecorator.all())
l = d.decorate(l,node);
return l;
}
/**
* Returns a decorated {@link Launcher} that puts the given set of arguments as a prefix to any commands
* that it invokes.
*
* @since 1.299
*/
public final Launcher decorateByPrefix(final String... prefix) {
final Launcher outer = this;
return new Launcher(outer) {
@Override
public boolean isUnix() {
return outer.isUnix();
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
starter.commands.addAll(0,Arrays.asList(prefix));
if (starter.masks != null) {
starter.masks = prefix(starter.masks);
}
return outer.launch(starter);
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
return outer.launchChannel(prefix(cmd),out,workDir,envVars);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
outer.kill(modelEnvVars);
}
private String[] prefix(String[] args) {
String[] newArgs = new String[args.length+prefix.length];
System.arraycopy(prefix,0,newArgs,0,prefix.length);
System.arraycopy(args,0,newArgs,prefix.length,args.length);
return newArgs;
}
private boolean[] prefix(boolean[] args) {
boolean[] newArgs = new boolean[args.length+prefix.length];
System.arraycopy(args,0,newArgs,prefix.length,args.length);
return newArgs;
}
};
}
/**
* Returns a decorated {@link Launcher} that automatically adds the specified environment
* variables.
*
* Those that are specified in {@link ProcStarter#envs(String...)} will take precedence over
* what's specified here.
*
* @since 1.489
*/
public final Launcher decorateByEnv(EnvVars _env) {
final EnvVars env = new EnvVars(_env);
final Launcher outer = this;
return new Launcher(outer) {
@Override
public boolean isUnix() {
return outer.isUnix();
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
EnvVars e = new EnvVars(env);
if (starter.envs!=null) {
for (String env : starter.envs) {
e.addLine(env);
}
}
starter.envs = Util.mapToEnv(e);
return outer.launch(starter);
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
EnvVars e = new EnvVars(env);
e.putAll(envVars);
return outer.launchChannel(cmd,out,workDir,e);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
outer.kill(modelEnvVars);
}
};
}
/**
* {@link Launcher} that launches process locally.
*/
public static class LocalLauncher extends Launcher {
public LocalLauncher(TaskListener listener) {
this(listener, FilePath.localChannel);
}
public LocalLauncher(TaskListener listener, VirtualChannel channel) {
super(listener, channel);
}
@Override
public Proc launch(ProcStarter ps) throws IOException {
maskedPrintCommandLine(ps.commands, ps.masks, ps.pwd);
EnvVars jobEnv = inherit(ps.envs);
// replace variables in command line
String[] jobCmd = new String[ps.commands.size()];
for ( int idx = 0 ; idx < jobCmd.length; idx++ )
jobCmd[idx] = jobEnv.expand(ps.commands.get(idx));
return new LocalProc(jobCmd, Util.mapToEnv(jobEnv),
ps.reverseStdin ?LocalProc.SELFPUMP_INPUT:ps.stdin,
ps.reverseStdout?LocalProc.SELFPUMP_OUTPUT:ps.stdout,
ps.reverseStderr?LocalProc.SELFPUMP_OUTPUT:ps.stderr,
toFile(ps.pwd));
}
private File toFile(FilePath f) {
return f==null ? null : new File(f.getRemote());
}
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String,String> envVars) throws IOException {
printCommandLine(cmd, workDir);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(toFile(workDir));
if (envVars!=null) pb.environment().putAll(envVars);
return launchChannel(out, pb);
}
@Override
public void kill(Map<String, String> modelEnvVars) throws InterruptedException {
ProcessTree.get().killAll(modelEnvVars);
}
/**
* @param out
* Where the stderr from the launched process will be sent.
*/
public Channel launchChannel(OutputStream out, ProcessBuilder pb) throws IOException {
final EnvVars cookie = EnvVars.createCookie();
pb.environment().putAll(cookie);
final Process proc = pb.start();
final Thread t2 = new StreamCopyThread(pb.command()+": stderr copier", proc.getErrorStream(), out);
t2.start();
return new Channel("locally launched channel on "+ pb.command(),
Computer.threadPoolForRemoting, proc.getInputStream(), proc.getOutputStream(), out) {
/**
* Kill the process when the channel is severed.
*/
@Override
public synchronized void terminate(IOException e) {
super.terminate(e);
ProcessTree pt = ProcessTree.get();
try {
pt.killAll(proc,cookie);
} catch (InterruptedException x) {
LOGGER.log(Level.INFO, "Interrupted", x);
}
}
@Override
public synchronized void close() throws IOException {
super.close();
// wait for all the output from the process to be picked up
try {
t2.join();
} catch (InterruptedException e) {
// process the interrupt later
Thread.currentThread().interrupt();
}
}
};
}
}
@Restricted(NoExternalUse.class)
public static class DummyLauncher extends Launcher {
public DummyLauncher(TaskListener listener) {
super(listener, null);
}
@Override
public Proc launch(ProcStarter starter) throws IOException {
throw new IOException("Can not call launch on a dummy launcher.");
}
@Override
public Channel launchChannel(String[] cmd, OutputStream out, FilePath workDir, Map<String, String> envVars) throws IOException, InterruptedException {
throw new IOException("Can not call launchChannel on a dummy launcher.");
}
@Override
public void kill(Map<String, String> modelEnvVars) throws IOException, InterruptedException {
// Kill method should do nothing.
}
}
/**
* Launches processes remotely by using the given channel.
*/
public static class RemoteLauncher extends Launcher {
private final boolean isUnix;
public RemoteLauncher(TaskListener listener, VirtualChannel channel, boolean isUnix) {
super(listener, channel);
this.isUnix = isUnix;
}
public Proc launch(ProcStarter ps) throws IOException {
final OutputStream out = ps.stdout == null ? null : new RemoteOutputStream(new CloseProofOutputStream(ps.stdout));
final OutputStream err = ps.stderr==null ? null : new RemoteOutputStream(new CloseProofOutputStream(ps.stderr));
final InputStream in = (ps.stdin==null || ps.stdin==NULL_INPUT_STREAM) ? null : new RemoteInputStream(ps.stdin,false);
final String workDir = ps.pwd==null ? null : ps.pwd.getRemote();
try {
return new ProcImpl(getChannel().call(new RemoteLaunchCallable(ps.commands, ps.masks, ps.envs, in, ps.reverseStdin, out, ps.reverseStdout, err, ps.reverseStderr, workDir, listener)));
} catch (InterruptedException e) {
throw (IOException)new InterruptedIOException().initCause(e);
}
}
public Channel launchChannel(String[] cmd, OutputStream err, FilePath _workDir, Map<String,String> envOverrides) throws IOException, InterruptedException {
printCommandLine(cmd, _workDir);
Pipe out = Pipe.createRemoteToLocal();
final String workDir = _workDir==null ? null : _workDir.getRemote();
OutputStream os = getChannel().call(new RemoteChannelLaunchCallable(cmd, out, err, workDir, envOverrides));
return new Channel("remotely launched channel on "+channel,
Computer.threadPoolForRemoting, out.getIn(), new BufferedOutputStream(os));
}
@Override
public boolean isUnix() {
return isUnix;
}
@Override
public void kill(final Map<String,String> modelEnvVars) throws IOException, InterruptedException {
getChannel().call(new KillTask(modelEnvVars));
}
private static final class KillTask implements Callable<Void,RuntimeException> {
private final Map<String, String> modelEnvVars;
public KillTask(Map<String, String> modelEnvVars) {
this.modelEnvVars = modelEnvVars;
}
public Void call() throws RuntimeException {
try {
ProcessTree.get().killAll(modelEnvVars);
} catch (InterruptedException e) {
// we are asked to terminate early by the caller, so no need to do anything
}
return null;
}
private static final long serialVersionUID = 1L;
}
public static final class ProcImpl extends Proc {
private final RemoteProcess process;
private final IOTriplet io;
public ProcImpl(RemoteProcess process) {
this.process = process;
this.io = process.getIOtriplet();
}
@Override
public void kill() throws IOException, InterruptedException {
process.kill();
}
@Override
public int join() throws IOException, InterruptedException {
return process.join();
}
@Override
public boolean isAlive() throws IOException, InterruptedException {
return process.isAlive();
}
@Override
public InputStream getStdout() {
return io.stdout;
}
@Override
public InputStream getStderr() {
return io.stderr;
}
@Override
public OutputStream getStdin() {
return io.stdin;
}
}
}
public static class IOTriplet implements Serializable {
InputStream stdout,stderr;
OutputStream stdin;
private static final long serialVersionUID = 1L;
}
/**
* Remoting interface of a remote process
*/
public interface RemoteProcess {
int join() throws InterruptedException, IOException;
void kill() throws IOException, InterruptedException;
boolean isAlive() throws IOException, InterruptedException;
IOTriplet getIOtriplet();
}
private static class RemoteLaunchCallable implements Callable<RemoteProcess,IOException> {
private final List<String> cmd;
private final boolean[] masks;
private final String[] env;
private final InputStream in;
private final OutputStream out;
private final OutputStream err;
private final String workDir;
private final TaskListener listener;
private final boolean reverseStdin, reverseStdout, reverseStderr;
RemoteLaunchCallable(List<String> cmd, boolean[] masks, String[] env, InputStream in, boolean reverseStdin, OutputStream out, boolean reverseStdout, OutputStream err, boolean reverseStderr, String workDir, TaskListener listener) {
this.cmd = new ArrayList<String>(cmd);
this.masks = masks;
this.env = env;
this.in = in;
this.out = out;
this.err = err;
this.workDir = workDir;
this.listener = listener;
this.reverseStdin = reverseStdin;
this.reverseStdout = reverseStdout;
this.reverseStderr = reverseStderr;
}
public RemoteProcess call() throws IOException {
Launcher.ProcStarter ps = new LocalLauncher(listener).launch();
ps.cmds(cmd).masks(masks).envs(env).stdin(in).stdout(out).stderr(err);
if(workDir!=null) ps.pwd(workDir);
if (reverseStdin) ps.writeStdin();
if (reverseStdout) ps.readStdout();
if (reverseStderr) ps.readStderr();
final Proc p = ps.start();
return Channel.current().export(RemoteProcess.class,new RemoteProcess() {
public int join() throws InterruptedException, IOException {
try {
return p.join();
} finally {
// make sure I/O is delivered to the remote before we return
try {
Channel.current().syncIO();
} catch (Throwable _) {
// this includes a failure to sync, slave.jar too old, etc
}
}
}
public void kill() throws IOException, InterruptedException {
p.kill();
}
public boolean isAlive() throws IOException, InterruptedException {
return p.isAlive();
}
public IOTriplet getIOtriplet() {
IOTriplet r = new IOTriplet();
if (reverseStdout) r.stdout = new RemoteInputStream(p.getStdout());
if (reverseStderr) r.stderr = new RemoteInputStream(p.getStderr());
if (reverseStdin) r.stdin = new RemoteOutputStream(p.getStdin());
return r;
}
});
}
private static final long serialVersionUID = 1L;
}
private static class RemoteChannelLaunchCallable implements Callable<OutputStream,IOException> {
private final String[] cmd;
private final Pipe out;
private final String workDir;
private final OutputStream err;
private final Map<String,String> envOverrides;
public RemoteChannelLaunchCallable(String[] cmd, Pipe out, OutputStream err, String workDir, Map<String,String> envOverrides) {
this.cmd = cmd;
this.out = out;
this.err = new RemoteOutputStream(err);
this.workDir = workDir;
this.envOverrides = envOverrides;
}
public OutputStream call() throws IOException {
Process p = Runtime.getRuntime().exec(cmd,
Util.mapToEnv(inherit(envOverrides)),
workDir == null ? null : new File(workDir));
List<String> cmdLines = Arrays.asList(cmd);
new StreamCopyThread("stdin copier for remote agent on "+cmdLines,
p.getInputStream(), out.getOut()).start();
new StreamCopyThread("stderr copier for remote agent on "+cmdLines,
p.getErrorStream(), err).start();
// TODO: don't we need to join?
return new RemoteOutputStream(p.getOutputStream());
}
private static final long serialVersionUID = 1L;
}
/**
* Expands the list of environment variables by inheriting current env variables.
*/
private static EnvVars inherit(String[] env) {
// convert String[] to Map first
EnvVars m = new EnvVars();
if(env!=null) {
for (String e : env) {
int index = e.indexOf('=');
m.put(e.substring(0,index), e.substring(index+1));
}
}
// then do the inheritance
return inherit(m);
}
/**
* Expands the list of environment variables by inheriting current env variables.
*/
private static EnvVars inherit(Map<String,String> overrides) {
EnvVars m = new EnvVars(EnvVars.masterEnvVars);
m.overrideExpandingAll(overrides);
return m;
}
/**
* Debug option to display full current path instead of just the last token.
*/
public static boolean showFullPath = false;
private static final NullInputStream NULL_INPUT_STREAM = new NullInputStream(0);
private static final Logger LOGGER = Logger.getLogger(Launcher.class.getName());
}
| [FIXED JENKINS-19454] - Added DecoratedLauncher implementation to the core.
This launcher Allows subclasses to only implement methods they want to override.
Originally, this launcher has been implemented in Custom Tools Plugin, but there are many duplicates in other plugins => it would be useful to have it in Jenkins core.
Resolves https://issues.jenkins-ci.org/browse/JENKINS-19454
Signed-off-by: Oleg Nenashev <[email protected]>
| core/src/main/java/hudson/Launcher.java | [FIXED JENKINS-19454] - Added DecoratedLauncher implementation to the core. This launcher Allows subclasses to only implement methods they want to override. Originally, this launcher has been implemented in Custom Tools Plugin, but there are many duplicates in other plugins => it would be useful to have it in Jenkins core. | <ide><path>ore/src/main/java/hudson/Launcher.java
<ide> }
<ide> }
<ide> }
<add>
<add> /**
<add> * A launcher which delegates to a provided inner launcher.
<add> * Allows subclasses to only implement methods they want to override.
<add> * Originally, this launcher has been implemented in
<add> * <a href="https://wiki.jenkins-ci.org/display/JENKINS/Custom+Tools+Plugin">
<add> * Custom Tools Plugin</a>.
<add> *
<add> * @author rcampbell
<add> * @author Oleg Nenashev, Synopsys Inc.
<add> * @since TODO: define version
<add> */
<add> public static class DecoratedLauncher extends Launcher {
<add>
<add> private Launcher inner = null;
<add>
<add> public DecoratedLauncher(Launcher inner) {
<add> super(inner);
<add> this.inner = inner;
<add> }
<add>
<add> @Override
<add> public Proc launch(ProcStarter starter) throws IOException {
<add> return inner.launch(starter);
<add> }
<add>
<add> @Override
<add> public Channel launchChannel(String[] cmd, OutputStream out,
<add> FilePath workDir, Map<String, String> envVars) throws IOException,
<add> InterruptedException {
<add> return inner.launchChannel(cmd, out, workDir, envVars);
<add> }
<add>
<add> @Override
<add> public void kill(Map<String, String> modelEnvVars) throws IOException,
<add> InterruptedException {
<add> inner.kill(modelEnvVars);
<add> }
<add>
<add> @Override
<add> public boolean isUnix() {
<add> return inner.isUnix();
<add> }
<add>
<add> @Override
<add> public Proc launch(String[] cmd, boolean[] mask, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
<add> return inner.launch(cmd, mask, env, in, out, workDir);
<add> }
<add>
<add> @Override
<add> public Computer getComputer() {
<add> return inner.getComputer();
<add> }
<add>
<add> @Override
<add> public TaskListener getListener() {
<add> return inner.getListener();
<add> }
<add>
<add> @Override
<add> public String toString() {
<add> return super.toString() + "; decorates " + inner.toString();
<add> }
<add>
<add> @Override
<add> public VirtualChannel getChannel() {
<add> return inner.getChannel();
<add> }
<add>
<add> @Override
<add> public Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out, FilePath workDir) throws IOException {
<add> return inner.launch(cmd, env, in, out, workDir);
<add> }
<add>
<add> /**
<add> * Gets nested launcher.
<add> * @return Inner launcher
<add> */
<add> public Launcher getInner() {
<add> return inner;
<add> }
<add> }
<ide>
<ide> public static class IOTriplet implements Serializable {
<ide> InputStream stdout,stderr; |
|
Java | mit | 5c29bab6706c003408a05984bd55dceccc9305ed | 0 | UCDGeeks/RealEstate_System | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
/**
*
* @author Sajeeka Mayomi
*/
public class HouseFileNew {
private String[] readArray;
private String[] writeArray;
public void read() {
int flag = SortedList.find(Integer.parseInt(writeArray[2]));
if (flag != -1) {
try {
FileReader inputFile = new FileReader(filePath);
BufferedReader reader = new BufferedReader(inputFile);
int i = 0;
while ((readArray[i] = reader.readLine()) != null) {
System.out.println(readArray[i++]);
}
reader.close();
} catch (IOException e) {
System.out.println("Cannot Reading the file Text.txt");
}
}
}
public void write() {
try {
File fileObject = new File(filePath);
FileOutputStream fileOutputStream = new FileOutputStream(fileObject);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
Writer writerObject = new BufferedWriter(outputStreamWriter);
for (int i = 0; i < writeArray.length && writeArray[i] != null; i++) {
writerObject.write(writeArray[i] + "\n");
}
writerObject.close();
} catch (IOException e) {
System.out.println("Problem writing to the file " + filePath);
}
}
}
| src/models/HouseFileNew.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package controller;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
/**
*
* @author Sajeeka Mayomi
*/
public class HouseFileNew {
private String[] readArray;
private String[] writeArray;
public void read() {
int flag = SortedList.find(Integer.parseInt(writeArray[2]));
if (flag != -1) {
try {
FileReader inputFile = new FileReader(filePath);
BufferedReader reader = new BufferedReader(inputFile);
int i = 0;
while ((readArray[i] = reader.readLine()) != null) {
System.out.println(readArray[i++]);
}
reader.close();
} catch (IOException e) {
System.out.println("Cannot Reading the file Text.txt");
}
}
}
public void write() {
try {
File fileObject = new File(filePath);
FileOutputStream fileOutputStream = new FileOutputStream(fileObject);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
Writer writerObject = new BufferedWriter(outputStreamWriter);
for (int i = 0; i < writeArray.length && writeArray[i] != null; i++) {
writerObject.write(writeArray[i] + "\n");
}
writerObject.close();
} catch (IOException e) {
System.out.println("Problem writing to the file " + filePath);
}
}
}
| One correction on a model and JSON Java library added to the folder. | src/models/HouseFileNew.java | One correction on a model and JSON Java library added to the folder. | <ide><path>rc/models/HouseFileNew.java
<ide> * To change this template file, choose Tools | Templates
<ide> * and open the template in the editor.
<ide> */
<del>package controller;
<add>package models;
<ide>
<ide> import java.io.BufferedReader;
<ide> import java.io.BufferedWriter; |
|
Java | apache-2.0 | b2d1cb13d3a8badedd02321eb115ad80b92a25b1 | 0 | gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa | /*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.dao;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import oracle.jdbc.OracleTypes;
import oracle.sql.ARRAY;
import oracle.sql.ArrayDescriptor;
import oracle.sql.STRUCT;
import oracle.sql.StructDescriptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.jdbc.core.*;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.simple.SimpleJdbcCall;
import org.springframework.jdbc.core.support.AbstractSqlTypeValue;
import uk.ac.ebi.microarray.atlas.model.*;
import uk.ac.ebi.microarray.atlas.services.ExperimentDAO;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.*;
import static com.google.common.base.Joiner.on;
import static com.google.common.collect.Iterables.partition;
/**
* A data access object designed for retrieving common sorts of data from the atlas database. This DAO should be
* configured with a spring {@link JdbcTemplate} object which will be used to query the database.
*
* @author Tony Burdett
* @author Alexey Filippov
* @author Nataliya Sklyar
* @author Misha Kapushesky
* @author Pavel Kurnosov
* @author Andrey Zorin
* @author Robert Petryszak
* @author Olga Melnichuk
*/
public class AtlasDAO implements ExperimentDAO {
public static final int MAX_QUERY_PARAMS = 10;
private Logger log = LoggerFactory.getLogger(getClass());
private ArrayDesignDAOInterface arrayDesignDAO;
private BioEntityDAOInterface bioEntityDAO;
private JdbcTemplate template;
public void setArrayDesignDAO(ArrayDesignDAOInterface arrayDesignDAO) {
this.arrayDesignDAO = arrayDesignDAO;
}
public void setBioEntityDAO(BioEntityDAOInterface bioEntityDAO) {
this.bioEntityDAO = bioEntityDAO;
}
public void setJdbcTemplate(JdbcTemplate template) {
this.template = template;
}
public List<Experiment> getAllExperiments() {
List<Experiment> results = template.query("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"ORDER BY (" +
" case when loaddate is null " +
" then (select min(loaddate) from a2_experiment) " +
" else loaddate end) desc, " +
" accession", new ExperimentMapper());
loadExperimentAssets(results);
return results;
}
/**
*
* @return All public curated experiments
*/
public Collection<Experiment> getPublicCuratedExperiments() {
return Collections2.filter(getAllExperiments(),
new Predicate<Experiment>() {
public boolean apply(uk.ac.ebi.microarray.atlas.model.Experiment exp) {
return !exp.isPrivate() && exp.isCurated();
}
});
}
/**
* Gets a single experiment from the Atlas Database, queried by the accession of the experiment.
*
* @param accession the experiment's accession number (usually in the format E-ABCD-1234)
* @return an object modelling this experiment
*/
public Experiment getExperimentByAccession(String accession) {
try {
Experiment result = template.queryForObject("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"WHERE accession=?",
new Object[]{accession},
new ExperimentMapper());
loadExperimentAssets(result);
return result;
} catch (IncorrectResultSizeDataAccessException e) {
return null;
}
}
/**
* @param experimentId id of experiment to retrieve
* @return Experiment (without assets) matching experimentId
*/
public Experiment getShallowExperimentById(long experimentId) {
try {
return template.queryForObject("SELECT " +
ExperimentMapper.FIELDS +
"FROM a2_experiment WHERE experimentid=?",
new Object[]{experimentId},
new ExperimentMapper());
} catch (IncorrectResultSizeDataAccessException e) {
log.warn("Experiment id: " + experimentId + ": " + e.getMessage(), e);
return null;
}
}
private void loadExperimentAssets(List<Experiment> results) {
for (Experiment experiment : results) {
loadExperimentAssets(experiment);
}
}
private void loadExperimentAssets(Experiment experiment) {
experiment.addAssets(template.query("SELECT a.name, a.filename, a.description" + " FROM a2_experiment e " +
" JOIN a2_experimentasset a ON a.ExperimentID = e.ExperimentID " +
" WHERE e.accession=? ORDER BY a.ExperimentAssetID",
new Object[]{experiment.getAccession()},
new RowMapper<Asset>() {
public Asset mapRow(ResultSet resultSet, int i) throws SQLException {
return new Asset(resultSet.getString(1),
resultSet.getString(2),
resultSet.getString(3));
}
}));
}
public List<Experiment> getExperimentByArrayDesign(String accession) {
List<Experiment> results = template.query("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"WHERE experimentid IN " +
" (SELECT experimentid FROM a2_assay a, a2_arraydesign ad " +
" WHERE a.arraydesignid=ad.arraydesignid AND ad.accession=?)",
new Object[]{accession},
new ExperimentMapper());
loadExperimentAssets(results);
return results;
}
/**
* @param experimentAccession the accession of experiment to retrieve assays for
* @return list of assays
* @deprecated Use id instead of accession
*/
@Deprecated
public List<Assay> getAssaysByExperimentAccession(String experimentAccession) {
List<Assay> assays = template.query("SELECT a.accession, e.accession, ad.accession, a.assayid " +
"FROM a2_assay a, a2_experiment e, a2_arraydesign ad " +
"WHERE e.experimentid=a.experimentid " +
"AND a.arraydesignid=ad.arraydesignid" + " " +
"AND e.accession=?",
new Object[]{experimentAccession},
new RowMapper<Assay>() {
public Assay mapRow(ResultSet resultSet, int i) throws SQLException {
Assay assay = new Assay();
assay.setAccession(resultSet.getString(1));
assay.setExperimentAccession(resultSet.getString(2));
assay.setArrayDesignAccession(resultSet.getString(3));
assay.setAssayID(resultSet.getLong(4));
return assay;
}
});
// populate the other info for these assays
if (!assays.isEmpty()) {
fillOutAssays(assays);
}
// and return
return assays;
}
/**
* @param experimentAccession the accession of experiment to retrieve samples for
* @param assayAccession the accession of the assay to retrieve samples for
* @return list of samples
* @deprecated Use ids instead of accessions
*/
@Deprecated
public List<Sample> getSamplesByAssayAccession(String experimentAccession, String assayAccession) {
List<Sample> samples = template.query("SELECT " + SampleMapper.FIELDS +
" FROM a2_sample s, a2_assay a, a2_assaysample ass, a2_experiment e, a2_organism org " +
"WHERE s.sampleid=ass.sampleid " +
"AND a.assayid=ass.assayid " +
"AND e.experimentid=a.experimentid " +
"AND s.organismid=org.organismid " +
"AND e.accession=? " +
"AND a.accession=? ", new Object[]{experimentAccession, assayAccession}, new SampleMapper());
// populate the other info for these samples
if (samples.size() > 0) {
fillOutSamples(samples);
}
return samples;
}
public List<Sample> getSamplesByExperimentAccession(String exptAccession) {
List<Sample> samples = template.query("SELECT " + SampleMapper.FIELDS +
" FROM a2_sample s, a2_assay a, a2_assaysample ass, a2_experiment e, a2_organism org " +
"WHERE s.sampleid=ass.sampleid " +
"AND a.assayid=ass.assayid " +
"AND a.experimentid=e.experimentid " +
"AND s.organismid=org.organismid " +
"AND e.accession=?", new Object[]{exptAccession}, new SampleMapper());
// populate the other info for these samples
if (samples.size() > 0) {
fillOutSamples(samples);
}
return samples;
}
public int getPropertyValueCount() {
return template.queryForInt("SELECT COUNT(DISTINCT name) FROM a2_propertyvalue");
}
public int getFactorValueCount() {
return template.queryForInt("SELECT COUNT(DISTINCT propertyvalueid) FROM a2_assayPV");
}
/**
* Returns all array designs in the underlying datasource. Note that, to reduce query times, this method does NOT
* prepopulate ArrayDesigns with their associated design elements (unlike other methods to retrieve array designs
* more specifically).
*
* @return the list of array designs, not prepopulated with design elements.
*/
public List<ArrayDesign> getAllArrayDesigns() {
return arrayDesignDAO.getAllArrayDesigns();
}
public ArrayDesign getArrayDesignByAccession(String accession) {
return arrayDesignDAO.getArrayDesignByAccession(accession);
}
/**
* @param accession Array design accession
* @return Array design (with no design element and gene ids filled in) corresponding to accession
*/
public ArrayDesign getArrayDesignShallowByAccession(String accession) {
return arrayDesignDAO.getArrayDesignShallowByAccession(accession);
}
public List<OntologyMapping> getOntologyMappingsByOntology(
String ontologyName) {
return template.query("SELECT DISTINCT accession, property, propertyvalue, ontologyterm, experimentid " +
"FROM a2_ontologymapping" + " " +
"WHERE ontologyname=?",
new Object[]{ontologyName},
new ExperimentPropertyMapper() {
public OntologyMapping mapRow(ResultSet resultSet, int i) throws SQLException {
OntologyMapping mapping = super.mapRow(resultSet, i);
mapping.setExperimentId(resultSet.getLong(5));
return mapping;
}
});
}
public List<Property> getAllProperties() {
return template.query("SELECT " + PropertyMapper.FIELDS + " " +
"FROM " + PropertyMapper.TABLES + " " +
"WHERE pv.propertyid=p.propertyid GROUP BY p.name, pv.name", new PropertyMapper());
}
public List<Property> getPropertiesByPropertyName(String propertyName) {
return template.query("SELECT " + PropertyMapper.FIELDS + " " +
"FROM " + PropertyMapper.TABLES + " " +
"WHERE pv.propertyid=p.propertyid AND p.name=? GROUP BY p.name, pv.name", new Object[]{propertyName}, new PropertyMapper());
}
public List<OntologyMapping> getExperimentsToAllProperties() {
return template.query("SELECT experiment, property, value, ontologyterm from cur_ontologymapping " +
"UNION " +
"SELECT distinct ap.experiment, ap.property, ap.value, null " +
"FROM cur_assayproperty ap where not exists " +
"(SELECT 1 from cur_ontologymapping cm " +
"WHERE cm.property = ap.property " +
"AND cm.value = ap.value " +
"AND cm.experiment = ap.experiment)",
new ExperimentPropertyMapper());
}
public AtlasStatistics getAtlasStatistics(final String dataRelease, final String lastReleaseDate) {
// manually count all experiments/genes/assays
AtlasStatistics stats = new AtlasStatistics();
stats.setDataRelease(dataRelease);
stats.setExperimentCount(template.queryForInt("SELECT COUNT(*) FROM a2_experiment"));
stats.setAssayCount(template.queryForInt("SELECT COUNT(*) FROM a2_assay"));
stats.setGeneCount(bioEntityDAO.getGeneCount());
stats.setNewExperimentCount(template.queryForInt("SELECT COUNT(*) FROM a2_experiment WHERE loaddate > to_date(?,'MM-YYYY')", lastReleaseDate));
stats.setPropertyValueCount(getPropertyValueCount());
stats.setFactorValueCount(getFactorValueCount());
return stats;
}
/*
DAO write methods
*/
public void writeLoadDetails(final String accession,
final LoadStage loadStage,
final LoadStatus loadStatus) {
writeLoadDetails(accession, loadStage, loadStatus, LoadType.EXPERIMENT);
}
public void writeLoadDetails(final String accession,
final LoadStage loadStage,
final LoadStatus loadStatus,
final LoadType loadType) {
// execute this procedure...
/*
create or replace procedure load_progress(
experiment_accession varchar
,stage varchar --load, netcdf, similarity, ranking, searchindex
,status varchar --done, pending
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.LOAD_PROGRESS")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("EXPERIMENT_ACCESSION")
.useInParameterNames("STAGE")
.useInParameterNames("STATUS")
.useInParameterNames("LOAD_TYPE")
.declareParameters(new SqlParameter("EXPERIMENT_ACCESSION", Types.VARCHAR))
.declareParameters(new SqlParameter("STAGE", Types.VARCHAR))
.declareParameters(new SqlParameter("STATUS", Types.VARCHAR))
.declareParameters(new SqlParameter("LOAD_TYPE", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource()
.addValue("EXPERIMENT_ACCESSION", accession)
.addValue("STAGE", loadStage.toString().toLowerCase())
.addValue("STATUS", loadStatus.toString().toLowerCase())
.addValue("LOAD_TYPE", loadType.toString().toLowerCase());
log.debug("Invoking load_progress stored procedure with parameters (" + accession + ", " + loadStage + ", " +
loadStatus + ", " + loadType + ")");
procedure.execute(params);
log.debug("load_progress stored procedure completed");
}
/**
* Writes the given experiment to the database, using the default transaction strategy configured for the
* datasource.
*
* @param experiment the experiment to write
*/
public void writeExperiment(final Experiment experiment) {
// execute this procedure...
/*
PROCEDURE "A2_EXPERIMENTSET" (
TheAccession varchar2
,TheDescription varchar2
,ThePerformer varchar2
,TheLab varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_EXPERIMENTSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("DESCRIPTION")
.useInParameterNames("PERFORMER")
.useInParameterNames("LAB")
.useInParameterNames("PMID")
.useInParameterNames("ABSTRACT")
.declareParameters(new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(new SqlParameter("DESCRIPTION", Types.VARCHAR))
.declareParameters(new SqlParameter("PERFORMER", Types.VARCHAR))
.declareParameters(new SqlParameter("LAB", Types.VARCHAR))
.declareParameters(new SqlParameter("PMID", Types.VARCHAR))
.declareParameters(new SqlParameter("ABSTRACT", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", experiment.getAccession())
.addValue("DESCRIPTION", experiment.getDescription())
.addValue("PERFORMER", experiment.getPerformer())
.addValue("LAB", experiment.getLab())
.addValue("PMID", experiment.getPubmedID())
.addValue("ABSTRACT", experiment.getArticleAbstract());
procedure.execute(params);
}
/**
* Writes the given assay to the database, using the default transaction strategy configured for the datasource.
*
* @param assay the assay to write
*/
public void writeAssay(final Assay assay) {
// execute this procedure...
/*
PROCEDURE "A2_ASSAYSET" (
TheAccession varchar2
,TheExperimentAccession varchar2
,TheArrayDesignAccession varchar2
,TheProperties PropertyTable
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_ASSAYSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("EXPERIMENTACCESSION")
.useInParameterNames("ARRAYDESIGNACCESSION")
.useInParameterNames("PROPERTIES")
.declareParameters(
new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("EXPERIMENTACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("ARRAYDESIGNACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("PROPERTIES", OracleTypes.ARRAY, "PROPERTYTABLE"));
// map parameters...
List<Property> props = assay.getProperties();
MapSqlParameterSource params = new MapSqlParameterSource();
StringBuffer sb = new StringBuffer();
sb.append("Properties listing for ").append(assay.getAccession()).append(":\n");
for (Property p : props) {
sb.append("\t").append(p.getName()).append("\t\t->\t\t").append(p.getValue()).append("\n");
}
log.debug(sb.toString());
SqlTypeValue propertiesParam =
props.isEmpty() ? null :
convertPropertiesToOracleARRAY(props);
params.addValue("ACCESSION", assay.getAccession())
.addValue("EXPERIMENTACCESSION", assay.getExperimentAccession())
.addValue("ARRAYDESIGNACCESSION", assay.getArrayDesignAccession())
.addValue("PROPERTIES", propertiesParam, OracleTypes.ARRAY, "PROPERTYTABLE");
log.debug("Invoking A2_ASSAYSET with the following parameters..." +
"\n\tassay accession: {}" +
"\n\texperiment: {}" +
"\n\tarray design: {}" +
"\n\tproperties count: {}" +
"\n\texpression value count: {}",
new Object[]{assay.getAccession(), assay.getExperimentAccession(), assay.getArrayDesignAccession(),
props.size(), 0});
// and execute
procedure.execute(params);
}
/**
* Writes the given sample to the database, using the default transaction strategy configured for the datasource.
*
* @param sample the sample to write
* @param experimentAccession experiment
*/
public void writeSample(final Sample sample, final String experimentAccession) {
// execute this procedure...
/*
PROCEDURE "A2_SAMPLESET" (
p_Accession varchar2
, p_Assays AccessionTable
, p_Properties PropertyTable
, p_Species varchar2
, p_Channel varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_SAMPLESET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("EXPERIMENTACCESSION")
.useInParameterNames("SAMPLEACCESSION")
.useInParameterNames("ASSAYS")
.useInParameterNames("PROPERTIES")
.useInParameterNames("CHANNEL")
.declareParameters(
new SqlParameter("EXPERIMENTACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("SAMPLEACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("ASSAYS", OracleTypes.ARRAY, "ACCESSIONTABLE"))
.declareParameters(
new SqlParameter("PROPERTIES", OracleTypes.ARRAY, "PROPERTYTABLE"))
.declareParameters(
new SqlParameter("CHANNEL", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
SqlTypeValue accessionsParam = sample.getAssayAccessions().isEmpty() ? null :
convertAssayAccessionsToOracleARRAY(sample.getAssayAccessions());
SqlTypeValue propertiesParam = sample.hasNoProperties() ? null
: convertPropertiesToOracleARRAY(sample.getProperties());
params.addValue("EXPERIMENTACCESSION", experimentAccession)
.addValue("SAMPLEACCESSION", sample.getAccession())
.addValue("ASSAYS", accessionsParam, OracleTypes.ARRAY, "ACCESSIONTABLE")
.addValue("PROPERTIES", propertiesParam, OracleTypes.ARRAY, "PROPERTYTABLE")
.addValue("CHANNEL", sample.getChannel());
int assayCount = sample.getAssayAccessions().size();
int propertiesCount = sample.getPropertiesCount();
log.debug("Invoking A2_SAMPLESET with the following parameters..." +
"\n\texperiment accession: {}" +
"\n\tsample accession: {}" +
"\n\tassays count: {}" +
"\n\tproperties count: {}" +
"\n\tspecies: {}" +
"\n\tchannel: {}",
new Object[]{experimentAccession, sample.getAccession(), assayCount, propertiesCount,
sample.getSpecies(),
sample.getChannel()});
// and execute
procedure.execute(params);
}
/**
* Writes array designs and associated data back to the database.
*
* @param arrayDesignBundle an object encapsulating the array design data that must be written to the database
*/
public void writeArrayDesignBundle(ArrayDesignBundle arrayDesignBundle) {
// execute this procedure...
/*
PROCEDURE A2_ARRAYDESIGNSET(
Accession varchar2
,Type varchar2
,Name varchar2
,Provider varchar2
,DesignElements DesignElementTable
);
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_ARRAYDESIGNSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("TYPE")
.useInParameterNames("NAME")
.useInParameterNames("PROVIDER")
.useInParameterNames("ENTRYPRIORITYLIST")
.useInParameterNames("DESIGNELEMENTS")
.declareParameters(
new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("TYPE", Types.VARCHAR))
.declareParameters(
new SqlParameter("NAME", Types.VARCHAR))
.declareParameters(
new SqlParameter("PROVIDER", Types.VARCHAR))
.declareParameters(
new SqlParameter("ENTRYPRIORITYLIST", OracleTypes.ARRAY, "IDVALUETABLE"))
.declareParameters(
new SqlParameter("DESIGNELEMENTS", OracleTypes.ARRAY, "DESIGNELEMENTTABLE"));
SqlTypeValue designElementsParam =
arrayDesignBundle.getDesignElementNames().isEmpty() ? null :
convertDesignElementsToOracleARRAY(arrayDesignBundle);
SqlTypeValue geneIdentifierPriorityParam = convertToOracleARRAYofIDVALUE(
arrayDesignBundle.getGeneIdentifierNames());
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", arrayDesignBundle.getAccession())
.addValue("TYPE", arrayDesignBundle.getType())
.addValue("NAME", arrayDesignBundle.getName())
.addValue("PROVIDER", arrayDesignBundle.getProvider())
.addValue("ENTRYPRIORITYLIST", geneIdentifierPriorityParam, OracleTypes.ARRAY, "IDVALUETABLE")
.addValue("DESIGNELEMENTS", designElementsParam, OracleTypes.ARRAY, "DESIGNELEMENTTABLE");
procedure.execute(params);
}
/*
DAO delete methods
*/
/**
* Deletes the experiment with the given accession from the database. If this experiment is not present, this does
* nothing.
*
* @param experimentAccession the accession of the experiment to remove
*/
public void deleteExperiment(final String experimentAccession) {
// execute this procedure...
/*
PROCEDURE A2_EXPERIMENTDELETE(
Accession varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_EXPERIMENTDELETE")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.declareParameters(new SqlParameter("ACCESSION", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", experimentAccession);
procedure.execute(params);
}
private void fillOutAssays(List<Assay> assays) {
// map assays to assay id
Map<Long, Assay> assaysByID = new HashMap<Long, Assay>();
for (Assay assay : assays) {
// index this assay
assaysByID.put(assay.getAssayID(), assay);
}
// maps properties to assays
ObjectPropertyMappper assayPropertyMapper = new ObjectPropertyMappper(assaysByID);
// query template for assays
NamedParameterJdbcTemplate namedTemplate = new NamedParameterJdbcTemplate(template);
// if we have more than 'MAX_QUERY_PARAMS' assays, split into smaller queries
final ArrayList<Long> assayIds = new ArrayList<Long>(assaysByID.keySet());
for (List<Long> assayIDsChunk : partition(assayIds, MAX_QUERY_PARAMS)) {
// now query for properties that map to one of the samples in the sublist
MapSqlParameterSource propertyParams = new MapSqlParameterSource();
propertyParams.addValue("assayids", assayIDsChunk);
namedTemplate.query("SELECT apv.assayid,\n" +
" p.name AS property,\n" +
" pv.name AS propertyvalue, 1,\n" +
" wm_concat(t.accession) AS efoTerms\n" +
" FROM a2_property p\n" +
" JOIN a2_propertyvalue pv ON pv.propertyid=p.propertyid\n" +
" JOIN a2_assaypv apv ON apv.propertyvalueid=pv.propertyvalueid\n" +
" LEFT JOIN a2_assaypvontology apvo ON apvo.assaypvid = apv.assaypvid\n" +
" LEFT JOIN a2_ontologyterm t ON apvo.ontologytermid = t.ontologytermid\n" +
" WHERE apv.assayid IN (:assayids)" +
" GROUP BY apvo.assaypvid, apv.assayid, p.name, pv.name", propertyParams, assayPropertyMapper);
}
}
private void fillOutSamples(List<Sample> samples) {
// map samples to sample id
Map<Long, Sample> samplesByID = new HashMap<Long, Sample>();
for (Sample sample : samples) {
samplesByID.put(sample.getSampleID(), sample);
}
// maps properties and assays to relevant sample
final Map<Long, Sample> samplesMap1 = samplesByID;
RowCallbackHandler assaySampleMapper = new RowCallbackHandler() {
Map<Long, Sample> samplesMap = samplesMap1;
public void processRow(ResultSet rs) throws SQLException {
long sampleID = rs.getLong(1);
samplesMap.get(sampleID).addAssayAccession(rs.getString(2));
}
};
ObjectPropertyMappper samplePropertyMapper = new ObjectPropertyMappper(samplesByID);
// query template for samples
NamedParameterJdbcTemplate namedTemplate = new NamedParameterJdbcTemplate(template);
// if we have more than 'MAX_QUERY_PARAMS' samples, split into smaller queries
List<Long> sampleIDs = new ArrayList<Long>(samplesByID.keySet());
for (List<Long> sampleIDsChunk : partition(sampleIDs, MAX_QUERY_PARAMS)) {
// now query for assays that map to one of these samples
MapSqlParameterSource assayParams = new MapSqlParameterSource();
assayParams.addValue("sampleids", sampleIDsChunk);
namedTemplate.query("SELECT s.sampleid, a.accession " +
"FROM a2_assay a, a2_assaysample s " +
"WHERE a.assayid=s.assayid " +
"AND s.sampleid IN (:sampleids)", assayParams, assaySampleMapper);
// now query for properties that map to one of these samples
log.trace("Querying for properties where sample IN (" + on(',').join(sampleIDsChunk) + ")");
MapSqlParameterSource propertyParams = new MapSqlParameterSource();
propertyParams.addValue("sampleids", sampleIDsChunk);
namedTemplate.query("SELECT spv.sampleid,\n" +
" p.name AS property,\n" +
" pv.name AS propertyvalue, 0,\n" +
" wm_concat(t.accession) AS efoTerms\n" +
" FROM a2_property p\n" +
" JOIN a2_propertyvalue pv ON pv.propertyid=p.propertyid\n" +
" JOIN a2_samplepv spv ON spv.propertyvalueid=pv.propertyvalueid\n" +
" LEFT JOIN a2_samplepvontology spvo ON spvo.SamplePVID = spv.SAMPLEPVID\n" +
" LEFT JOIN a2_ontologyterm t ON spvo.ontologytermid = t.ontologytermid\n" +
" WHERE spv.sampleid IN (:sampleids)" +
" GROUP BY spvo.SamplePVID, spv.SAMPLEID, p.name, pv.name ", propertyParams, samplePropertyMapper);
}
}
@Deprecated
private SqlTypeValue convertPropertiesToOracleARRAY(final List<Property> properties) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
// this should be creating an oracle ARRAY of properties
// the array of STRUCTS representing each property
Object[] propArrayValues;
if (properties != null && !properties.isEmpty()) {
propArrayValues = new Object[properties.size()];
// convert each property to an oracle STRUCT
int i = 0;
Object[] propStructValues = new Object[4];
for (Property property : properties) {
// array representing the values to go in the STRUCT
propStructValues[0] = property.getAccession();
propStructValues[1] = property.getName();
propStructValues[2] = property.getValue();
propStructValues[3] = property.getEfoTerms();
// descriptor for PROPERTY type
StructDescriptor structDescriptor = StructDescriptor.createDescriptor("PROPERTY", connection);
// each array value is a new STRUCT
propArrayValues[i++] = new STRUCT(structDescriptor, connection, propStructValues);
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, propArrayValues);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list of properties");
}
}
};
}
private Object createArray(Connection connection, String typeName, Object... propArrayValues) throws SQLException {
ArrayDescriptor arrayDescriptor = ArrayDescriptor.createDescriptor(typeName, connection);
return new ARRAY(arrayDescriptor, connection, propArrayValues);
}
private <T> SqlTypeValue convertToOracleARRAYofIDVALUE(final Collection<T> list) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
// this should be creating an oracle ARRAY of properties
// the array of STRUCTS representing each property
Object[] strArrayValues;
if (list != null && !list.isEmpty()) {
strArrayValues = new Object[list.size()];
// convert each property to an oracle STRUCT
int i = 0;
Object[] propStructValues = new Object[2];
for (T elt : list) {
// array representing the values to go in the STRUCT
propStructValues[0] = i;
propStructValues[1] = elt;
// descriptor for PROPERTY type
StructDescriptor structDescriptor = StructDescriptor.createDescriptor("IDVALUE", connection);
// each array value is a new STRUCT
strArrayValues[i++] = new STRUCT(structDescriptor, connection, propStructValues);
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, strArrayValues);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list");
}
}
};
}
private SqlTypeValue convertAssayAccessionsToOracleARRAY(final Set<String> assayAccessions) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
Object[] accessions;
if (assayAccessions != null && !assayAccessions.isEmpty()) {
accessions = new Object[assayAccessions.size()];
int i = 0;
for (String assayAccession : assayAccessions) {
accessions[i++] = assayAccession;
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, accessions);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list of accessions");
}
}
};
}
private SqlTypeValue convertDesignElementsToOracleARRAY(final ArrayDesignBundle arrayDesignBundle) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
List<Object> deArrayValues = new ArrayList<Object>();
StructDescriptor structDescriptor =
StructDescriptor.createDescriptor("DESIGNELEMENT2", connection);
// loop over all design element names
for (String designElementName : arrayDesignBundle.getDesignElementNames()) {
// loop over the mappings of database entry 'type' to the set of values
Map<String, List<String>> dbeMappings =
arrayDesignBundle.getDatabaseEntriesForDesignElement(designElementName);
for (Map.Entry<String, List<String>> entry : dbeMappings.entrySet()) {
// loop over the enumeration of database entry values
List<String> databaseEntryValues = entry.getValue();
for (String databaseEntryValue : databaseEntryValues) {
// create a new row in the table for each combination
Object[] deStructValues = new Object[3];
deStructValues[0] = designElementName;
deStructValues[1] = entry.getKey();
deStructValues[2] = databaseEntryValue;
deArrayValues.add(new STRUCT(structDescriptor, connection, deStructValues));
}
}
}
return createArray(connection, typeName, deArrayValues.toArray());
}
};
}
public int getCountAssaysForExperimentID(long experimentID) {
return template.queryForInt(
"SELECT COUNT(DISTINCT ASSAYID) FROM VWEXPERIMENTASSAY WHERE EXPERIMENTID=?",
experimentID);
}
public List<String> getSpeciesForExperiment(long experimentId) {
return bioEntityDAO.getSpeciesForExperiment(experimentId);
}
private static class ExperimentMapper implements RowMapper<Experiment> {
private static final String FIELDS = " accession, description, performer, lab, " +
" experimentid, loaddate, pmid, abstract, releasedate, private, curated ";
public Experiment mapRow(ResultSet resultSet, int i) throws SQLException {
Experiment experiment = new Experiment();
experiment.setAccession(resultSet.getString(1));
experiment.setDescription(resultSet.getString(2));
experiment.setPerformer(resultSet.getString(3));
experiment.setLab(resultSet.getString(4));
experiment.setExperimentID(resultSet.getLong(5));
experiment.setLoadDate(resultSet.getDate(6));
experiment.setPubmedID(resultSet.getString(7));
experiment.setArticleAbstract(resultSet.getString(8));
experiment.setReleaseDate(resultSet.getDate(9));
experiment.setPrivate(resultSet.getBoolean(10));
experiment.setCurated(resultSet.getBoolean(11));
return experiment;
}
}
private static class ExperimentPropertyMapper implements RowMapper<OntologyMapping> {
public OntologyMapping mapRow(ResultSet resultSet, int i) throws SQLException {
OntologyMapping mapping = new OntologyMapping();
mapping.setExperimentAccession(resultSet.getString(1));
mapping.setProperty(resultSet.getString(2));
mapping.setPropertyValue(resultSet.getString(3));
mapping.setOntologyTerm(resultSet.getString(4));
return mapping;
}
}
static class ObjectPropertyMappper implements RowCallbackHandler {
private Map<Long, ? extends ObjectWithProperties> objectsById;
public ObjectPropertyMappper(Map<Long, ? extends ObjectWithProperties> objectsById) {
this.objectsById = objectsById;
}
public void processRow(ResultSet rs) throws SQLException {
Property property = new Property();
long objectId = rs.getLong(1);
property.setName(rs.getString(2));
property.setValue(rs.getString(3));
objectsById.get(objectId).addProperty(property);
}
}
private static class PropertyMapper implements RowMapper<Property> {
private static final String FIELDS = "min(p.propertyid), p.name, min(pv.propertyvalueid), pv.name";
private static final String TABLES = "a2_property p, a2_propertyvalue pv";
public Property mapRow(ResultSet resultSet, int i) throws SQLException {
Property property = new Property();
property.setPropertyId(resultSet.getLong(1));
property.setAccession(resultSet.getString(2));
property.setName(resultSet.getString(2));
property.setPropertyValueId(resultSet.getLong(3));
property.setValue(resultSet.getString(4));
return property;
}
}
public void setExperimentReleaseDate(String accession) {
template.update("Update a2_experiment set releasedate = (select sysdate from dual) where accession = ?", accession);
}
private static class SampleMapper implements RowMapper<Sample> {
private static final String FIELDS = "s.accession, org.name species, s.channel, s.sampleid ";
public Sample mapRow(ResultSet resultSet, int i) throws SQLException {
Sample sample = new Sample();
sample.setAccession(resultSet.getString(1));
sample.setSpecies(resultSet.getString(2));
sample.setChannel(resultSet.getString(3));
sample.setSampleID(resultSet.getLong(4));
return sample;
}
}
}
| atlas-dao/src/main/java/uk/ac/ebi/gxa/dao/AtlasDAO.java | /*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.dao;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import oracle.jdbc.OracleTypes;
import oracle.sql.ARRAY;
import oracle.sql.ArrayDescriptor;
import oracle.sql.STRUCT;
import oracle.sql.StructDescriptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.jdbc.core.*;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.simple.SimpleJdbcCall;
import org.springframework.jdbc.core.support.AbstractSqlTypeValue;
import uk.ac.ebi.microarray.atlas.model.*;
import uk.ac.ebi.microarray.atlas.services.ExperimentDAO;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.*;
import static com.google.common.base.Joiner.on;
import static com.google.common.collect.Iterables.partition;
/**
* A data access object designed for retrieving common sorts of data from the atlas database. This DAO should be
* configured with a spring {@link JdbcTemplate} object which will be used to query the database.
*
* @author Tony Burdett
* @author Alexey Filippov
* @author Nataliya Sklyar
* @author Misha Kapushesky
* @author Pavel Kurnosov
* @author Andrey Zorin
* @author Robert Petryszak
* @author Olga Melnichuk
*/
public class AtlasDAO implements ExperimentDAO {
public static final int MAX_QUERY_PARAMS = 10;
private Logger log = LoggerFactory.getLogger(getClass());
private ArrayDesignDAOInterface arrayDesignDAO;
private BioEntityDAOInterface bioEntityDAO;
private JdbcTemplate template;
public void setArrayDesignDAO(ArrayDesignDAOInterface arrayDesignDAO) {
this.arrayDesignDAO = arrayDesignDAO;
}
public void setBioEntityDAO(BioEntityDAOInterface bioEntityDAO) {
this.bioEntityDAO = bioEntityDAO;
}
public void setJdbcTemplate(JdbcTemplate template) {
this.template = template;
}
public List<Experiment> getAllExperiments() {
List<Experiment> results = template.query("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"ORDER BY (" +
" case when loaddate is null " +
" then (select min(loaddate) from a2_experiment) " +
" else loaddate end) desc, " +
" accession", new ExperimentMapper());
loadExperimentAssets(results);
return results;
}
/**
*
* @return All public experiments
*/
public Collection<Experiment> getPublicExperiments() {
return Collections2.filter(getAllExperiments(),
new Predicate<Experiment>() {
public boolean apply(uk.ac.ebi.microarray.atlas.model.Experiment exp) {
return !exp.isPrivate();
}
});
}
/**
* Gets a single experiment from the Atlas Database, queried by the accession of the experiment.
*
* @param accession the experiment's accession number (usually in the format E-ABCD-1234)
* @return an object modelling this experiment
*/
public Experiment getExperimentByAccession(String accession) {
try {
Experiment result = template.queryForObject("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"WHERE accession=?",
new Object[]{accession},
new ExperimentMapper());
loadExperimentAssets(result);
return result;
} catch (IncorrectResultSizeDataAccessException e) {
return null;
}
}
/**
* @param experimentId id of experiment to retrieve
* @return Experiment (without assets) matching experimentId
*/
public Experiment getShallowExperimentById(long experimentId) {
try {
return template.queryForObject("SELECT " +
ExperimentMapper.FIELDS +
"FROM a2_experiment WHERE experimentid=?",
new Object[]{experimentId},
new ExperimentMapper());
} catch (IncorrectResultSizeDataAccessException e) {
log.warn("Experiment id: " + experimentId + ": " + e.getMessage(), e);
return null;
}
}
private void loadExperimentAssets(List<Experiment> results) {
for (Experiment experiment : results) {
loadExperimentAssets(experiment);
}
}
private void loadExperimentAssets(Experiment experiment) {
experiment.addAssets(template.query("SELECT a.name, a.filename, a.description" + " FROM a2_experiment e " +
" JOIN a2_experimentasset a ON a.ExperimentID = e.ExperimentID " +
" WHERE e.accession=? ORDER BY a.ExperimentAssetID",
new Object[]{experiment.getAccession()},
new RowMapper<Asset>() {
public Asset mapRow(ResultSet resultSet, int i) throws SQLException {
return new Asset(resultSet.getString(1),
resultSet.getString(2),
resultSet.getString(3));
}
}));
}
public List<Experiment> getExperimentByArrayDesign(String accession) {
List<Experiment> results = template.query("SELECT " + ExperimentMapper.FIELDS + " FROM a2_experiment " +
"WHERE experimentid IN " +
" (SELECT experimentid FROM a2_assay a, a2_arraydesign ad " +
" WHERE a.arraydesignid=ad.arraydesignid AND ad.accession=?)",
new Object[]{accession},
new ExperimentMapper());
loadExperimentAssets(results);
return results;
}
/**
* @param experimentAccession the accession of experiment to retrieve assays for
* @return list of assays
* @deprecated Use id instead of accession
*/
@Deprecated
public List<Assay> getAssaysByExperimentAccession(String experimentAccession) {
List<Assay> assays = template.query("SELECT a.accession, e.accession, ad.accession, a.assayid " +
"FROM a2_assay a, a2_experiment e, a2_arraydesign ad " +
"WHERE e.experimentid=a.experimentid " +
"AND a.arraydesignid=ad.arraydesignid" + " " +
"AND e.accession=?",
new Object[]{experimentAccession},
new RowMapper<Assay>() {
public Assay mapRow(ResultSet resultSet, int i) throws SQLException {
Assay assay = new Assay();
assay.setAccession(resultSet.getString(1));
assay.setExperimentAccession(resultSet.getString(2));
assay.setArrayDesignAccession(resultSet.getString(3));
assay.setAssayID(resultSet.getLong(4));
return assay;
}
});
// populate the other info for these assays
if (!assays.isEmpty()) {
fillOutAssays(assays);
}
// and return
return assays;
}
/**
* @param experimentAccession the accession of experiment to retrieve samples for
* @param assayAccession the accession of the assay to retrieve samples for
* @return list of samples
* @deprecated Use ids instead of accessions
*/
@Deprecated
public List<Sample> getSamplesByAssayAccession(String experimentAccession, String assayAccession) {
List<Sample> samples = template.query("SELECT " + SampleMapper.FIELDS +
" FROM a2_sample s, a2_assay a, a2_assaysample ass, a2_experiment e, a2_organism org " +
"WHERE s.sampleid=ass.sampleid " +
"AND a.assayid=ass.assayid " +
"AND e.experimentid=a.experimentid " +
"AND s.organismid=org.organismid " +
"AND e.accession=? " +
"AND a.accession=? ", new Object[]{experimentAccession, assayAccession}, new SampleMapper());
// populate the other info for these samples
if (samples.size() > 0) {
fillOutSamples(samples);
}
return samples;
}
public List<Sample> getSamplesByExperimentAccession(String exptAccession) {
List<Sample> samples = template.query("SELECT " + SampleMapper.FIELDS +
" FROM a2_sample s, a2_assay a, a2_assaysample ass, a2_experiment e, a2_organism org " +
"WHERE s.sampleid=ass.sampleid " +
"AND a.assayid=ass.assayid " +
"AND a.experimentid=e.experimentid " +
"AND s.organismid=org.organismid " +
"AND e.accession=?", new Object[]{exptAccession}, new SampleMapper());
// populate the other info for these samples
if (samples.size() > 0) {
fillOutSamples(samples);
}
return samples;
}
public int getPropertyValueCount() {
return template.queryForInt("SELECT COUNT(DISTINCT name) FROM a2_propertyvalue");
}
public int getFactorValueCount() {
return template.queryForInt("SELECT COUNT(DISTINCT propertyvalueid) FROM a2_assayPV");
}
/**
* Returns all array designs in the underlying datasource. Note that, to reduce query times, this method does NOT
* prepopulate ArrayDesigns with their associated design elements (unlike other methods to retrieve array designs
* more specifically).
*
* @return the list of array designs, not prepopulated with design elements.
*/
public List<ArrayDesign> getAllArrayDesigns() {
return arrayDesignDAO.getAllArrayDesigns();
}
public ArrayDesign getArrayDesignByAccession(String accession) {
return arrayDesignDAO.getArrayDesignByAccession(accession);
}
/**
* @param accession Array design accession
* @return Array design (with no design element and gene ids filled in) corresponding to accession
*/
public ArrayDesign getArrayDesignShallowByAccession(String accession) {
return arrayDesignDAO.getArrayDesignShallowByAccession(accession);
}
public List<OntologyMapping> getOntologyMappingsByOntology(
String ontologyName) {
return template.query("SELECT DISTINCT accession, property, propertyvalue, ontologyterm, experimentid " +
"FROM a2_ontologymapping" + " " +
"WHERE ontologyname=?",
new Object[]{ontologyName},
new ExperimentPropertyMapper() {
public OntologyMapping mapRow(ResultSet resultSet, int i) throws SQLException {
OntologyMapping mapping = super.mapRow(resultSet, i);
mapping.setExperimentId(resultSet.getLong(5));
return mapping;
}
});
}
public List<Property> getAllProperties() {
return template.query("SELECT " + PropertyMapper.FIELDS + " " +
"FROM " + PropertyMapper.TABLES + " " +
"WHERE pv.propertyid=p.propertyid GROUP BY p.name, pv.name", new PropertyMapper());
}
public List<Property> getPropertiesByPropertyName(String propertyName) {
return template.query("SELECT " + PropertyMapper.FIELDS + " " +
"FROM " + PropertyMapper.TABLES + " " +
"WHERE pv.propertyid=p.propertyid AND p.name=? GROUP BY p.name, pv.name", new Object[]{propertyName}, new PropertyMapper());
}
public List<OntologyMapping> getExperimentsToAllProperties() {
return template.query("SELECT experiment, property, value, ontologyterm from cur_ontologymapping " +
"UNION " +
"SELECT distinct ap.experiment, ap.property, ap.value, null " +
"FROM cur_assayproperty ap where not exists " +
"(SELECT 1 from cur_ontologymapping cm " +
"WHERE cm.property = ap.property " +
"AND cm.value = ap.value " +
"AND cm.experiment = ap.experiment)",
new ExperimentPropertyMapper());
}
public AtlasStatistics getAtlasStatistics(final String dataRelease, final String lastReleaseDate) {
// manually count all experiments/genes/assays
AtlasStatistics stats = new AtlasStatistics();
stats.setDataRelease(dataRelease);
stats.setExperimentCount(template.queryForInt("SELECT COUNT(*) FROM a2_experiment"));
stats.setAssayCount(template.queryForInt("SELECT COUNT(*) FROM a2_assay"));
stats.setGeneCount(bioEntityDAO.getGeneCount());
stats.setNewExperimentCount(template.queryForInt("SELECT COUNT(*) FROM a2_experiment WHERE loaddate > to_date(?,'MM-YYYY')", lastReleaseDate));
stats.setPropertyValueCount(getPropertyValueCount());
stats.setFactorValueCount(getFactorValueCount());
return stats;
}
/*
DAO write methods
*/
public void writeLoadDetails(final String accession,
final LoadStage loadStage,
final LoadStatus loadStatus) {
writeLoadDetails(accession, loadStage, loadStatus, LoadType.EXPERIMENT);
}
public void writeLoadDetails(final String accession,
final LoadStage loadStage,
final LoadStatus loadStatus,
final LoadType loadType) {
// execute this procedure...
/*
create or replace procedure load_progress(
experiment_accession varchar
,stage varchar --load, netcdf, similarity, ranking, searchindex
,status varchar --done, pending
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.LOAD_PROGRESS")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("EXPERIMENT_ACCESSION")
.useInParameterNames("STAGE")
.useInParameterNames("STATUS")
.useInParameterNames("LOAD_TYPE")
.declareParameters(new SqlParameter("EXPERIMENT_ACCESSION", Types.VARCHAR))
.declareParameters(new SqlParameter("STAGE", Types.VARCHAR))
.declareParameters(new SqlParameter("STATUS", Types.VARCHAR))
.declareParameters(new SqlParameter("LOAD_TYPE", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource()
.addValue("EXPERIMENT_ACCESSION", accession)
.addValue("STAGE", loadStage.toString().toLowerCase())
.addValue("STATUS", loadStatus.toString().toLowerCase())
.addValue("LOAD_TYPE", loadType.toString().toLowerCase());
log.debug("Invoking load_progress stored procedure with parameters (" + accession + ", " + loadStage + ", " +
loadStatus + ", " + loadType + ")");
procedure.execute(params);
log.debug("load_progress stored procedure completed");
}
/**
* Writes the given experiment to the database, using the default transaction strategy configured for the
* datasource.
*
* @param experiment the experiment to write
*/
public void writeExperiment(final Experiment experiment) {
// execute this procedure...
/*
PROCEDURE "A2_EXPERIMENTSET" (
TheAccession varchar2
,TheDescription varchar2
,ThePerformer varchar2
,TheLab varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_EXPERIMENTSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("DESCRIPTION")
.useInParameterNames("PERFORMER")
.useInParameterNames("LAB")
.useInParameterNames("PMID")
.useInParameterNames("ABSTRACT")
.declareParameters(new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(new SqlParameter("DESCRIPTION", Types.VARCHAR))
.declareParameters(new SqlParameter("PERFORMER", Types.VARCHAR))
.declareParameters(new SqlParameter("LAB", Types.VARCHAR))
.declareParameters(new SqlParameter("PMID", Types.VARCHAR))
.declareParameters(new SqlParameter("ABSTRACT", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", experiment.getAccession())
.addValue("DESCRIPTION", experiment.getDescription())
.addValue("PERFORMER", experiment.getPerformer())
.addValue("LAB", experiment.getLab())
.addValue("PMID", experiment.getPubmedID())
.addValue("ABSTRACT", experiment.getArticleAbstract());
procedure.execute(params);
}
/**
* Writes the given assay to the database, using the default transaction strategy configured for the datasource.
*
* @param assay the assay to write
*/
public void writeAssay(final Assay assay) {
// execute this procedure...
/*
PROCEDURE "A2_ASSAYSET" (
TheAccession varchar2
,TheExperimentAccession varchar2
,TheArrayDesignAccession varchar2
,TheProperties PropertyTable
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_ASSAYSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("EXPERIMENTACCESSION")
.useInParameterNames("ARRAYDESIGNACCESSION")
.useInParameterNames("PROPERTIES")
.declareParameters(
new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("EXPERIMENTACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("ARRAYDESIGNACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("PROPERTIES", OracleTypes.ARRAY, "PROPERTYTABLE"));
// map parameters...
List<Property> props = assay.getProperties();
MapSqlParameterSource params = new MapSqlParameterSource();
StringBuffer sb = new StringBuffer();
sb.append("Properties listing for ").append(assay.getAccession()).append(":\n");
for (Property p : props) {
sb.append("\t").append(p.getName()).append("\t\t->\t\t").append(p.getValue()).append("\n");
}
log.debug(sb.toString());
SqlTypeValue propertiesParam =
props.isEmpty() ? null :
convertPropertiesToOracleARRAY(props);
params.addValue("ACCESSION", assay.getAccession())
.addValue("EXPERIMENTACCESSION", assay.getExperimentAccession())
.addValue("ARRAYDESIGNACCESSION", assay.getArrayDesignAccession())
.addValue("PROPERTIES", propertiesParam, OracleTypes.ARRAY, "PROPERTYTABLE");
log.debug("Invoking A2_ASSAYSET with the following parameters..." +
"\n\tassay accession: {}" +
"\n\texperiment: {}" +
"\n\tarray design: {}" +
"\n\tproperties count: {}" +
"\n\texpression value count: {}",
new Object[]{assay.getAccession(), assay.getExperimentAccession(), assay.getArrayDesignAccession(),
props.size(), 0});
// and execute
procedure.execute(params);
}
/**
* Writes the given sample to the database, using the default transaction strategy configured for the datasource.
*
* @param sample the sample to write
* @param experimentAccession experiment
*/
public void writeSample(final Sample sample, final String experimentAccession) {
// execute this procedure...
/*
PROCEDURE "A2_SAMPLESET" (
p_Accession varchar2
, p_Assays AccessionTable
, p_Properties PropertyTable
, p_Species varchar2
, p_Channel varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_SAMPLESET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("EXPERIMENTACCESSION")
.useInParameterNames("SAMPLEACCESSION")
.useInParameterNames("ASSAYS")
.useInParameterNames("PROPERTIES")
.useInParameterNames("CHANNEL")
.declareParameters(
new SqlParameter("EXPERIMENTACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("SAMPLEACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("ASSAYS", OracleTypes.ARRAY, "ACCESSIONTABLE"))
.declareParameters(
new SqlParameter("PROPERTIES", OracleTypes.ARRAY, "PROPERTYTABLE"))
.declareParameters(
new SqlParameter("CHANNEL", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
SqlTypeValue accessionsParam = sample.getAssayAccessions().isEmpty() ? null :
convertAssayAccessionsToOracleARRAY(sample.getAssayAccessions());
SqlTypeValue propertiesParam = sample.hasNoProperties() ? null
: convertPropertiesToOracleARRAY(sample.getProperties());
params.addValue("EXPERIMENTACCESSION", experimentAccession)
.addValue("SAMPLEACCESSION", sample.getAccession())
.addValue("ASSAYS", accessionsParam, OracleTypes.ARRAY, "ACCESSIONTABLE")
.addValue("PROPERTIES", propertiesParam, OracleTypes.ARRAY, "PROPERTYTABLE")
.addValue("CHANNEL", sample.getChannel());
int assayCount = sample.getAssayAccessions().size();
int propertiesCount = sample.getPropertiesCount();
log.debug("Invoking A2_SAMPLESET with the following parameters..." +
"\n\texperiment accession: {}" +
"\n\tsample accession: {}" +
"\n\tassays count: {}" +
"\n\tproperties count: {}" +
"\n\tspecies: {}" +
"\n\tchannel: {}",
new Object[]{experimentAccession, sample.getAccession(), assayCount, propertiesCount,
sample.getSpecies(),
sample.getChannel()});
// and execute
procedure.execute(params);
}
/**
* Writes array designs and associated data back to the database.
*
* @param arrayDesignBundle an object encapsulating the array design data that must be written to the database
*/
public void writeArrayDesignBundle(ArrayDesignBundle arrayDesignBundle) {
// execute this procedure...
/*
PROCEDURE A2_ARRAYDESIGNSET(
Accession varchar2
,Type varchar2
,Name varchar2
,Provider varchar2
,DesignElements DesignElementTable
);
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_ARRAYDESIGNSET")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.useInParameterNames("TYPE")
.useInParameterNames("NAME")
.useInParameterNames("PROVIDER")
.useInParameterNames("ENTRYPRIORITYLIST")
.useInParameterNames("DESIGNELEMENTS")
.declareParameters(
new SqlParameter("ACCESSION", Types.VARCHAR))
.declareParameters(
new SqlParameter("TYPE", Types.VARCHAR))
.declareParameters(
new SqlParameter("NAME", Types.VARCHAR))
.declareParameters(
new SqlParameter("PROVIDER", Types.VARCHAR))
.declareParameters(
new SqlParameter("ENTRYPRIORITYLIST", OracleTypes.ARRAY, "IDVALUETABLE"))
.declareParameters(
new SqlParameter("DESIGNELEMENTS", OracleTypes.ARRAY, "DESIGNELEMENTTABLE"));
SqlTypeValue designElementsParam =
arrayDesignBundle.getDesignElementNames().isEmpty() ? null :
convertDesignElementsToOracleARRAY(arrayDesignBundle);
SqlTypeValue geneIdentifierPriorityParam = convertToOracleARRAYofIDVALUE(
arrayDesignBundle.getGeneIdentifierNames());
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", arrayDesignBundle.getAccession())
.addValue("TYPE", arrayDesignBundle.getType())
.addValue("NAME", arrayDesignBundle.getName())
.addValue("PROVIDER", arrayDesignBundle.getProvider())
.addValue("ENTRYPRIORITYLIST", geneIdentifierPriorityParam, OracleTypes.ARRAY, "IDVALUETABLE")
.addValue("DESIGNELEMENTS", designElementsParam, OracleTypes.ARRAY, "DESIGNELEMENTTABLE");
procedure.execute(params);
}
/*
DAO delete methods
*/
/**
* Deletes the experiment with the given accession from the database. If this experiment is not present, this does
* nothing.
*
* @param experimentAccession the accession of the experiment to remove
*/
public void deleteExperiment(final String experimentAccession) {
// execute this procedure...
/*
PROCEDURE A2_EXPERIMENTDELETE(
Accession varchar2
)
*/
SimpleJdbcCall procedure =
new SimpleJdbcCall(template)
.withProcedureName("ATLASLDR.A2_EXPERIMENTDELETE")
.withoutProcedureColumnMetaDataAccess()
.useInParameterNames("ACCESSION")
.declareParameters(new SqlParameter("ACCESSION", Types.VARCHAR));
// map parameters...
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("ACCESSION", experimentAccession);
procedure.execute(params);
}
private void fillOutAssays(List<Assay> assays) {
// map assays to assay id
Map<Long, Assay> assaysByID = new HashMap<Long, Assay>();
for (Assay assay : assays) {
// index this assay
assaysByID.put(assay.getAssayID(), assay);
}
// maps properties to assays
ObjectPropertyMappper assayPropertyMapper = new ObjectPropertyMappper(assaysByID);
// query template for assays
NamedParameterJdbcTemplate namedTemplate = new NamedParameterJdbcTemplate(template);
// if we have more than 'MAX_QUERY_PARAMS' assays, split into smaller queries
final ArrayList<Long> assayIds = new ArrayList<Long>(assaysByID.keySet());
for (List<Long> assayIDsChunk : partition(assayIds, MAX_QUERY_PARAMS)) {
// now query for properties that map to one of the samples in the sublist
MapSqlParameterSource propertyParams = new MapSqlParameterSource();
propertyParams.addValue("assayids", assayIDsChunk);
namedTemplate.query("SELECT apv.assayid,\n" +
" p.name AS property,\n" +
" pv.name AS propertyvalue, 1,\n" +
" wm_concat(t.accession) AS efoTerms\n" +
" FROM a2_property p\n" +
" JOIN a2_propertyvalue pv ON pv.propertyid=p.propertyid\n" +
" JOIN a2_assaypv apv ON apv.propertyvalueid=pv.propertyvalueid\n" +
" LEFT JOIN a2_assaypvontology apvo ON apvo.assaypvid = apv.assaypvid\n" +
" LEFT JOIN a2_ontologyterm t ON apvo.ontologytermid = t.ontologytermid\n" +
" WHERE apv.assayid IN (:assayids)" +
" GROUP BY apvo.assaypvid, apv.assayid, p.name, pv.name", propertyParams, assayPropertyMapper);
}
}
private void fillOutSamples(List<Sample> samples) {
// map samples to sample id
Map<Long, Sample> samplesByID = new HashMap<Long, Sample>();
for (Sample sample : samples) {
samplesByID.put(sample.getSampleID(), sample);
}
// maps properties and assays to relevant sample
final Map<Long, Sample> samplesMap1 = samplesByID;
RowCallbackHandler assaySampleMapper = new RowCallbackHandler() {
Map<Long, Sample> samplesMap = samplesMap1;
public void processRow(ResultSet rs) throws SQLException {
long sampleID = rs.getLong(1);
samplesMap.get(sampleID).addAssayAccession(rs.getString(2));
}
};
ObjectPropertyMappper samplePropertyMapper = new ObjectPropertyMappper(samplesByID);
// query template for samples
NamedParameterJdbcTemplate namedTemplate = new NamedParameterJdbcTemplate(template);
// if we have more than 'MAX_QUERY_PARAMS' samples, split into smaller queries
List<Long> sampleIDs = new ArrayList<Long>(samplesByID.keySet());
for (List<Long> sampleIDsChunk : partition(sampleIDs, MAX_QUERY_PARAMS)) {
// now query for assays that map to one of these samples
MapSqlParameterSource assayParams = new MapSqlParameterSource();
assayParams.addValue("sampleids", sampleIDsChunk);
namedTemplate.query("SELECT s.sampleid, a.accession " +
"FROM a2_assay a, a2_assaysample s " +
"WHERE a.assayid=s.assayid " +
"AND s.sampleid IN (:sampleids)", assayParams, assaySampleMapper);
// now query for properties that map to one of these samples
log.trace("Querying for properties where sample IN (" + on(',').join(sampleIDsChunk) + ")");
MapSqlParameterSource propertyParams = new MapSqlParameterSource();
propertyParams.addValue("sampleids", sampleIDsChunk);
namedTemplate.query("SELECT spv.sampleid,\n" +
" p.name AS property,\n" +
" pv.name AS propertyvalue, 0,\n" +
" wm_concat(t.accession) AS efoTerms\n" +
" FROM a2_property p\n" +
" JOIN a2_propertyvalue pv ON pv.propertyid=p.propertyid\n" +
" JOIN a2_samplepv spv ON spv.propertyvalueid=pv.propertyvalueid\n" +
" LEFT JOIN a2_samplepvontology spvo ON spvo.SamplePVID = spv.SAMPLEPVID\n" +
" LEFT JOIN a2_ontologyterm t ON spvo.ontologytermid = t.ontologytermid\n" +
" WHERE spv.sampleid IN (:sampleids)" +
" GROUP BY spvo.SamplePVID, spv.SAMPLEID, p.name, pv.name ", propertyParams, samplePropertyMapper);
}
}
@Deprecated
private SqlTypeValue convertPropertiesToOracleARRAY(final List<Property> properties) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
// this should be creating an oracle ARRAY of properties
// the array of STRUCTS representing each property
Object[] propArrayValues;
if (properties != null && !properties.isEmpty()) {
propArrayValues = new Object[properties.size()];
// convert each property to an oracle STRUCT
int i = 0;
Object[] propStructValues = new Object[4];
for (Property property : properties) {
// array representing the values to go in the STRUCT
propStructValues[0] = property.getAccession();
propStructValues[1] = property.getName();
propStructValues[2] = property.getValue();
propStructValues[3] = property.getEfoTerms();
// descriptor for PROPERTY type
StructDescriptor structDescriptor = StructDescriptor.createDescriptor("PROPERTY", connection);
// each array value is a new STRUCT
propArrayValues[i++] = new STRUCT(structDescriptor, connection, propStructValues);
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, propArrayValues);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list of properties");
}
}
};
}
private Object createArray(Connection connection, String typeName, Object... propArrayValues) throws SQLException {
ArrayDescriptor arrayDescriptor = ArrayDescriptor.createDescriptor(typeName, connection);
return new ARRAY(arrayDescriptor, connection, propArrayValues);
}
private <T> SqlTypeValue convertToOracleARRAYofIDVALUE(final Collection<T> list) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
// this should be creating an oracle ARRAY of properties
// the array of STRUCTS representing each property
Object[] strArrayValues;
if (list != null && !list.isEmpty()) {
strArrayValues = new Object[list.size()];
// convert each property to an oracle STRUCT
int i = 0;
Object[] propStructValues = new Object[2];
for (T elt : list) {
// array representing the values to go in the STRUCT
propStructValues[0] = i;
propStructValues[1] = elt;
// descriptor for PROPERTY type
StructDescriptor structDescriptor = StructDescriptor.createDescriptor("IDVALUE", connection);
// each array value is a new STRUCT
strArrayValues[i++] = new STRUCT(structDescriptor, connection, propStructValues);
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, strArrayValues);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list");
}
}
};
}
private SqlTypeValue convertAssayAccessionsToOracleARRAY(final Set<String> assayAccessions) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
Object[] accessions;
if (assayAccessions != null && !assayAccessions.isEmpty()) {
accessions = new Object[assayAccessions.size()];
int i = 0;
for (String assayAccession : assayAccessions) {
accessions[i++] = assayAccession;
}
// created the array of STRUCTs, group into ARRAY
return createArray(connection, typeName, accessions);
} else {
// throw an SQLException, as we cannot create a ARRAY with an empty array
throw new SQLException("Unable to create an ARRAY from an empty list of accessions");
}
}
};
}
private SqlTypeValue convertDesignElementsToOracleARRAY(final ArrayDesignBundle arrayDesignBundle) {
return new AbstractSqlTypeValue() {
protected Object createTypeValue(Connection connection, int sqlType, String typeName) throws SQLException {
List<Object> deArrayValues = new ArrayList<Object>();
StructDescriptor structDescriptor =
StructDescriptor.createDescriptor("DESIGNELEMENT2", connection);
// loop over all design element names
for (String designElementName : arrayDesignBundle.getDesignElementNames()) {
// loop over the mappings of database entry 'type' to the set of values
Map<String, List<String>> dbeMappings =
arrayDesignBundle.getDatabaseEntriesForDesignElement(designElementName);
for (Map.Entry<String, List<String>> entry : dbeMappings.entrySet()) {
// loop over the enumeration of database entry values
List<String> databaseEntryValues = entry.getValue();
for (String databaseEntryValue : databaseEntryValues) {
// create a new row in the table for each combination
Object[] deStructValues = new Object[3];
deStructValues[0] = designElementName;
deStructValues[1] = entry.getKey();
deStructValues[2] = databaseEntryValue;
deArrayValues.add(new STRUCT(structDescriptor, connection, deStructValues));
}
}
}
return createArray(connection, typeName, deArrayValues.toArray());
}
};
}
public int getCountAssaysForExperimentID(long experimentID) {
return template.queryForInt(
"SELECT COUNT(DISTINCT ASSAYID) FROM VWEXPERIMENTASSAY WHERE EXPERIMENTID=?",
experimentID);
}
public List<String> getSpeciesForExperiment(long experimentId) {
return bioEntityDAO.getSpeciesForExperiment(experimentId);
}
private static class ExperimentMapper implements RowMapper<Experiment> {
private static final String FIELDS = " accession, description, performer, lab, " +
" experimentid, loaddate, pmid, abstract, releasedate, private, curated ";
public Experiment mapRow(ResultSet resultSet, int i) throws SQLException {
Experiment experiment = new Experiment();
experiment.setAccession(resultSet.getString(1));
experiment.setDescription(resultSet.getString(2));
experiment.setPerformer(resultSet.getString(3));
experiment.setLab(resultSet.getString(4));
experiment.setExperimentID(resultSet.getLong(5));
experiment.setLoadDate(resultSet.getDate(6));
experiment.setPubmedID(resultSet.getString(7));
experiment.setArticleAbstract(resultSet.getString(8));
experiment.setReleaseDate(resultSet.getDate(9));
experiment.setPrivate(resultSet.getBoolean(10));
experiment.setCurated(resultSet.getBoolean(11));
return experiment;
}
}
private static class ExperimentPropertyMapper implements RowMapper<OntologyMapping> {
public OntologyMapping mapRow(ResultSet resultSet, int i) throws SQLException {
OntologyMapping mapping = new OntologyMapping();
mapping.setExperimentAccession(resultSet.getString(1));
mapping.setProperty(resultSet.getString(2));
mapping.setPropertyValue(resultSet.getString(3));
mapping.setOntologyTerm(resultSet.getString(4));
return mapping;
}
}
static class ObjectPropertyMappper implements RowCallbackHandler {
private Map<Long, ? extends ObjectWithProperties> objectsById;
public ObjectPropertyMappper(Map<Long, ? extends ObjectWithProperties> objectsById) {
this.objectsById = objectsById;
}
public void processRow(ResultSet rs) throws SQLException {
Property property = new Property();
long objectId = rs.getLong(1);
property.setName(rs.getString(2));
property.setValue(rs.getString(3));
objectsById.get(objectId).addProperty(property);
}
}
private static class PropertyMapper implements RowMapper<Property> {
private static final String FIELDS = "min(p.propertyid), p.name, min(pv.propertyvalueid), pv.name";
private static final String TABLES = "a2_property p, a2_propertyvalue pv";
public Property mapRow(ResultSet resultSet, int i) throws SQLException {
Property property = new Property();
property.setPropertyId(resultSet.getLong(1));
property.setAccession(resultSet.getString(2));
property.setName(resultSet.getString(2));
property.setPropertyValueId(resultSet.getLong(3));
property.setValue(resultSet.getString(4));
return property;
}
}
public void setExperimentReleaseDate(String accession) {
template.update("Update a2_experiment set releasedate = (select sysdate from dual) where accession = ?", accession);
}
private static class SampleMapper implements RowMapper<Sample> {
private static final String FIELDS = "s.accession, org.name species, s.channel, s.sampleid ";
public Sample mapRow(ResultSet resultSet, int i) throws SQLException {
Sample sample = new Sample();
sample.setAccession(resultSet.getString(1));
sample.setSpecies(resultSet.getString(2));
sample.setChannel(resultSet.getString(3));
sample.setSampleID(resultSet.getLong(4));
return sample;
}
}
}
| Copied changed from release-2.0.7 branch to exclude non-curated experiments from Solr and bit indexes
| atlas-dao/src/main/java/uk/ac/ebi/gxa/dao/AtlasDAO.java | Copied changed from release-2.0.7 branch to exclude non-curated experiments from Solr and bit indexes | <ide><path>tlas-dao/src/main/java/uk/ac/ebi/gxa/dao/AtlasDAO.java
<ide>
<ide> /**
<ide> *
<del> * @return All public experiments
<del> */
<del> public Collection<Experiment> getPublicExperiments() {
<add> * @return All public curated experiments
<add> */
<add> public Collection<Experiment> getPublicCuratedExperiments() {
<ide> return Collections2.filter(getAllExperiments(),
<ide> new Predicate<Experiment>() {
<ide> public boolean apply(uk.ac.ebi.microarray.atlas.model.Experiment exp) {
<del> return !exp.isPrivate();
<add> return !exp.isPrivate() && exp.isCurated();
<ide> }
<ide> });
<ide> } |
|
JavaScript | mit | 3ce6adff2d783474eabc0aaefdb46b70a5805e9b | 0 | huangshuwei/vue-easytable | import { mount } from "@vue/test-utils";
import veTable from "@/ve-table";
import { later } from "../util";
describe("veTable edit", () => {
const TABLE_DATA = [
{
name: "John",
date: "1900-05-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Shanghai",
rowKey: 0,
},
{
name: "Dickerson",
date: "1910-06-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Beijing",
rowKey: 1,
},
{
name: "Larsen",
date: "2000-07-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Chongqing",
rowKey: 2,
},
{
name: "Geneva",
date: "2010-08-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Xiamen",
rowKey: 3,
},
{
name: "Jami",
date: "2020-09-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Shenzhen",
rowKey: 4,
},
];
const COLUMNS = [
{
field: "name",
key: "name",
title: "Name",
align: "left",
width: "15%",
edit: true,
},
{
field: "date",
key: "date",
title: "Date",
align: "left",
width: "15%",
edit: true,
},
{
field: "hobby",
key: "hobby",
title: "Hobby",
align: "center",
width: "30%",
edit: true,
},
{
field: "address",
key: "address",
title: "Address",
align: "left",
width: "40%",
edit: true,
},
];
it("double click edit", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
doubleClickEdit: true,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// first cell
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("dblclick");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
});
it("single click edit", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
});
it("full row edit 存在bug", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
doubleClickEdit: true,
fullRowEdit: true,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// first cell
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("dblclick");
await later();
expect(wrapper.findAll(".ve-table-body-td-edit-input").length).toBe(4);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(1)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(wrapper.findAll(".ve-table-body-td-edit-input").length).toBe(0);
});
it("stop editing when cell lose focus", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: false,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
});
it("startEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
await later();
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
it("stopEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
await later();
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
wrapper.vm.stopEditingCell({
rowKey: 0,
colKey: "name",
});
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
it("stopAllEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: false,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
wrapper.vm.startEditingCell({
rowKey: 1,
colKey: "date",
defaultValue: "BBB",
});
await later();
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(1)
.findAll(".ve-table-body-td")
.at(1);
expect(secondCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
wrapper.vm.stopAllEditingCell();
await later();
expect(mockFn).toHaveBeenCalledTimes(2);
});
it("cell value change", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
firstCell.trigger("click");
await later();
const textInput = firstCell.find(".ve-table-body-td-edit-input");
expect(textInput.exists()).toBe(true);
textInput.setValue("AAA");
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
// full row edit
// rowValueChange
});
| tests/unit/specs/ve-table-edit.spec.js | import { mount } from "@vue/test-utils";
import veTable from "@/ve-table";
import { later } from "../util";
describe("veTable edit", () => {
const TABLE_DATA = [
{
name: "John",
date: "1900-05-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Shanghai",
rowKey: 0,
},
{
name: "Dickerson",
date: "1910-06-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Beijing",
rowKey: 1,
},
{
name: "Larsen",
date: "2000-07-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Chongqing",
rowKey: 2,
},
{
name: "Geneva",
date: "2010-08-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Xiamen",
rowKey: 3,
},
{
name: "Jami",
date: "2020-09-20",
hobby: "coding and coding repeat",
address: "No.1 Century Avenue, Shenzhen",
rowKey: 4,
},
];
const COLUMNS = [
{
field: "name",
key: "name",
title: "Name",
align: "left",
width: "15%",
edit: true,
},
{
field: "date",
key: "date",
title: "Date",
align: "left",
width: "15%",
edit: true,
},
{
field: "hobby",
key: "hobby",
title: "Hobby",
align: "center",
width: "30%",
edit: true,
},
{
field: "address",
key: "address",
title: "Address",
align: "left",
width: "40%",
edit: true,
},
];
it("double click edit", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
doubleClickEdit: true,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// first cell
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("dblclick");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
});
it("single click edit", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
});
it("stop editing when cell lose focus", async () => {
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: false,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
false,
);
firstCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
});
it("startEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
await later();
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
it("stopEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
await later();
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
wrapper.vm.stopEditingCell({
rowKey: 0,
colKey: "name",
});
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
it("stopAllEditingCell instance method", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: false,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
wrapper.vm.startEditingCell({
rowKey: 0,
colKey: "name",
defaultValue: "AAA",
});
wrapper.vm.startEditingCell({
rowKey: 1,
colKey: "date",
defaultValue: "BBB",
});
await later();
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(1)
.findAll(".ve-table-body-td")
.at(1);
expect(secondCell.find(".ve-table-body-td-edit-input").exists()).toBe(
true,
);
wrapper.vm.stopAllEditingCell();
await later();
expect(mockFn).toHaveBeenCalledTimes(2);
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
it("cell value change", async () => {
const mockFn = jest.fn();
const wrapper = mount(veTable, {
propsData: {
columns: COLUMNS,
tableData: TABLE_DATA,
editOption: {
stopEditingWhenCellLoseFocus: true,
doubleClickEdit: false,
// cell value change
cellValueChange: ({ row, column }) => {
mockFn(row, column);
},
},
rowKeyFieldName: "rowKey",
},
});
// td
const firstCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(0);
firstCell.trigger("click");
await later();
const textInput = firstCell.find(".ve-table-body-td-edit-input");
expect(textInput.exists()).toBe(true);
textInput.setValue("AAA");
// second cell
const secondCell = wrapper
.findAll(".ve-table-body-tr")
.at(0)
.findAll(".ve-table-body-td")
.at(1);
secondCell.trigger("click");
await later();
expect(mockFn).toHaveBeenCalled();
expect(mockFn).toHaveBeenCalledWith(
{
address: "No.1 Century Avenue, Shanghai",
date: "1900-05-20",
hobby: "coding and coding repeat",
name: "AAA",
rowKey: 0,
},
{
_colspan: 1,
_keys: "name",
_level: 1,
_realTimeWidth: "15%",
_rowspan: 1,
align: "left",
edit: true,
field: "name",
key: "name",
title: "Name",
width: "15%",
},
);
});
// full row edit
// rowValueChange
});
| Update ve-table-edit.spec.js
| tests/unit/specs/ve-table-edit.spec.js | Update ve-table-edit.spec.js | <ide><path>ests/unit/specs/ve-table-edit.spec.js
<ide> expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
<ide> false,
<ide> );
<add> });
<add>
<add> it("full row edit 存在bug", async () => {
<add> const wrapper = mount(veTable, {
<add> propsData: {
<add> columns: COLUMNS,
<add> tableData: TABLE_DATA,
<add> editOption: {
<add> doubleClickEdit: true,
<add> fullRowEdit: true,
<add> // cell value change
<add> cellValueChange: ({ row, column }) => {},
<add> },
<add> rowKeyFieldName: "rowKey",
<add> },
<add> });
<add>
<add> // first cell
<add> const firstCell = wrapper
<add> .findAll(".ve-table-body-tr")
<add> .at(0)
<add> .findAll(".ve-table-body-td")
<add> .at(0);
<add>
<add> expect(firstCell.find(".ve-table-body-td-edit-input").exists()).toBe(
<add> false,
<add> );
<add>
<add> firstCell.trigger("dblclick");
<add>
<add> await later();
<add>
<add> expect(wrapper.findAll(".ve-table-body-td-edit-input").length).toBe(4);
<add>
<add> // second cell
<add> const secondCell = wrapper
<add> .findAll(".ve-table-body-tr")
<add> .at(1)
<add> .findAll(".ve-table-body-td")
<add> .at(1);
<add>
<add> secondCell.trigger("click");
<add>
<add> await later();
<add>
<add> expect(wrapper.findAll(".ve-table-body-td-edit-input").length).toBe(0);
<ide> });
<ide>
<ide> it("stop editing when cell lose focus", async () => {
<ide> await later();
<ide>
<ide> expect(mockFn).toHaveBeenCalledTimes(2);
<add> });
<add>
<add> it("cell value change", async () => {
<add> const mockFn = jest.fn();
<add>
<add> const wrapper = mount(veTable, {
<add> propsData: {
<add> columns: COLUMNS,
<add> tableData: TABLE_DATA,
<add> editOption: {
<add> stopEditingWhenCellLoseFocus: true,
<add> doubleClickEdit: false,
<add> // cell value change
<add> cellValueChange: ({ row, column }) => {
<add> mockFn(row, column);
<add> },
<add> },
<add> rowKeyFieldName: "rowKey",
<add> },
<add> });
<add>
<add> // td
<add> const firstCell = wrapper
<add> .findAll(".ve-table-body-tr")
<add> .at(0)
<add> .findAll(".ve-table-body-td")
<add> .at(0);
<add>
<add> firstCell.trigger("click");
<add>
<add> await later();
<add>
<add> const textInput = firstCell.find(".ve-table-body-td-edit-input");
<add>
<add> expect(textInput.exists()).toBe(true);
<add>
<add> textInput.setValue("AAA");
<add>
<add> // second cell
<add> const secondCell = wrapper
<add> .findAll(".ve-table-body-tr")
<add> .at(0)
<add> .findAll(".ve-table-body-td")
<add> .at(1);
<add>
<add> secondCell.trigger("click");
<add>
<add> await later();
<add>
<add> expect(mockFn).toHaveBeenCalled();
<ide> expect(mockFn).toHaveBeenCalledWith(
<ide> {
<ide> address: "No.1 Century Avenue, Shanghai",
<ide> );
<ide> });
<ide>
<del> it("cell value change", async () => {
<del> const mockFn = jest.fn();
<del>
<del> const wrapper = mount(veTable, {
<del> propsData: {
<del> columns: COLUMNS,
<del> tableData: TABLE_DATA,
<del> editOption: {
<del> stopEditingWhenCellLoseFocus: true,
<del> doubleClickEdit: false,
<del> // cell value change
<del> cellValueChange: ({ row, column }) => {
<del> mockFn(row, column);
<del> },
<del> },
<del> rowKeyFieldName: "rowKey",
<del> },
<del> });
<del>
<del> // td
<del> const firstCell = wrapper
<del> .findAll(".ve-table-body-tr")
<del> .at(0)
<del> .findAll(".ve-table-body-td")
<del> .at(0);
<del>
<del> firstCell.trigger("click");
<del>
<del> await later();
<del>
<del> const textInput = firstCell.find(".ve-table-body-td-edit-input");
<del>
<del> expect(textInput.exists()).toBe(true);
<del>
<del> textInput.setValue("AAA");
<del>
<del> // second cell
<del> const secondCell = wrapper
<del> .findAll(".ve-table-body-tr")
<del> .at(0)
<del> .findAll(".ve-table-body-td")
<del> .at(1);
<del>
<del> secondCell.trigger("click");
<del>
<del> await later();
<del>
<del> expect(mockFn).toHaveBeenCalled();
<del> expect(mockFn).toHaveBeenCalledWith(
<del> {
<del> address: "No.1 Century Avenue, Shanghai",
<del> date: "1900-05-20",
<del> hobby: "coding and coding repeat",
<del> name: "AAA",
<del> rowKey: 0,
<del> },
<del> {
<del> _colspan: 1,
<del> _keys: "name",
<del> _level: 1,
<del> _realTimeWidth: "15%",
<del> _rowspan: 1,
<del> align: "left",
<del> edit: true,
<del> field: "name",
<del> key: "name",
<del> title: "Name",
<del> width: "15%",
<del> },
<del> );
<del> });
<del>
<ide> // full row edit
<ide> // rowValueChange
<ide> }); |
|
Java | apache-2.0 | 4cb24085811f7d11c4ac329a9ee97087c2123d95 | 0 | j-a-w-r/jawr-main-repo,ic3fox/jawr,davidwebster48/jawr-main-repo,davidwebster48/jawr-main-repo,ic3fox/jawr,davidwebster48/jawr-main-repo,diorcety/jawr,ic3fox/jawr-main-repo,maximmold/jawr-main-repo,diorcety/jawr,j-a-w-r/jawr-main-repo,j-a-w-r/jawr-main-repo,maximmold/jawr-main-repo,ic3fox/jawr,ic3fox/jawr-main-repo,ic3fox/jawr-main-repo,ic3fox/jawr-main-repo,maximmold/jawr-main-repo,maximmold/jawr-main-repo,ic3fox/jawr,diorcety/jawr,j-a-w-r/jawr-main-repo,davidwebster48/jawr-main-repo | /**
* Copyright 2007-2012 Jordi Hernández Sellés, Ibrahim Chaehoi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.jawr.web.servlet;
import java.io.IOException;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.jawr.web.JawrConstant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
/**
*
* Main Jawr servlet. Maps logical URLs to script bundles, which are generated
* on the fly (may be cached), and served as a single file.
*
*
* @author Jordi Hernández Sellés
* @author Ibrahim Chaehoi
*/
public class JawrServlet extends HttpServlet implements ServletContextListener {
/** The serial version UID */
private static final long serialVersionUID = -4551240917172286444L;
/** The logger */
private static final Logger LOGGER = LoggerFactory
.getLogger(JawrServlet.class);
/** The request handler */
protected JawrRequestHandler requestHandler;
/*
* (non-Javadoc)
*
* @see javax.servlet.GenericServlet#init()
*/
public void init() throws ServletException {
try {
String type = getServletConfig().getInitParameter(
JawrConstant.TYPE_INIT_PARAMETER);
if (JawrConstant.BINARY_TYPE.equals(type)) {
requestHandler = new JawrBinaryResourceRequestHandler(
getServletContext(), getServletConfig());
} else {
requestHandler = new JawrRequestHandler(getServletContext(),
getServletConfig());
}
} catch (ServletException e) {
Marker fatal = MarkerFactory.getMarker("FATAL");
LOGGER.error(fatal, "Jawr servlet with name "
+ getServletConfig().getServletName()
+ " failed to initialize properly. ");
LOGGER.error(fatal, "Cause:");
LOGGER.error(fatal, e.getMessage(), e);
throw e;
} catch (Throwable e) {
Marker fatal = MarkerFactory.getMarker("FATAL");
LOGGER.error(fatal, "Jawr servlet with name "
+ getServletConfig().getServletName()
+ " failed to initialize properly. ");
LOGGER.error(fatal, "Cause: ");
LOGGER.error(fatal, e.getMessage(), e);
throw new ServletException(e);
}
}
/*
* (non-Javadoc)
*
* @see
* javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest
* , javax.servlet.http.HttpServletResponse)
*/
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
requestHandler.doGet(req, resp);
}
/*
* (non-Javadoc)
*
* @see javax.servlet.GenericServlet#destroy()
*/
public void destroy() {
requestHandler.destroy();
}
/*
* (non-Javadoc)
*
* @see
* javax.servlet.ServletContextListener#contextInitialized(javax.servlet
* .ServletContextEvent)
*/
@Override
public void contextInitialized(ServletContextEvent sce) {
}
/*
* (non-Javadoc)
*
* @see javax.servlet.ServletContextListener#contextDestroyed(javax.servlet.
* ServletContextEvent)
*/
@Override
public void contextDestroyed(ServletContextEvent sce) {
requestHandler.destroy();
}
}
| jawr-core/src/main/java/net/jawr/web/servlet/JawrServlet.java | /**
* Copyright 2007-2012 Jordi Hernández Sellés, Ibrahim Chaehoi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.jawr.web.servlet;
import java.io.IOException;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.jawr.web.JawrConstant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
/**
*
* Main Jawr servlet. Maps logical URLs to script bundles, which are generated
* on the fly (may be cached), and served as a single file.
*
*
* @author Jordi Hernández Sellés
* @author Ibrahim Chaehoi
*/
public class JawrServlet extends HttpServlet implements ServletContextListener {
/** The serial version UID */
private static final long serialVersionUID = -4551240917172286444L;
/** The logger */
private static final Logger LOGGER = LoggerFactory
.getLogger(JawrServlet.class);
/** The request handler */
protected JawrRequestHandler requestHandler;
/*
* (non-Javadoc)
*
* @see javax.servlet.GenericServlet#init()
*/
public void init() throws ServletException {
try {
String type = getServletConfig().getInitParameter(
JawrConstant.TYPE_INIT_PARAMETER);
if (JawrConstant.BINARY_TYPE.equals(type)) {
requestHandler = new JawrBinaryResourceRequestHandler(
getServletContext(), getServletConfig());
} else {
requestHandler = new JawrRequestHandler(getServletContext(),
getServletConfig());
}
} catch (ServletException e) {
Marker fatal = MarkerFactory.getMarker("FATAL");
LOGGER.error(fatal, "Jawr servlet with name "
+ getServletConfig().getServletName()
+ " failed to initialize properly. ");
LOGGER.error(fatal, "Cause:");
LOGGER.error(fatal, e.getMessage(), e);
throw e;
} catch (RuntimeException e) {
Marker fatal = MarkerFactory.getMarker("FATAL");
LOGGER.error(fatal, "Jawr servlet with name "
+ getServletConfig().getServletName()
+ " failed to initialize properly. ");
LOGGER.error(fatal, "Cause: ");
LOGGER.error(fatal, e.getMessage(), e);
throw new ServletException(e);
}
}
/*
* (non-Javadoc)
*
* @see
* javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest
* , javax.servlet.http.HttpServletResponse)
*/
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
requestHandler.doGet(req, resp);
}
/*
* (non-Javadoc)
*
* @see javax.servlet.GenericServlet#destroy()
*/
public void destroy() {
requestHandler.destroy();
}
/*
* (non-Javadoc)
*
* @see
* javax.servlet.ServletContextListener#contextInitialized(javax.servlet
* .ServletContextEvent)
*/
@Override
public void contextInitialized(ServletContextEvent sce) {
}
/*
* (non-Javadoc)
*
* @see javax.servlet.ServletContextListener#contextDestroyed(javax.servlet.
* ServletContextEvent)
*/
@Override
public void contextDestroyed(ServletContextEvent sce) {
requestHandler.destroy();
}
}
| log non runtime exceptions in JawrServlet, such as NoClassDefFoundError, which may be thrown if rhino, lesscss or some other jars are missing in a project
| jawr-core/src/main/java/net/jawr/web/servlet/JawrServlet.java | log non runtime exceptions in JawrServlet, such as NoClassDefFoundError, which may be thrown if rhino, lesscss or some other jars are missing in a project | <ide><path>awr-core/src/main/java/net/jawr/web/servlet/JawrServlet.java
<ide> LOGGER.error(fatal, "Cause:");
<ide> LOGGER.error(fatal, e.getMessage(), e);
<ide> throw e;
<del> } catch (RuntimeException e) {
<add> } catch (Throwable e) {
<ide> Marker fatal = MarkerFactory.getMarker("FATAL");
<ide> LOGGER.error(fatal, "Jawr servlet with name "
<ide> + getServletConfig().getServletName() |
|
JavaScript | mit | 244cedaea9aafbbd842ffff5b97f57b05f674ccd | 0 | ClinGen/clincoded,ClinGen/clincoded,ClinGen/clincoded,ClinGen/clincoded,ClinGen/clincoded | 'use strict';
var React = require('react');
var url = require('url');
var _ = require('underscore');
var moment = require('moment');
var panel = require('../libs/bootstrap/panel');
var form = require('../libs/bootstrap/form');
var globals = require('./globals');
var curator = require('./curator');
var RestMixin = require('./rest').RestMixin;
var methods = require('./methods');
var parseAndLogError = require('./mixins').parseAndLogError;
var CuratorHistory = require('./curator_history');
var modal = require('../libs/bootstrap/modal');
var Modal = modal.Modal;
var CurationMixin = curator.CurationMixin;
var RecordHeader = curator.RecordHeader;
var CurationPalette = curator.CurationPalette;
var PanelGroup = panel.PanelGroup;
var Panel = panel.Panel;
var Form = form.Form;
var FormMixin = form.FormMixin;
var Input = form.Input;
var InputMixin = form.InputMixin;
var queryKeyValue = globals.queryKeyValue;
var userMatch = globals.userMatch;
var ProvisionalCuration = React.createClass({
mixins: [FormMixin, RestMixin, CurationMixin, CuratorHistory],
contextTypes: {
navigate: React.PropTypes.func,
closeModal: React.PropTypes.func
},
queryValues: {},
getInitialState: function() {
return {
user: null, // login user uuid
gdm: null, // current gdm object, must be null initially.
provisional: null, // login user's existing provisional object, must be null initially.
//assessments: null, // list of all assessments, must be nul initially.
totalScore: null,
autoClassification: null
};
},
loadData: function() {
var gdmUuid = this.queryValues.gdmUuid;
// get gdm from db.
var uris = _.compact([
gdmUuid ? '/gdm/' + gdmUuid : '' // search for entire data set of the gdm
]);
this.getRestDatas(
uris
).then(datas => {
var stateObj = {};
stateObj.user = this.props.session.user_properties.uuid;
datas.forEach(function(data) {
switch(data['@type'][0]) {
case 'gdm':
stateObj.gdm = data;
break;
default:
break;
}
});
// Update the Curator Mixin OMIM state with the current GDM's OMIM ID.
if (stateObj.gdm && stateObj.gdm.omimId) {
this.setOmimIdState(stateObj.gdm.omimId);
}
// search for provisional owned by login user
if (stateObj.gdm.provisionalClassifications && stateObj.gdm.provisionalClassifications.length > 0) {
for (var i in stateObj.gdm.provisionalClassifications) {
var owner = stateObj.gdm.provisionalClassifications[i].submitted_by;
if (owner.uuid === stateObj.user) { // find
stateObj.provisional = stateObj.gdm.provisionalClassifications[i];
break;
}
}
}
stateObj.previousUrl = url;
this.setState(stateObj);
return Promise.resolve();
}).catch(function(e) {
console.log('OBJECT LOAD ERROR: %s — %s', e.statusText, e.url);
});
},
componentDidMount: function() {
this.loadData();
},
submitForm: function(e) {
// Don't run through HTML submit handler
e.preventDefault();
e.stopPropagation();
// Save all form values from the DOM.
this.saveAllFormValues();
if (this.validateDefault()) {
var calculate = queryKeyValue('calculate', this.props.href);
var edit = queryKeyValue('edit', this.props.href);
var newProvisional = this.state.provisional ? curator.flatten(this.state.provisional) : {};
newProvisional.totalScore = Number(this.state.totalScore);
newProvisional.autoClassification = this.state.autoClassification;
newProvisional.alteredClassification = this.getFormValue('alteredClassification');
newProvisional.reasons = this.getFormValue('reasons');
// check required item (reasons)
var formErr = false;
if (!newProvisional.reasons && newProvisional.autoClassification !== newProvisional.alteredClassification) {
formErr = true;
this.setFormErrors('reasons', 'Required when changing classification.');
}
if (!formErr) {
var backUrl = '/curation-central/?gdm=' + this.state.gdm.uuid;
backUrl += this.queryValues.pmid ? '&pmid=' + this.queryValues.pmid : '';
if (this.state.provisional) { // edit existing provisional
this.putRestData('/provisional/' + this.state.provisional.uuid, newProvisional).then(data => {
var provisionalClassification = data['@graph'][0];
// Record provisional classification history
var meta = {
provisionalClassification: {
gdm: this.state.gdm['@id'],
alteredClassification: provisionalClassification.alteredClassification
}
};
this.recordHistory('modify', provisionalClassification, meta);
this.resetAllFormValues();
window.history.go(-1);
}).catch(function(e) {
console.log('PROVISIONAL GENERATION ERROR = : %o', e);
});
}
else { // save a new calculation and provisional classification
this.postRestData('/provisional/', newProvisional).then(data => {
return data['@graph'][0];
}).then(savedProvisional => {
// Record provisional classification history
var meta = {
provisionalClassification: {
gdm: this.state.gdm['@id'],
alteredClassification: savedProvisional.alteredClassification
}
};
this.recordHistory('add', savedProvisional, meta);
var theGdm = curator.flatten(this.state.gdm);
if (theGdm.provisionalClassifications) {
theGdm.provisionalClassifications.push(savedProvisional['@id']);
}
else {
theGdm.provisionalClassifications = [savedProvisional['@id']];
}
return this.putRestData('/gdm/' + this.state.gdm.uuid, theGdm).then(data => {
return data['@graph'][0];
});
}).then(savedGdm => {
this.resetAllFormValues();
window.history.go(-1);
}).catch(function(e) {
console.log('PROVISIONAL GENERATION ERROR = %o', e);
});
}
}
}
},
cancelForm: function(e) {
// Changed modal cancel button from a form input to a html button
// as to avoid accepting enter/return key as a click event.
// Removed hack in this method.
window.history.go(-1);
},
render: function() {
this.queryValues.gdmUuid = queryKeyValue('gdm', this.props.href);
var calculate = queryKeyValue('calculate', this.props.href);
var edit = queryKeyValue('edit', this.props.href);
var session = (this.props.session && Object.keys(this.props.session).length) ? this.props.session : null;
var gdm = this.state.gdm ? this.state.gdm : null;
var provisional = this.state.provisional ? this.state.provisional : null;
var show_clsfctn = queryKeyValue('classification', this.props.href);
var summaryMatrix = queryKeyValue('summarymatrix', this.props.href);
var expMatrix = queryKeyValue('expmatrix', this.props.href);
return (
<div>
{ show_clsfctn === 'display' ?
Classification.call()
:
( gdm ?
<div>
<RecordHeader gdm={gdm} omimId={this.state.currOmimId} updateOmimId={this.updateOmimId} session={session} summaryPage={true} linkGdm={true} />
<div className="container">
{
(provisional && edit === 'yes') ?
EditCurrent.call(this)
:
( calculate === 'yes' ?
<div>
<h1>Curation Summary & Provisional Classification</h1>
{
provisional ?
<PanelGroup accordion>
<Panel title="Last Saved Summary & Provisional Classification" open>
<div className="row">
<div className="col-sm-5"><strong>Date Generated:</strong></div>
<div className="col-sm-7"><span>{moment(provisional.last_modified).format("YYYY MMM DD, h:mm a")}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Total Score:</strong>
</div>
<div className="col-sm-7"><span>{provisional.totalScore}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Calculated Clinical Validity Classification:</strong>
</div>
<div className="col-sm-7"><span>{provisional.autoClassification}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Selected Clinical Validity Classification:</strong>
</div>
<div className="col-sm-7"><span>{provisional.alteredClassification}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Reason(s):</strong>
</div>
<div className="col-sm-7"><span>{this.state.provisional.reasons}</span></div>
</div>
<div className="row"> </div>
</Panel>
</PanelGroup>
:
null
}
{NewCalculation.call(this)}
</div>
:
null
)
}
</div>
</div>
:
null
)
}
</div>
);
}
});
globals.curator_page.register(ProvisionalCuration, 'curator_page', 'provisional-curation');
// Generate Classification Description page for url ../provisional-curation/?gdm=GDMId&classification=display
var Classification = function() {
return (
<div className="container classification-cell">
<h1>Clinical Validity Classifications</h1>
<div className="classificationTable">
<table>
<tbody>
<tr className="greyRow">
<td colSpan='2' className="titleCell">Evidence Level</td>
<td className="titleCell">Evidence Description</td>
</tr>
<tr>
<td rowSpan='7' className="verticalCell">
<div className="verticalContent spptEvd">
Supportive Evidence
</div>
</td>
<td className="levelCell">DEFINITIVE</td>
<td>
The role of this gene in this particular disease hase been repeatedly demonstrated in both the research and clinical
diagnostic settings, and has been upheld over time (in general, at least 3 years). No convincing evidence has emerged
that contradicts the role of the gene in the specified disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">STRONG</td>
<td>
The role of this gene in disease has been independently demonstrated in at least two separate studies providing
<strong>strong</strong> supporting evidence for this gene's role in disease, such as the following types of evidence:
<ul>
<li>Strong variant-level evidence demonstrating numerous unrelated probands with variants that provide convincing
evidence for disease causality¹</li>
<li>Compelling gene-level evidence from different types of supporting experimental data².</li>
</ul>
In addition, no convincing evidence has emerged that contradicts the role of the gene in the noted disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">MODERATE</td>
<td>
There is <strong>moderate</strong> evidence to support a causal role for this gene in this diseaese, such as:
<ul>
<li>At least 3 unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Moderate experimental data² supporting the gene-disease association</li>
</ul>
The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged
that contradicts the role of the gene in the noded disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">LIMITED</td>
<td>
There is <strong>limited</strong> evidence to support a causal role for this gene in this disease, such as:
<ul>
<li>Fewer than three observations of variants that provide convincing evidence for disease causality¹</li>
<li>Multiple variants reported in unrelated probands but <i>without</i> sufficient evidence that the variants alter function</li>
<li>Limited experimental data² supporting the gene-disease association</li>
</ul>
The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that
contradicts the role of the gene in the noted disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td colSpan="2" className="levelCell">NO REPORTED<br />EVIDENCE</td>
<td>
No evidence reported for a causal role in disease. These genes might be "candidate" genes based on animal models or implication
in pathways known to be involved in human diseases, but no reports have implicated the gene in human disease cases.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="verticalCell">
<div className="verticalContent cntrdctEvd">
Contradictory Evidence
</div>
</td>
<td className="levelCell">
CONFLICTING<br />EVIDENCE<br />REPORTED
</td>
<td>
Although there has been an assertion of a gene-disease association, conflicting evidence for the role of this gene in disease has arisen
since the time of the initial report indicating a disease association. Depending on the quantity and quality of evidence disputing the
association, the gene/disease association may be further defined by the following two sub-categories:
<ol className="olTitle">
<li type="1">
Disputed
<ol className="olContent">
<li type="a">
Convincing evidence <i>disputing</i> a role for this gene in this disease has arisen since the initial report identifying an
association between the gene and disease.
</li>
<li type="a">
Refuting evidence need not outweigh existing evidence supporting the gene:disease association.
</li>
</ol>
</li>
<li type="1">
Refuted
<ol className="olContent">
<li type="a">
Evidence refuting the role of the gene in the specified disease has been reported and significantly outweighs any evidence
supporting the role.
</li>
<li type="a">
This designation is to be applied at the discretion of clinical domain experts after thorough review of available evidence
</li>
</ol>
</li>
</ol>
</td>
</tr>
<tr className="greyRow">
<td colSpan="3" className="levelCell">NOTES</td>
</tr>
<tr>
<td colSpan="3">
<p>
¹Variants that have evidence to disrupt function and/or have other strong genetic and population data (e.g. <i>de novo</i>
occurrence, absence in controls, etc) can be used as evidence in support of a variant's causality in this framework.
</p>
<p>²Examples of appropriate types of supporting experimental data based on those outlined in MacArthur et al. 2014.</p>
</td>
</tr>
</tbody>
</table>
</div>
</div>
);
};
// Description of 4 leves of classification in summary table
var LimitedClassification = function() {
return (
<div>
<p className="title underline-text title-p">LIMITED CLASSIFICATION</p>
<p>There is <strong>limited</strong> evidence to support a causal role for this gene in this disease, such as:</p>
<ul>
<li>Fewer than three observations of variants that provide convincing evidence for disease causality¹</li>
<li>Multiple variants reported in unrelated probands but <i>without</i> sufficient evidence that the variants alter function</li>
<li>Limited experimental data² supporting the gene-disease association</li>
</ul>
<p>The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that contradicts the role of the gene in the noted disease.</p>
</div>
);
};
var ModerateClassification = function() {
return (
<div>
<p className="title underline-text title-p">MODERATE CLASSIFICATION</p>
<p>There is <strong>moderate</strong> evidence to support a causal role for this gene in this diseaese, such as:</p>
<ul>
<li>At least 3 unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Moderate experimental data² supporting the gene-disease association</li>
</ul>
<p>The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that contradicts the role of the gene in the noded disease.</p>
</div>
);
};
var StrongClassification = function() {
return (
<div>
<p className="title underline-text title-p">STRONG CLASSIFICATION</p>
<p>
The role of this gene in disease has been independently demonstrated in at least two separate studies providing
<strong>strong</strong> supporting evidence for this gene's role in disease, such as the following types of evidence:
</p>
<ul>
<li>Strong variant-level evidence demonstrating numerous unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Compelling gene-level evidence from different types of supporting experimental data².</li>
</ul>
<p>In addition, no convincing evidence has emerged that contradicts the role of the gene in the noted disease.</p>
</div>
);
};
var DefinitiveClassification = function() {
return (
<div>
<p className="title underline-text title-p">DEFINITIVE CLASSIFICATION</p>
<p>
The role of this gene in this particular disease hase been repeatedly demonstrated in both the research and clinical
diagnostic settings, and has been upheld over time (in general, at least 3 years). No convincing evidence has emerged
that contradicts the role of the gene in the specified disease.
</p>
</div>
);
};
// Edit page for url ../provisional-curation/?gdm=GDMId&edit=yes
var EditCurrent = function() {
var alteredClassification = this.state.provisional.alteredClassification ? this.state.provisional.alteredClassification : 'none';
this.state.totalScore = this.state.provisional.totalScore;
this.state.autoClassification = this.state.provisional.autoClassification;
return (
<div>
<h1>Edit Summary and Provisional Classification</h1>
<Form submitHandler={this.submitForm} formClassName="form-horizontal form-std">
<PanelGroup accordion>
<Panel title="Currently Saved Calculation and Classification" open>
<div className="row">
<div className="col-sm-5"><strong className="pull-right">Total Score:</strong></div>
<div className="col-sm-7"><span>{this.state.totalScore}</span></div>
</div>
<br />
<div className="row">
<div className="col-sm-5">
<strong className="pull-right">Calculated
<a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>
:
</strong>
</div>
<div className="col-sm-7"><span>{this.state.autoClassification}</span></div>
</div>
<br />
<div className="row">
<Input type="select" ref="alteredClassification" value={alteredClassification} labelClassName="col-sm-5 control-label" wrapperClassName="col-sm-7"
label={<strong>Select Provisional <a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:</strong>}
groupClassName="form-group" handleChange={this.handleChange}>
<option value="Definitive">Definitive</option>
<option value="Strong">Strong</option>
<option value="Moderate">Moderate</option>
<option value="Limited">Limited</option>
<option value="No Reported Evidence">No Evidence</option>
<option value="Disputed">Disputed</option>
<option value="Refuted">Refuted</option>
</Input>
</div>
<div className="row">
<Input type="textarea" ref="reasons" label="Explain Reason(s) for Change:" rows="5" labelClassName="col-sm-5 control-label"
value={this.state.provisional && this.state.provisional.reasons} wrapperClassName="col-sm-7" groupClassName="form-group"
error={this.getFormError('reasons')} clearError={this.clrFormErrors.bind(null, 'reasons')}/>
</div>
<div className="row">
<div className="col-sm-5"><strong>Date Created:</strong></div>
<div className="col-sm-7">
<span>{moment(this.state.provisional.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
</div>
<div className="row">
<div className="col-sm-5"><strong>Last Modified:</strong></div>
<div className="col-sm-7">
<span>{moment(this.state.provisional.last_modified).format("YYYY MMM DD, h:mm a")}</span>
</div>
</div>
<div><span> </span></div>
<br />
</Panel>
</PanelGroup>
<div className='modal-footer'>
<Input type="button" inputClassName="btn-default btn-inline-spacer" clickHandler={this.cancelForm} title="Cancel" />
<Input type="submit" inputClassName="btn-primary btn-inline-spacer pull-right" id="submit" title="Save" />
</div>
</Form>
</div>
);
};
// function for looping through family (of GDM or of group) and finding all relevent information needed for score calculations
// returns dictionary of relevant items that need to be updated within NewCalculation()
var FamilyScraper = function(user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched) {
families.forEach(family => {
// loop through individual within family: old code??? - MC
/*
if (family.individualIncluded && family.individualIncluded.length) {
individualsCollected = filter(individualsCollected, family.individualIncluded, annotation.article, pathoVariantIdList);
}
*/
// get segregation of family, but only if it was made by user (may change later - MC)
if (family.segregation && family.submitted_by.uuid === user) {
userAssessments['segNot'] += 1;
// loop through assessments and update relevant userAssessment counts
// irrelevant at the moment as assessments for segregation do not exist - MC
/*
assessments = family.segregation.assessments && family.segregation.assessments.length ? family.segregation.assessments : [];
assessments.forEach(assessment => {
if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Supports') {
userAssessments['segSpt'] += 1;
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Review') {
userAssessments['segReview'] += 1;
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Contradicts') {
userAssessments['segCntdct'] += 1;
}
});
*/
// get lod score of segregation of family
if (family.segregation.includeLodScoreInAggregateCalculation) {
if ("lodPublished" in family.segregation && family.segregation.lodPublished === true && family.segregation.publishedLodScore) {
segregationCount += 1;
segregationPoints += family.segregation.publishedLodScore;
} else if ("lodPublished" in family.segregation && family.segregation.lodPublished === false && family.segregation.estimatedLodScore) {
segregationCount += 1;
segregationPoints += family.segregation.estimatedLodScore;
}
}
}
// get proband individuals of family
if (family.individualIncluded && family.individualIncluded.length) {
individualMatched = IndividualScraper(family.individualIncluded, individualMatched);
}
});
return {
individualsCollected: individualsCollected,
userAssessments: userAssessments,
assessments: assessments,
segregationCount: segregationCount,
segregationPoints: segregationPoints,
individualMatched: individualMatched
};
};
var IndividualScraper = function(individuals, individualMatched) {
if (individuals) {
individuals.forEach(individual => {
if (individual.proband === true && (individual.scores && individual.scores.length)) {
individualMatched.push(individual);
}
});
}
return individualMatched;
};
// Generate a new summary for url ../provisional-curation/?gdm=GDMId&calculate=yes
// Calculation rules are defined by Small GCWG. See ClinGen_Interface_4_2015.pptx and Clinical Validity Classifications for detail
var NewCalculation = function() {
var gdm = this.state.gdm;
const MAX_SCORE_CONSTANTS = {
VARIANT_IS_DE_NOVO: 12,
PREDICTED_OR_PROVEN_NULL_VARIANT: 10,
OTHER_VARIANT_TYPE_WITH_GENE_IMPACT: 7,
AUTOSOMAL_RECESSIVE: 12,
SEGREGATION: 7,
CASE_CONTROL: 12,
FUNCTIONAL: 2,
FUNCTIONAL_ALTERATION: 2,
MODELS_RESCUE: 4,
GENETIC_EVIDENCE: 12,
EXPERIMENTAL_EVIDENCE: 6,
TOTAL: 18
};
/*****************************************************/
/* VARIABLES FOR EVIDENCE SCORE TABLE */
/*****************************************************/
// variables for autosomal dominant data
let probandOtherVariantCount = 0, probandOtherVariantPoints = 0, probandOtherVariantPointsCounted = 0;
let probandNullVariantCount = 0, probandNullVariantPoints = 0, probandNullVariantPointsCounted = 0;
let variantDenovoCount = 0, variantDenovoPoints = 0, variantDenovoPointsCounted = 0;
// variables for autosomal recessive data
let autosomalRecessivePointsCounted = 0;
let twoVariantsProvenCount = 0, twoVariantsProvenPoints = 0;
let twoVariantsNotProvenCount = 0, twoVariantsNotProvenPoints = 0;
// variables for segregation data
// segregationPoints is actually the raw, unconverted score; segregationPointsCounted is calculated and displayed score
let segregationCount = 0, segregationPoints = 0, segregationPointsCounted = 0;
// variables for case-control data
let caseControlCount = 0, caseControlPoints = 0, caseControlPointsCounted;
// variables for Experimental data
let functionalPointsCounted = 0, functionalAlterationPointsCounted = 0, modelsRescuePointsCounted = 0;
let biochemicalFunctionCount = 0, biochemicalFunctionPoints = 0;
let proteinInteractionsCount = 0, proteinInteractionsPoints = 0;
let expressionCount = 0, expressionPoints = 0;
let patientCellsCount = 0, patientCellsPoints = 0;
let nonPatientCellsCount = 0, nonPatientCellsPoints = 0;
let animalModelCount = 0, animalModelPoints = 0;
let cellCultureCount = 0, cellCulturePoints = 0;
let rescueCount = 0, rescuePoints = 0;
let rescueEngineeredCount = 0, rescueEngineeredPoints = 0;
// variables for total counts
let geneticEvidenceTotalPoints = 0, experimentalEvidenceTotalPoints = 0, totalPoints = 0;
/*****************************************************/
/* Find all proband individuals that had been scored */
/*****************************************************/
let probandTotal = []; // Total proband combined
let probandFamily = []; // Total probands associated with families from all annotations
let probandIndividual = []; // Total proband individuals from all annotations
var h, i, j, k, l;
// initial values of assessments
var userAssessments = {
"variantSpt": 0,
"variantReview": 0,
"variantCntdct": 0,
"variantNot": 0,
"expSpt": 0,
"expReview": 0,
"expCntdct": 0,
"expNot": 0,
"segSpt": 0,
"segReview": 0,
"segCntdct": 0,
"segNot": 0
};
// Collect variants from user's pathogenicity
var gdmPathoList = gdm.variantPathogenicity;
var pathoVariantIdList = {
"support": [],
"review": [],
"contradict": []
};
gdmPathoList.forEach(gdmPatho => {
let variantUuid = gdmPatho.variant.uuid;
// Collect login user's variant assessments, separated as 3 different values.
if (gdmPatho.assessments && gdmPatho.assessments.length > 0) {
gdmPatho.assessments.forEach(assessment => {
if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Supports') {
pathoVariantIdList['support'].push(variantUuid);
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Review') {
pathoVariantIdList['review'].push(variantUuid);
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Contradicts') {
pathoVariantIdList['contradict'].push(variantUuid);
}
});
}
});
var exp_scores = [0, 0, 0];
var expType = {
"Expression": 0,
"Protein Interactions": 0,
"Biochemical Function": 0,
"Functional Alteration (Patient cells)": 0,
"Functional Alteration (Engineered equivalent)": 0,
"Model Systems (Animal model)": 0,
"Model Systems (Engineered equivalent)": 0,
"Rescue (Patient cells)": 0,
"Rescue (Engineered equivalent)": 0
};
var individualsCollected = {
"probandInd": [],
"allVariants": [],
"sptVariants": [],
"rvwVariants": [],
"cntdctVariants": []
};
var proband_variants = [];
let tempFamilyScraperValues = {};
let individualMatched = [];
let caseControlTotal = [];
// scan gdm
let annotations = gdm.annotations && gdm.annotations.length ? gdm.annotations : [];
annotations.forEach(annotation => {
let groups, families, individuals, assessments, experimentals;
// loop through groups
groups = annotation.groups && annotation.groups.length ? annotation.groups : [];
groups.forEach(group => {
// loop through families using FamilyScraper
families = group.familyIncluded && group.familyIncluded.length ? group.familyIncluded : [];
tempFamilyScraperValues = FamilyScraper(this.state.user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched);
individualsCollected = tempFamilyScraperValues['individualsCollected'];
userAssessments = tempFamilyScraperValues['userAssessments'];
assessments = tempFamilyScraperValues['assessments'];
segregationCount = tempFamilyScraperValues['segregationCount'];
segregationPoints = tempFamilyScraperValues['segregationPoints'];
individualMatched = tempFamilyScraperValues['individualMatched'];
// get proband individuals of group
if (group.individualIncluded && group.individualIncluded.length) {
individualMatched = IndividualScraper(group.individualIncluded, individualMatched);
}
/*
if (group.individualIncluded && group.individualIncluded.length) {
individualsCollected = filter(individualsCollected, group.individualIncluded, annotation.article, pathoVariantIdList);
}
*/
});
// loop through families using FamilyScraper
families = annotation.families && annotation.families.length ? annotation.families : [];
tempFamilyScraperValues = FamilyScraper(this.state.user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched);
individualsCollected = tempFamilyScraperValues['individualsCollected'];
userAssessments = tempFamilyScraperValues['userAssessments'];
assessments = tempFamilyScraperValues['assessments'];
segregationCount = tempFamilyScraperValues['segregationCount'];
segregationPoints = tempFamilyScraperValues['segregationPoints'];
individualMatched = tempFamilyScraperValues['individualMatched'];
// push all matched individuals from families and families of groups to probandFamily
individualMatched.forEach(item => {
probandFamily.push(item);
});
// loop through individuals
if (annotation.individuals && annotation.individuals.length) {
// get proband individuals
individualMatched = [];
individualMatched = IndividualScraper(annotation.individuals, individualMatched);
// push all matched individuals to probandIndividual
individualMatched.forEach(item => {
probandIndividual.push(item);
});
//individualsCollected = filter(individualsCollected, annotation.individuals, annotation.article, pathoVariantIdList);
}
// loop through case-controls
let caseControlMatched = [];
if (annotation.caseControlStudies && annotation.caseControlStudies.length) {
annotation.caseControlStudies.forEach(caseControl => {
if (caseControl.scores && caseControl.scores.length) {
caseControl.scores.forEach(score => {
if (score.submitted_by.uuid === this.state.user && score.score && score.score !== 'none') {
caseControlCount += 1;
caseControlPoints += parseFloat(score.score);
}
});
}
});
}
// loop through experimentals
experimentals = annotation.experimentalData && annotation.experimentalData.length ? annotation.experimentalData : [];
experimentals.forEach(experimental => {
// loop through scores, if any
if (experimental.scores && experimental.scores.length) {
experimental.scores.forEach(score => {
// only care about scores made by current user
if (score.submitted_by.uuid === this.state.user) {
// parse score of experimental
let experimentalScore = 0;
if (score.score && score.score !== 'none') {
experimentalScore = parseFloat(score.score); // Use the score selected by curator (if any)
} else if (score.calculatedScore && score.calculatedScore !== 'none') {
experimentalScore = parseFloat(score.calculatedScore); // Otherwise, use default score (if any)
}
userAssessments['expNot'] += 1;
// assign score to correct sub-type depending on experiment type and other variables
if (experimental.evidenceType && experimental.evidenceType === 'Biochemical Function') {
biochemicalFunctionCount += 1;
biochemicalFunctionPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Protein Interactions') {
proteinInteractionsCount += 1;
proteinInteractionsPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Expression') {
expressionCount += 1;
expressionPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Functional Alteration') {
if (experimental.functionalAlteration.cellMutationOrEngineeredEquivalent
&& experimental.functionalAlteration.cellMutationOrEngineeredEquivalent === 'Patient cells') {
patientCellsCount += 1;
patientCellsPoints += experimentalScore;
} else if (experimental.functionalAlteration.cellMutationOrEngineeredEquivalent
&& experimental.functionalAlteration.cellMutationOrEngineeredEquivalent === 'Engineered equivalent') {
nonPatientCellsCount += 1;
nonPatientCellsPoints += experimentalScore;
}
} else if (experimental.evidenceType && experimental.evidenceType === 'Model Systems') {
if (experimental.modelSystems.animalOrCellCulture
&& experimental.modelSystems.animalOrCellCulture === 'Animal model') {
animalModelCount += 1;
animalModelPoints += experimentalScore;
} else if (experimental.modelSystems.animalOrCellCulture
&& experimental.modelSystems.animalOrCellCulture === 'Engineered equivalent') {
cellCultureCount += 1;
cellCulturePoints += experimentalScore;
}
} else if (experimental.evidenceType && experimental.evidenceType === 'Rescue') {
if (experimental.rescue.patientCellOrEngineeredEquivalent
&& experimental.rescue.patientCellOrEngineeredEquivalent === 'Patient cells') {
rescueCount += 1;
rescuePoints += experimentalScore;
} else if (experimental.rescue.patientCellOrEngineeredEquivalent
&& experimental.rescue.patientCellOrEngineeredEquivalent === 'Engineered equivalent') {
rescueEngineeredCount += 1;
rescueEngineeredPoints += experimentalScore;
}
}
}
});
}
});
});
// combine all probands
probandTotal = probandFamily.concat(probandIndividual);
// scan probands
probandTotal.forEach(proband => {
proband.scores.forEach(score => {
if (score.submitted_by.uuid === this.state.user) {
// parse proband score
let probandScore = 0;
if (score.score && score.score !== 'none') {
probandScore += parseFloat(score.score);
} else if (score.calculatedScore && score.calculatedScore !== 'none') {
probandScore += parseFloat(score.calculatedScore);
}
// assign score to correct sub-type depending on score type
if (score.caseInfoType && score.caseInfoType === 'OTHER_VARIANT_TYPE_WITH_GENE_IMPACT' && score.scoreStatus === 'Score') {
probandOtherVariantCount += 1;
probandOtherVariantPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'PREDICTED_OR_PROVEN_NULL_VARIANT' && score.scoreStatus === 'Score') {
probandNullVariantCount += 1;
probandNullVariantPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'VARIANT_IS_DE_NOVO' && score.scoreStatus === 'Score') {
variantDenovoCount += 1;
variantDenovoPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'TWO_VARIANTS_WITH_GENE_IMPACT_IN_TRANS' && score.scoreStatus === 'Score') {
twoVariantsNotProvenCount += 1;
twoVariantsNotProvenPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'TWO_VARIANTS_IN_TRANS_WITH_ONE_DE_NOVO' && score.scoreStatus === 'Score') {
twoVariantsProvenCount += 1;
twoVariantsProvenPoints += probandScore;
}
}
});
});
// is the below few lines necessary? - MC
userAssessments['variantSpt'] = individualsCollected['sptVariants'].length;
userAssessments['variantReview'] = individualsCollected['rvwVariants'].length;
userAssessments['variantCntdct'] = individualsCollected['cntdctVariants'].length;
userAssessments['variantNot'] = individualsCollected['allVariants'].length - userAssessments['variantSpt'] - userAssessments['variantReview'] - userAssessments['variantCntdct'];
userAssessments['expNot'] = userAssessments['expNot'] - userAssessments['expSpt'] - userAssessments['expReview'] - userAssessments['expCntdct'];
userAssessments['segNot'] = userAssessments['segNot'] - userAssessments['segSpt'] - userAssessments['segReview'] - userAssessments['segCntdct'];
/**************************************************************************/
/* Comment block below may need to be removed/revised for new scoring matrix - MC
/**************************************************************************/
/*
// Collect articles and find the earliest publication year
var proband = 0;
var articleCollected = [];
var year = new Date();
var earliest = year.getFullYear();
individualsCollected['probandInd'].forEach(probandInd => {
if (probandInd.pmid && probandInd.pmid != '') {
proband += 1;
if (!in_array(probandInd.pmid, articleCollected)) {
articleCollected.push(probandInd.pmid);
earliest = get_earliest_year(earliest, probandInd.date);
}
}
});
// calculate scores
var currentYear = year.getFullYear();
var time = currentYear.valueOf() - earliest.valueOf();
var timeScore = 0, probandScore = 0, pubScore = 0, expScore = 0; // initialize scores to 0
if (time >= 3) {
timeScore = 2;
}
else if (time >= 1) {
timeScore = 1;
}
else {
timeScore = 0;
}
if (proband > 18) {
probandScore = 7;
}
else if (proband >15) {
probandScore = 6;
}
else if (proband > 12) {
probandScore = 5;
}
else if (proband > 9) {
probandScore = 4;
}
else if (proband > 6) {
probandScore = 3;
}
else if (proband > 3) {
probandScore = 2;
}
else if (proband >= 1) {
probandScore = 1;
}
else {
probandScore = 0;
}
if (articleCollected.length >= 5) {
pubScore = 5;
}
else {
pubScore = articleCollected.length;
}
if (articleCollected.length <= 2 && timeScore > 1) {
timeScore = 1;
}
var totalScore = probandScore + pubScore + timeScore + expScore;
// set calculated classification
var autoClassification = 'No Reported Evidence';
if (Math.floor(totalScore) >= 17){
autoClassification = 'Definitive';
}
else if (Math.floor(totalScore) >= 13) {
autoClassification = 'Strong';
}
else if (Math.floor(totalScore) >= 9) {
autoClassification = 'Moderate';
}
else if (Math.floor(totalScore) >= 2) {
autoClassification = 'Limited';
}
// save total score and calculated classification to state
this.state.totalScore = totalScore;
this.state.autoClassification = autoClassification;
// set score positons in html table
var probandRow = [], pubRow = [], timeRow = [];
for(i=0; i<8; i++) {
if (i === probandScore) {
probandRow.push(proband);
}
else {
probandRow.push('');
}
if (i === pubScore) {
pubRow.push(articleCollected.length);
}
else if (i < 6) {
pubRow.push('');
}
if (i === timeScore) {
timeRow.push(time);
}
else if (i < 3) {
timeRow.push('');
}
}
*/
// calculate segregation counted points
segregationPoints = +parseFloat(segregationPoints).toFixed(2);
if (segregationPoints >= 0.75 && segregationPoints <= 0.99) {
segregationPointsCounted = 1;
} else if (segregationPoints >= 1 && segregationPoints <= 1.24) {
segregationPointsCounted = .5;
} else if (segregationPoints >= 1.25 && segregationPoints <= 1.49) {
segregationPointsCounted = 2.5;
} else if (segregationPoints >= 1.5 && segregationPoints <= 1.74) {
segregationPointsCounted = 3;
} else if (segregationPoints >= 1.75 && segregationPoints <= 1.99) {
segregationPointsCounted = 3.5;
} else if (segregationPoints >= 2 && segregationPoints <= 2.49) {
segregationPointsCounted = 4;
} else if (segregationPoints >= 2.5 && segregationPoints <= 2.99) {
segregationPointsCounted = 4.5;
} else if (segregationPoints >= 3 && segregationPoints <= 3.49) {
segregationPointsCounted = 5;
} else if (segregationPoints >= 3.5 && segregationPoints <= 3.99) {
segregationPointsCounted = 5.5;
} else if (segregationPoints >= 4 && segregationPoints <= 4.49) {
segregationPointsCounted = 6;
} else if (segregationPoints >= 4.5 && segregationPoints <= 4.99) {
segregationPointsCounted = 6.5;
} else if (segregationPoints >= 5) {
segregationPointsCounted = MAX_SCORE_CONSTANTS.SEGREGATION;
}
// calculate other counted points
let tempPoints = 0;
probandOtherVariantPointsCounted = probandOtherVariantPoints < MAX_SCORE_CONSTANTS.OTHER_VARIANT_TYPE_WITH_GENE_IMPACT ? probandOtherVariantPoints : MAX_SCORE_CONSTANTS.OTHER_VARIANT_TYPE_WITH_GENE_IMPACT;
probandNullVariantPointsCounted = probandNullVariantPoints < MAX_SCORE_CONSTANTS.PREDICTED_OR_PROVEN_NULL_VARIANT ? probandNullVariantPoints : MAX_SCORE_CONSTANTS.PREDICTED_OR_PROVEN_NULL_VARIANT;
variantDenovoPointsCounted = variantDenovoPoints < MAX_SCORE_CONSTANTS.VARIANT_IS_DE_NOVO ? variantDenovoPoints : MAX_SCORE_CONSTANTS.VARIANT_IS_DE_NOVO;
tempPoints = twoVariantsProvenPoints + twoVariantsNotProvenPoints;
autosomalRecessivePointsCounted = tempPoints < MAX_SCORE_CONSTANTS.AUTOSOMAL_RECESSIVE ? tempPoints : MAX_SCORE_CONSTANTS.AUTOSOMAL_RECESSIVE;
caseControlPointsCounted = caseControlPoints < MAX_SCORE_CONSTANTS.CASE_CONTROL ? caseControlPoints : MAX_SCORE_CONSTANTS.CASE_CONTROL;
tempPoints = biochemicalFunctionPoints + proteinInteractionsPoints + expressionPoints;
functionalPointsCounted = tempPoints < MAX_SCORE_CONSTANTS.FUNCTIONAL ? tempPoints : MAX_SCORE_CONSTANTS.FUNCTIONAL;
tempPoints = patientCellsPoints + nonPatientCellsPoints;
functionalAlterationPointsCounted = tempPoints < MAX_SCORE_CONSTANTS.FUNCTIONAL_ALTERATION ? tempPoints : MAX_SCORE_CONSTANTS.FUNCTIONAL_ALTERATION;
tempPoints = animalModelPoints + cellCulturePoints + rescuePoints + rescueEngineeredPoints;
modelsRescuePointsCounted = tempPoints < MAX_SCORE_CONSTANTS.MODELS_RESCUE ? tempPoints : MAX_SCORE_CONSTANTS.MODELS_RESCUE;
tempPoints = probandOtherVariantPointsCounted + probandNullVariantPointsCounted + variantDenovoPointsCounted + autosomalRecessivePointsCounted + segregationPointsCounted + caseControlPointsCounted;
geneticEvidenceTotalPoints = tempPoints < MAX_SCORE_CONSTANTS.GENETIC_EVIDENCE ? tempPoints : MAX_SCORE_CONSTANTS.GENETIC_EVIDENCE;
tempPoints = functionalPointsCounted + functionalAlterationPointsCounted + modelsRescuePointsCounted;
experimentalEvidenceTotalPoints = tempPoints < MAX_SCORE_CONSTANTS.EXPERIMENTAL_EVIDENCE ? tempPoints : MAX_SCORE_CONSTANTS.EXPERIMENTAL_EVIDENCE;
totalPoints = geneticEvidenceTotalPoints + experimentalEvidenceTotalPoints;
return (
<div>
<Form submitHandler={this.submitForm} formClassName="form-horizontal form-std">
<PanelGroup accordion>
<Panel title="New Summary & Provisional Classification" open>
<div className="form-group">
<div>
The calculated values below are based on the set of saved evidence that existed when the "Generate New Summary"
button was clicked. To save these values and the calculated or selected Classification, click "Save" below - they
will then represent the new "Last Saved Summary & Provisional Classification".
</div>
<div><span> </span></div>
<br />
<div className="container">
<table className="summary-matrix">
<tbody>
<tr className="header large bg-color separator-below">
<td colSpan="5">Evidence Type</td>
<td>Count</td>
<td>Total Points</td>
<td>Points Counted</td>
</tr>
<tr>
<td rowSpan="8" className="header"><div className="rotate-text"><div>Genetic Evidence</div></div></td>
<td rowSpan="6" className="header"><div className="rotate-text"><div>Case-Level</div></div></td>
<td rowSpan="5" className="header"><div className="rotate-text"><div>Variant</div></div></td>
<td rowSpan="3" className="header">Autosomal Dominant Disease</td>
<td>Proband with other variant type with some evidence of gene impact</td>
<td>{probandOtherVariantCount}</td>
<td>{probandOtherVariantPoints}</td>
<td>{probandOtherVariantPointsCounted}</td>
</tr>
<tr>
<td>Proband with predicted or proven null variant</td>
<td>{probandNullVariantCount}</td>
<td>{probandNullVariantPoints}</td>
<td>{probandNullVariantPointsCounted}</td>
</tr>
<tr>
<td>Variant is <i>de novo</i></td>
<td>{variantDenovoCount}</td>
<td>{variantDenovoPoints}</td>
<td>{variantDenovoPointsCounted}</td>
</tr>
<tr>
<td rowSpan="2" className="header">Autosomal Recessive Disease</td>
<td>Two variants (not prediced/proven null) with some evidence of gene impact in <i>trans</i></td>
<td>{twoVariantsNotProvenCount}</td>
<td>{twoVariantsNotProvenPoints}</td>
<td rowSpan="2">{autosomalRecessivePointsCounted}</td>
</tr>
<tr>
<td>Two variants in <i>trans</i> and at least one <i>de novo</i> or a predicted/proven null variant</td>
<td>{twoVariantsProvenCount}</td>
<td>{twoVariantsProvenPoints}</td>
</tr>
<tr>
<td colSpan="3" className="header">Segregation</td>
<td>{segregationCount}</td>
<td><span>{segregationPointsCounted}</span> (<abbr title="Combined LOD Score"><span>{segregationPoints}</span><strong>*</strong></abbr>)</td>
<td>{segregationPointsCounted}</td>
</tr>
<tr>
<td colSpan="4" className="header">Case-Control</td>
<td>{caseControlCount}</td>
<td>{caseControlPoints}</td>
<td>{caseControlPointsCounted}</td>
</tr>
<tr className="header separator-below">
<td colSpan="6">Genetic Evidence Total</td>
<td>{geneticEvidenceTotalPoints}</td>
</tr>
<tr>
<td rowSpan="10" className="header"><div className="rotate-text"><div>Experimental Evidence</div></div></td>
<td colSpan="3" rowSpan="3" className="header">Functional</td>
<td>Biochemical Functions</td>
<td>{biochemicalFunctionCount}</td>
<td>{biochemicalFunctionPoints}</td>
<td rowSpan="3">{functionalPointsCounted}</td>
</tr>
<tr>
<td>Protein Interactions</td>
<td>{proteinInteractionsCount}</td>
<td>{proteinInteractionsPoints}</td>
</tr>
<tr>
<td>Expression</td>
<td>{expressionCount}</td>
<td>{expressionPoints}</td>
</tr>
<tr>
<td colSpan="3" rowSpan="2" className="header">Functional Alteration</td>
<td>Patient Cells</td>
<td>{patientCellsCount}</td>
<td>{patientCellsPoints}</td>
<td rowSpan="2">{functionalAlterationPointsCounted}</td>
</tr>
<tr>
<td>Non-patient Cells</td>
<td>{nonPatientCellsCount}</td>
<td>{nonPatientCellsPoints}</td>
</tr>
<tr>
<td colSpan="3" rowSpan="4" className="header">Models & Rescue</td>
<td>Animal Model</td>
<td>{animalModelCount}</td>
<td>{animalModelPoints}</td>
<td rowSpan="4">{modelsRescuePointsCounted}</td>
</tr>
<tr>
<td>Cell Culture Model System</td>
<td>{cellCultureCount}</td>
<td>{cellCulturePoints}</td>
</tr>
<tr>
<td>Rescue in Animal Model</td>
<td>{rescueCount}</td>
<td>{rescuePoints}</td>
</tr>
<tr>
<td>Rescue in Engineered Equivalent</td>
<td>{rescueEngineeredCount}</td>
<td>{rescueEngineeredPoints}</td>
</tr>
<tr className="header separator-below">
<td colSpan="6">Experimental Evidence Total</td>
<td>{experimentalEvidenceTotalPoints}</td>
</tr>
<tr className="total-row header">
<td colSpan="7">Total Points</td>
<td>{totalPoints}</td>
</tr>
</tbody>
</table>
<strong>*</strong> – Combined LOD Score
</div>
<br />
<br />
<div className="row">
<div className="col-sm-5">
<strong className="pull-right">Calculated
<a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:
</strong>
</div>
<div className="col-sm-7">
{this.state.autoClassification}
</div>
</div>
{ userAssessments.segCntdct>0 || userAssessments.variantCntdct || userAssessments.expCntdct ?
<div className="row">
<div className="col-sm-5"> </div>
<div className="col-sm-7">
<strong style={{'color':'#f00'}}>Note: One or more pieces of evidence in this record was assessed as "Contradicts".</strong>
</div>
</div>
: null
}
<br />
<Input type="select" ref="alteredClassification"
label={<strong>Select Provisional <a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:</strong>}
labelClassName="col-sm-5 control-label"
wrapperClassName="col-sm-7" defaultValue={this.state.autoClassification}
groupClassName="form-group">
<option value="Definitive">Definitive</option>
<option value="Strong">Strong</option>
<option value="Moderate">Moderate</option>
<option value="Limited">Limited</option>
<option value="No Evidence">No Reported Evidence</option>
<option value="Disputed">Disputed</option>
<option value="Refuted">Refuted</option>
</Input>
<Input type="textarea" ref="reasons" label="Explain Reason(s) for Change:" rows="5" labelClassName="col-sm-5 control-label"
wrapperClassName="col-sm-7" groupClassName="form-group" error={this.getFormError('reasons')}
clearError={this.clrFormErrors.bind(null, 'reasons')} />
<div className="col-sm-5"><span className="pull-right"> </span></div>
<div className="col-sm-7">
<span>
Note: If your selected Clinical Validity Classification is different from the Calculated value, provide a reason to expain why you changed it.
</span>
</div>
</div>
</Panel>
</PanelGroup>
<div className='modal-footer'>
<Input type="button" inputClassName="btn-default btn-inline-spacer" clickHandler={this.cancelForm} title="Cancel" />
<Input type="submit" inputClassName="btn-primary btn-inline-spacer pull-right" id="submit" title="Save" />
</div>
</Form>
</div>
);
};
// Method to return a list of experimental evidence scores
// by score status
function getExpScoreList(evidenceList) {
let newArray = [];
evidenceList.forEach(evidence => {
evidence.scores.forEach(item => {
if (item.scoreStatus === 'Score') {
newArray.push(item);
}
});
});
return newArray;
}
// Function to check if an itme exists in an array(list)
var in_array = function(item, list) {
for(var i in list){
if (list[i] == item) {
return true;
}
}
return false;
};
// Function to get earliest year of selected publications
var get_earliest_year = function(earliest, dateStr) {
var pattern = new RegExp(/^\d\d\d\d/);
var theYear = pattern.exec(dateStr);
if (theYear && theYear.valueOf() < earliest.valueOf()) {
return theYear;
}
return earliest;
};
// Funtion to separate proband individuals by assessment values
// target: object containing separated proband individuals
// branch: individual array in annotation/group/family
// article: object containing publication info
// idList: Assessment array
var filter = function(target, branch, article, idList) {
var allVariants = target['allVariants'],
sptVariants = target['sptVariants'],
rvwVariants = target['rvwVariants'],
cntdctVariants = target['cntdctVariants'],
patho_spt = idList['support'],
patho_rvw = idList['review'],
patho_cntdct = idList['contradict'];
branch.forEach(function(obj) {
if (obj.proband && obj.variants && obj.variants.length > 0) {
// counting at probands only
var allSupported = true;
for (var j in obj.variants) {
// collect all distinct variants from proband individuals
if (!in_array(obj.variants[j].uuid, allVariants)) {
allVariants.push(obj.variants[j].uuid);
}
// collect variant assessments, separated by 3 different values.
if (!in_array(obj.variants[j].uuid, patho_spt)) {
allSupported = false;
if (in_array(obj.variants[j].uuid, patho_rvw) && !in_array(obj.variants[j].uuid, rvwVariants)) {
rvwVariants.push(obj.variants[j].uuid);
}
else if (in_array(obj.variants[j].uuid, patho_cntdct) && !in_array(obj.variants[j].uuid, cntdctVariants)) {
cntdctVariants.push(obj.variants[j].uuid);
}
}
else {
if (!in_array(obj.variants[j].uuid, sptVariants)) {
sptVariants.push(obj.variants[j].uuid);
}
}
}
if (allSupported) {
target["probandInd"].push(
{
"evidence":obj.uuid,
"pmid":article.pmid,
"date": article.date
}
);
}
target["allVariants"] = allVariants;
target["sptVariants"] = sptVariants;
target["rvwVariants"] = rvwVariants;
target["cntdctVariants"] = cntdctVariants;
}
});
return target;
};
// Display a history item for adding a family
var ProvisionalAddModHistory = React.createClass({
render: function() {
var history = this.props.history;
var meta = history.meta.provisionalClassification;
var gdm = meta.gdm;
return (
<div>
<span><a href={'/provisional-curation/?gdm=' + gdm.uuid + '&edit=yes'} title="View/edit provisional classification">Provisional classification</a> {meta.alteredClassification.toUpperCase()} added to </span>
<strong>{gdm.gene.symbol}-{gdm.disease.term}-</strong>
<i>{gdm.modeInheritance.indexOf('(') > -1 ? gdm.modeInheritance.substring(0, gdm.modeInheritance.indexOf('(') - 1) : gdm.modeInheritance}</i>
<span>; {moment(history.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
);
}
});
globals.history_views.register(ProvisionalAddModHistory, 'provisionalClassification', 'add');
// Display a history item for modifying a family
var ProvisionalModifyHistory = React.createClass({
render: function() {
var history = this.props.history;
var meta = history.meta.provisionalClassification;
var gdm = meta.gdm;
return (
<div>
<span><a href={'/provisional-curation/?gdm=' + gdm.uuid + '&edit=yes'} title="View/edit provisional classification">Provisional classification</a> modified to {meta.alteredClassification.toUpperCase()} for </span>
<strong>{gdm.gene.symbol}-{gdm.disease.term}-</strong>
<i>{gdm.modeInheritance.indexOf('(') > -1 ? gdm.modeInheritance.substring(0, gdm.modeInheritance.indexOf('(') - 1) : gdm.modeInheritance}</i>
<span>; {moment(history.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
);
}
});
globals.history_views.register(ProvisionalModifyHistory, 'provisionalClassification', 'modify');
// Display a history item for deleting a family
var ProvisionalDeleteHistory = React.createClass({
render: function() {
return <div>PROVISIONALDELETE</div>;
}
});
globals.history_views.register(ProvisionalDeleteHistory, 'provisionalClassification', 'delete');
| src/clincoded/static/components/provisional_curation.js | 'use strict';
var React = require('react');
var url = require('url');
var _ = require('underscore');
var moment = require('moment');
var panel = require('../libs/bootstrap/panel');
var form = require('../libs/bootstrap/form');
var globals = require('./globals');
var curator = require('./curator');
var RestMixin = require('./rest').RestMixin;
var methods = require('./methods');
var parseAndLogError = require('./mixins').parseAndLogError;
var CuratorHistory = require('./curator_history');
var modal = require('../libs/bootstrap/modal');
var Modal = modal.Modal;
var CurationMixin = curator.CurationMixin;
var RecordHeader = curator.RecordHeader;
var CurationPalette = curator.CurationPalette;
var PanelGroup = panel.PanelGroup;
var Panel = panel.Panel;
var Form = form.Form;
var FormMixin = form.FormMixin;
var Input = form.Input;
var InputMixin = form.InputMixin;
var queryKeyValue = globals.queryKeyValue;
var userMatch = globals.userMatch;
var ProvisionalCuration = React.createClass({
mixins: [FormMixin, RestMixin, CurationMixin, CuratorHistory],
contextTypes: {
navigate: React.PropTypes.func,
closeModal: React.PropTypes.func
},
queryValues: {},
getInitialState: function() {
return {
user: null, // login user uuid
gdm: null, // current gdm object, must be null initially.
provisional: null, // login user's existing provisional object, must be null initially.
//assessments: null, // list of all assessments, must be nul initially.
totalScore: null,
autoClassification: null
};
},
loadData: function() {
var gdmUuid = this.queryValues.gdmUuid;
// get gdm from db.
var uris = _.compact([
gdmUuid ? '/gdm/' + gdmUuid : '' // search for entire data set of the gdm
]);
this.getRestDatas(
uris
).then(datas => {
var stateObj = {};
stateObj.user = this.props.session.user_properties.uuid;
datas.forEach(function(data) {
switch(data['@type'][0]) {
case 'gdm':
stateObj.gdm = data;
break;
default:
break;
}
});
// Update the Curator Mixin OMIM state with the current GDM's OMIM ID.
if (stateObj.gdm && stateObj.gdm.omimId) {
this.setOmimIdState(stateObj.gdm.omimId);
}
// search for provisional owned by login user
if (stateObj.gdm.provisionalClassifications && stateObj.gdm.provisionalClassifications.length > 0) {
for (var i in stateObj.gdm.provisionalClassifications) {
var owner = stateObj.gdm.provisionalClassifications[i].submitted_by;
if (owner.uuid === stateObj.user) { // find
stateObj.provisional = stateObj.gdm.provisionalClassifications[i];
break;
}
}
}
stateObj.previousUrl = url;
this.setState(stateObj);
return Promise.resolve();
}).catch(function(e) {
console.log('OBJECT LOAD ERROR: %s — %s', e.statusText, e.url);
});
},
componentDidMount: function() {
this.loadData();
},
submitForm: function(e) {
// Don't run through HTML submit handler
e.preventDefault();
e.stopPropagation();
// Save all form values from the DOM.
this.saveAllFormValues();
if (this.validateDefault()) {
var calculate = queryKeyValue('calculate', this.props.href);
var edit = queryKeyValue('edit', this.props.href);
var newProvisional = this.state.provisional ? curator.flatten(this.state.provisional) : {};
newProvisional.totalScore = Number(this.state.totalScore);
newProvisional.autoClassification = this.state.autoClassification;
newProvisional.alteredClassification = this.getFormValue('alteredClassification');
newProvisional.reasons = this.getFormValue('reasons');
// check required item (reasons)
var formErr = false;
if (!newProvisional.reasons && newProvisional.autoClassification !== newProvisional.alteredClassification) {
formErr = true;
this.setFormErrors('reasons', 'Required when changing classification.');
}
if (!formErr) {
var backUrl = '/curation-central/?gdm=' + this.state.gdm.uuid;
backUrl += this.queryValues.pmid ? '&pmid=' + this.queryValues.pmid : '';
if (this.state.provisional) { // edit existing provisional
this.putRestData('/provisional/' + this.state.provisional.uuid, newProvisional).then(data => {
var provisionalClassification = data['@graph'][0];
// Record provisional classification history
var meta = {
provisionalClassification: {
gdm: this.state.gdm['@id'],
alteredClassification: provisionalClassification.alteredClassification
}
};
this.recordHistory('modify', provisionalClassification, meta);
this.resetAllFormValues();
window.history.go(-1);
}).catch(function(e) {
console.log('PROVISIONAL GENERATION ERROR = : %o', e);
});
}
else { // save a new calculation and provisional classification
this.postRestData('/provisional/', newProvisional).then(data => {
return data['@graph'][0];
}).then(savedProvisional => {
// Record provisional classification history
var meta = {
provisionalClassification: {
gdm: this.state.gdm['@id'],
alteredClassification: savedProvisional.alteredClassification
}
};
this.recordHistory('add', savedProvisional, meta);
var theGdm = curator.flatten(this.state.gdm);
if (theGdm.provisionalClassifications) {
theGdm.provisionalClassifications.push(savedProvisional['@id']);
}
else {
theGdm.provisionalClassifications = [savedProvisional['@id']];
}
return this.putRestData('/gdm/' + this.state.gdm.uuid, theGdm).then(data => {
return data['@graph'][0];
});
}).then(savedGdm => {
this.resetAllFormValues();
window.history.go(-1);
}).catch(function(e) {
console.log('PROVISIONAL GENERATION ERROR = %o', e);
});
}
}
}
},
cancelForm: function(e) {
// Changed modal cancel button from a form input to a html button
// as to avoid accepting enter/return key as a click event.
// Removed hack in this method.
window.history.go(-1);
},
render: function() {
this.queryValues.gdmUuid = queryKeyValue('gdm', this.props.href);
var calculate = queryKeyValue('calculate', this.props.href);
var edit = queryKeyValue('edit', this.props.href);
var session = (this.props.session && Object.keys(this.props.session).length) ? this.props.session : null;
var gdm = this.state.gdm ? this.state.gdm : null;
var provisional = this.state.provisional ? this.state.provisional : null;
var show_clsfctn = queryKeyValue('classification', this.props.href);
var summaryMatrix = queryKeyValue('summarymatrix', this.props.href);
var expMatrix = queryKeyValue('expmatrix', this.props.href);
return (
<div>
{ show_clsfctn === 'display' ?
Classification.call()
:
( gdm ?
<div>
<RecordHeader gdm={gdm} omimId={this.state.currOmimId} updateOmimId={this.updateOmimId} session={session} summaryPage={true} linkGdm={true} />
<div className="container">
{
(provisional && edit === 'yes') ?
EditCurrent.call(this)
:
( calculate === 'yes' ?
<div>
<h1>Curation Summary & Provisional Classification</h1>
{
provisional ?
<PanelGroup accordion>
<Panel title="Last Saved Summary & Provisional Classification" open>
<div className="row">
<div className="col-sm-5"><strong>Date Generated:</strong></div>
<div className="col-sm-7"><span>{moment(provisional.last_modified).format("YYYY MMM DD, h:mm a")}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Total Score:</strong>
</div>
<div className="col-sm-7"><span>{provisional.totalScore}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Calculated Clinical Validity Classification:</strong>
</div>
<div className="col-sm-7"><span>{provisional.autoClassification}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Selected Clinical Validity Classification:</strong>
</div>
<div className="col-sm-7"><span>{provisional.alteredClassification}</span></div>
</div>
<div className="row">
<div className="col-sm-5">
<strong>Reason(s):</strong>
</div>
<div className="col-sm-7"><span>{this.state.provisional.reasons}</span></div>
</div>
<div className="row"> </div>
</Panel>
</PanelGroup>
:
null
}
{NewCalculation.call(this)}
</div>
:
null
)
}
</div>
</div>
:
null
)
}
</div>
);
}
});
globals.curator_page.register(ProvisionalCuration, 'curator_page', 'provisional-curation');
// Generate Classification Description page for url ../provisional-curation/?gdm=GDMId&classification=display
var Classification = function() {
return (
<div className="container classification-cell">
<h1>Clinical Validity Classifications</h1>
<div className="classificationTable">
<table>
<tbody>
<tr className="greyRow">
<td colSpan='2' className="titleCell">Evidence Level</td>
<td className="titleCell">Evidence Description</td>
</tr>
<tr>
<td rowSpan='7' className="verticalCell">
<div className="verticalContent spptEvd">
Supportive Evidence
</div>
</td>
<td className="levelCell">DEFINITIVE</td>
<td>
The role of this gene in this particular disease hase been repeatedly demonstrated in both the research and clinical
diagnostic settings, and has been upheld over time (in general, at least 3 years). No convincing evidence has emerged
that contradicts the role of the gene in the specified disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">STRONG</td>
<td>
The role of this gene in disease has been independently demonstrated in at least two separate studies providing
<strong>strong</strong> supporting evidence for this gene's role in disease, such as the following types of evidence:
<ul>
<li>Strong variant-level evidence demonstrating numerous unrelated probands with variants that provide convincing
evidence for disease causality¹</li>
<li>Compelling gene-level evidence from different types of supporting experimental data².</li>
</ul>
In addition, no convincing evidence has emerged that contradicts the role of the gene in the noted disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">MODERATE</td>
<td>
There is <strong>moderate</strong> evidence to support a causal role for this gene in this diseaese, such as:
<ul>
<li>At least 3 unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Moderate experimental data² supporting the gene-disease association</li>
</ul>
The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged
that contradicts the role of the gene in the noded disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="levelCell">LIMITED</td>
<td>
There is <strong>limited</strong> evidence to support a causal role for this gene in this disease, such as:
<ul>
<li>Fewer than three observations of variants that provide convincing evidence for disease causality¹</li>
<li>Multiple variants reported in unrelated probands but <i>without</i> sufficient evidence that the variants alter function</li>
<li>Limited experimental data² supporting the gene-disease association</li>
</ul>
The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that
contradicts the role of the gene in the noted disease.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td colSpan="2" className="levelCell">NO REPORTED<br />EVIDENCE</td>
<td>
No evidence reported for a causal role in disease. These genes might be "candidate" genes based on animal models or implication
in pathways known to be involved in human diseases, but no reports have implicated the gene in human disease cases.
</td>
</tr>
<tr className="narrow-line"></tr>
<tr>
<td className="verticalCell">
<div className="verticalContent cntrdctEvd">
Contradictory Evidence
</div>
</td>
<td className="levelCell">
CONFLICTING<br />EVIDENCE<br />REPORTED
</td>
<td>
Although there has been an assertion of a gene-disease association, conflicting evidence for the role of this gene in disease has arisen
since the time of the initial report indicating a disease association. Depending on the quantity and quality of evidence disputing the
association, the gene/disease association may be further defined by the following two sub-categories:
<ol className="olTitle">
<li type="1">
Disputed
<ol className="olContent">
<li type="a">
Convincing evidence <i>disputing</i> a role for this gene in this disease has arisen since the initial report identifying an
association between the gene and disease.
</li>
<li type="a">
Refuting evidence need not outweigh existing evidence supporting the gene:disease association.
</li>
</ol>
</li>
<li type="1">
Refuted
<ol className="olContent">
<li type="a">
Evidence refuting the role of the gene in the specified disease has been reported and significantly outweighs any evidence
supporting the role.
</li>
<li type="a">
This designation is to be applied at the discretion of clinical domain experts after thorough review of available evidence
</li>
</ol>
</li>
</ol>
</td>
</tr>
<tr className="greyRow">
<td colSpan="3" className="levelCell">NOTES</td>
</tr>
<tr>
<td colSpan="3">
<p>
¹Variants that have evidence to disrupt function and/or have other strong genetic and population data (e.g. <i>de novo</i>
occurrence, absence in controls, etc) can be used as evidence in support of a variant's causality in this framework.
</p>
<p>²Examples of appropriate types of supporting experimental data based on those outlined in MacArthur et al. 2014.</p>
</td>
</tr>
</tbody>
</table>
</div>
</div>
);
};
// Description of 4 leves of classification in summary table
var LimitedClassification = function() {
return (
<div>
<p className="title underline-text title-p">LIMITED CLASSIFICATION</p>
<p>There is <strong>limited</strong> evidence to support a causal role for this gene in this disease, such as:</p>
<ul>
<li>Fewer than three observations of variants that provide convincing evidence for disease causality¹</li>
<li>Multiple variants reported in unrelated probands but <i>without</i> sufficient evidence that the variants alter function</li>
<li>Limited experimental data² supporting the gene-disease association</li>
</ul>
<p>The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that contradicts the role of the gene in the noted disease.</p>
</div>
);
};
var ModerateClassification = function() {
return (
<div>
<p className="title underline-text title-p">MODERATE CLASSIFICATION</p>
<p>There is <strong>moderate</strong> evidence to support a causal role for this gene in this diseaese, such as:</p>
<ul>
<li>At least 3 unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Moderate experimental data² supporting the gene-disease association</li>
</ul>
<p>The role of this gene in disease may not have been independently reported, but no convincing evidence has emerged that contradicts the role of the gene in the noded disease.</p>
</div>
);
};
var StrongClassification = function() {
return (
<div>
<p className="title underline-text title-p">STRONG CLASSIFICATION</p>
<p>
The role of this gene in disease has been independently demonstrated in at least two separate studies providing
<strong>strong</strong> supporting evidence for this gene's role in disease, such as the following types of evidence:
</p>
<ul>
<li>Strong variant-level evidence demonstrating numerous unrelated probands with variants that provide convincing evidence for disease causality¹</li>
<li>Compelling gene-level evidence from different types of supporting experimental data².</li>
</ul>
<p>In addition, no convincing evidence has emerged that contradicts the role of the gene in the noted disease.</p>
</div>
);
};
var DefinitiveClassification = function() {
return (
<div>
<p className="title underline-text title-p">DEFINITIVE CLASSIFICATION</p>
<p>
The role of this gene in this particular disease hase been repeatedly demonstrated in both the research and clinical
diagnostic settings, and has been upheld over time (in general, at least 3 years). No convincing evidence has emerged
that contradicts the role of the gene in the specified disease.
</p>
</div>
);
};
// Edit page for url ../provisional-curation/?gdm=GDMId&edit=yes
var EditCurrent = function() {
var alteredClassification = this.state.provisional.alteredClassification ? this.state.provisional.alteredClassification : 'none';
this.state.totalScore = this.state.provisional.totalScore;
this.state.autoClassification = this.state.provisional.autoClassification;
return (
<div>
<h1>Edit Summary and Provisional Classification</h1>
<Form submitHandler={this.submitForm} formClassName="form-horizontal form-std">
<PanelGroup accordion>
<Panel title="Currently Saved Calculation and Classification" open>
<div className="row">
<div className="col-sm-5"><strong className="pull-right">Total Score:</strong></div>
<div className="col-sm-7"><span>{this.state.totalScore}</span></div>
</div>
<br />
<div className="row">
<div className="col-sm-5">
<strong className="pull-right">Calculated
<a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>
:
</strong>
</div>
<div className="col-sm-7"><span>{this.state.autoClassification}</span></div>
</div>
<br />
<div className="row">
<Input type="select" ref="alteredClassification" value={alteredClassification} labelClassName="col-sm-5 control-label" wrapperClassName="col-sm-7"
label={<strong>Select Provisional <a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:</strong>}
groupClassName="form-group" handleChange={this.handleChange}>
<option value="Definitive">Definitive</option>
<option value="Strong">Strong</option>
<option value="Moderate">Moderate</option>
<option value="Limited">Limited</option>
<option value="No Reported Evidence">No Evidence</option>
<option value="Disputed">Disputed</option>
<option value="Refuted">Refuted</option>
</Input>
</div>
<div className="row">
<Input type="textarea" ref="reasons" label="Explain Reason(s) for Change:" rows="5" labelClassName="col-sm-5 control-label"
value={this.state.provisional && this.state.provisional.reasons} wrapperClassName="col-sm-7" groupClassName="form-group"
error={this.getFormError('reasons')} clearError={this.clrFormErrors.bind(null, 'reasons')}/>
</div>
<div className="row">
<div className="col-sm-5"><strong>Date Created:</strong></div>
<div className="col-sm-7">
<span>{moment(this.state.provisional.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
</div>
<div className="row">
<div className="col-sm-5"><strong>Last Modified:</strong></div>
<div className="col-sm-7">
<span>{moment(this.state.provisional.last_modified).format("YYYY MMM DD, h:mm a")}</span>
</div>
</div>
<div><span> </span></div>
<br />
</Panel>
</PanelGroup>
<div className='modal-footer'>
<Input type="button" inputClassName="btn-default btn-inline-spacer" clickHandler={this.cancelForm} title="Cancel" />
<Input type="submit" inputClassName="btn-primary btn-inline-spacer pull-right" id="submit" title="Save" />
</div>
</Form>
</div>
);
};
// function for looping through family (of GDM or of group) and finding all relevent information needed for score calculations
// returns dictionary of relevant items that need to be updated within NewCalculation()
var FamilyScraper = function(user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched) {
families.forEach(family => {
// loop through individual within family: old code??? - MC
/*
if (family.individualIncluded && family.individualIncluded.length) {
individualsCollected = filter(individualsCollected, family.individualIncluded, annotation.article, pathoVariantIdList);
}
*/
// get segregation of family, but only if it was made by user (may change later - MC)
if (family.segregation && family.submitted_by.uuid === user) {
userAssessments['segNot'] += 1;
// loop through assessments and update relevant userAssessment counts
// irrelevant at the moment as assessments for segregation do not exist - MC
/*
assessments = family.segregation.assessments && family.segregation.assessments.length ? family.segregation.assessments : [];
assessments.forEach(assessment => {
if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Supports') {
userAssessments['segSpt'] += 1;
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Review') {
userAssessments['segReview'] += 1;
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Contradicts') {
userAssessments['segCntdct'] += 1;
}
});
*/
// get lod score of segregation of family
if (family.segregation.includeLodScoreInAggregateCalculation) {
if ("lodPublished" in family.segregation && family.segregation.lodPublished === true && family.segregation.publishedLodScore) {
segregationCount += 1;
segregationPoints += family.segregation.publishedLodScore;
} else if ("lodPublished" in family.segregation && family.segregation.lodPublished === false && family.segregation.estimatedLodScore) {
segregationCount += 1;
segregationPoints += family.segregation.estimatedLodScore;
}
}
}
// get proband individuals of family
if (family.individualIncluded && family.individualIncluded.length) {
individualMatched = IndividualScraper(family.individualIncluded, individualMatched);
}
});
return {
individualsCollected: individualsCollected,
userAssessments: userAssessments,
assessments: assessments,
segregationCount: segregationCount,
segregationPoints: segregationPoints,
individualMatched: individualMatched
};
};
var IndividualScraper = function(individuals, individualMatched) {
if (individuals) {
individuals.forEach(individual => {
if (individual.proband === true && (individual.scores && individual.scores.length)) {
individualMatched.push(individual);
}
});
}
return individualMatched;
};
// Generate a new summary for url ../provisional-curation/?gdm=GDMId&calculate=yes
// Calculation rules are defined by Small GCWG. See ClinGen_Interface_4_2015.pptx and Clinical Validity Classifications for detail
var NewCalculation = function() {
var gdm = this.state.gdm;
const MAX_SCORE_CONSTANTS = {
VARIANT_IS_DE_NOVO: 12,
PREDICTED_OR_PROVEN_NULL_VARIANT: 10,
OTHER_VARIANT_TYPE_WITH_GENE_IMPACT: 7,
AUTOSOMAL_RECESSIVE: 12,
SEGREGATION: 7,
CASE_CONTROL: 12,
FUNCTIONAL: 2,
FUNCTIONAL_ALTERATION: 2,
MODELS_RESCUE: 4,
GENETIC_EVIDENCE: 12,
EXPERIMENTAL_EVIDENCE: 6,
TOTAL: 18
};
/*****************************************************/
/* VARIABLES FOR EVIDENCE SCORE TABLE */
/*****************************************************/
// variables for autosomal dominant data
let probandOtherVariantCount = 0, probandOtherVariantPoints = 0, probandOtherVariantPointsCounted = 0;
let probandNullVariantCount = 0, probandNullVariantPoints = 0, probandNullVariantPointsCounted = 0;
let variantDenovoCount = 0, variantDenovoPoints = 0, variantDenovoPointsCounted = 0;
// variables for autosomal recessive data
let autosomalRecessivePointsCounted = 0;
let twoVariantsProvenCount = 0, twoVariantsProvenPoints = 0;
let twoVariantsNotProvenCount = 0, twoVariantsNotProvenPoints = 0;
// variables for segregation data
// segregationPoints is actually the raw, unconverted score; segregationPointsCounted is calculated and displayed score
let segregationCount = 0, segregationPoints = 0, segregationPointsCounted = 0;
// variables for case-control data
let caseControlCount = 0, caseControlPoints = 0, caseControlPointsCounted;
// variables for Experimental data
let functionalPointsCounted = 0, functionalAlterationPointsCounted = 0, modelsRescuePointsCounted = 0;
let biochemicalFunctionCount = 0, biochemicalFunctionPoints = 0;
let proteinInteractionsCount = 0, proteinInteractionsPoints = 0;
let expressionCount = 0, expressionPoints = 0;
let patientCellsCount = 0, patientCellsPoints = 0;
let nonPatientCellsCount = 0, nonPatientCellsPoints = 0;
let animalModelCount = 0, animalModelPoints = 0;
let cellCultureCount = 0, cellCulturePoints = 0;
let rescueCount = 0, rescuePoints = 0;
let rescueEngineeredCount = 0, rescueEngineeredPoints = 0;
// variables for total counts
let geneticEvidenceTotalPoints = 0, experimentalEvidenceTotalPoints = 0, totalPoints = 0;
/*****************************************************/
/* Find all proband individuals that had been scored */
/*****************************************************/
let probandTotal = []; // Total proband combined
let probandFamily = []; // Total probands associated with families from all annotations
let probandIndividual = []; // Total proband individuals from all annotations
var h, i, j, k, l;
// initial values of assessments
var userAssessments = {
"variantSpt": 0,
"variantReview": 0,
"variantCntdct": 0,
"variantNot": 0,
"expSpt": 0,
"expReview": 0,
"expCntdct": 0,
"expNot": 0,
"segSpt": 0,
"segReview": 0,
"segCntdct": 0,
"segNot": 0
};
// Collect variants from user's pathogenicity
var gdmPathoList = gdm.variantPathogenicity;
var pathoVariantIdList = {
"support": [],
"review": [],
"contradict": []
};
gdmPathoList.forEach(gdmPatho => {
let variantUuid = gdmPatho.variant.uuid;
// Collect login user's variant assessments, separated as 3 different values.
if (gdmPatho.assessments && gdmPatho.assessments.length > 0) {
gdmPatho.assessments.forEach(assessment => {
if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Supports') {
pathoVariantIdList['support'].push(variantUuid);
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Review') {
pathoVariantIdList['review'].push(variantUuid);
}
else if (assessment.submitted_by.uuid === this.state.user && assessment.value === 'Contradicts') {
pathoVariantIdList['contradict'].push(variantUuid);
}
});
}
});
var exp_scores = [0, 0, 0];
var expType = {
"Expression": 0,
"Protein Interactions": 0,
"Biochemical Function": 0,
"Functional Alteration (Patient cells)": 0,
"Functional Alteration (Engineered equivalent)": 0,
"Model Systems (Animal model)": 0,
"Model Systems (Engineered equivalent)": 0,
"Rescue (Patient cells)": 0,
"Rescue (Engineered equivalent)": 0
};
var individualsCollected = {
"probandInd": [],
"allVariants": [],
"sptVariants": [],
"rvwVariants": [],
"cntdctVariants": []
};
var proband_variants = [];
let tempFamilyScraperValues = {};
let individualMatched = [];
let caseControlTotal = [];
// scan gdm
let annotations = gdm.annotations && gdm.annotations.length ? gdm.annotations : [];
annotations.forEach(annotation => {
let groups, families, individuals, assessments, experimentals;
// loop through groups
groups = annotation.groups && annotation.groups.length ? annotation.groups : [];
groups.forEach(group => {
// loop through families using FamilyScraper
families = group.familyIncluded && group.familyIncluded.length ? group.familyIncluded : [];
tempFamilyScraperValues = FamilyScraper(this.state.user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched);
individualsCollected = tempFamilyScraperValues['individualsCollected'];
userAssessments = tempFamilyScraperValues['userAssessments'];
assessments = tempFamilyScraperValues['assessments'];
segregationCount = tempFamilyScraperValues['segregationCount'];
segregationPoints = tempFamilyScraperValues['segregationPoints'];
individualMatched = tempFamilyScraperValues['individualMatched'];
// get proband individuals of group
if (group.individualIncluded && group.individualIncluded.length) {
individualMatched = IndividualScraper(group.individualIncluded, individualMatched);
}
/*
if (group.individualIncluded && group.individualIncluded.length) {
individualsCollected = filter(individualsCollected, group.individualIncluded, annotation.article, pathoVariantIdList);
}
*/
});
// loop through families using FamilyScraper
families = annotation.families && annotation.families.length ? annotation.families : [];
tempFamilyScraperValues = FamilyScraper(this.state.user, families, individualsCollected, annotation, pathoVariantIdList, userAssessments, assessments, segregationCount, segregationPoints, individualMatched);
individualsCollected = tempFamilyScraperValues['individualsCollected'];
userAssessments = tempFamilyScraperValues['userAssessments'];
assessments = tempFamilyScraperValues['assessments'];
segregationCount = tempFamilyScraperValues['segregationCount'];
segregationPoints = tempFamilyScraperValues['segregationPoints'];
individualMatched = tempFamilyScraperValues['individualMatched'];
// push all matched individuals from families and families of groups to probandFamily
individualMatched.forEach(item => {
probandFamily.push(item);
});
// loop through individuals
if (annotation.individuals && annotation.individuals.length) {
// get proband individuals
individualMatched = [];
individualMatched = IndividualScraper(annotation.individuals, individualMatched);
// push all matched individuals to probandIndividual
individualMatched.forEach(item => {
probandIndividual.push(item);
});
//individualsCollected = filter(individualsCollected, annotation.individuals, annotation.article, pathoVariantIdList);
}
// loop through case-controls
let caseControlMatched = [];
if (annotation.caseControlStudies && annotation.caseControlStudies.length) {
annotation.caseControlStudies.forEach(caseControl => {
if (caseControl.scores && caseControl.scores.length) {
caseControl.scores.forEach(score => {
if (score.submitted_by.uuid === this.state.user && score.score && score.score !== 'none') {
caseControlCount += 1;
caseControlPoints += parseFloat(score.score);
}
});
}
});
}
// loop through experimentals
experimentals = annotation.experimentalData && annotation.experimentalData.length ? annotation.experimentalData : [];
experimentals.forEach(experimental => {
// loop through scores, if any
if (experimental.scores && experimental.scores.length) {
experimental.scores.forEach(score => {
// only care about scores made by current user
if (score.submitted_by.uuid === this.state.user) {
// parse score of experimental
let experimentalScore = 0;
if (score.score && score.score !== 'none') {
experimentalScore = parseFloat(score.score); // Use the score selected by curator (if any)
} else if (score.calculatedScore && score.calculatedScore !== 'none') {
experimentalScore = parseFloat(score.calculatedScore); // Otherwise, use default score (if any)
}
userAssessments['expNot'] += 1;
// assign score to correct sub-type depending on experiment type and other variables
if (experimental.evidenceType && experimental.evidenceType === 'Biochemical Function') {
biochemicalFunctionCount += 1;
biochemicalFunctionPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Protein Interactions') {
proteinInteractionsCount += 1;
proteinInteractionsPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Expression') {
expressionCount += 1;
expressionPoints += experimentalScore;
} else if (experimental.evidenceType && experimental.evidenceType === 'Functional Alteration') {
if (experimental.functionalAlteration.cellMutationOrEngineeredEquivalent
&& experimental.functionalAlteration.cellMutationOrEngineeredEquivalent === 'Patient cells') {
patientCellsCount += 1;
patientCellsPoints += experimentalScore;
} else if (experimental.functionalAlteration.cellMutationOrEngineeredEquivalent
&& experimental.functionalAlteration.cellMutationOrEngineeredEquivalent === 'Engineered equivalent') {
nonPatientCellsCount += 1;
nonPatientCellsPoints += experimentalScore;
}
} else if (experimental.evidenceType && experimental.evidenceType === 'Model Systems') {
if (experimental.modelSystems.animalOrCellCulture
&& experimental.modelSystems.animalOrCellCulture === 'Animal model') {
animalModelCount += 1;
animalModelPoints += experimentalScore;
} else if (experimental.modelSystems.animalOrCellCulture
&& experimental.modelSystems.animalOrCellCulture === 'Engineered equivalent') {
cellCultureCount += 1;
cellCulturePoints += experimentalScore;
}
} else if (experimental.evidenceType && experimental.evidenceType === 'Rescue') {
if (experimental.rescue.patientCellOrEngineeredEquivalent
&& experimental.rescue.patientCellOrEngineeredEquivalent === 'Patient cells') {
rescueCount += 1;
rescuePoints += experimentalScore;
} else if (experimental.rescue.patientCellOrEngineeredEquivalent
&& experimental.rescue.patientCellOrEngineeredEquivalent === 'Engineered equivalent') {
rescueEngineeredCount += 1;
rescueEngineeredPoints += experimentalScore;
}
}
}
});
}
});
});
// combine all probands
probandTotal = probandFamily.concat(probandIndividual);
// scan probands
probandTotal.forEach(proband => {
proband.scores.forEach(score => {
if (score.submitted_by.uuid === this.state.user) {
// parse proband score
let probandScore = 0;
if (score.score && score.score !== 'none') {
probandScore += parseFloat(score.score);
} else if (score.calculatedScore && score.calculatedScore !== 'none') {
probandScore += parseFloat(score.calculatedScore);
}
// assign score to correct sub-type depending on score type
if (score.caseInfoType && score.caseInfoType === 'OTHER_VARIANT_TYPE_WITH_GENE_IMPACT' && score.scoreStatus === 'Score') {
probandOtherVariantCount += 1;
probandOtherVariantPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'PREDICTED_OR_PROVEN_NULL_VARIANT' && score.scoreStatus === 'Score') {
probandNullVariantCount += 1;
probandNullVariantPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'VARIANT_IS_DE_NOVO' && score.scoreStatus === 'Score') {
variantDenovoCount += 1;
variantDenovoPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'TWO_VARIANTS_WITH_GENE_IMPACT_IN_TRANS' && score.scoreStatus === 'Score') {
twoVariantsNotProvenCount += 1;
twoVariantsNotProvenPoints += probandScore;
} else if (score.caseInfoType && score.caseInfoType === 'TWO_VARIANTS_IN_TRANS_WITH_ONE_DE_NOVO' && score.scoreStatus === 'Score') {
twoVariantsProvenCount += 1;
twoVariantsProvenPoints += probandScore;
}
}
});
});
// is the below few lines necessary? - MC
userAssessments['variantSpt'] = individualsCollected['sptVariants'].length;
userAssessments['variantReview'] = individualsCollected['rvwVariants'].length;
userAssessments['variantCntdct'] = individualsCollected['cntdctVariants'].length;
userAssessments['variantNot'] = individualsCollected['allVariants'].length - userAssessments['variantSpt'] - userAssessments['variantReview'] - userAssessments['variantCntdct'];
userAssessments['expNot'] = userAssessments['expNot'] - userAssessments['expSpt'] - userAssessments['expReview'] - userAssessments['expCntdct'];
userAssessments['segNot'] = userAssessments['segNot'] - userAssessments['segSpt'] - userAssessments['segReview'] - userAssessments['segCntdct'];
/**************************************************************************/
/* Comment block below may need to be removed/revised for new scoring matrix - MC
/**************************************************************************/
/*
// Collect articles and find the earliest publication year
var proband = 0;
var articleCollected = [];
var year = new Date();
var earliest = year.getFullYear();
individualsCollected['probandInd'].forEach(probandInd => {
if (probandInd.pmid && probandInd.pmid != '') {
proband += 1;
if (!in_array(probandInd.pmid, articleCollected)) {
articleCollected.push(probandInd.pmid);
earliest = get_earliest_year(earliest, probandInd.date);
}
}
});
// calculate scores
var currentYear = year.getFullYear();
var time = currentYear.valueOf() - earliest.valueOf();
var timeScore = 0, probandScore = 0, pubScore = 0, expScore = 0; // initialize scores to 0
if (time >= 3) {
timeScore = 2;
}
else if (time >= 1) {
timeScore = 1;
}
else {
timeScore = 0;
}
if (proband > 18) {
probandScore = 7;
}
else if (proband >15) {
probandScore = 6;
}
else if (proband > 12) {
probandScore = 5;
}
else if (proband > 9) {
probandScore = 4;
}
else if (proband > 6) {
probandScore = 3;
}
else if (proband > 3) {
probandScore = 2;
}
else if (proband >= 1) {
probandScore = 1;
}
else {
probandScore = 0;
}
if (articleCollected.length >= 5) {
pubScore = 5;
}
else {
pubScore = articleCollected.length;
}
if (articleCollected.length <= 2 && timeScore > 1) {
timeScore = 1;
}
var totalScore = probandScore + pubScore + timeScore + expScore;
// set calculated classification
var autoClassification = 'No Reported Evidence';
if (Math.floor(totalScore) >= 17){
autoClassification = 'Definitive';
}
else if (Math.floor(totalScore) >= 13) {
autoClassification = 'Strong';
}
else if (Math.floor(totalScore) >= 9) {
autoClassification = 'Moderate';
}
else if (Math.floor(totalScore) >= 2) {
autoClassification = 'Limited';
}
// save total score and calculated classification to state
this.state.totalScore = totalScore;
this.state.autoClassification = autoClassification;
// set score positons in html table
var probandRow = [], pubRow = [], timeRow = [];
for(i=0; i<8; i++) {
if (i === probandScore) {
probandRow.push(proband);
}
else {
probandRow.push('');
}
if (i === pubScore) {
pubRow.push(articleCollected.length);
}
else if (i < 6) {
pubRow.push('');
}
if (i === timeScore) {
timeRow.push(time);
}
else if (i < 3) {
timeRow.push('');
}
}
*/
// calculate segregation counted points
if (segregationPoints >= 0.75 && segregationPoints <= 0.99) {
segregationPointsCounted = 1;
} else if (segregationPoints >= 1 && segregationPoints <= 1.24) {
segregationPointsCounted = .5;
} else if (segregationPoints >= 1.25 && segregationPoints <= 1.49) {
segregationPointsCounted = 2.5;
} else if (segregationPoints >= 1.5 && segregationPoints <= 1.74) {
segregationPointsCounted = 3;
} else if (segregationPoints >= 1.75 && segregationPoints <= 1.99) {
segregationPointsCounted = 3.5;
} else if (segregationPoints >= 2 && segregationPoints <= 2.49) {
segregationPointsCounted = 4;
} else if (segregationPoints >= 2.5 && segregationPoints <= 2.99) {
segregationPointsCounted = 4.5;
} else if (segregationPoints >= 3 && segregationPoints <= 3.49) {
segregationPointsCounted = 5;
} else if (segregationPoints >= 3.5 && segregationPoints <= 3.99) {
segregationPointsCounted = 5.5;
} else if (segregationPoints >= 4 && segregationPoints <= 4.49) {
segregationPointsCounted = 6;
} else if (segregationPoints >= 4.5 && segregationPoints <= 4.99) {
segregationPointsCounted = 6.5;
} else if (segregationPoints >= 5) {
segregationPointsCounted = MAX_SCORE_CONSTANTS.SEGREGATION;
}
// calculate other counted points
let tempPoints = 0;
probandOtherVariantPointsCounted = probandOtherVariantPoints < MAX_SCORE_CONSTANTS.OTHER_VARIANT_TYPE_WITH_GENE_IMPACT ? probandOtherVariantPoints : MAX_SCORE_CONSTANTS.OTHER_VARIANT_TYPE_WITH_GENE_IMPACT;
probandNullVariantPointsCounted = probandNullVariantPoints < MAX_SCORE_CONSTANTS.PREDICTED_OR_PROVEN_NULL_VARIANT ? probandNullVariantPoints : MAX_SCORE_CONSTANTS.PREDICTED_OR_PROVEN_NULL_VARIANT;
variantDenovoPointsCounted = variantDenovoPoints < MAX_SCORE_CONSTANTS.VARIANT_IS_DE_NOVO ? variantDenovoPoints : MAX_SCORE_CONSTANTS.VARIANT_IS_DE_NOVO;
tempPoints = twoVariantsProvenPoints + twoVariantsNotProvenPoints;
autosomalRecessivePointsCounted = tempPoints < MAX_SCORE_CONSTANTS.AUTOSOMAL_RECESSIVE ? tempPoints : MAX_SCORE_CONSTANTS.AUTOSOMAL_RECESSIVE;
caseControlPointsCounted = caseControlPoints < MAX_SCORE_CONSTANTS.CASE_CONTROL ? caseControlPoints : MAX_SCORE_CONSTANTS.CASE_CONTROL;
tempPoints = biochemicalFunctionPoints + proteinInteractionsPoints + expressionPoints;
functionalPointsCounted = tempPoints < MAX_SCORE_CONSTANTS.FUNCTIONAL ? tempPoints : MAX_SCORE_CONSTANTS.FUNCTIONAL;
tempPoints = patientCellsPoints + nonPatientCellsPoints;
functionalAlterationPointsCounted = tempPoints < MAX_SCORE_CONSTANTS.FUNCTIONAL_ALTERATION ? tempPoints : MAX_SCORE_CONSTANTS.FUNCTIONAL_ALTERATION;
tempPoints = animalModelPoints + cellCulturePoints + rescuePoints + rescueEngineeredPoints;
modelsRescuePointsCounted = tempPoints < MAX_SCORE_CONSTANTS.MODELS_RESCUE ? tempPoints : MAX_SCORE_CONSTANTS.MODELS_RESCUE;
tempPoints = probandOtherVariantPointsCounted + probandNullVariantPointsCounted + variantDenovoPointsCounted + autosomalRecessivePointsCounted + segregationPointsCounted + caseControlPointsCounted;
geneticEvidenceTotalPoints = tempPoints < MAX_SCORE_CONSTANTS.GENETIC_EVIDENCE ? tempPoints : MAX_SCORE_CONSTANTS.GENETIC_EVIDENCE;
tempPoints = functionalPointsCounted + functionalAlterationPointsCounted + modelsRescuePointsCounted;
experimentalEvidenceTotalPoints = tempPoints < MAX_SCORE_CONSTANTS.EXPERIMENTAL_EVIDENCE ? tempPoints : MAX_SCORE_CONSTANTS.EXPERIMENTAL_EVIDENCE;
totalPoints = geneticEvidenceTotalPoints + experimentalEvidenceTotalPoints;
return (
<div>
<Form submitHandler={this.submitForm} formClassName="form-horizontal form-std">
<PanelGroup accordion>
<Panel title="New Summary & Provisional Classification" open>
<div className="form-group">
<div>
The calculated values below are based on the set of saved evidence that existed when the "Generate New Summary"
button was clicked. To save these values and the calculated or selected Classification, click "Save" below - they
will then represent the new "Last Saved Summary & Provisional Classification".
</div>
<div><span> </span></div>
<br />
<div className="container">
<table className="summary-matrix">
<tbody>
<tr className="header large bg-color separator-below">
<td colSpan="5">Evidence Type</td>
<td>Count</td>
<td>Total Points</td>
<td>Points Counted</td>
</tr>
<tr>
<td rowSpan="8" className="header"><div className="rotate-text"><div>Genetic Evidence</div></div></td>
<td rowSpan="6" className="header"><div className="rotate-text"><div>Case-Level</div></div></td>
<td rowSpan="5" className="header"><div className="rotate-text"><div>Variant</div></div></td>
<td rowSpan="3" className="header">Autosomal Dominant Disease</td>
<td>Proband with other variant type with some evidence of gene impact</td>
<td>{probandOtherVariantCount}</td>
<td>{probandOtherVariantPoints}</td>
<td>{probandOtherVariantPointsCounted}</td>
</tr>
<tr>
<td>Proband with predicted or proven null variant</td>
<td>{probandNullVariantCount}</td>
<td>{probandNullVariantPoints}</td>
<td>{probandNullVariantPointsCounted}</td>
</tr>
<tr>
<td>Variant is <i>de novo</i></td>
<td>{variantDenovoCount}</td>
<td>{variantDenovoPoints}</td>
<td>{variantDenovoPointsCounted}</td>
</tr>
<tr>
<td rowSpan="2" className="header">Autosomal Recessive Disease</td>
<td>Two variants (not prediced/proven null) with some evidence of gene impact in <i>trans</i></td>
<td>{twoVariantsNotProvenCount}</td>
<td>{twoVariantsNotProvenPoints}</td>
<td rowSpan="2">{autosomalRecessivePointsCounted}</td>
</tr>
<tr>
<td>Two variants in <i>trans</i> and at least one <i>de novo</i> or a predicted/proven null variant</td>
<td>{twoVariantsProvenCount}</td>
<td>{twoVariantsProvenPoints}</td>
</tr>
<tr>
<td colSpan="3" className="header">Segregation</td>
<td>{segregationCount}</td>
<td><span>{segregationPointsCounted}</span> (<abbr title="Combined LOD Score"><span>{segregationPoints}</span><strong>*</strong></abbr>)</td>
<td>{segregationPointsCounted}</td>
</tr>
<tr>
<td colSpan="4" className="header">Case-Control</td>
<td>{caseControlCount}</td>
<td>{caseControlPoints}</td>
<td>{caseControlPointsCounted}</td>
</tr>
<tr className="header separator-below">
<td colSpan="6">Genetic Evidence Total</td>
<td>{geneticEvidenceTotalPoints}</td>
</tr>
<tr>
<td rowSpan="10" className="header"><div className="rotate-text"><div>Experimental Evidence</div></div></td>
<td colSpan="3" rowSpan="3" className="header">Functional</td>
<td>Biochemical Functions</td>
<td>{biochemicalFunctionCount}</td>
<td>{biochemicalFunctionPoints}</td>
<td rowSpan="3">{functionalPointsCounted}</td>
</tr>
<tr>
<td>Protein Interactions</td>
<td>{proteinInteractionsCount}</td>
<td>{proteinInteractionsPoints}</td>
</tr>
<tr>
<td>Expression</td>
<td>{expressionCount}</td>
<td>{expressionPoints}</td>
</tr>
<tr>
<td colSpan="3" rowSpan="2" className="header">Functional Alteration</td>
<td>Patient Cells</td>
<td>{patientCellsCount}</td>
<td>{patientCellsPoints}</td>
<td rowSpan="2">{functionalAlterationPointsCounted}</td>
</tr>
<tr>
<td>Non-patient Cells</td>
<td>{nonPatientCellsCount}</td>
<td>{nonPatientCellsPoints}</td>
</tr>
<tr>
<td colSpan="3" rowSpan="4" className="header">Models & Rescue</td>
<td>Animal Model</td>
<td>{animalModelCount}</td>
<td>{animalModelPoints}</td>
<td rowSpan="4">{modelsRescuePointsCounted}</td>
</tr>
<tr>
<td>Cell Culture Model System</td>
<td>{cellCultureCount}</td>
<td>{cellCulturePoints}</td>
</tr>
<tr>
<td>Rescue in Animal Model</td>
<td>{rescueCount}</td>
<td>{rescuePoints}</td>
</tr>
<tr>
<td>Rescue in Engineered Equivalent</td>
<td>{rescueEngineeredCount}</td>
<td>{rescueEngineeredPoints}</td>
</tr>
<tr className="header separator-below">
<td colSpan="6">Experimental Evidence Total</td>
<td>{experimentalEvidenceTotalPoints}</td>
</tr>
<tr className="total-row header">
<td colSpan="7">Total Points</td>
<td>{totalPoints}</td>
</tr>
</tbody>
</table>
<strong>*</strong> – Combined LOD Score
</div>
<br />
<br />
<div className="row">
<div className="col-sm-5">
<strong className="pull-right">Calculated
<a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:
</strong>
</div>
<div className="col-sm-7">
{this.state.autoClassification}
</div>
</div>
{ userAssessments.segCntdct>0 || userAssessments.variantCntdct || userAssessments.expCntdct ?
<div className="row">
<div className="col-sm-5"> </div>
<div className="col-sm-7">
<strong style={{'color':'#f00'}}>Note: One or more pieces of evidence in this record was assessed as "Contradicts".</strong>
</div>
</div>
: null
}
<br />
<Input type="select" ref="alteredClassification"
label={<strong>Select Provisional <a href="/provisional-curation/?classification=display" target="_block">Clinical Validity Classification</a>:</strong>}
labelClassName="col-sm-5 control-label"
wrapperClassName="col-sm-7" defaultValue={this.state.autoClassification}
groupClassName="form-group">
<option value="Definitive">Definitive</option>
<option value="Strong">Strong</option>
<option value="Moderate">Moderate</option>
<option value="Limited">Limited</option>
<option value="No Evidence">No Reported Evidence</option>
<option value="Disputed">Disputed</option>
<option value="Refuted">Refuted</option>
</Input>
<Input type="textarea" ref="reasons" label="Explain Reason(s) for Change:" rows="5" labelClassName="col-sm-5 control-label"
wrapperClassName="col-sm-7" groupClassName="form-group" error={this.getFormError('reasons')}
clearError={this.clrFormErrors.bind(null, 'reasons')} />
<div className="col-sm-5"><span className="pull-right"> </span></div>
<div className="col-sm-7">
<span>
Note: If your selected Clinical Validity Classification is different from the Calculated value, provide a reason to expain why you changed it.
</span>
</div>
</div>
</Panel>
</PanelGroup>
<div className='modal-footer'>
<Input type="button" inputClassName="btn-default btn-inline-spacer" clickHandler={this.cancelForm} title="Cancel" />
<Input type="submit" inputClassName="btn-primary btn-inline-spacer pull-right" id="submit" title="Save" />
</div>
</Form>
</div>
);
};
// Method to return a list of experimental evidence scores
// by score status
function getExpScoreList(evidenceList) {
let newArray = [];
evidenceList.forEach(evidence => {
evidence.scores.forEach(item => {
if (item.scoreStatus === 'Score') {
newArray.push(item);
}
});
});
return newArray;
}
// Function to check if an itme exists in an array(list)
var in_array = function(item, list) {
for(var i in list){
if (list[i] == item) {
return true;
}
}
return false;
};
// Function to get earliest year of selected publications
var get_earliest_year = function(earliest, dateStr) {
var pattern = new RegExp(/^\d\d\d\d/);
var theYear = pattern.exec(dateStr);
if (theYear && theYear.valueOf() < earliest.valueOf()) {
return theYear;
}
return earliest;
};
// Funtion to separate proband individuals by assessment values
// target: object containing separated proband individuals
// branch: individual array in annotation/group/family
// article: object containing publication info
// idList: Assessment array
var filter = function(target, branch, article, idList) {
var allVariants = target['allVariants'],
sptVariants = target['sptVariants'],
rvwVariants = target['rvwVariants'],
cntdctVariants = target['cntdctVariants'],
patho_spt = idList['support'],
patho_rvw = idList['review'],
patho_cntdct = idList['contradict'];
branch.forEach(function(obj) {
if (obj.proband && obj.variants && obj.variants.length > 0) {
// counting at probands only
var allSupported = true;
for (var j in obj.variants) {
// collect all distinct variants from proband individuals
if (!in_array(obj.variants[j].uuid, allVariants)) {
allVariants.push(obj.variants[j].uuid);
}
// collect variant assessments, separated by 3 different values.
if (!in_array(obj.variants[j].uuid, patho_spt)) {
allSupported = false;
if (in_array(obj.variants[j].uuid, patho_rvw) && !in_array(obj.variants[j].uuid, rvwVariants)) {
rvwVariants.push(obj.variants[j].uuid);
}
else if (in_array(obj.variants[j].uuid, patho_cntdct) && !in_array(obj.variants[j].uuid, cntdctVariants)) {
cntdctVariants.push(obj.variants[j].uuid);
}
}
else {
if (!in_array(obj.variants[j].uuid, sptVariants)) {
sptVariants.push(obj.variants[j].uuid);
}
}
}
if (allSupported) {
target["probandInd"].push(
{
"evidence":obj.uuid,
"pmid":article.pmid,
"date": article.date
}
);
}
target["allVariants"] = allVariants;
target["sptVariants"] = sptVariants;
target["rvwVariants"] = rvwVariants;
target["cntdctVariants"] = cntdctVariants;
}
});
return target;
};
// Display a history item for adding a family
var ProvisionalAddModHistory = React.createClass({
render: function() {
var history = this.props.history;
var meta = history.meta.provisionalClassification;
var gdm = meta.gdm;
return (
<div>
<span><a href={'/provisional-curation/?gdm=' + gdm.uuid + '&edit=yes'} title="View/edit provisional classification">Provisional classification</a> {meta.alteredClassification.toUpperCase()} added to </span>
<strong>{gdm.gene.symbol}-{gdm.disease.term}-</strong>
<i>{gdm.modeInheritance.indexOf('(') > -1 ? gdm.modeInheritance.substring(0, gdm.modeInheritance.indexOf('(') - 1) : gdm.modeInheritance}</i>
<span>; {moment(history.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
);
}
});
globals.history_views.register(ProvisionalAddModHistory, 'provisionalClassification', 'add');
// Display a history item for modifying a family
var ProvisionalModifyHistory = React.createClass({
render: function() {
var history = this.props.history;
var meta = history.meta.provisionalClassification;
var gdm = meta.gdm;
return (
<div>
<span><a href={'/provisional-curation/?gdm=' + gdm.uuid + '&edit=yes'} title="View/edit provisional classification">Provisional classification</a> modified to {meta.alteredClassification.toUpperCase()} for </span>
<strong>{gdm.gene.symbol}-{gdm.disease.term}-</strong>
<i>{gdm.modeInheritance.indexOf('(') > -1 ? gdm.modeInheritance.substring(0, gdm.modeInheritance.indexOf('(') - 1) : gdm.modeInheritance}</i>
<span>; {moment(history.date_created).format("YYYY MMM DD, h:mm a")}</span>
</div>
);
}
});
globals.history_views.register(ProvisionalModifyHistory, 'provisionalClassification', 'modify');
// Display a history item for deleting a family
var ProvisionalDeleteHistory = React.createClass({
render: function() {
return <div>PROVISIONALDELETE</div>;
}
});
globals.history_views.register(ProvisionalDeleteHistory, 'provisionalClassification', 'delete');
| rounding of segregation points
| src/clincoded/static/components/provisional_curation.js | rounding of segregation points | <ide><path>rc/clincoded/static/components/provisional_curation.js
<ide> */
<ide>
<ide> // calculate segregation counted points
<add> segregationPoints = +parseFloat(segregationPoints).toFixed(2);
<ide> if (segregationPoints >= 0.75 && segregationPoints <= 0.99) {
<ide> segregationPointsCounted = 1;
<ide> } else if (segregationPoints >= 1 && segregationPoints <= 1.24) { |
|
Java | lgpl-2.1 | ba39aff73fc2402e7c20986e131e5f2007d27ca4 | 0 | viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer | /*
* CypressFX2Biasgen.java
*
* Created on December 1, 2005, 2:00 PM
*
* To change this template, choose Tools | Options and locate the template under
* the Source Creation and Management node. Right-click the template and choose
* Open. You can then make changes to the template in the Source Editor.
*/
package net.sf.jaer.hardwareinterface.usb.cypressfx2;
import net.sf.jaer.aemonitor.AEPacketRaw;
import net.sf.jaer.hardwareinterface.HardwareInterfaceException;
import net.sf.jaer.hardwareinterface.HasUpdatableFirmware;
import de.thesycon.usbio.UsbIoBuf;
import de.thesycon.usbio.UsbIoInterface;
import de.thesycon.usbio.structs.USBIO_CLASS_OR_VENDOR_REQUEST;
import de.thesycon.usbio.structs.USBIO_DATA_BUFFER;
import javax.swing.JOptionPane;
/**
* The hardware interface for the DVS128 (second Tmpdiff128 board, with CPLD) retina boards.
*
* @author tobi/rapha
*/
public class CypressFX2DVS128HardwareInterface extends CypressFX2Biasgen implements HasUpdatableFirmware {
public final static String FIRMWARE_FILENAME_DVS128_XSVF="/net/sf/jaer/hardwareinterface/usb/cypressfx2/dvs128CPLD.xsvf";
/** Creates a new instance of CypressFX2Biasgen */
protected CypressFX2DVS128HardwareInterface(int devNumber) {
super(devNumber);
}
/**
* Starts reader buffer pool thread and enables in endpoints for AEs. This method is overridden to construct
our own reader with its translateEvents method
*/
@Override
public void startAEReader() throws HardwareInterfaceException { // raphael: changed from private to protected, because i need to access this method
setAeReader(new RetinaAEReader(this));
allocateAEBuffers();
getAeReader().startThread(3); // arg is number of errors before giving up
HardwareInterfaceException.clearException();
}
synchronized public void resetTimestamps() {
log.info(this + ".resetTimestamps(): zeroing timestamps");
try {
this.sendVendorRequest(this.VENDOR_REQUEST_RESET_TIMESTAMPS);
} catch (HardwareInterfaceException e) {
e.printStackTrace();
}
}
/** This reader understands the format of raw USB data and translates to the AEPacketRaw */
public class RetinaAEReader extends CypressFX2.AEReader{
public RetinaAEReader(CypressFX2 cypress) throws HardwareInterfaceException{
super(cypress);
}
/** Does the translation, timestamp unwrapping and reset
* @param b the raw buffer
*/
@Override
protected void translateEvents(UsbIoBuf b){
// System.out.println("buf has "+b.BytesTransferred+" bytes");
synchronized(aePacketRawPool){
AEPacketRaw buffer=aePacketRawPool.writeBuffer();
// if(buffer.overrunOccuredFlag) return; // don't bother if there's already an overrun, consumer must get the events to clear this flag before there is more room for new events
int shortts;
int NumberOfWrapEvents;
NumberOfWrapEvents=0;
byte[] aeBuffer=b.BufferMem;
// byte lsb,msb;
int bytesSent=b.BytesTransferred;
if(bytesSent%4!=0){
log.warning("CypressFX2.AEReader.translateEvents(): warning: "+bytesSent+" bytes sent, which is not multiple of 4");
bytesSent=(bytesSent/4)*4; // truncate off any extra part-event
}
int[] addresses=buffer.getAddresses();
int[] timestamps=buffer.getTimestamps();
// write the start of the packet
buffer.lastCaptureIndex=eventCounter;
for(int i=0;i<bytesSent;i+=4){
// if(eventCounter>aeBufferSize-1){
// buffer.overrunOccuredFlag=true;
// // log.warning("overrun");
// return; // return, output event buffer is full and we cannot add any more events to it.
// //no more events will be translated until the existing events have been consumed by acquireAvailableEventsFromDriver
// }
if((aeBuffer[i+3]&0x80)==0x80){ // timestamp bit 15 is one -> wrap
// now we need to increment the wrapAdd
wrapAdd+=0x4000L; //uses only 14 bit timestamps
//System.out.println("received wrap event, index:" + eventCounter + " wrapAdd: "+ wrapAdd);
NumberOfWrapEvents++;
} else if ((aeBuffer[i+3]&0x40)==0x40 ) { // timestamp bit 14 is one -> wrapAdd reset
// this firmware version uses reset events to reset timestamps
this.resetTimestamps();
// log.info("got reset event, timestamp " + (0xffff&((short)aeBuffer[i]&0xff | ((short)aeBuffer[i+1]&0xff)<<8)));
} else if ((eventCounter>aeBufferSize-1) || (buffer.overrunOccuredFlag)) { // just do nothing, throw away events
buffer.overrunOccuredFlag=true;
} else {
// address is LSB MSB
addresses[eventCounter]=(int)((aeBuffer[i]&0xFF) | ((aeBuffer[i+1]&0xFF)<<8));
// same for timestamp, LSB MSB
shortts=(aeBuffer[i+2]&0xff | ((aeBuffer[i+3]&0xff)<<8)); // this is 15 bit value of timestamp in TICK_US tick
timestamps[eventCounter]=(int)(TICK_US*(shortts+wrapAdd)); //*TICK_US; //add in the wrap offset and convert to 1us tick
// this is USB2AERmini2 or StereoRetina board which have 1us timestamp tick
eventCounter++;
buffer.setNumEvents(eventCounter);
}
} // end for
// write capture size
buffer.lastCaptureLength=eventCounter-buffer.lastCaptureIndex;
// if (NumberOfWrapEvents!=0) {
//System.out.println("Number of wrap events received: "+ NumberOfWrapEvents);
//}
//System.out.println("wrapAdd : "+ wrapAdd);
} // sync on aePacketRawPool
}
}
/** set the pixel array reset
* @param value true to reset the pixels, false to let them run normally
*/
synchronized public void setArrayReset(boolean value) {
arrayResetEnabled=value;
// send vendor request for device to reset array
if(gUsbIo==null){
throw new RuntimeException("device must be opened before sending this vendor request");
}
// make vendor request structure and populate it
USBIO_CLASS_OR_VENDOR_REQUEST VendorRequest=new USBIO_CLASS_OR_VENDOR_REQUEST();
VendorRequest.Flags=UsbIoInterface.USBIO_SHORT_TRANSFER_OK;
VendorRequest.Type=UsbIoInterface.RequestTypeVendor;
VendorRequest.Recipient=UsbIoInterface.RecipientDevice;
VendorRequest.RequestTypeReservedBits=0;
VendorRequest.Request=VENDOR_REQUEST_SET_ARRAY_RESET;
VendorRequest.Index=0;
VendorRequest.Value=(short)(value?1:0); // this is the request bit, if value true, send value 1, false send value 0
USBIO_DATA_BUFFER dataBuffer=new USBIO_DATA_BUFFER(0); // no data, value is in request value
dataBuffer.setNumberOfBytesToTransfer(dataBuffer.Buffer().length);
int status=gUsbIo.classOrVendorOutRequest(dataBuffer,VendorRequest);
if(status!=USBIO_ERR_SUCCESS){
System.err.println("CypressFX2.resetPixelArray: couldn't send vendor request to reset array");
}
}
public boolean isArrayReset(){
return arrayResetEnabled;
}
/** Updates the firmware by downloading to the board's EEPROM. The firmware filename is hardcoded. TODO fix this hardcoding */
public void updateFirmware() throws HardwareInterfaceException {
Thread T = new Thread("FirmwareUpdater") {
@Override
public void run() {
try {
setEventAcquisitionEnabled(false);
writeCPLDfirmware(FIRMWARE_FILENAME_DVS128_XSVF);
log.info("New firmware written to CPLD");
byte[] fw;
try {
// TODO fix hardcoded firmware file
fw = loadBinaryFirmwareFile(CypressFX2.FIRMWARE_FILENAME_DVS128_IIC);
} catch (java.io.IOException e) {
e.printStackTrace();
throw new HardwareInterfaceException("Could not load firmware file ");
}
setEventAcquisitionEnabled(false);
writeEEPROM(0, fw);
log.info("New firmware written to EEPROM");
setEventAcquisitionEnabled(true);
JOptionPane.showMessageDialog(chip.getAeViewer(), "Update successful - unplug and replug the device to activate new firmware", "Firmware update complete", JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e) {
log.warning("Firmware update failed: " + e.getMessage());
JOptionPane.showMessageDialog(chip.getAeViewer(), "Update failed: " + e.toString(), "Firmware update failed", JOptionPane.WARNING_MESSAGE);
}
}
};
T.start();
}
}
| src/net/sf/jaer/hardwareinterface/usb/cypressfx2/CypressFX2DVS128HardwareInterface.java | /*
* CypressFX2Biasgen.java
*
* Created on December 1, 2005, 2:00 PM
*
* To change this template, choose Tools | Options and locate the template under
* the Source Creation and Management node. Right-click the template and choose
* Open. You can then make changes to the template in the Source Editor.
*/
package net.sf.jaer.hardwareinterface.usb.cypressfx2;
import net.sf.jaer.aemonitor.AEPacketRaw;
import net.sf.jaer.hardwareinterface.HardwareInterfaceException;
import net.sf.jaer.hardwareinterface.HasUpdatableFirmware;
import de.thesycon.usbio.UsbIoBuf;
import de.thesycon.usbio.UsbIoInterface;
import de.thesycon.usbio.structs.USBIO_CLASS_OR_VENDOR_REQUEST;
import de.thesycon.usbio.structs.USBIO_DATA_BUFFER;
import javax.swing.JOptionPane;
/**
* The hardware interface for the DVS128 (second Tmpdiff128 board, with CPLD) retina boards.
*
* @author tobi/rapha
*/
public class CypressFX2DVS128HardwareInterface extends CypressFX2Biasgen implements HasUpdatableFirmware {
public final static String FIRMWARE_FILENAME_DVS128_XSVF="/net/sf/jaer/hardwareinterface/usb/cypressfx2/dvs128CPLD.xsvf";
/** Creates a new instance of CypressFX2Biasgen */
protected CypressFX2DVS128HardwareInterface(int devNumber) {
super(devNumber);
}
/**
* Starts reader buffer pool thread and enables in endpoints for AEs. This method is overridden to construct
our own reader with its translateEvents method
*/
@Override
public void startAEReader() throws HardwareInterfaceException { // raphael: changed from private to protected, because i need to access this method
setAeReader(new RetinaAEReader(this));
allocateAEBuffers();
getAeReader().startThread(3); // arg is number of errors before giving up
HardwareInterfaceException.clearException();
}
synchronized public void resetTimestamps() {
log.info(this + ".resetTimestamps(): zeroing timestamps");
try {
this.sendVendorRequest(this.VENDOR_REQUEST_RESET_TIMESTAMPS);
} catch (HardwareInterfaceException e) {
e.printStackTrace();
}
}
/** This reader understands the format of raw USB data and translates to the AEPacketRaw */
public class RetinaAEReader extends CypressFX2.AEReader{
public RetinaAEReader(CypressFX2 cypress) throws HardwareInterfaceException{
super(cypress);
}
/** Does the translation, timestamp unwrapping and reset
* @param b the raw buffer
*/
@Override
protected void translateEvents(UsbIoBuf b){
// System.out.println("buf has "+b.BytesTransferred+" bytes");
synchronized(aePacketRawPool){
AEPacketRaw buffer=aePacketRawPool.writeBuffer();
// if(buffer.overrunOccuredFlag) return; // don't bother if there's already an overrun, consumer must get the events to clear this flag before there is more room for new events
int shortts;
int NumberOfWrapEvents;
NumberOfWrapEvents=0;
byte[] aeBuffer=b.BufferMem;
// byte lsb,msb;
int bytesSent=b.BytesTransferred;
if(bytesSent%4!=0){
log.warning("CypressFX2.AEReader.translateEvents(): warning: "+bytesSent+" bytes sent, which is not multiple of 4");
bytesSent=(bytesSent/4)*4; // truncate off any extra part-event
}
int[] addresses=buffer.getAddresses();
int[] timestamps=buffer.getTimestamps();
// write the start of the packet
buffer.lastCaptureIndex=eventCounter;
for(int i=0;i<bytesSent;i+=4){
// if(eventCounter>aeBufferSize-1){
// buffer.overrunOccuredFlag=true;
// // log.warning("overrun");
// return; // return, output event buffer is full and we cannot add any more events to it.
// //no more events will be translated until the existing events have been consumed by acquireAvailableEventsFromDriver
// }
if((aeBuffer[i+3]&0x80)==0x80){ // timestamp bit 15 is one -> wrap
// now we need to increment the wrapAdd
wrapAdd+=0x4000L; //uses only 14 bit timestamps
//System.out.println("received wrap event, index:" + eventCounter + " wrapAdd: "+ wrapAdd);
NumberOfWrapEvents++;
/* } else if ((aeBuffer[i+3]&0x40)==0x40 ) { // timestamp bit 14 is one -> wrapAdd reset
// this firmware version uses reset events to reset timestamps
this.resetTimestamps();
// log.info("got reset event, timestamp " + (0xffff&((short)aeBuffer[i]&0xff | ((short)aeBuffer[i+1]&0xff)<<8)));
*/ } else if ((eventCounter>aeBufferSize-1) || (buffer.overrunOccuredFlag)) { // just do nothing, throw away events
buffer.overrunOccuredFlag=true;
} else {
// address is LSB MSB
addresses[eventCounter]=(int)((aeBuffer[i]&0xFF) | ((aeBuffer[i+1]&0xFF)<<8));
// same for timestamp, LSB MSB
shortts=(aeBuffer[i+2]&0xff | ((aeBuffer[i+3]&0xff)<<8)); // this is 15 bit value of timestamp in TICK_US tick
timestamps[eventCounter]=(int)(TICK_US*(shortts+wrapAdd)); //*TICK_US; //add in the wrap offset and convert to 1us tick
// this is USB2AERmini2 or StereoRetina board which have 1us timestamp tick
eventCounter++;
buffer.setNumEvents(eventCounter);
}
} // end for
// write capture size
buffer.lastCaptureLength=eventCounter-buffer.lastCaptureIndex;
// if (NumberOfWrapEvents!=0) {
//System.out.println("Number of wrap events received: "+ NumberOfWrapEvents);
//}
//System.out.println("wrapAdd : "+ wrapAdd);
} // sync on aePacketRawPool
}
}
/** set the pixel array reset
* @param value true to reset the pixels, false to let them run normally
*/
synchronized public void setArrayReset(boolean value) {
arrayResetEnabled=value;
// send vendor request for device to reset array
if(gUsbIo==null){
throw new RuntimeException("device must be opened before sending this vendor request");
}
// make vendor request structure and populate it
USBIO_CLASS_OR_VENDOR_REQUEST VendorRequest=new USBIO_CLASS_OR_VENDOR_REQUEST();
VendorRequest.Flags=UsbIoInterface.USBIO_SHORT_TRANSFER_OK;
VendorRequest.Type=UsbIoInterface.RequestTypeVendor;
VendorRequest.Recipient=UsbIoInterface.RecipientDevice;
VendorRequest.RequestTypeReservedBits=0;
VendorRequest.Request=VENDOR_REQUEST_SET_ARRAY_RESET;
VendorRequest.Index=0;
VendorRequest.Value=(short)(value?1:0); // this is the request bit, if value true, send value 1, false send value 0
USBIO_DATA_BUFFER dataBuffer=new USBIO_DATA_BUFFER(0); // no data, value is in request value
dataBuffer.setNumberOfBytesToTransfer(dataBuffer.Buffer().length);
int status=gUsbIo.classOrVendorOutRequest(dataBuffer,VendorRequest);
if(status!=USBIO_ERR_SUCCESS){
System.err.println("CypressFX2.resetPixelArray: couldn't send vendor request to reset array");
}
}
public boolean isArrayReset(){
return arrayResetEnabled;
}
/** Updates the firmware by downloading to the board's EEPROM. The firmware filename is hardcoded. TODO fix this hardcoding */
public void updateFirmware() throws HardwareInterfaceException {
Thread T = new Thread("FirmwareUpdater") {
@Override
public void run() {
try {
setEventAcquisitionEnabled(false);
writeCPLDfirmware(FIRMWARE_FILENAME_DVS128_XSVF);
log.info("New firmware written to CPLD");
byte[] fw;
try {
// TODO fix hardcoded firmware file
fw = loadBinaryFirmwareFile(CypressFX2.FIRMWARE_FILENAME_DVS128_IIC);
} catch (java.io.IOException e) {
e.printStackTrace();
throw new HardwareInterfaceException("Could not load firmware file ");
}
setEventAcquisitionEnabled(false);
writeEEPROM(0, fw);
log.info("New firmware written to EEPROM");
setEventAcquisitionEnabled(true);
JOptionPane.showMessageDialog(chip.getAeViewer(), "Update successful - unplug and replug the device to activate new firmware", "Firmware update complete", JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e) {
log.warning("Firmware update failed: " + e.getMessage());
JOptionPane.showMessageDialog(chip.getAeViewer(), "Update failed: " + e.toString(), "Firmware update failed", JOptionPane.WARNING_MESSAGE);
}
}
};
T.start();
}
}
| uncommented timestamp reset which was commented for debugging new dvs128 board
git-svn-id: e3d3b427d532171a6bd7557d8a4952a393b554a2@1128 b7f4320f-462c-0410-a916-d9f35bb82d52
| src/net/sf/jaer/hardwareinterface/usb/cypressfx2/CypressFX2DVS128HardwareInterface.java | uncommented timestamp reset which was commented for debugging new dvs128 board | <ide><path>rc/net/sf/jaer/hardwareinterface/usb/cypressfx2/CypressFX2DVS128HardwareInterface.java
<ide>
<ide> //System.out.println("received wrap event, index:" + eventCounter + " wrapAdd: "+ wrapAdd);
<ide> NumberOfWrapEvents++;
<del> /* } else if ((aeBuffer[i+3]&0x40)==0x40 ) { // timestamp bit 14 is one -> wrapAdd reset
<add> } else if ((aeBuffer[i+3]&0x40)==0x40 ) { // timestamp bit 14 is one -> wrapAdd reset
<ide> // this firmware version uses reset events to reset timestamps
<ide> this.resetTimestamps();
<ide> // log.info("got reset event, timestamp " + (0xffff&((short)aeBuffer[i]&0xff | ((short)aeBuffer[i+1]&0xff)<<8)));
<del> */ } else if ((eventCounter>aeBufferSize-1) || (buffer.overrunOccuredFlag)) { // just do nothing, throw away events
<add> } else if ((eventCounter>aeBufferSize-1) || (buffer.overrunOccuredFlag)) { // just do nothing, throw away events
<ide> buffer.overrunOccuredFlag=true;
<ide> } else {
<ide> // address is LSB MSB |
|
JavaScript | apache-2.0 | ea5234290d5ba525ee08b7029377204e795c5daa | 0 | westlywright/ui,rancher/ui,vincent99/ui,rancher/ui,rancherio/ui,rancher/ui,rancherio/ui,westlywright/ui,westlywright/ui,vincent99/ui,rancherio/ui,vincent99/ui | import { get, observer, set } from '@ember/object';
import Route from '@ember/routing/route';
import { cancel, next, schedule } from '@ember/runloop';
import { inject as service } from '@ember/service';
import C from 'ui/utils/constants';
export default Route.extend({
access: service(),
cookies: service(),
language: service('user-language'),
modal: service(),
prefs: service(),
settings: service(),
previousParams: null,
previousRoute: null,
loadingShown: false,
loadingId: 0,
hideTimer: null,
previousLang: null,
shortcuts: { 'shift+l': 'langToggle', },
beforeModel() {
this.updateWindowTitle();
return (async() => {
if (!window.Prettycron) {
window.Prettycron = await import('prettycron');
}
// Find out if auth is enabled
return get(this, 'access').detect().finally(() => {
return get(this, 'language').initLanguage();
});
})();
},
model(params, transition) {
transition.finally(() => {
this.controllerFor('application').setProperties({
state: null,
code: null,
error_description: null,
});
})
if (params.isPopup) {
this.controllerFor('application').set('isPopup', true);
}
},
actions: {
didTransition() {
this.notifyAction('did-transition');
},
loading(transition) {
this.incrementProperty('loadingId');
let id = get(this, 'loadingId');
cancel(get(this, 'hideTimer'));
// console.log('Loading', id);
this.notifyAction('need-to-load');
if ( !get(this, 'loadingShown') ) {
set(this, 'loadingShown', true);
// console.log('Loading Show', id);
this.notifyLoading(true);
schedule('afterRender', () => {
$('#loading-underlay').stop().show().fadeIn({// eslint-disable-line
duration: 100,
queue: false,
easing: 'linear',
complete: schedule('afterRender', function() { // eslint-disable-line
$('#loading-overlay').stop().show().fadeIn({duration: 200, queue: false, easing: 'linear'}); // eslint-disable-line
})
});
});
}
transition.finally(() => {
var self = this;
function hide() {
// console.log('Loading hide', id);
set(self, 'loadingShown', false);
schedule('afterRender', () => {
$('#loading-overlay').stop().fadeOut({// eslint-disable-line
duration: 200,
queue: false,
easing: 'linear',
complete: schedule('afterRender', function() { // eslint-disable-line
$('#loading-underlay').stop().fadeOut({duration: 100, queue: false, easing: 'linear'}); // eslint-disable-line
setTimeout(() => self.notifyLoading(false), 200);
})
});
});
}
if ( get(this, 'loadingId') === id ) {
if ( transition.isAborted ) {
// console.log('Loading aborted', id, get(this, 'loadingId'));
set(this, 'hideTimer', next(hide));
} else {
// console.log('Loading finished', id, get(this, 'loadingId'));
hide();
}
}
});
return true;
},
error(err, transition) {
/* if we dont abort the transition we'll call the model calls again and fail transition correctly*/
transition.abort();
const status = parseInt(err.status, 10);
if ( err && [401, 403].includes(status) ) {
this.send('logout', transition);
return;
}
this.controllerFor('application').set('error', err);
this.transitionTo('failWhale');
// console.log('Application Error', (err ? err.stack : undefined));
},
goToPrevious(def) {
this.goToPrevious(def);
},
finishLogin() {
this.finishLogin();
},
logout(transition, errorMsg) {
let session = get(this, 'session');
let access = get(this, 'access');
access.clearToken().finally(() => {
let url = `${ window.location.origin }/login`;
get(this, 'tab-session').clear();
set(this, `session.${ C.SESSION.CONTAINER_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.ISTIO_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.CLUSTER_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.PROJECT_ROUTE }`, undefined);
if ( transition && !session.get(C.SESSION.BACK_TO) ) {
session.set(C.SESSION.BACK_TO, window.location.href);
}
if ( get(this, 'modal.modalVisible') ) {
get(this, 'modal').toggleModal();
}
if ( errorMsg ) {
url = `${ url }?errorMsg=${ errorMsg }`;
}
window.location.replace(url);
});
},
langToggle() {
let svc = get(this, 'language');
let cur = svc.getLocale();
if ( cur === 'none' ) {
svc.sideLoadLanguage(get(this, 'previousLang') || 'en-us');
} else {
set(this, 'previousLang', cur);
svc.sideLoadLanguage('none');
}
},
},
updateWindowTitle: observer('settings.appName', function() {
document.title = get(this, 'settings.appName');
}),
finishLogin() {
let session = get(this, 'session');
let backTo = session.get(C.SESSION.BACK_TO);
session.set(C.SESSION.BACK_TO, undefined);
if ( backTo ) {
// console.log('Going back to', backTo);
window.location.href = backTo;
} else {
this.replaceWith('authenticated');
}
},
notifyLoading(isLoading) {
this.notifyAction('loading', isLoading);
},
notifyAction(action, state) {
// If embedded, notify outer frame
const isEmbedded = window !== window.top;
if (isEmbedded) {
window.top.postMessage({
action,
state
});
}
}
});
| app/application/route.js | import { cancel, next, schedule } from '@ember/runloop';
import { inject as service } from '@ember/service';
import Route from '@ember/routing/route';
import C from 'ui/utils/constants';
import { get, set, observer } from '@ember/object';
export default Route.extend({
access: service(),
cookies: service(),
language: service('user-language'),
modal: service(),
prefs: service(),
settings: service(),
previousParams: null,
previousRoute: null,
loadingShown: false,
loadingId: 0,
hideTimer: null,
previousLang: null,
shortcuts: { 'shift+l': 'langToggle', },
beforeModel() {
this.updateWindowTitle();
return (async() => {
if (!window.Prettycron) {
window.Prettycron = await import('prettycron');
}
// Find out if auth is enabled
return get(this, 'access').detect().finally(() => {
return get(this, 'language').initLanguage();
});
})();
},
model(params, transition) {
transition.finally(() => {
this.controllerFor('application').setProperties({
state: null,
code: null,
error_description: null,
});
})
if (params.isPopup) {
this.controllerFor('application').set('isPopup', true);
}
},
actions: {
didTransition() {
this.notifyAction('did-transition');
},
loading(transition) {
this.incrementProperty('loadingId');
let id = get(this, 'loadingId');
cancel(get(this, 'hideTimer'));
this.notifyAction('need-to-load');
if ( !get(this, 'loadingShown') ) {
set(this, 'loadingShown', true);
this.notifyLoading(true);
schedule('afterRender', () => {
$('#loading-underlay').stop().show().fadeIn({// eslint-disable-line
duration: 100,
queue: false,
easing: 'linear',
complete: schedule('afterRender', function() { // eslint-disable-line
$('#loading-overlay').stop().show().fadeIn({duration: 200, queue: false, easing: 'linear'}); // eslint-disable-line
})
});
});
}
transition.finally(() => {
var self = this;
function hide() {
// console.log('Loading hide', id);
set(self, 'loadingShown', false);
schedule('afterRender', () => {
$('#loading-overlay').stop().fadeOut({// eslint-disable-line
duration: 200,
queue: false,
easing: 'linear',
complete: schedule('afterRender', function() { // eslint-disable-line
$('#loading-underlay').stop().fadeOut({duration: 100, queue: false, easing: 'linear'}); // eslint-disable-line
setTimeout(() => self.notifyLoading(false), 200);
})
});
});
}
if ( get(this, 'loadingId') === id ) {
if ( transition.isAborted ) {
// console.log('Loading aborted', id, get(this, 'loadingId'));
set(this, 'hideTimer', next(hide));
} else {
// console.log('Loading finished', id, get(this, 'loadingId'));
hide();
}
}
});
return true;
},
error(err, transition) {
/* if we dont abort the transition we'll call the model calls again and fail transition correctly*/
transition.abort();
const status = parseInt(err.status, 10);
if ( err && [401, 403].includes(status) ) {
this.send('logout', transition);
return;
}
this.controllerFor('application').set('error', err);
this.transitionTo('failWhale');
// console.log('Application Error', (err ? err.stack : undefined));
},
goToPrevious(def) {
this.goToPrevious(def);
},
finishLogin() {
this.finishLogin();
},
logout(transition, errorMsg) {
let session = get(this, 'session');
let access = get(this, 'access');
access.clearToken().finally(() => {
let url = `${ window.location.origin }/login`;
get(this, 'tab-session').clear();
set(this, `session.${ C.SESSION.CONTAINER_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.ISTIO_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.CLUSTER_ROUTE }`, undefined);
set(this, `session.${ C.SESSION.PROJECT_ROUTE }`, undefined);
if ( transition && !session.get(C.SESSION.BACK_TO) ) {
session.set(C.SESSION.BACK_TO, window.location.href);
}
if ( get(this, 'modal.modalVisible') ) {
get(this, 'modal').toggleModal();
}
if ( errorMsg ) {
url = `${ url }?errorMsg=${ errorMsg }`;
}
window.location.replace(url);
});
},
langToggle() {
let svc = get(this, 'language');
let cur = svc.getLocale();
if ( cur === 'none' ) {
svc.sideLoadLanguage(get(this, 'previousLang') || 'en-us');
} else {
set(this, 'previousLang', cur);
svc.sideLoadLanguage('none');
}
},
},
updateWindowTitle: observer('settings.appName', function() {
document.title = get(this, 'settings.appName');
}),
finishLogin() {
let session = get(this, 'session');
let backTo = session.get(C.SESSION.BACK_TO);
session.set(C.SESSION.BACK_TO, undefined);
if ( backTo ) {
// console.log('Going back to', backTo);
window.location.href = backTo;
} else {
this.replaceWith('authenticated');
}
},
notifyLoading(isLoading) {
this.notifyAction('loading', isLoading);
},
notifyAction(action, state) {
// If embedded, notify outer frame
const isEmbedded = window !== window.top;
if (isEmbedded) {
window.top.postMessage({
action,
state
});
}
}
});
| add back in comments
| app/application/route.js | add back in comments | <ide><path>pp/application/route.js
<add>import { get, observer, set } from '@ember/object';
<add>import Route from '@ember/routing/route';
<ide> import { cancel, next, schedule } from '@ember/runloop';
<ide> import { inject as service } from '@ember/service';
<del>import Route from '@ember/routing/route';
<ide> import C from 'ui/utils/constants';
<del>import { get, set, observer } from '@ember/object';
<ide>
<ide> export default Route.extend({
<ide> access: service(),
<ide>
<ide> cancel(get(this, 'hideTimer'));
<ide>
<add> // console.log('Loading', id);
<ide> this.notifyAction('need-to-load');
<ide>
<ide> if ( !get(this, 'loadingShown') ) {
<ide> set(this, 'loadingShown', true);
<add> // console.log('Loading Show', id);
<ide> this.notifyLoading(true);
<ide>
<ide> schedule('afterRender', () => { |
|
Java | apache-2.0 | 0341548da8968f659da396ea37e75ab6d72dc9be | 0 | MalharJenkins/incubator-apex-core,apache/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,tushargosavi/incubator-apex-core,chinmaykolhatkar/incubator-apex-core,tweise/apex-core,devtagare/incubator-apex-core,vrozov/incubator-apex-core,deepak-narkhede/apex-core,tushargosavi/apex-core,brightchen/apex-core,PramodSSImmaneni/apex-core,klynchDS/incubator-apex-core,vrozov/apex-core,simplifi-it/otterx,ishark/incubator-apex-core,brightchen/apex-core,vrozov/incubator-apex-core,ishark/incubator-apex-core,klynchDS/incubator-apex-core,apache/incubator-apex-core,amberarrow/incubator-apex-core,tweise/apex-core,deepak-narkhede/apex-core,tweise/apex-core,sandeshh/incubator-apex-core,sandeshh/apex-core,PramodSSImmaneni/apex-core,sandeshh/incubator-apex-core,brightchen/incubator-apex-core,vrozov/apex-core,aniruddhas/incubator-apex-core,brightchen/apex-core,aniruddhas/incubator-apex-core,sandeshh/incubator-apex-core,chinmaykolhatkar/incubator-apex-core,mt0803/incubator-apex-core,tushargosavi/incubator-apex-core,amberarrow/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,devtagare/incubator-apex-core,tushargosavi/apex-core,vrozov/apex-core,sandeshh/apex-core,deepak-narkhede/apex-core,ishark/incubator-apex-core,mt0803/incubator-apex-core,simplifi-it/otterx,simplifi-it/otterx,devtagare/incubator-apex-core,mattqzhang/apex-core,brightchen/incubator-apex-core,PramodSSImmaneni/apex-core,andyperlitch/incubator-apex-core,mattqzhang/apex-core,tushargosavi/incubator-apex-core,andyperlitch/incubator-apex-core,mattqzhang/apex-core,MalharJenkins/incubator-apex-core,vrozov/incubator-apex-core,tweise/incubator-apex-core,apache/incubator-apex-core,chinmaykolhatkar/incubator-apex-core,tushargosavi/apex-core,tweise/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,tweise/incubator-apex-core,sandeshh/apex-core | /**
* Copyright (c) 2012-2012 Malhar, Inc.
* All rights reserved.
*/
package com.malhartech.stram;
import java.io.ByteArrayOutputStream;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.malhartech.api.InputOperator;
import com.malhartech.api.Operator;
import com.malhartech.api.StorageAgent;
import com.malhartech.bufferserver.util.Codec;
import com.malhartech.engine.Node;
import com.malhartech.engine.OperatorContext;
import com.malhartech.stram.OperatorDeployInfo.InputDeployInfo;
import com.malhartech.stram.OperatorDeployInfo.OutputDeployInfo;
import com.malhartech.stram.PhysicalPlan.PTContainer;
import com.malhartech.stram.PhysicalPlan.PTInput;
import com.malhartech.stram.PhysicalPlan.PTOperator;
import com.malhartech.stram.PhysicalPlan.PTOperator.State;
import com.malhartech.stram.PhysicalPlan.PTOutput;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.ContainerHeartbeatResponse;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StramToNodeRequest;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingContainerContext;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat.DNodeState;
import com.malhartech.stram.plan.logical.LogicalPlan;
import com.malhartech.stram.plan.logical.LogicalPlan.InputPortMeta;
import com.malhartech.stram.plan.logical.LogicalPlan.StreamMeta;
import com.malhartech.stram.webapp.ContainerInfo;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
/**
*
* Representation of a child container in the master<p>
* <br>
*/
public class StramChildAgent {
private static final Logger LOG = LoggerFactory.getLogger(StramChildAgent.class);
public static class ContainerStartRequest {
final PTContainer container;
ContainerStartRequest(PTContainer container) {
this.container = container;
}
}
static class MovingAverageLong {
private final int periods;
private final long[] values;
private int index = 0;
private boolean filled = false;
MovingAverageLong(int periods) {
this.periods = periods;
this.values = new long[periods];
}
void add(long val) {
values[index++] = val;
if (index == periods) {
filled = true;
}
index %= periods;
}
long getAvg() {
long sum = 0;
for (int i=0; i<periods; i++) {
sum += values[i];
}
if (!filled) {
return index == 0 ? 0 : sum/index;
} else {
return sum/periods;
}
}
}
// Generics don't work with numbers. Hence this mess.
static class MovingAverageDouble {
private final int periods;
private final double[] values;
private int index = 0;
private boolean filled = false;
MovingAverageDouble(int periods) {
this.periods = periods;
this.values = new double[periods];
}
void add(double val) {
values[index++] = val;
if (index == periods) {
filled = true;
}
index %= periods;
}
double getAvg() {
double sum = 0;
for (int i=0; i<periods; i++) {
sum += values[i];
}
if (!filled) {
return index == 0 ? 0 : sum/index;
} else {
return sum/periods;
}
}
}
protected class OperatorStatus
{
StreamingNodeHeartbeat lastHeartbeat;
final PTOperator operator;
long totalTuplesProcessed;
long totalTuplesEmitted;
long currentWindowId;
MovingAverageLong tuplesProcessedPSMA10 = new MovingAverageLong(10);
MovingAverageLong tuplesEmittedPSMA10 = new MovingAverageLong(10);
MovingAverageDouble cpuPercentageMA10 = new MovingAverageDouble(10);
MovingAverageLong latencyMA = new MovingAverageLong(10);
List<String> recordingNames; // null if recording is not in progress
Map<String, PortStatus> inputPortStatusList = new HashMap<String, PortStatus>();
Map<String, PortStatus> outputPortStatusList = new HashMap<String, PortStatus>();
private OperatorStatus(PTOperator operator) {
this.operator = operator;
for (PTInput ptInput: operator.inputs) {
PortStatus inputPortStatus = new PortStatus();
inputPortStatus.portName = ptInput.portName;
inputPortStatusList.put(ptInput.portName, inputPortStatus);
}
for (PTOutput ptOutput: operator.outputs) {
PortStatus outputPortStatus = new PortStatus();
outputPortStatus.portName = ptOutput.portName;
outputPortStatusList.put(ptOutput.portName, outputPortStatus);
}
}
public boolean isIdle()
{
if ((lastHeartbeat != null && DNodeState.IDLE.name().equals(lastHeartbeat.getState()))) {
return true;
}
return false;
}
}
public class PortStatus
{
String portName;
long totalTuples = 0;
MovingAverageLong tuplesPSMA10 = new MovingAverageLong(10);
MovingAverageLong bufferServerBytesPSMA10 = new MovingAverageLong(10); // TBD
}
public StramChildAgent(PTContainer container, StreamingContainerContext initCtx) {
this.container = container;
this.initCtx = initCtx;
this.operators = new HashMap<Integer, OperatorStatus>(container.operators.size());
this.memoryMBFree = this.container.getAllocatedMemoryMB();
}
boolean shutdownRequested = false;
boolean isComplete = false;
long lastHeartbeatMillis = 0;
//long lastCheckpointRequestMillis = 0;
long createdMillis = System.currentTimeMillis();
final PTContainer container;
Map<Integer, OperatorStatus> operators;
final StreamingContainerContext initCtx;
Runnable onAck = null;
String jvmName;
int memoryMBFree;
private final ConcurrentLinkedQueue<StramToNodeRequest> operatorRequests = new ConcurrentLinkedQueue<StramToNodeRequest>();
public StreamingContainerContext getInitContext() {
return initCtx;
}
public boolean hasPendingWork() {
return this.onAck != null || !container.pendingDeploy.isEmpty() || !container.pendingUndeploy.isEmpty();
}
private void ackPendingRequest() {
if (onAck != null) {
onAck.run();
onAck = null;
}
}
protected OperatorStatus updateOperatorStatus(StreamingNodeHeartbeat shb) {
OperatorStatus status = this.operators.get(shb.getNodeId());
if (status == null) {
for (PTOperator operator : container.operators) {
if (operator.getId() == shb.getNodeId()) {
status = new OperatorStatus(operator);
operators.put(shb.getNodeId(), status);
}
}
}
if (status != null && !container.pendingDeploy.isEmpty()) {
if (status.operator.getState() == PTOperator.State.PENDING_DEPLOY) {
// remove operator from deploy list only if not scheduled of undeploy (or redeploy) again
if (!container.pendingUndeploy.contains(status.operator) && container.pendingDeploy.remove(status.operator)) {
LOG.debug("{} marking deployed: {} remote status {}", new Object[] {container.containerId, status.operator, shb.getState()});
status.operator.setState(PTOperator.State.ACTIVE);
}
}
LOG.debug("{} pendingDeploy {}", container.containerId, container.pendingDeploy);
}
return status;
}
public void addOperatorRequest(StramToNodeRequest r) {
LOG.info("Adding operator request {} {}", container.containerId, r);
this.operatorRequests.add(r);
}
@SuppressWarnings("ReturnOfCollectionOrArrayField")
protected ConcurrentLinkedQueue<StramToNodeRequest> getOperatorRequests() {
return this.operatorRequests;
}
public ContainerHeartbeatResponse pollRequest() {
ackPendingRequest();
if (!this.container.pendingUndeploy.isEmpty()) {
ContainerHeartbeatResponse rsp = new ContainerHeartbeatResponse();
final Set<PTOperator> toUndeploy = Sets.newHashSet(this.container.pendingUndeploy);
List<OperatorDeployInfo> nodeList = getUndeployInfoList(toUndeploy);
rsp.undeployRequest = nodeList;
rsp.hasPendingRequests = (!this.container.pendingDeploy.isEmpty());
this.onAck = new Runnable() {
@Override
public void run() {
// remove operators from undeploy list to not request it again
container.pendingUndeploy.removeAll(toUndeploy);
for (PTOperator operator : toUndeploy) {
operator.setState(PTOperator.State.INACTIVE);
}
LOG.debug("{} undeploy complete: {} deploy: {}", new Object[] {container.containerId, toUndeploy, container.pendingDeploy});
}
};
return rsp;
}
if (!this.container.pendingDeploy.isEmpty()) {
Set<PTOperator> deployOperators = this.container.plan.getOperatorsForDeploy(this.container);
LOG.debug("container {} deployable operators: {}", container.containerId, deployOperators);
ContainerHeartbeatResponse rsp = new ContainerHeartbeatResponse();
List<OperatorDeployInfo> deployList = getDeployInfoList(deployOperators);
if (deployList != null && !deployList.isEmpty()) {
rsp.deployRequest = deployList;
rsp.nodeRequests = Lists.newArrayList();
for (PTOperator o : deployOperators) {
rsp.nodeRequests.addAll(o.deployRequests);
}
}
rsp.hasPendingRequests = false;
return rsp;
}
return null;
}
boolean isIdle() {
if (this.hasPendingWork()) {
// container may have no active operators but deploy request pending
return false;
}
for (OperatorStatus operatorStatus : this.operators.values()) {
if (!operatorStatus.isIdle()) {
return false;
}
}
return true;
}
// this method is only used for testing
public List<OperatorDeployInfo> getDeployInfo() {
return getDeployInfoList(container.pendingDeploy);
}
/**
* Create deploy info for StramChild.
* @param operators
* @return StreamingContainerContext
*/
private List<OperatorDeployInfo> getDeployInfoList(Set<PTOperator> operators) {
if (container.bufferServerAddress == null) {
throw new IllegalStateException("No buffer server address assigned");
}
Map<OperatorDeployInfo, PTOperator> nodes = new LinkedHashMap<OperatorDeployInfo, PTOperator>();
Map<String, OutputDeployInfo> publishers = new LinkedHashMap<String, OutputDeployInfo>();
for (PTOperator node : operators) {
if (node.getState() != State.NEW && node.getState() != State.INACTIVE) {
LOG.debug("Skipping deploy for operator {} state {}", node, node.getState());
continue;
}
node.setState(State.PENDING_DEPLOY);
OperatorDeployInfo ndi = createOperatorDeployInfo(node);
long checkpointWindowId = node.getRecoveryCheckpoint();
if (checkpointWindowId > 0) {
LOG.debug("Operator {} recovery checkpoint {}", node.getId(), Codec.getStringWindowId(checkpointWindowId));
ndi.checkpointWindowId = checkpointWindowId;
}
nodes.put(ndi, node);
ndi.inputs = new ArrayList<InputDeployInfo>(node.inputs.size());
ndi.outputs = new ArrayList<OutputDeployInfo>(node.outputs.size());
for (PTOutput out : node.outputs) {
final StreamMeta streamMeta = out.logicalStream;
// buffer server or inline publisher
OutputDeployInfo portInfo = new OutputDeployInfo();
portInfo.declaredStreamId = streamMeta.getId();
portInfo.portName = out.portName;
portInfo.contextAttributes = streamMeta.getSource().getAttributes();
if (ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
// input attributes of the downstream operator
for (InputPortMeta sink : streamMeta.getSinks()) {
portInfo.contextAttributes = sink.getAttributes();
break;
}
}
if (!out.isDownStreamInline()) {
portInfo.bufferServerHost = node.container.bufferServerAddress.getHostName();
portInfo.bufferServerPort = node.container.bufferServerAddress.getPort();
if (streamMeta.getCodecClass() != null) {
portInfo.serDeClassName = streamMeta.getCodecClass().getName();
}
}
ndi.outputs.add(portInfo);
publishers.put(node.getId() + "/" + streamMeta.getId(), portInfo);
}
}
// after we know all publishers within container, determine subscribers
for (Map.Entry<OperatorDeployInfo, PTOperator> nodeEntry : nodes.entrySet()) {
OperatorDeployInfo ndi = nodeEntry.getKey();
PTOperator node = nodeEntry.getValue();
for (PTInput in : node.inputs) {
final StreamMeta streamMeta = in.logicalStream;
if (streamMeta.getSource() == null) {
throw new AssertionError("source is null: " + in);
}
PTOutput sourceOutput = in.source;
InputDeployInfo inputInfo = new InputDeployInfo();
inputInfo.declaredStreamId = streamMeta.getId();
inputInfo.portName = in.portName;
for (Map.Entry<InputPortMeta, StreamMeta> e : node.getOperatorMeta().getInputStreams().entrySet()) {
if (e.getValue() == streamMeta) {
inputInfo.contextAttributes = e.getKey().getAttributes();
}
}
if (inputInfo.contextAttributes == null && ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
inputInfo.contextAttributes = in.source.logicalStream.getSource().getAttributes();
}
inputInfo.sourceNodeId = sourceOutput.source.getId();
inputInfo.sourcePortName = sourceOutput.portName;
if (in.partitions != null && in.partitions.mask != 0) {
inputInfo.partitionMask = in.partitions.mask;
inputInfo.partitionKeys = in.partitions.partitions;
}
if (sourceOutput.source.container == node.container) {
// inline input (both operators in same container)
OutputDeployInfo outputInfo = publishers.get(sourceOutput.source.getId() + "/" + streamMeta.getId());
if (outputInfo == null) {
throw new AssertionError("Missing publisher for inline stream " + sourceOutput);
}
} else {
// buffer server input
InetSocketAddress addr = sourceOutput.source.container.bufferServerAddress;
if (addr == null) {
throw new AssertionError("upstream address not assigned: " + sourceOutput);
}
inputInfo.bufferServerHost = addr.getHostName();
inputInfo.bufferServerPort = addr.getPort();
if (streamMeta.getCodecClass() != null) {
inputInfo.serDeClassName = streamMeta.getCodecClass().getName();
}
}
ndi.inputs.add(inputInfo);
}
}
return new ArrayList<OperatorDeployInfo>(nodes.keySet());
}
/**
* Create operator undeploy request for StramChild. Since the physical plan
* could have been modified (dynamic partitioning etc.), stream information
* cannot be provided. StramChild keeps track of connected streams and removes
* them along with the operator instance.
*
* @param operators
* @return
*/
private List<OperatorDeployInfo> getUndeployInfoList(Set<PTOperator> operators) {
List<OperatorDeployInfo> undeployList = new ArrayList<OperatorDeployInfo>(operators.size());
for (PTOperator node : operators) {
node.setState(State.PENDING_UNDEPLOY);
OperatorDeployInfo ndi = createOperatorDeployInfo(node);
long checkpointWindowId = node.getRecoveryCheckpoint();
if (checkpointWindowId > 0) {
LOG.debug("Operator {} recovery checkpoint {}", node.getId(), Codec.getStringWindowId(checkpointWindowId));
ndi.checkpointWindowId = checkpointWindowId;
}
undeployList.add(ndi);
}
return undeployList;
}
/**
* Create deploy info for operator.
* <p>
*
* @param dnodeId
* @param nodeDecl
* @return {@link com.malhartech.stram.OperatorDeployInfo}
*
*/
private OperatorDeployInfo createOperatorDeployInfo(PTOperator node)
{
OperatorDeployInfo ndi = new OperatorDeployInfo();
Operator operator = node.getOperatorMeta().getOperator();
ndi.type = (operator instanceof InputOperator) ? OperatorDeployInfo.OperatorType.INPUT : OperatorDeployInfo.OperatorType.GENERIC;
if (node.merge != null) {
operator = node.merge;
ndi.type = OperatorDeployInfo.OperatorType.UNIFIER;
} else if (node.partition != null) {
operator = node.partition.getOperator();
}
StorageAgent agent = node.getOperatorMeta().getAttributes().attr(OperatorContext.STORAGE_AGENT).get();
if (agent == null) {
String appPath = getInitContext().applicationAttributes.attrValue(LogicalPlan.STRAM_APP_PATH, "app-dfs-path-not-configured");
agent = new HdfsBackupAgent(new Configuration(), appPath + "/" + LogicalPlan.SUBDIR_CHECKPOINTS);
}
try {
OutputStream stream = agent.getSaveStream(node.getId(), -1);
Node.storeOperator(stream, operator);
stream.close();
}
catch (Exception e) {
throw new RuntimeException("Failed to serialize and distribute " + operator + "(" + operator.getClass() + ")", e);
}
ndi.declaredId = node.getOperatorMeta().getId();
ndi.id = node.getId();
ndi.contextAttributes = node.getOperatorMeta().getAttributes();
return ndi;
}
public ContainerInfo getContainerInfo() {
ContainerInfo ci = new ContainerInfo();
ci.id = container.containerId;
ci.host = container.host;
ci.state = container.getState().name();
ci.jvmName = this.jvmName;
ci.numOperators = container.operators.size();
ci.memoryMBAllocated = container.getAllocatedMemoryMB();
ci.lastHeartbeat = lastHeartbeatMillis;
ci.memoryMBFree = this.memoryMBFree;
return ci;
}
}
| engine/src/main/java/com/malhartech/stram/StramChildAgent.java | /**
* Copyright (c) 2012-2012 Malhar, Inc.
* All rights reserved.
*/
package com.malhartech.stram;
import java.io.ByteArrayOutputStream;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.malhartech.api.InputOperator;
import com.malhartech.api.Operator;
import com.malhartech.api.StorageAgent;
import com.malhartech.bufferserver.util.Codec;
import com.malhartech.engine.Node;
import com.malhartech.engine.OperatorContext;
import com.malhartech.stram.OperatorDeployInfo.InputDeployInfo;
import com.malhartech.stram.OperatorDeployInfo.OutputDeployInfo;
import com.malhartech.stram.PhysicalPlan.PTContainer;
import com.malhartech.stram.PhysicalPlan.PTInput;
import com.malhartech.stram.PhysicalPlan.PTOperator;
import com.malhartech.stram.PhysicalPlan.PTOperator.State;
import com.malhartech.stram.PhysicalPlan.PTOutput;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.ContainerHeartbeatResponse;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StramToNodeRequest;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingContainerContext;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat.DNodeState;
import com.malhartech.stram.plan.logical.LogicalPlan;
import com.malhartech.stram.plan.logical.LogicalPlan.InputPortMeta;
import com.malhartech.stram.plan.logical.LogicalPlan.StreamMeta;
import com.malhartech.stram.webapp.ContainerInfo;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
/**
*
* Representation of a child container in the master<p>
* <br>
*/
public class StramChildAgent {
private static final Logger LOG = LoggerFactory.getLogger(StramChildAgent.class);
public static class ContainerStartRequest {
final PTContainer container;
ContainerStartRequest(PTContainer container) {
this.container = container;
}
}
static class MovingAverageLong {
private final int periods;
private final long[] values;
private int index = 0;
private boolean filled = false;
MovingAverageLong(int periods) {
this.periods = periods;
this.values = new long[periods];
}
void add(long val) {
values[index++] = val;
if (index == periods) {
filled = true;
}
index %= periods;
}
long getAvg() {
long sum = 0;
for (int i=0; i<periods; i++) {
sum += values[i];
}
if (!filled) {
return index == 0 ? 0 : sum/index;
} else {
return sum/periods;
}
}
}
// Generics don't work with numbers. Hence this mess.
static class MovingAverageDouble {
private final int periods;
private final double[] values;
private int index = 0;
private boolean filled = false;
MovingAverageDouble(int periods) {
this.periods = periods;
this.values = new double[periods];
}
void add(double val) {
values[index++] = val;
if (index == periods) {
filled = true;
}
index %= periods;
}
double getAvg() {
double sum = 0;
for (int i=0; i<periods; i++) {
sum += values[i];
}
if (!filled) {
return index == 0 ? 0 : sum/index;
} else {
return sum/periods;
}
}
}
protected class OperatorStatus
{
StreamingNodeHeartbeat lastHeartbeat;
final PTOperator operator;
long totalTuplesProcessed;
long totalTuplesEmitted;
long currentWindowId;
MovingAverageLong tuplesProcessedPSMA10 = new MovingAverageLong(10);
MovingAverageLong tuplesEmittedPSMA10 = new MovingAverageLong(10);
MovingAverageDouble cpuPercentageMA10 = new MovingAverageDouble(10);
MovingAverageLong latencyMA = new MovingAverageLong(10);
List<String> recordingNames; // null if recording is not in progress
Map<String, PortStatus> inputPortStatusList = new HashMap<String, PortStatus>();
Map<String, PortStatus> outputPortStatusList = new HashMap<String, PortStatus>();
private OperatorStatus(PTOperator operator) {
this.operator = operator;
for (PTInput ptInput: operator.inputs) {
PortStatus inputPortStatus = new PortStatus();
inputPortStatus.portName = ptInput.portName;
inputPortStatusList.put(ptInput.portName, inputPortStatus);
}
for (PTOutput ptOutput: operator.outputs) {
PortStatus outputPortStatus = new PortStatus();
outputPortStatus.portName = ptOutput.portName;
outputPortStatusList.put(ptOutput.portName, outputPortStatus);
}
}
public boolean isIdle()
{
if ((lastHeartbeat != null && DNodeState.IDLE.name().equals(lastHeartbeat.getState()))) {
return true;
}
return false;
}
}
public class PortStatus
{
String portName;
long totalTuples = 0;
MovingAverageLong tuplesPSMA10 = new MovingAverageLong(10);
MovingAverageLong bufferServerBytesPSMA10 = new MovingAverageLong(10); // TBD
}
public StramChildAgent(PTContainer container, StreamingContainerContext initCtx) {
this.container = container;
this.initCtx = initCtx;
this.operators = new HashMap<Integer, OperatorStatus>(container.operators.size());
this.memoryMBFree = this.container.getAllocatedMemoryMB();
}
boolean shutdownRequested = false;
boolean isComplete = false;
long lastHeartbeatMillis = 0;
//long lastCheckpointRequestMillis = 0;
long createdMillis = System.currentTimeMillis();
final PTContainer container;
Map<Integer, OperatorStatus> operators;
final StreamingContainerContext initCtx;
Runnable onAck = null;
String jvmName;
int memoryMBFree;
private final ConcurrentLinkedQueue<StramToNodeRequest> operatorRequests = new ConcurrentLinkedQueue<StramToNodeRequest>();
public StreamingContainerContext getInitContext() {
return initCtx;
}
public boolean hasPendingWork() {
return this.onAck != null || !container.pendingDeploy.isEmpty() || !container.pendingUndeploy.isEmpty();
}
private void ackPendingRequest() {
if (onAck != null) {
onAck.run();
onAck = null;
}
}
protected OperatorStatus updateOperatorStatus(StreamingNodeHeartbeat shb) {
OperatorStatus status = this.operators.get(shb.getNodeId());
if (status == null) {
for (PTOperator operator : container.operators) {
if (operator.getId() == shb.getNodeId()) {
status = new OperatorStatus(operator);
operators.put(shb.getNodeId(), status);
}
}
}
if (status != null && !container.pendingDeploy.isEmpty()) {
if (status.operator.getState() == PTOperator.State.PENDING_DEPLOY) {
// remove operator from deploy list only if not scheduled of undeploy (or redeploy) again
if (!container.pendingUndeploy.contains(status.operator) && container.pendingDeploy.remove(status.operator)) {
LOG.debug("{} marking deployed: {} remote status {}", new Object[] {container.containerId, status.operator, shb.getState()});
status.operator.setState(PTOperator.State.ACTIVE);
}
}
LOG.debug("{} pendingDeploy {}", container.containerId, container.pendingDeploy);
}
return status;
}
public void addOperatorRequest(StramToNodeRequest r) {
LOG.info("Adding operator request {} {}", container.containerId, r);
this.operatorRequests.add(r);
}
@SuppressWarnings("ReturnOfCollectionOrArrayField")
protected ConcurrentLinkedQueue<StramToNodeRequest> getOperatorRequests() {
return this.operatorRequests;
}
public ContainerHeartbeatResponse pollRequest() {
ackPendingRequest();
if (!this.container.pendingUndeploy.isEmpty()) {
ContainerHeartbeatResponse rsp = new ContainerHeartbeatResponse();
final Set<PTOperator> toUndeploy = Sets.newHashSet(this.container.pendingUndeploy);
List<OperatorDeployInfo> nodeList = getUndeployInfoList(toUndeploy);
rsp.undeployRequest = nodeList;
rsp.hasPendingRequests = (!this.container.pendingDeploy.isEmpty());
this.onAck = new Runnable() {
@Override
public void run() {
// remove operators from undeploy list to not request it again
container.pendingUndeploy.removeAll(toUndeploy);
for (PTOperator operator : toUndeploy) {
operator.setState(PTOperator.State.INACTIVE);
}
LOG.debug("{} undeploy complete: {} deploy: {}", new Object[] {container.containerId, toUndeploy, container.pendingDeploy});
}
};
return rsp;
}
if (!this.container.pendingDeploy.isEmpty()) {
Set<PTOperator> deployOperators = this.container.plan.getOperatorsForDeploy(this.container);
LOG.debug("container {} deployable operators: {}", container.containerId, deployOperators);
ContainerHeartbeatResponse rsp = new ContainerHeartbeatResponse();
List<OperatorDeployInfo> deployList = getDeployInfoList(deployOperators);
if (deployList != null && !deployList.isEmpty()) {
rsp.deployRequest = deployList;
rsp.nodeRequests = Lists.newArrayList();
for (PTOperator o : deployOperators) {
rsp.nodeRequests.addAll(o.deployRequests);
}
}
rsp.hasPendingRequests = false;
return rsp;
}
return null;
}
boolean isIdle() {
if (this.hasPendingWork()) {
// container may have no active operators but deploy request pending
return false;
}
for (OperatorStatus operatorStatus : this.operators.values()) {
if (!operatorStatus.isIdle()) {
return false;
}
}
return true;
}
// this method is only used for testing
public List<OperatorDeployInfo> getDeployInfo() {
return getDeployInfoList(container.pendingDeploy);
}
/**
* Create deploy info for StramChild.
* @param operators
* @return StreamingContainerContext
*/
private List<OperatorDeployInfo> getDeployInfoList(Set<PTOperator> operators) {
if (container.bufferServerAddress == null) {
throw new IllegalStateException("No buffer server address assigned");
}
Map<OperatorDeployInfo, PTOperator> nodes = new LinkedHashMap<OperatorDeployInfo, PTOperator>();
Map<String, OutputDeployInfo> publishers = new LinkedHashMap<String, OutputDeployInfo>();
for (PTOperator node : operators) {
if (node.getState() != State.NEW && node.getState() != State.INACTIVE) {
LOG.debug("Skipping deploy for operator {} state {}", node, node.getState());
continue;
}
node.setState(State.PENDING_DEPLOY);
OperatorDeployInfo ndi = createOperatorDeployInfo(node);
long checkpointWindowId = node.getRecoveryCheckpoint();
if (checkpointWindowId > 0) {
LOG.debug("Operator {} recovery checkpoint {}", node.getId(), Codec.getStringWindowId(checkpointWindowId));
ndi.checkpointWindowId = checkpointWindowId;
}
nodes.put(ndi, node);
ndi.inputs = new ArrayList<InputDeployInfo>(node.inputs.size());
ndi.outputs = new ArrayList<OutputDeployInfo>(node.outputs.size());
for (PTOutput out : node.outputs) {
final StreamMeta streamMeta = out.logicalStream;
// buffer server or inline publisher
OutputDeployInfo portInfo = new OutputDeployInfo();
portInfo.declaredStreamId = streamMeta.getId();
portInfo.portName = out.portName;
portInfo.contextAttributes = streamMeta.getSource().getAttributes();
if (ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
// input attributes of the downstream operator
for (InputPortMeta sink : streamMeta.getSinks()) {
portInfo.contextAttributes = sink.getAttributes();
break;
}
}
if (!out.isDownStreamInline()) {
portInfo.bufferServerHost = node.container.bufferServerAddress.getHostName();
portInfo.bufferServerPort = node.container.bufferServerAddress.getPort();
if (streamMeta.getCodecClass() != null) {
portInfo.serDeClassName = streamMeta.getCodecClass().getName();
}
}
ndi.outputs.add(portInfo);
publishers.put(node.getId() + "/" + streamMeta.getId(), portInfo);
}
}
// after we know all publishers within container, determine subscribers
for (Map.Entry<OperatorDeployInfo, PTOperator> nodeEntry : nodes.entrySet()) {
OperatorDeployInfo ndi = nodeEntry.getKey();
PTOperator node = nodeEntry.getValue();
for (PTInput in : node.inputs) {
final StreamMeta streamMeta = in.logicalStream;
if (streamMeta.getSource() == null) {
throw new AssertionError("source is null: " + in);
}
PTOutput sourceOutput = in.source;
InputDeployInfo inputInfo = new InputDeployInfo();
inputInfo.declaredStreamId = streamMeta.getId();
inputInfo.portName = in.portName;
for (Map.Entry<InputPortMeta, StreamMeta> e : node.getOperatorMeta().getInputStreams().entrySet()) {
if (e.getValue() == streamMeta) {
inputInfo.contextAttributes = e.getKey().getAttributes();
}
}
if (inputInfo.contextAttributes == null && ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
inputInfo.contextAttributes = in.source.logicalStream.getSource().getAttributes();
}
inputInfo.sourceNodeId = sourceOutput.source.getId();
inputInfo.sourcePortName = sourceOutput.portName;
if (in.partitions != null && in.partitions.mask != 0) {
inputInfo.partitionMask = in.partitions.mask;
inputInfo.partitionKeys = in.partitions.partitions;
}
if (sourceOutput.source.container == node.container) {
// inline input (both operators in same container)
OutputDeployInfo outputInfo = publishers.get(sourceOutput.source.getId() + "/" + streamMeta.getId());
if (outputInfo == null) {
throw new AssertionError("Missing publisher for inline stream " + sourceOutput);
}
} else {
// buffer server input
InetSocketAddress addr = sourceOutput.source.container.bufferServerAddress;
if (addr == null) {
throw new AssertionError("upstream address not assigned: " + sourceOutput);
}
inputInfo.bufferServerHost = addr.getHostName();
inputInfo.bufferServerPort = addr.getPort();
if (streamMeta.getCodecClass() != null) {
inputInfo.serDeClassName = streamMeta.getCodecClass().getName();
}
}
ndi.inputs.add(inputInfo);
}
}
return new ArrayList<OperatorDeployInfo>(nodes.keySet());
}
/**
* Create operator undeploy request for StramChild. Since the physical plan
* could have been modified (dynamic partitioning etc.), stream information
* cannot be provided. StramChild keeps track of connected streams and removes
* them along with the operator instance.
*
* @param operators
* @return
*/
private List<OperatorDeployInfo> getUndeployInfoList(Set<PTOperator> operators) {
List<OperatorDeployInfo> undeployList = new ArrayList<OperatorDeployInfo>(operators.size());
for (PTOperator node : operators) {
node.setState(State.PENDING_UNDEPLOY);
OperatorDeployInfo ndi = createOperatorDeployInfo(node);
long checkpointWindowId = node.getRecoveryCheckpoint();
if (checkpointWindowId > 0) {
LOG.debug("Operator {} recovery checkpoint {}", node.getId(), Codec.getStringWindowId(checkpointWindowId));
ndi.checkpointWindowId = checkpointWindowId;
}
undeployList.add(ndi);
}
return undeployList;
}
/**
* Create deploy info for operator.
* <p>
*
* @param dnodeId
* @param nodeDecl
* @return {@link com.malhartech.stram.OperatorDeployInfo}
*
*/
private OperatorDeployInfo createOperatorDeployInfo(PTOperator node)
{
OperatorDeployInfo ndi = new OperatorDeployInfo();
Operator operator = node.getOperatorMeta().getOperator();
ndi.type = (operator instanceof InputOperator) ? OperatorDeployInfo.OperatorType.INPUT : OperatorDeployInfo.OperatorType.GENERIC;
if (node.merge != null) {
operator = node.merge;
ndi.type = OperatorDeployInfo.OperatorType.UNIFIER;
} else if (node.partition != null) {
operator = node.partition.getOperator();
}
StorageAgent agent = node.getOperatorMeta().getAttributes().attr(OperatorContext.STORAGE_AGENT).get();
if (agent == null) {
String appPath = getInitContext().applicationAttributes.attrValue(LogicalPlan.STRAM_APP_PATH, "app-dfs-path-not-configured");
agent = new HdfsBackupAgent(new Configuration(), appPath + "/" + LogicalPlan.SUBDIR_CHECKPOINTS);
}
try {
OutputStream stream = agent.getSaveStream(node.getId(), -1);
Node.storeOperator(stream, operator);
stream.close();
}
catch (Exception e) {
throw new RuntimeException("Failed to initialize " + operator + "(" + operator.getClass() + ")", e);
}
ndi.declaredId = node.getOperatorMeta().getId();
ndi.id = node.getId();
ndi.contextAttributes = node.getOperatorMeta().getAttributes();
return ndi;
}
public ContainerInfo getContainerInfo() {
ContainerInfo ci = new ContainerInfo();
ci.id = container.containerId;
ci.host = container.host;
ci.state = container.getState().name();
ci.jvmName = this.jvmName;
ci.numOperators = container.operators.size();
ci.memoryMBAllocated = container.getAllocatedMemoryMB();
ci.lastHeartbeat = lastHeartbeatMillis;
ci.memoryMBFree = this.memoryMBFree;
return ci;
}
}
| debug
| engine/src/main/java/com/malhartech/stram/StramChildAgent.java | debug | <ide><path>ngine/src/main/java/com/malhartech/stram/StramChildAgent.java
<ide> stream.close();
<ide> }
<ide> catch (Exception e) {
<del> throw new RuntimeException("Failed to initialize " + operator + "(" + operator.getClass() + ")", e);
<add> throw new RuntimeException("Failed to serialize and distribute " + operator + "(" + operator.getClass() + ")", e);
<ide> }
<ide> ndi.declaredId = node.getOperatorMeta().getId();
<ide> ndi.id = node.getId(); |
|
Java | apache-2.0 | 4bc10269960026df015834c88757c4a60f814ddc | 0 | cfieber/clouddriver,ajordens/clouddriver,cfieber/clouddriver,duftler/clouddriver,ajordens/clouddriver,cfieber/clouddriver,duftler/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,duftler/clouddriver,spinnaker/clouddriver,spinnaker/clouddriver,ajordens/clouddriver,duftler/clouddriver | /*
* Copyright 2017 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.clouddriver.kubernetes.v2.caching.view.provider;
import com.netflix.spinnaker.cats.cache.Cache;
import com.netflix.spinnaker.cats.cache.CacheData;
import com.netflix.spinnaker.cats.cache.RelationshipCacheFilter;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class KubernetesCacheUtils {
private final Cache cache;
public KubernetesCacheUtils(Cache cache) {
this.cache = cache;
}
public Collection<CacheData> getAllKeys(String type) {
return cache.getAll(type);
}
public CacheData getSingleEntry(String type, String key) {
return cache.get(type, key);
}
public Collection<CacheData> getTransitiveRelationship(String from, List<String> sourceKeys, String to) {
Collection<CacheData> sourceData = cache.getAll(from, sourceKeys, RelationshipCacheFilter.include(to));
if (sourceData == null) {
return Collections.emptyList();
}
return cache.getAll(to, sourceData.stream()
.filter(Objects::nonNull)
.map(CacheData::getRelationships)
.filter(Objects::nonNull)
.map(r -> r.get(to))
.flatMap(Collection::stream)
.collect(Collectors.toList()));
}
public Collection<CacheData> loadRelationshipsFromCache(Collection<CacheData> sources, String relationshipType) {
List<String> keys = sources.stream()
.filter(Objects::nonNull)
.map(CacheData::getRelationships)
.filter(Objects::nonNull)
.map(r -> r.get(relationshipType))
.flatMap(Collection::stream)
.collect(Collectors.toList());
return cache.getAll(relationshipType, keys);
}
}
| clouddriver-kubernetes/src/main/groovy/com/netflix/spinnaker/clouddriver/kubernetes/v2/caching/view/provider/KubernetesCacheUtils.java | /*
* Copyright 2017 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.clouddriver.kubernetes.v2.caching.view.provider;
import com.netflix.spinnaker.cats.cache.Cache;
import com.netflix.spinnaker.cats.cache.CacheData;
import com.netflix.spinnaker.cats.cache.RelationshipCacheFilter;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
public class KubernetesCacheUtils {
private final Cache cache;
public KubernetesCacheUtils(Cache cache) {
this.cache = cache;
}
public Collection<CacheData> getAllKeys(String type) {
return cache.getAll(type);
}
public CacheData getSingleEntry(String type, String key) {
return cache.get(type, key);
}
public Collection<CacheData> getTransitiveRelationship(String from, List<String> sourceKeys, String to) {
Collection<CacheData> sourceData = cache.getAll(from, sourceKeys, RelationshipCacheFilter.include(to));
if (sourceData == null) {
return Collections.emptyList();
}
return cache.getAll(to, sourceData.stream()
.map(cd -> cd.getRelationships().get(to))
.flatMap(Collection::stream)
.collect(Collectors.toList()));
}
public Collection<CacheData> loadRelationshipsFromCache(Collection<CacheData> sources, String relationshipType) {
List<String> keys = sources.stream()
.map(cd -> cd.getRelationships().get(relationshipType))
.flatMap(Collection::stream)
.collect(Collectors.toList());
return cache.getAll(relationshipType, keys);
}
}
| fix(provider/kubernetes): v2 guard against empty cache relationships (#1932)
| clouddriver-kubernetes/src/main/groovy/com/netflix/spinnaker/clouddriver/kubernetes/v2/caching/view/provider/KubernetesCacheUtils.java | fix(provider/kubernetes): v2 guard against empty cache relationships (#1932) | <ide><path>louddriver-kubernetes/src/main/groovy/com/netflix/spinnaker/clouddriver/kubernetes/v2/caching/view/provider/KubernetesCacheUtils.java
<ide> import java.util.Collection;
<ide> import java.util.Collections;
<ide> import java.util.List;
<add>import java.util.Objects;
<ide> import java.util.stream.Collectors;
<ide>
<ide> public class KubernetesCacheUtils {
<ide> }
<ide>
<ide> return cache.getAll(to, sourceData.stream()
<del> .map(cd -> cd.getRelationships().get(to))
<add> .filter(Objects::nonNull)
<add> .map(CacheData::getRelationships)
<add> .filter(Objects::nonNull)
<add> .map(r -> r.get(to))
<ide> .flatMap(Collection::stream)
<ide> .collect(Collectors.toList()));
<ide> }
<ide>
<ide> public Collection<CacheData> loadRelationshipsFromCache(Collection<CacheData> sources, String relationshipType) {
<ide> List<String> keys = sources.stream()
<del> .map(cd -> cd.getRelationships().get(relationshipType))
<add> .filter(Objects::nonNull)
<add> .map(CacheData::getRelationships)
<add> .filter(Objects::nonNull)
<add> .map(r -> r.get(relationshipType))
<ide> .flatMap(Collection::stream)
<ide> .collect(Collectors.toList());
<ide> |
|
JavaScript | mit | 80410c5f89ef5e949726e06a494a4559fb61fc5b | 0 | maxbbn/kissy-xtemplate,kissyteam/kissy-xtemplate,kissyteam/kissy-xtemplate | /**
*
* @author: 橘子<[email protected]>
* @time: 12/18/13 18:08
* @description:
*/
var fs = require('fs');
var path = require('path');
var iconv = require('iconv-lite');
var jsBeautify = require('js-beautify').js_beautify;
var mkdirp = require('mkdirp');
var pkgInfo = require('../package.json');
function XTemplate(cfg){
cfg = cfg || {};
var defVersion = '1.4.2';
cfg.version = cfg.version || defVersion;
cfg.type = cfg.type === 'tpl' ? 'tpl' : 'xtpl';
if(!fs.existsSync(path.resolve(__dirname, './xtemplate/', cfg.version))){
throw new Error('version ' + cfg.version + ' not supported yet. Please submit a new issue at ' + pkgInfo.bugs.url);
}
cfg.inputCharset = cfg.inputCharset || 'utf8';
cfg.outputCharset = cfg.outputCharset || 'utf8';
this.kissy = require('./kissy/seed');
this.kissy.Config.packages = {};
this.kissy.version = cfg.version.replace(/(\d\.\d)\.(\d)/, '$1$2');
if(this.kissy.Env && this.kissy.Env.mods){
for(var modName in this.kissy.Env.mods){
if(this.kissy.Env.mods.hasOwnProperty(modName) && /^xtemplate\//.test(modName)){
delete this.kissy.Env.mods[modName];
}
}
}
this.kissy.config({
packages: {
xtemplate: {
base: path.resolve(__dirname, './xtemplate/' + cfg.version),
ignorePackageNameInUri: true
}
}
});
this.compiler = this.kissy.nodeRequire('xtemplate/compiler');
this.cfg = cfg;
}
XTemplate.prototype._beautify = function (str) {
var opts = {
'indent_size': '4',
'indent_char': ' ',
'preserve_newlines': true,
'brace_style': 'collapse',
'keep_array_indentation': false,
'space_after_anon_function': true
};
return jsBeautify(str, opts);
};
XTemplate.prototype._compile = function (tplContent, inputCharset, outputCharset) {
tplContent = iconv.decode(tplContent, inputCharset);
var isBlow150 = parseInt(this.cfg.version.replace(/\./g, ''), 10) < 150;
var moduleCode = this._beautify(
'/** Compiled By KISSY-XTemplate */\n' +
'KISSY.add(function(S,require,exports,module){\n' +
'/*jshint quotmark:false, loopfunc:true, indent:false, asi:true, unused:false, boss:true*/\n' +
(isBlow150 ? 'return ' : 'var t = ') + this.compiler.compileToStr(tplContent)) + ';\n' +
(isBlow150 ? '' : 't.TPL_NAME = module.name;\nreturn t;\n') +
'});';
return iconv.encode(moduleCode, outputCharset);
};
XTemplate.prototype._tpl2mod = function (tplContent, inputCharset, outputCharset){
tplContent = iconv.decode(tplContent, inputCharset);
tplContent = tplContent.replace(/\\/g, '\\')
.replace(/\r?\n/g, '\\n')
.replace(/'/g, '\\\'');
var moduleCode = '' +
'/*\n' +
' Generated by kissy-tpl2mod.' +
'*/\n' +
"KISSY.add('" + tplContent + "');";
return iconv.encode(moduleCode, outputCharset);
};
XTemplate.prototype.compile = function(src, dest){
var self = this;
if(Array.isArray(src)){
src.forEach(function(s){
self.compile(s, dest);
});
}else{
fs.exists(src, function(exists){
if(!exists){
throw new Error('file ' + src + ' not exists.');
}else{
fs.stat(src, function(err, stat){
if(stat.isDirectory()){
fs.readdir(src, function(err, files){
if(err) throw new Error(err);
files.forEach(function(file){
self.compile(path.resolve(src, file), dest);
});
});
}else{
fs.readFile(src, function(err, moduleCode){
if (self.cfg.type === 'xtpl' || src.match(/\.xtpl\.html$/)) {
moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
} else if (src.match(/\.tpl\.html$/)) {
moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
}
fs.exists(dest, function(destExists){
var generate = function(destPath){
fs.stat(destPath, function(err, destStat){
var modulePath;
if(path.extname(dest) !== '.js' && destStat.isDirectory()){
modulePath = path.resolve(dest, path.basename(src, '.html').replace(/\.(x?tpl)$/, '-$1') + '.js');
}else{
modulePath = destPath;
}
fs.writeFile(modulePath, moduleCode);
});
};
if(path.extname(dest) !== '.js' && !destExists){
mkdirp(dest, function(err){
if(err) throw new Error(err);
generate(dest);
});
}else{
generate(dest);
}
});
});
}
});
}
});
}
};
XTemplate.prototype.compileSync = function(src, dest){
var self = this;
if(Array.isArray(src)){
src.forEach(function(s){
self.compileSync(s, dest);
});
}else{
if(fs.existsSync(src)){
if(fs.statSync(src).isDirectory()){
var files = fs.readdirSync(src);
if(files.length){
files.forEach(function(filePath){
self.compileSync(path.resolve(src, filePath), dest);
});
}
}else{
var moduleCode = fs.readFileSync(src);
if (self.cfg.type === 'xtpl' || src.match(/\.xtpl\.html$/)) {
moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
} else if (src.match(/\.tpl\.html$/)) {
moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
}
var modulePath;
if(path.extname(dest) !== '.js' && !fs.existsSync(dest)){
mkdirp.sync(dest);
}
if(path.extname(dest) !== '.js' && fs.statSync(dest).isDirectory()){
modulePath = path.resolve(dest, path.basename(src, '.html').replace(/\.(x?tpl)$/, '-$1') + '.js');
}else{
modulePath = dest;
}
fs.writeFileSync(modulePath, moduleCode);
console.info('generate module: ' + modulePath);
}
}else{
throw new Error('file ' + src + ' not exists.');
}
}
};
module.exports = XTemplate; | lib/xtemplate.js | /**
*
* @author: 橘子<[email protected]>
* @time: 12/18/13 18:08
* @description:
*/
var fs = require('fs');
var path = require('path');
var iconv = require('iconv-lite');
var jsBeautify = require('js-beautify').js_beautify;
var mkdirp = require('mkdirp');
var pkgInfo = require('../package.json');
function XTemplate(cfg){
cfg = cfg || {};
var defVersion = '1.4.2';
cfg.version = cfg.version || defVersion;
if(!fs.existsSync(path.resolve(__dirname, './xtemplate/', cfg.version))){
throw new Error('version ' + cfg.version + ' not supported yet. Please submit a new issue at ' + pkgInfo.bugs.url);
}
cfg.inputCharset = cfg.inputCharset || 'utf8';
cfg.outputCharset = cfg.outputCharset || 'utf8';
this.kissy = require('./kissy/seed');
this.kissy.Config.packages = {};
this.kissy.version = cfg.version.replace(/(\d\.\d)\.(\d)/, '$1$2');
if(this.kissy.Env && this.kissy.Env.mods){
for(var modName in this.kissy.Env.mods){
if(this.kissy.Env.mods.hasOwnProperty(modName) && /^xtemplate\//.test(modName)){
delete this.kissy.Env.mods[modName];
}
}
}
this.kissy.config({
packages: {
xtemplate: {
base: path.resolve(__dirname, './xtemplate/' + cfg.version),
ignorePackageNameInUri: true
}
}
});
this.compiler = this.kissy.nodeRequire('xtemplate/compiler');
this.cfg = cfg;
}
XTemplate.prototype._beautify = function (str) {
var opts = {
'indent_size': '4',
'indent_char': ' ',
'preserve_newlines': true,
'brace_style': 'collapse',
'keep_array_indentation': false,
'space_after_anon_function': true
};
return jsBeautify(str, opts);
};
XTemplate.prototype._compile = function (tplContent, inputCharset, outputCharset) {
tplContent = iconv.decode(tplContent, inputCharset);
var isBlow150 = parseInt(this.cfg.version.replace(/\./g, ''), 10) < 150;
var moduleCode = this._beautify(
'/** Compiled By KISSY-XTemplate */\n' +
'KISSY.add(function(S,require,exports,module){\n' +
'/*jshint quotmark:false, loopfunc:true, indent:false, asi:true, unused:false, boss:true*/\n' +
(isBlow150 ? 'return ' : 'var t = ') + this.compiler.compileToStr(tplContent)) + ';\n' +
(isBlow150 ? '' : 't.TPL_NAME = module.name;\nreturn t;\n') +
'});';
return iconv.encode(moduleCode, outputCharset);
};
XTemplate.prototype._tpl2mod = function (tplContent, inputCharset, outputCharset){
tplContent = iconv.decode(tplContent, inputCharset);
tplContent = tplContent.replace(/\\/g, '\\')
.replace(/\r?\n/g, '\\n')
.replace(/'/g, '\\\'');
var moduleCode = '' +
'/*\n' +
' Generated by kissy-tpl2mod.' +
'*/\n' +
"KISSY.add('" + tplContent + "');";
return iconv.encode(moduleCode, outputCharset);
};
XTemplate.prototype.compile = function(src, dest){
var self = this;
if(Array.isArray(src)){
src.forEach(function(s){
self.compile(s, dest);
});
}else{
fs.exists(src, function(exists){
if(!exists){
throw new Error('file ' + src + ' not exists.');
}else{
fs.stat(src, function(err, stat){
if(stat.isDirectory()){
fs.readdir(src, function(err, files){
if(err) throw new Error(err);
files.forEach(function(file){
self.compile(path.resolve(src, file), dest);
});
});
}else{
fs.readFile(src, function(err, moduleCode){
if (src.match(/\.xtpl\.html$/)) {
moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
} else if (src.match(/\.tpl\.html$/)) {
moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
}
fs.exists(dest, function(destExists){
var generate = function(destPath){
fs.stat(destPath, function(err, destStat){
var modulePath;
if(path.extname(dest) !== '.js' && destStat.isDirectory()){
modulePath = path.resolve(dest, path.basename(src, '.html').replace(/\.(x?tpl)$/, '-$1') + '.js');
}else{
modulePath = destPath;
}
fs.writeFile(modulePath, moduleCode);
});
};
if(path.extname(dest) !== '.js' && !destExists){
mkdirp(dest, function(err){
if(err) throw new Error(err);
generate(dest);
});
}else{
generate(dest);
}
});
});
}
});
}
});
}
};
XTemplate.prototype.compileSync = function(src, dest){
var self = this;
if(Array.isArray(src)){
src.forEach(function(s){
self.compileSync(s, dest);
});
}else{
if(fs.existsSync(src)){
if(fs.statSync(src).isDirectory()){
var files = fs.readdirSync(src);
if(files.length){
files.forEach(function(filePath){
self.compileSync(path.resolve(src, filePath), dest);
});
}
}else{
var moduleCode = fs.readFileSync(src);
if (src.match(/\.xtpl\.html$/)) {
moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
} else if (src.match(/\.tpl\.html$/)) {
moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
}
var modulePath;
if(path.extname(dest) !== '.js' && !fs.existsSync(dest)){
mkdirp.sync(dest);
}
if(path.extname(dest) !== '.js' && fs.statSync(dest).isDirectory()){
modulePath = path.resolve(dest, path.basename(src, '.html').replace(/\.(x?tpl)$/, '-$1') + '.js');
}else{
modulePath = dest;
}
fs.writeFileSync(modulePath, moduleCode);
console.info('generate module: ' + modulePath);
}
}else{
throw new Error('file ' + src + ' not exists.');
}
}
};
module.exports = XTemplate; | add type support and default type is xtpl
| lib/xtemplate.js | add type support and default type is xtpl | <ide><path>ib/xtemplate.js
<ide> cfg = cfg || {};
<ide> var defVersion = '1.4.2';
<ide> cfg.version = cfg.version || defVersion;
<add> cfg.type = cfg.type === 'tpl' ? 'tpl' : 'xtpl';
<ide> if(!fs.existsSync(path.resolve(__dirname, './xtemplate/', cfg.version))){
<ide> throw new Error('version ' + cfg.version + ' not supported yet. Please submit a new issue at ' + pkgInfo.bugs.url);
<ide> }
<ide> });
<ide> }else{
<ide> fs.readFile(src, function(err, moduleCode){
<del> if (src.match(/\.xtpl\.html$/)) {
<add> if (self.cfg.type === 'xtpl' || src.match(/\.xtpl\.html$/)) {
<ide> moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
<ide> } else if (src.match(/\.tpl\.html$/)) {
<ide> moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
<ide> }
<ide> }else{
<ide> var moduleCode = fs.readFileSync(src);
<del> if (src.match(/\.xtpl\.html$/)) {
<add> if (self.cfg.type === 'xtpl' || src.match(/\.xtpl\.html$/)) {
<ide> moduleCode = self._compile(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset);
<ide> } else if (src.match(/\.tpl\.html$/)) {
<ide> moduleCode = self._tpl2mod(moduleCode, self.cfg.inputCharset, self.cfg.outputCharset); |
|
JavaScript | apache-2.0 | 14fb4e6e6bfffedf28e518bb45875634e757435d | 0 | fhchina/ui,ninja/ui,fhchina/ui | /*!
Ninja UI jQuery Plugin vdevelopment
http://ninjaui.com/
Copyright 2008-2011 Jamie Hoover
Licensed per the terms of the Apache License v2.0
http://ninjaui.com/license
*/
/*globals CFInstall: false*/
/*jshint bitwise: true, browser: true, curly: true, eqeqeq: true, forin: true, immed: true, indent: 2, jquery: true, maxerr: 3, newcap: true, noarg: true, noempty: true, nomen: true, nonew: true, onevar: true, plusplus: false, regexp: true, strict: true, undef: true, white: true*/
(function ($, window, document, undefined) {
'use strict';
var
browser = $.browser,
defaults,
objects,
methods,
time,
version = $.fn.jquery.split('.'),
versionMinor = parseFloat(version[1]),
versionIncrement = parseFloat(version[2] || '0');
if (versionMinor === 4 && versionIncrement < 3 || versionMinor < 4) {
$.error('Ninja UI requires jQuery 1.4.3 or higher.');
}
if (browser.msie && parseFloat(browser.version) < '9') {
$('<script/>', {
defer: '',
src: 'http://ajax.googleapis.com/ajax/libs/chrome-frame/1.0.3/CFInstall.min.js'
}).appendTo('head');
$(document).ready(function () {
CFInstall.check({
mode: 'overlay'
});
});
}
$('<link/>', {
rel: 'stylesheet',
href: '../src/ninjaui.css'
}).appendTo('head');
time = $.now();
function uniqueId() {
return time ++;
}
methods = {
attach: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('attach.ninja', callback);
} else {
$object.trigger('attach.ninja');
}
});
},
delist: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('delist.ninja', callback);
} else {
$object.trigger('delist.ninja');
}
});
},
deselect: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('deselect.ninja', callback);
} else if ($object.is('.ninja-state-select') && !$object.is('.ninja-state-disable')) {
$object.trigger('deselect.ninja');
}
});
},
detach: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('detach.ninja', callback);
} else {
$object.trigger('detach.ninja');
$.fn.detach.apply($object);
}
});
},
disable: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('disable.ninja', callback);
} else {
$object.fadeTo('fast', 0.5).addClass('ninja-state-disable').trigger('disable.ninja');
}
});
},
enable: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('enable.ninja', callback);
} else {
$object.fadeTo('fast', 1).removeClass('ninja-state-disable').trigger('enable.ninja');
}
});
},
hint: function (options) {
return this.each(function () {
options = $.extend({}, defaults, options);
var
$object = $(this),
$hint = $('<span/>', {
'class': 'ninja-object-hint',
css: $.extend(options.css, {
minWidth: $object.width()
}),
html: options.html
}),
$stem = $('<svg class="ninja-object-stem" height="1" width="1" version="1.1" viewBox="0 0 8 8" xmlns="http://www.w3.org/2000/svg"><g><polygon points="4,1 8,8 1,8" stroke-width="0"/><line x1="4" x2="0" y2="8"/><line x1="4" x2="8" y2="8"/></g></svg>').appendTo($hint);
if (options.css) {
$stem.find('g').css(options.css);
}
$object.bind({
'focus.ninja mouseenter.ninja': function () {
var offset = $object.offset();
$hint.css({
top: offset.top + $object.outerHeight() + 5
}).appendTo('body');
if (offset.left + $hint.outerWidth() > $(window).width()) {
$hint.css({
right: 0
});
$stem.css({
right: ($object.outerWidth() / 2) - 4
});
} else {
$hint.css({
left: offset.left + (($object.outerWidth() - $hint.outerWidth()) / 2)
});
$stem.css({
left: ($hint.outerWidth() / 2) - 4
});
}
},
'blur.ninja mouseleave.ninja select.ninja': function () {
$hint.detach();
}
});
});
},
list: function (options) {
return this.each(function () {
options = $.extend({}, defaults, options);
var
$hover = null,
$object = $(this).ninja(),
$list = $('<div/>', {
'class': 'ninja-object-list'
}),
offset = $object.offset(),
scrollTop = $(window).scrollTop(),
bottom = offset.top + $object.outerHeight(),
right = offset.left + $object.outerWidth();
if ($object.is('.ninja-object-autocomplete')) {
$object.next('.ninja-object-autocomplete-spin').hide();
}
if (options.choices.length) {
$object.bind({
'delist.ninja': function () {
$(document).unbind('click.ninja keydown.ninja keyup.ninja');
$list.detach();
if ($hover) {
$hover.removeClass('ninja-state-hover');
}
}
});
$('body').append($list);
if (bottom > (scrollTop + $(window).height())) {
$list.css({
bottom: $object.outerHeight()
});
} else {
$list.css({
top: bottom
});
}
if (right > $(window).width()) {
$list.css({
right: right
});
} else {
$list.css({
left: offset.left
});
}
$(document).bind({
'keydown.ninja': function (event) {
if ($.inArray(event.keyCode, [9, 38, 40]) > -1) {/* down or up */
event.preventDefault();/* prevents page scrolling and tabbing when a list is active */
}
},
'keyup.ninja': function (event) {
if ($.inArray(event.keyCode, [9, 13, 27, 38, 40]) > -1) {/* tab, return, escape, down or up */
if (event.keyCode === 13) {/* return */
if ($hover) {
$hover.click();
}
} else if (event.keyCode === 27) {/* escape */
$object.delist();
} else if ($.inArray(event.keyCode, [9, 40]) > -1 && !event.shiftKey) {/* tab or down arrow */
if ($hover) {
if ($hover.nextAll('.ninja-object-item').length) {
$hover.nextAll('.ninja-object-item:first').trigger('mouseenter.ninja');
} else {
$list.find('.ninja-object-item:first').trigger('mouseenter.ninja');
}
} else {
$list.find('.ninja-object-item:first').trigger('mouseenter.ninja');
}
} else if (event.keyCode === 38 || (event.shiftKey && event.keyCode === 9)) {/* shift+tab or up arrow */
if ($hover) {
if ($hover.prevAll('.ninja-object-item').length) {
$hover.prevAll('.ninja-object-item:first').trigger('mouseenter.ninja');
} else {
$list.find('.ninja-object-item:last').trigger('mouseenter.ninja');
}
} else {
$list.find('.ninja-object-item:last').trigger('mouseenter.ninja');
}
}
return false;
}
},
'click.ninja': function (event) {
$object.delist();
}
});
if (options.query) {
options.choices = $.map(options.choices, function (item) {
item.value = item.value || item.html || item;
if (item.html) {
item.html = item.html.toString().replace(new RegExp(options.query, 'gi'), '<b>' + options.query + '</b>');
}
return item;
});
}
$.each(options.choices, function (i, choice) {
var $choice;
if (choice.spacer) {
$choice = $('<div/>', {
'class': 'ninja-object-rule'
});
} else {
$choice = $('<button/>', {
'class': 'ninja-object-item'
});
$choice.bind({
'mouseleave.ninja': function () {
$hover.removeClass('ninja-state-hover');
},
'click.ninja': function () {
$object.trigger('delist.ninja').focus();
if ($object.is('input[type=text]')) {
$object.val(choice.value);
}
if ($.isFunction(choice.select)) {
choice.select();
}
},
'mouseenter.ninja': function () {
if ($hover) {
$hover.trigger('mouseleave.ninja');
}
$hover = $choice.addClass('ninja-state-hover');
}
});
}
$choice.html(choice.html || choice).appendTo($list);
});
}
});
},
placeholder: function (placeholder) {
return this.each(function () {
var
$object = $(this).ninja(),
value;
if ($object.is('input[type=search], input[type=text]')) {
if ('placeholder' in $object) {
$object.attr('placeholder', placeholder);
} else {
$object.bind({
'blur.ninja': function () {
value = $object.val();
if (value === '' || value === placeholder) {
$object.addClass('ninja-state-placeholder');
if (value === '') {
$object.val(placeholder);
}
}
},
'focus.ninja': function () {
if ($object.val() === placeholder) {
$object.removeClass('ninja-state-placeholder').val('');
}
}
}).trigger('blur.ninja');
}
}
});
},
select: function (event) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(event)) {
$object.bind('select.ninja', event);
} else if (!$object.is('.ninja-state-disable')) {
$object.trigger('select.ninja');
}
});
},
source: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('source.ninja', callback);
} else if ($object.val() !== '') {
$object.trigger('source.ninja');
}
});
}
};
objects = {
autocomplete: function (options) {
options = $.extend({}, defaults, options);
var
timer,
$x,
$spin = $('<span/>', {
'class': 'ninja-object-autocomplete-spin'
}),
$input = $('<input/>', {
'class': 'ninja-object-autocomplete',
type: 'text'
}).bind({
'keyup.ninja': function (event) {
clearTimeout(timer);
if ($.inArray(event.keyCode, [9, 13, 27, 37, 38, 39, 40]) === -1 && $input.val() !== '') {/* not tab, return, escape, left , up, right or down */
timer = setTimeout(function () {
if ($input.next('.ninja-object-autocomplete-spin').is(':hidden')) {
$spin.show();
} else {
$spin.html($.ninja.icon({
name: 'spin'
}));
$input.after($spin);
}
$input.source();
}, 1000);
}
},
'select.ninja': function (event) {
if (event.html) {
$input.val($.trim(event.html.toString().replace(new RegExp('/<\/?[^>]+>/', 'gi'), '')));
} else {
event.html = $input.val();
}
},
'source.ninja': function (event) {
$input.delist();
event.query = $input.val();
}
});
$x = $.ninja.icon({
name: 'x'
}).bind('click.ninja', function () {
$input.val('').focus();
$x.css({
visibility: 'hidden'
});
});
if (options.placeholder) {
$input.ninja().placeholder(options.placeholder);
}
return $input.ninja();
},
button: function (options) {
options = $.extend({}, defaults, options);
var $button = $('<button/>', {
'class': 'ninja-object-button',
css: options.css,
html: options.html
});
$button.bind({
'click.ninja': function (event) {
if (!$button.is('.ninja-state-disable')) {
if ($button.is('.ninja-state-select')) {
$button.trigger('deselect.ninja');
} else {
$button.trigger('select.ninja');
}
}
event.stopImmediatePropagation();
},
'deselect.ninja': function () {
$button.removeClass('ninja-state-select');
},
'disable.ninja': function () {
$button.attr({
disabled: 'disabled'
});
},
'enable.ninja': function () {
$button.attr({
disabled: false
});
},
'select.ninja': function () {
$button.addClass('ninja-state-select');
}
});
if (options.select) {
$button.trigger('select.ninja');
}
if (options.disable) {
$button.ninja().disable();
}
return $button.ninja();
},
dialog: function (options) {
options = $.extend({}, defaults, {
$parent: $('body')
}, options);
var
$dialog = $('<span/>', {
'class': 'ninja-object-dialog',
css: options.css,
html: options.html
}),
$button = $.ninja.icon({
name: 'X'
}).bind('click.ninja', function () {
$dialog.detach();
}).appendTo($dialog),
$blocker = $('<div/>', {
'class': 'ninja-object-blocker'
}).bind('click.ninja', function (event) {
if ($.inArray($dialog[0], $(event.target).parents()) === -1) {
$dialog.detach();
}
});
$dialog.bind({
'attach.ninja': function (event) {
options.$parent.append($blocker, $dialog);
$blocker.height(options.$parent.height());
$dialog.css({
left: ($(window).width() / 2) - ($dialog.width() / 2),
top: ($(window).height() / 2) - ($dialog.height() / 2) + $(window).scrollTop()
});
$(document).bind({
'keyup.ninja': function (event) {
if (event.keyCode === 27) {/* escape */
$dialog.detach();
}
}
});
},
'detach.ninja remove.ninja': function () {
$(document).unbind('click.ninja keydown.ninja');
$blocker.detach();
}
});
return $dialog.ninja();
},
drawer: function (options) {
options = $.extend({}, defaults, options);
var
$drawer = $('<div/>', {
'class': 'ninja-object-drawer',
css: options.css
}),
$tray = $('<div/>', {
'class': 'ninja-object-tray',
html: options.html
}).appendTo($drawer),
$arrowDown = $.ninja.icon({
name: 'drawer-select'
}),
$arrowRight = $.ninja.icon({
name: 'drawer'
}),
$handle = $.ninja.button($.extend({}, options, {
select: options.select,
html: options.title
})).bind({
'deselect.ninja': function () {
$tray.slideUp('fast', function () {
$arrowDown.detach();
$handle.prepend($arrowRight);
});
},
'select.ninja': function () {
$arrowRight.detach();
$handle.prepend($arrowDown);
$tray.slideDown('fast');
}
}).prependTo($drawer);
if (options.select) {
$handle.prepend($arrowDown);
} else {
$handle.prepend($arrowRight);
$tray.hide();
}
return $drawer.ninja();
},
icon: function (options) {
options = $.extend({}, defaults, {
name: 'spin'
}, options);
var
$icon,
border = ' fill="none" stroke-width="2"',
defs = '',
g = '',
id = uniqueId(),
idMask = id + 'Mask',
idSymbol = id + 'Symbol',
idVector = id + 'Vector',
mask = '',
maskBackground = '<rect fill="#fff" x="0" y="0" width="16" height="16"/>',
onload = '',
points = '',
rotate = '';
if ($.inArray(options.name, ['drawer', 'drawer-select']) > -1) {
if (options.name === 'drawer-select') {
points = '4,4 12,4 8,12';
} else {
points = '4,4 12,8 4,12';
}
g = '<polygon points="' + points + '"/>';
} else if (options.name === 'camera') {
defs = '<defs><mask id="' + idMask + '">' + maskBackground + '<circle cx="8" cy="9" r="5"/></mask></defs>';
g = '<rect x="0" y="4" width="16" height="11" rx="2" ry="2" mask="url(#' + idMask + ')"/><polygon points="4,8 4,4 6,1 10,1 12,4 12,8" mask="url(#' + idMask + ')"/><circle cx="8" cy="9" r="3"/>';
} else if ($.inArray(options.name, ['X', 'x', '-', '+']) > -1) {
if (options.name === '-') {
mask = '<rect x="4" y="7" width="8" height="2"/>';
} else {
if (options.name !== '+') {
rotate = ' transform="rotate(45 8 8)"';
}
mask = '<polygon points="7,4 9,4 9,7 12,7 12,9 9,9 9,12 7,12 7,9 4,9 4,7 7,7"' + rotate + '/>';
}
if (options.name === 'X') {
g = '<circle cx="8" cy="8" r="7"/><polygon points="7,4 9,4 9,7 12,7 12,9 9,9 9,12 7,12 7,9 4,9 4,7 7,7"' + rotate + '/>';
} else {
defs = '<defs><mask id="' + idMask + '">' + maskBackground + mask + '</mask></defs>';
g = '<circle cx="8" cy="8" mask="url(#' + idMask + ')" r="8"/>';
}
} else if (options.name === 'go') {
g = '<circle' + border + ' cx="8" cy="8" r="7"/><circle cx="8" cy="8" r="5"/>';
} else if (options.name === 'home') {
g = '<polygon points="0,10 0,8 8,0 16,8 16,10 14,10 14,16 10,16 10,10 6,10 6,16 2,16 2,10"/><rect x="11" y="16" width="4" height="8"/>';
} else if (options.name === 'mail') {
g = '<polygon points="0,2 8,10 16,2"/><polygon points="16,4 12,8 16,12"/><polygon points="0,14 5,9 8,12 11,9 16,14"/><polygon points="0,4 4,8 0,12"/>';
} else if (options.name === 'menu') {
g = '<polygon points="5,7 8,2 11,7"/><polygon points="5,9 8,14 11,9"/>';
} else if (options.name === 'search') {
g = '<circle' + border + ' cx="7" cy="7" r="5"/><polygon points="9,11 11,9 16,14 14,16"/>';
} else if (options.name === 'star') {
g = '<polygon points="0,6 6,6 8,0 10,6 16,6 11,10 13,16 8,12 3,16 5,10"/>';
} else if (options.name === 'stop') {
g = '<polygon' + border + ' points="1,11 1,5 5,1 11,1 15,5 15,11 11,15 5,15"/><polygon points="3,10 3,6 6,3 10,3 13,6 13,10 10,13 6,13"/>';
} else if (options.name === 'yield') {
g = '<polygon' + border + ' points="8,1 15,15 1,15"/><polygon points="8,5 12,13 4,13"/>';
} else if (options.name === 'spin') {
onload = ' onload="var frame=0;setInterval(function(){frame=frame+30;if(frame===360){frame=0}document.getElementById(\'' + idVector + '\').setAttributeNS(null,\'transform\',\'rotate(\'+frame+\' 8 8)\');},100)"';
defs = '<defs><rect id="' + idSymbol + '" x="7" width="2" height="4"/></defs>';
g = '<use xlink:href="#' + idSymbol + '" style="opacity:.1" transform="rotate(30 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.2" transform="rotate(60 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.3" transform="rotate(90 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.4" transform="rotate(120 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.5" transform="rotate(150 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.6" transform="rotate(180 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.7" transform="rotate(210 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.8" transform="rotate(240 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9" transform="rotate(270 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9.5" transform="rotate(300 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9.75" transform="rotate(330 8 8)"/><use xlink:href="#' + idSymbol + '"/>';
}
$icon = $('<svg aria-label="' + options.name + '" class="ninja-object-icon" height="1" width="1"' + onload + ' role="img" version="1.1" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><title>' + options.name + '</title>' + defs + '<g id="' + idVector + '" stroke-width="0">' + g + '</g></svg>');
if (options.css) {
$icon.find('g').css(options.css);
}
return $icon;
},
menu: function (options) {
options = $.extend({}, defaults, options);
var $menu = $.ninja.button($.extend({}, options, {
html: options.html
})).addClass('ninja-object-menu').append($.ninja.icon({
name: 'menu'
})).select(function () {
$menu.blur().list(options);
}).deselect(function () {
$menu.delist();
}).delist(function () {
$menu.deselect();
});
return $menu;
},
rating: function (options) {
options = $.extend({}, defaults, {
average: 0,
select: 0,
stars: 5
}, options);
var
i,
$rating = $('<span/>', {
'class': 'ninja-object-rating'
}).bind({
'mouseleave.ninja': function () {
$rating.find('.ninja-object-star').each(function (ii, star) {
var $star = $(star);
if (options.select === 0) {
if (ii < options.average) {
$star.addClass('ninja-state-average');
} else {
$star.removeClass('ninja-state-average');
}
}
if (ii < options.select) {
$star.addClass('ninja-state-individual');
} else {
$star.removeClass('ninja-state-individual');
}
});
}
});
for (i = 0; i < options.stars; i++) {
$('<button/>', {
'class': 'ninja-object-star',
html: $.ninja.icon({
'name': 'star'
})
}).appendTo($rating);
}
$rating.find('.ninja-object-star').each(function (i, star) {
i++;
var $star = $(star);
$star.bind({
'click.ninja select.ninja': function () {
options.select = i;
$rating.trigger('mouseleave.ninja').trigger({
type: 'select',
stars: i
});
},
'mouseenter.ninja': function () {
$rating.find('.ninja-object-star').each(function (ii, star) {
var $star = $(star).removeClass('ninja-state-average');
if (ii < i) {
$star.addClass('ninja-state-individual');
} else {
$star.removeClass('ninja-state-individual');
}
});
}
});
});
$rating.trigger('mouseleave.ninja');
return $rating.ninja();
},
slider: function (options) {
options = $.extend({}, defaults, {
slot: 0,
width: 200
}, options);
var
drag = false,
slots = options.choices.length - 1,
increment = options.width / slots,
left = options.slot * increment,
offsetX = 0,
touch,
trackWidth = options.width + 16,
$button = $('<button/>', {
'class': 'ninja-object-slider-button',
css: { left: left }
}),
$choice = $('<span/>', {
'class': 'ninja-object-slider-choice',
html: options.choices[options.slot].html
}),
$level = $('<div/>', {
'class': 'ninja-object-slider-level',
css: { width: left }
}),
$slider = $('<span/>', {
'class': 'ninja-object-slider'
}).bind({
'change.ninja select.ninja': function (event) {
var slot;
if (event.sliderX < 0) {
slot = 0;
} else if (event.sliderX > slots) {
slot = slots;
} else {
slot = event.sliderX;
}
event.choice = options.choices[slot];
$choice.html(event.choice.html);
left = slot * increment;
$button.css({ left: left });
$level.css({ width: left });
},
'select.ninja': function (event) {
if (event.choice.select) {
event.choice.select(event);
}
}
}).append($choice),
$track = $('<div/>', {
'class': 'ninja-object-slider-track',
css: { width: trackWidth }
}).appendTo($slider),
$groove = $('<div/>', {
'class': 'ninja-object-slider-groove'
}).bind('click.ninja', function (event) {
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((event.pageX - $track.offset().left) / increment)
});
}).append($level).appendTo($track);
if (options.title) {
$choice.before($('<span/>', {
'class': 'ninja-object-slider-title',
text: options.title + ': '
}));
}
$button.bind({
'keyup.ninja': function (event) {
if ($.inArray(event.keyCode, [37, 39]) > -1) {/* right or left */
var
choice,
slot = Math.round($button.position().left / increment);
if (slot > 0 && event.keyCode === 37) {/* left arrow */
slot--;
} else if (slot < slots && event.keyCode === 39) {/* right arrow */
slot++;
}
choice = options.choices[slot];
$choice.html(choice.html);
left = slot * increment;
$button.css({ left: left });
$level.css({ width: left });
return false;
}
},
'mousedown.ninja touchstart.ninja': function () {
$button.addClass('ninja-state-select');
},
'mousedown.ninja': function (event) {
event.preventDefault();
offsetX = event.pageX - $button.position().left;
drag = true;
$(document).bind({
'mousemove.ninja': function (event) {
if (!drag) {
return;
}
$slider.trigger({
type: 'change.ninja',
sliderX: Math.round((event.pageX - offsetX) / increment)
});
},
'mouseup.ninja': function (event) {
$button.removeClass('ninja-state-select');
drag = false;
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((event.pageX - offsetX) / increment)
});
$(document).unbind('mousemove.ninja mouseup.ninja');
}
});
},
'touchstart.ninja': function (event) {
event.preventDefault();
touch = event.originalEvent.targetTouches[0] || event.originalEvent.changedTouches[0];
offsetX = touch.pageX - $button.position().left;
},
'touchmove.ninja': function (event) {
event.preventDefault();
touch = event.originalEvent.targetTouches[0] || event.originalEvent.changedTouches[0];
$slider.trigger({
type: 'change.ninja',
sliderX: Math.round((touch.pageX - offsetX) / increment)
});
},
'touchend.ninja': function (event) {
$button.removeClass('ninja-state-select');
event.preventDefault();
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((touch.pageX - offsetX) / increment)
});
}
}).appendTo($track);
return $slider.ninja();
},
tabs: function (options) {
options = $.extend({}, defaults, {
choice: 1
}, options);
var $tabs = $('<span/>');
if (options.vertical) {
$tabs.addClass('ninja-object-tabs-vertical');
} else {
$tabs.addClass('ninja-object-tabs-horizontal');
}
$.each(options.choices, function (i, choice) {
var $tab = $('<button/>', {
'class': 'ninja-object-tab',
css: options.css,
html: choice.html || choice
}).bind({
'click.ninja': function () {
if (!$tab.is('.ninja-state-disable') && !$tab.is('.ninja-state-select')) {
$tab.trigger('select.ninja');
}
},
'disable.ninja': function () {
$tab.attr({
disabled: 'disabled'
});
},
'enable.ninja': function () {
$tab.attr({
disabled: false
});
},
'select.ninja': function () {
$tabs.children().not($tab).removeClass('ninja-state-select');
$tab.addClass('ninja-state-select');
if ($.isFunction(choice.select)) {
choice.select();
}
}
}).appendTo($tabs);
if (i === options.choice - 1) {
$tab.select();
}
});
return $tabs.ninja();
},
version: function () {
return 'development';
}
};
$.ninja = objects;
$.fn.ninja = function () {
return this.extend(methods);
};
}(jQuery, window, document));
| src/ninjaui.js | /*!
Ninja UI jQuery Plugin vdevelopment
http://ninjaui.com/
Copyright 2008-2011 Jamie Hoover
Licensed per the terms of the Apache License v2.0
http://ninjaui.com/license
*/
/*globals CFInstall: false*/
/*jshint bitwise: true, browser: true, curly: true, eqeqeq: true, forin: true, immed: true, indent: 2, jquery: true, maxerr: 3, newcap: true, noarg: true, noempty: true, nomen: true, nonew: true, onevar: true, plusplus: false, regexp: true, strict: true, undef: true, white: true*/
(function ($, window, document, undefined) {
'use strict';
var
browser = $.browser,
defaults,
objects,
methods,
time,
version = $.fn.jquery.split('.'),
versionMinor = parseFloat(version[1]),
versionIncrement = parseFloat(version[2] || '0');
if (versionMinor === 4 && versionIncrement < 3 || versionMinor < 4) {
$.error('Ninja UI requires jQuery 1.4.3 or higher.');
}
if (browser.msie && parseFloat(browser.version) < '9') {
$('<script/>', {
defer: '',
src: 'http://ajax.googleapis.com/ajax/libs/chrome-frame/1.0.3/CFInstall.min.js'
}).appendTo('head');
$(document).ready(function () {
CFInstall.check({
mode: 'overlay'
});
});
}
$('<link/>', {
rel: 'stylesheet',
href: '../src/ninjaui.css'
}).appendTo('head');
time = $.now();
function uniqueId() {
return time ++;
}
methods = {
attach: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('attach.ninja', callback);
} else {
$object.trigger('attach.ninja');
}
});
},
delist: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('delist.ninja', callback);
} else {
$object.trigger('delist.ninja');
}
});
},
deselect: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('deselect.ninja', callback);
} else if ($object.is('.ninja-state-select') && !$object.is('.ninja-state-disable')) {
$object.trigger('deselect.ninja');
}
});
},
detach: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('detach.ninja', callback);
} else {
$object.trigger('detach.ninja');
$.fn.detach.apply($object);
}
});
},
disable: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('disable.ninja', callback);
} else {
$object.fadeTo('fast', 0.5).addClass('ninja-state-disable').trigger('disable.ninja');
}
});
},
enable: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('enable.ninja', callback);
} else {
$object.fadeTo('fast', 1).removeClass('ninja-state-disable').trigger('enable.ninja');
}
});
},
hint: function (options) {
return this.each(function () {
options = $.extend({}, defaults, options);
var
$object = $(this),
$hint = $('<span/>', {
'class': 'ninja-object-hint',
css: $.extend(options.css, {
minWidth: $object.width()
}),
html: options.html
}),
$stem = $('<svg class="ninja-object-stem" height="1" width="1" version="1.1" viewBox="0 0 8 8" xmlns="http://www.w3.org/2000/svg"><g><polygon points="4,1 8,8 1,8" stroke-width="0"/><line x1="4" x2="0" y2="8"/><line x1="4" x2="8" y2="8"/></g></svg>').appendTo($hint);
if (options.css) {
$stem.find('g').css(options.css);
}
$object.bind({
'deselect.ninja focus.ninja mouseenter.ninja': function () {
var offset = $object.offset();
$hint.css({
top: offset.top + $object.outerHeight() + 5
}).appendTo('body');
if (offset.left + $hint.outerWidth() > $(window).width()) {
$hint.css({
right: 0
});
$stem.css({
right: ($object.outerWidth() / 2) - 4
});
} else {
$hint.css({
left: offset.left + (($object.outerWidth() - $hint.outerWidth()) / 2)
});
$stem.css({
left: ($hint.outerWidth() / 2) - 4
});
}
},
'blur.ninja mouseleave.ninja select.ninja': function () {
$hint.detach();
}
});
});
},
list: function (options) {
return this.each(function () {
options = $.extend({}, defaults, options);
var
$hover = null,
$object = $(this).ninja(),
$list = $('<div/>', {
'class': 'ninja-object-list'
}),
offset = $object.offset(),
scrollTop = $(window).scrollTop(),
bottom = offset.top + $object.outerHeight(),
right = offset.left + $object.outerWidth();
if ($object.is('.ninja-object-autocomplete')) {
$object.next('.ninja-object-autocomplete-spin').hide();
}
if (options.choices.length) {
$object.bind({
'delist.ninja': function () {
$(document).unbind('click.ninja keydown.ninja keyup.ninja');
$list.detach();
if ($hover) {
$hover.removeClass('ninja-state-hover');
}
}
});
$('body').append($list);
if (bottom > (scrollTop + $(window).height())) {
$list.css({
bottom: $object.outerHeight()
});
} else {
$list.css({
top: bottom
});
}
if (right > $(window).width()) {
$list.css({
right: right
});
} else {
$list.css({
left: offset.left
});
}
$(document).bind({
'keydown.ninja': function (event) {
if ($.inArray(event.keyCode, [9, 38, 40]) > -1) {/* down or up */
event.preventDefault();/* prevents page scrolling and tabbing when a list is active */
}
},
'keyup.ninja': function (event) {
if ($.inArray(event.keyCode, [9, 13, 27, 38, 40]) > -1) {/* tab, return, escape, down or up */
if (event.keyCode === 13) {/* return */
if ($hover) {
$hover.click();
}
} else if (event.keyCode === 27) {/* escape */
$object.delist();
} else if ($.inArray(event.keyCode, [9, 40]) > -1 && !event.shiftKey) {/* tab or down arrow */
if ($hover) {
if ($hover.nextAll('.ninja-object-item').length) {
$hover.nextAll('.ninja-object-item:first').trigger('mouseenter.ninja');
} else {
$list.find('.ninja-object-item:first').trigger('mouseenter.ninja');
}
} else {
$list.find('.ninja-object-item:first').trigger('mouseenter.ninja');
}
} else if (event.keyCode === 38 || (event.shiftKey && event.keyCode === 9)) {/* shift+tab or up arrow */
if ($hover) {
if ($hover.prevAll('.ninja-object-item').length) {
$hover.prevAll('.ninja-object-item:first').trigger('mouseenter.ninja');
} else {
$list.find('.ninja-object-item:last').trigger('mouseenter.ninja');
}
} else {
$list.find('.ninja-object-item:last').trigger('mouseenter.ninja');
}
}
return false;
}
},
'click.ninja': function (event) {
$object.delist();
}
});
if (options.query) {
options.choices = $.map(options.choices, function (item) {
item.value = item.value || item.html || item;
if (item.html) {
item.html = item.html.toString().replace(new RegExp(options.query, 'gi'), '<b>' + options.query + '</b>');
}
return item;
});
}
$.each(options.choices, function (i, choice) {
var $choice;
if (choice.spacer) {
$choice = $('<div/>', {
'class': 'ninja-object-rule'
});
} else {
$choice = $('<button/>', {
'class': 'ninja-object-item'
});
$choice.bind({
'mouseleave.ninja': function () {
$hover.removeClass('ninja-state-hover');
},
'click.ninja': function () {
$object.trigger('delist.ninja').focus();
if ($object.is('input[type=text]')) {
$object.val(choice.value);
}
if ($.isFunction(choice.select)) {
choice.select();
}
},
'mouseenter.ninja': function () {
if ($hover) {
$hover.trigger('mouseleave.ninja');
}
$hover = $choice.addClass('ninja-state-hover');
}
});
}
$choice.html(choice.html || choice).appendTo($list);
});
}
});
},
placeholder: function (placeholder) {
return this.each(function () {
var
$object = $(this).ninja(),
value;
if ($object.is('input[type=search], input[type=text]')) {
if ('placeholder' in $object) {
$object.attr('placeholder', placeholder);
} else {
$object.bind({
'blur.ninja': function () {
value = $object.val();
if (value === '' || value === placeholder) {
$object.addClass('ninja-state-placeholder');
if (value === '') {
$object.val(placeholder);
}
}
},
'focus.ninja': function () {
if ($object.val() === placeholder) {
$object.removeClass('ninja-state-placeholder').val('');
}
}
}).trigger('blur.ninja');
}
}
});
},
select: function (event) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(event)) {
$object.bind('select.ninja', event);
} else if (!$object.is('.ninja-state-disable')) {
$object.trigger('select.ninja');
}
});
},
source: function (callback) {
return this.each(function () {
var $object = $(this).ninja();
if ($.isFunction(callback)) {
$object.bind('source.ninja', callback);
} else if ($object.val() !== '') {
$object.trigger('source.ninja');
}
});
}
};
objects = {
autocomplete: function (options) {
options = $.extend({}, defaults, options);
var
timer,
$x,
$spin = $('<span/>', {
'class': 'ninja-object-autocomplete-spin'
}),
$input = $('<input/>', {
'class': 'ninja-object-autocomplete',
type: 'text'
}).bind({
'keyup.ninja': function (event) {
clearTimeout(timer);
if ($.inArray(event.keyCode, [9, 13, 27, 37, 38, 39, 40]) === -1 && $input.val() !== '') {/* not tab, return, escape, left , up, right or down */
timer = setTimeout(function () {
if ($input.next('.ninja-object-autocomplete-spin').is(':hidden')) {
$spin.show();
} else {
$spin.html($.ninja.icon({
name: 'spin'
}));
$input.after($spin);
}
$input.source();
}, 1000);
}
},
'select.ninja': function (event) {
if (event.html) {
$input.val($.trim(event.html.toString().replace(new RegExp('/<\/?[^>]+>/', 'gi'), '')));
} else {
event.html = $input.val();
}
},
'source.ninja': function (event) {
$input.delist();
event.query = $input.val();
}
});
$x = $.ninja.icon({
name: 'x'
}).bind('click.ninja', function () {
$input.val('').focus();
$x.css({
visibility: 'hidden'
});
});
if (options.placeholder) {
$input.ninja().placeholder(options.placeholder);
}
return $input.ninja();
},
button: function (options) {
options = $.extend({}, defaults, options);
var $button = $('<button/>', {
'class': 'ninja-object-button',
css: options.css,
html: options.html
});
$button.bind({
'click.ninja': function (event) {
if (!$button.is('.ninja-state-disable')) {
if ($button.is('.ninja-state-select')) {
$button.trigger('deselect.ninja');
} else {
$button.trigger('select.ninja');
}
}
event.stopImmediatePropagation();
},
'deselect.ninja': function () {
$button.removeClass('ninja-state-select');
},
'disable.ninja': function () {
$button.attr({
disabled: 'disabled'
});
},
'enable.ninja': function () {
$button.attr({
disabled: false
});
},
'select.ninja': function () {
$button.addClass('ninja-state-select');
}
});
if (options.select) {
$button.trigger('select.ninja');
}
if (options.disable) {
$button.ninja().disable();
}
return $button.ninja();
},
dialog: function (options) {
options = $.extend({}, defaults, {
$parent: $('body')
}, options);
var
$dialog = $('<span/>', {
'class': 'ninja-object-dialog',
css: options.css,
html: options.html
}),
$button = $.ninja.icon({
name: 'X'
}).bind('click.ninja', function () {
$dialog.detach();
}).appendTo($dialog),
$blocker = $('<div/>', {
'class': 'ninja-object-blocker'
}).bind('click.ninja', function (event) {
if ($.inArray($dialog[0], $(event.target).parents()) === -1) {
$dialog.detach();
}
});
$dialog.bind({
'attach.ninja': function (event) {
options.$parent.append($blocker, $dialog);
$blocker.height(options.$parent.height());
$dialog.css({
left: ($(window).width() / 2) - ($dialog.width() / 2),
top: ($(window).height() / 2) - ($dialog.height() / 2) + $(window).scrollTop()
});
$(document).bind({
'keyup.ninja': function (event) {
if (event.keyCode === 27) {/* escape */
$dialog.detach();
}
}
});
},
'detach.ninja remove.ninja': function () {
$(document).unbind('click.ninja keydown.ninja');
$blocker.detach();
}
});
return $dialog.ninja();
},
drawer: function (options) {
options = $.extend({}, defaults, options);
var
$drawer = $('<div/>', {
'class': 'ninja-object-drawer',
css: options.css
}),
$tray = $('<div/>', {
'class': 'ninja-object-tray',
html: options.html
}).appendTo($drawer),
$arrowDown = $.ninja.icon({
name: 'drawer-select'
}),
$arrowRight = $.ninja.icon({
name: 'drawer'
}),
$handle = $.ninja.button($.extend({}, options, {
select: options.select,
html: options.title
})).bind({
'deselect.ninja': function () {
$tray.slideUp('fast', function () {
$arrowDown.detach();
$handle.prepend($arrowRight);
});
},
'select.ninja': function () {
$arrowRight.detach();
$handle.prepend($arrowDown);
$tray.slideDown('fast');
}
}).prependTo($drawer);
if (options.select) {
$handle.prepend($arrowDown);
} else {
$handle.prepend($arrowRight);
$tray.hide();
}
return $drawer.ninja();
},
icon: function (options) {
options = $.extend({}, defaults, {
name: 'spin'
}, options);
var
$icon,
border = ' fill="none" stroke-width="2"',
defs = '',
g = '',
id = uniqueId(),
idMask = id + 'Mask',
idSymbol = id + 'Symbol',
idVector = id + 'Vector',
mask = '',
maskBackground = '<rect fill="#fff" x="0" y="0" width="16" height="16"/>',
onload = '',
points = '',
rotate = '';
if ($.inArray(options.name, ['drawer', 'drawer-select']) > -1) {
if (options.name === 'drawer-select') {
points = '4,4 12,4 8,12';
} else {
points = '4,4 12,8 4,12';
}
g = '<polygon points="' + points + '"/>';
} else if (options.name === 'camera') {
defs = '<defs><mask id="' + idMask + '">' + maskBackground + '<circle cx="8" cy="9" r="5"/></mask></defs>';
g = '<rect x="0" y="4" width="16" height="11" rx="2" ry="2" mask="url(#' + idMask + ')"/><polygon points="4,8 4,4 6,1 10,1 12,4 12,8" mask="url(#' + idMask + ')"/><circle cx="8" cy="9" r="3"/>';
} else if ($.inArray(options.name, ['X', 'x', '-', '+']) > -1) {
if (options.name === '-') {
mask = '<rect x="4" y="7" width="8" height="2"/>';
} else {
if (options.name !== '+') {
rotate = ' transform="rotate(45 8 8)"';
}
mask = '<polygon points="7,4 9,4 9,7 12,7 12,9 9,9 9,12 7,12 7,9 4,9 4,7 7,7"' + rotate + '/>';
}
if (options.name === 'X') {
g = '<circle cx="8" cy="8" r="7"/><polygon points="7,4 9,4 9,7 12,7 12,9 9,9 9,12 7,12 7,9 4,9 4,7 7,7"' + rotate + '/>';
} else {
defs = '<defs><mask id="' + idMask + '">' + maskBackground + mask + '</mask></defs>';
g = '<circle cx="8" cy="8" mask="url(#' + idMask + ')" r="8"/>';
}
} else if (options.name === 'go') {
g = '<circle' + border + ' cx="8" cy="8" r="7"/><circle cx="8" cy="8" r="5"/>';
} else if (options.name === 'home') {
g = '<polygon points="0,10 0,8 8,0 16,8 16,10 14,10 14,16 10,16 10,10 6,10 6,16 2,16 2,10"/><rect x="11" y="16" width="4" height="8"/>';
} else if (options.name === 'mail') {
g = '<polygon points="0,2 8,10 16,2"/><polygon points="16,4 12,8 16,12"/><polygon points="0,14 5,9 8,12 11,9 16,14"/><polygon points="0,4 4,8 0,12"/>';
} else if (options.name === 'menu') {
g = '<polygon points="5,7 8,2 11,7"/><polygon points="5,9 8,14 11,9"/>';
} else if (options.name === 'search') {
g = '<circle' + border + ' cx="7" cy="7" r="5"/><polygon points="9,11 11,9 16,14 14,16"/>';
} else if (options.name === 'star') {
g = '<polygon points="0,6 6,6 8,0 10,6 16,6 11,10 13,16 8,12 3,16 5,10"/>';
} else if (options.name === 'stop') {
g = '<polygon' + border + ' points="1,11 1,5 5,1 11,1 15,5 15,11 11,15 5,15"/><polygon points="3,10 3,6 6,3 10,3 13,6 13,10 10,13 6,13"/>';
} else if (options.name === 'yield') {
g = '<polygon' + border + ' points="8,1 15,15 1,15"/><polygon points="8,5 12,13 4,13"/>';
} else if (options.name === 'spin') {
onload = ' onload="var frame=0;setInterval(function(){frame=frame+30;if(frame===360){frame=0}document.getElementById(\'' + idVector + '\').setAttributeNS(null,\'transform\',\'rotate(\'+frame+\' 8 8)\');},100)"';
defs = '<defs><rect id="' + idSymbol + '" x="7" width="2" height="4"/></defs>';
g = '<use xlink:href="#' + idSymbol + '" style="opacity:.1" transform="rotate(30 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.2" transform="rotate(60 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.3" transform="rotate(90 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.4" transform="rotate(120 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.5" transform="rotate(150 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.6" transform="rotate(180 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.7" transform="rotate(210 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.8" transform="rotate(240 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9" transform="rotate(270 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9.5" transform="rotate(300 8 8)"/><use xlink:href="#' + idSymbol + '" style="opacity:.9.75" transform="rotate(330 8 8)"/><use xlink:href="#' + idSymbol + '"/>';
}
$icon = $('<svg aria-label="' + options.name + '" class="ninja-object-icon" height="1" width="1"' + onload + ' role="img" version="1.1" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><title>' + options.name + '</title>' + defs + '<g id="' + idVector + '" stroke-width="0">' + g + '</g></svg>');
if (options.css) {
$icon.find('g').css(options.css);
}
return $icon;
},
menu: function (options) {
options = $.extend({}, defaults, options);
var $menu = $.ninja.button($.extend({}, options, {
html: options.html
})).addClass('ninja-object-menu').append($.ninja.icon({
name: 'menu'
})).select(function () {
$menu.blur().list(options);
}).deselect(function () {
$menu.delist();
}).delist(function () {
$menu.deselect();
});
return $menu;
},
rating: function (options) {
options = $.extend({}, defaults, {
average: 0,
select: 0,
stars: 5
}, options);
var
i,
$rating = $('<span/>', {
'class': 'ninja-object-rating'
}).bind({
'mouseleave.ninja': function () {
$rating.find('.ninja-object-star').each(function (ii, star) {
var $star = $(star);
if (options.select === 0) {
if (ii < options.average) {
$star.addClass('ninja-state-average');
} else {
$star.removeClass('ninja-state-average');
}
}
if (ii < options.select) {
$star.addClass('ninja-state-individual');
} else {
$star.removeClass('ninja-state-individual');
}
});
}
});
for (i = 0; i < options.stars; i++) {
$('<button/>', {
'class': 'ninja-object-star',
html: $.ninja.icon({
'name': 'star'
})
}).appendTo($rating);
}
$rating.find('.ninja-object-star').each(function (i, star) {
i++;
var $star = $(star);
$star.bind({
'click.ninja select.ninja': function () {
options.select = i;
$rating.trigger('mouseleave.ninja').trigger({
type: 'select',
stars: i
});
},
'mouseenter.ninja': function () {
$rating.find('.ninja-object-star').each(function (ii, star) {
var $star = $(star).removeClass('ninja-state-average');
if (ii < i) {
$star.addClass('ninja-state-individual');
} else {
$star.removeClass('ninja-state-individual');
}
});
}
});
});
$rating.trigger('mouseleave.ninja');
return $rating.ninja();
},
slider: function (options) {
options = $.extend({}, defaults, {
slot: 0,
width: 200
}, options);
var
drag = false,
slots = options.choices.length - 1,
increment = options.width / slots,
left = options.slot * increment,
offsetX = 0,
touch,
trackWidth = options.width + 16,
$button = $('<button/>', {
'class': 'ninja-object-slider-button',
css: { left: left }
}),
$choice = $('<span/>', {
'class': 'ninja-object-slider-choice',
html: options.choices[options.slot].html
}),
$level = $('<div/>', {
'class': 'ninja-object-slider-level',
css: { width: left }
}),
$slider = $('<span/>', {
'class': 'ninja-object-slider'
}).bind({
'change.ninja select.ninja': function (event) {
var slot;
if (event.sliderX < 0) {
slot = 0;
} else if (event.sliderX > slots) {
slot = slots;
} else {
slot = event.sliderX;
}
event.choice = options.choices[slot];
$choice.html(event.choice.html);
left = slot * increment;
$button.css({ left: left });
$level.css({ width: left });
},
'select.ninja': function (event) {
if (event.choice.select) {
event.choice.select(event);
}
}
}).append($choice),
$track = $('<div/>', {
'class': 'ninja-object-slider-track',
css: { width: trackWidth }
}).appendTo($slider),
$groove = $('<div/>', {
'class': 'ninja-object-slider-groove'
}).bind('click.ninja', function (event) {
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((event.pageX - $track.offset().left) / increment)
});
}).append($level).appendTo($track);
if (options.title) {
$choice.before($('<span/>', {
'class': 'ninja-object-slider-title',
text: options.title + ': '
}));
}
$button.bind({
'keyup.ninja': function (event) {
if ($.inArray(event.keyCode, [37, 39]) > -1) {/* right or left */
var
choice,
slot = Math.round($button.position().left / increment);
if (slot > 0 && event.keyCode === 37) {/* left arrow */
slot--;
} else if (slot < slots && event.keyCode === 39) {/* right arrow */
slot++;
}
choice = options.choices[slot];
$choice.html(choice.html);
left = slot * increment;
$button.css({ left: left });
$level.css({ width: left });
return false;
}
},
'mousedown.ninja touchstart.ninja': function () {
$button.addClass('ninja-state-select');
},
'mousedown.ninja': function (event) {
event.preventDefault();
offsetX = event.pageX - $button.position().left;
drag = true;
$(document).bind({
'mousemove.ninja': function (event) {
if (!drag) {
return;
}
$slider.trigger({
type: 'change.ninja',
sliderX: Math.round((event.pageX - offsetX) / increment)
});
},
'mouseup.ninja': function (event) {
$button.removeClass('ninja-state-select');
drag = false;
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((event.pageX - offsetX) / increment)
});
$(document).unbind('mousemove.ninja mouseup.ninja');
}
});
},
'touchstart.ninja': function (event) {
event.preventDefault();
touch = event.originalEvent.targetTouches[0] || event.originalEvent.changedTouches[0];
offsetX = touch.pageX - $button.position().left;
},
'touchmove.ninja': function (event) {
event.preventDefault();
touch = event.originalEvent.targetTouches[0] || event.originalEvent.changedTouches[0];
$slider.trigger({
type: 'change.ninja',
sliderX: Math.round((touch.pageX - offsetX) / increment)
});
},
'touchend.ninja': function (event) {
$button.removeClass('ninja-state-select');
event.preventDefault();
$button.trigger({
type: 'select.ninja',
sliderX: Math.round((touch.pageX - offsetX) / increment)
});
}
}).appendTo($track);
return $slider.ninja();
},
tabs: function (options) {
options = $.extend({}, defaults, {
choice: 1
}, options);
var $tabs = $('<span/>');
if (options.vertical) {
$tabs.addClass('ninja-object-tabs-vertical');
} else {
$tabs.addClass('ninja-object-tabs-horizontal');
}
$.each(options.choices, function (i, choice) {
var $tab = $('<button/>', {
'class': 'ninja-object-tab',
css: options.css,
html: choice.html || choice
}).bind({
'click.ninja': function () {
if (!$tab.is('.ninja-state-disable') && !$tab.is('.ninja-state-select')) {
$tab.trigger('select.ninja');
}
},
'disable.ninja': function () {
$tab.attr({
disabled: 'disabled'
});
},
'enable.ninja': function () {
$tab.attr({
disabled: false
});
},
'select.ninja': function () {
$tabs.children().not($tab).removeClass('ninja-state-select');
$tab.addClass('ninja-state-select');
if ($.isFunction(choice.select)) {
choice.select();
}
}
}).appendTo($tabs);
if (i === options.choice - 1) {
$tab.select();
}
});
return $tabs.ninja();
},
version: function () {
return 'development';
}
};
$.ninja = objects;
$.fn.ninja = function () {
return this.extend(methods);
};
}(jQuery, window, document));
| Hint bug in iOS
Keeps hint hidden after touching a button the second time.
| src/ninjaui.js | Hint bug in iOS | <ide><path>rc/ninjaui.js
<ide> $stem.find('g').css(options.css);
<ide> }
<ide> $object.bind({
<del> 'deselect.ninja focus.ninja mouseenter.ninja': function () {
<add> 'focus.ninja mouseenter.ninja': function () {
<ide> var offset = $object.offset();
<ide> $hint.css({
<ide> top: offset.top + $object.outerHeight() + 5 |
|
JavaScript | mit | d2f17624cdce1ccc26c6d8153fd3f95ca1204cc9 | 0 | Step7750/ScheduleStorm,Step7750/ScheduleStorm | class Generator {
constructor(classes) {
// chosen classes
this.classes = jQuery.extend(true, {}, classes);
this.convertTimes();
this.addCourseInfo();
//console.log(this.classes);
// Generates the schedules
this.schedGen();
}
/*
Spawns a web worker that generates possible schedules given classes
*/
schedGen() {
var self = this;
self.doneGenerating = false;
// Instantiate the generator
var schedgenerator = operative({
possibleschedules: [],
combinations: [],
classes: {},
init: function(classes, callback) {
this.classes = classes;
this.findCombinations();
this.iterateCombos();
callback(this.possibleschedules);
},
/*
Iterates through every group combinations to find possible non-conflicting schedules
*/
iterateCombos: function () {
// reset possible schedules
this.possibleschedules = [];
if (this.combinations.length > 0) {
// there must be more than 0 combos for a schedule
for (var combos in this.combinations[0]) {
// create a copy to work with
var combocopy = JSON.parse(JSON.stringify(this.combinations[0][combos]));
// geenrate the schedules
this.generateSchedules([], combocopy);
this.possibleschedulescopy = JSON.parse(JSON.stringify(this.possibleschedules));
if (this.combinations.length > 1) {
console.log("Processing further groups");
this.possibleschedules = [];
// We have to add the other groups
for (var group = 1; group < this.combinations.length; group++) {
for (var newcombo in this.combinations[group]) {
// for every previous schedule
// TODO: If this starts to become slow, we might want to apply some heuristics
for (var possibleschedule in this.possibleschedulescopy) {
var combocopy = JSON.parse(JSON.stringify(this.combinations[group][newcombo]));
this.generateSchedules(this.possibleschedulescopy[possibleschedule], combocopy);
}
}
if (group < (this.combinations.length-1)) {
// clear the schedules (we don't want partially working schedules)
this.possibleschedulescopy = JSON.parse(JSON.stringify(this.possibleschedules));
this.possibleschedules = [];
}
}
}
}
}
},
/*
Pushes every combination given the type of groups
*/
findCombinations: function () {
this.combinations = [];
for (var group in this.classes) {
var thisgroup = this.classes[group];
var type = thisgroup["type"];
// figure out the length of the courses
var coursekeys = Object.keys(thisgroup["courses"]);
if (coursekeys.length > 0) {
// there must be courses selected
if (type == 0 || type > coursekeys.length) {
// they selected all of or they wanted more courses than chosen
type = coursekeys.length;
}
// convert the courses to an array
var thesecourses = [];
for (var course in thisgroup["courses"]) {
thisgroup["courses"][course]["name"] = course;
thesecourses.push(thisgroup["courses"][course]);
}
// push the combinations
this.combinations.push(this.k_combinations(thesecourses, type));
}
}
},
generateSchedules: function (schedule, queue) {
/*
Given a wanted class queue and current schedule, this method will recursively find every schedule that doesn't conflict
*/
var timeconflict = false;
if (queue.length == 0) {
// we found a successful schedule, push it
// we need to make a copy since the higher depths will undo the actions
this.possibleschedules.push(JSON.parse(JSON.stringify(schedule)));
}
else {
if (schedule.length > 1) {
// TODO: REFACTOR NEEDED
// Check whether the most recent index has a time conflict with any of the others
for (var x = 0; x < schedule.length-1; x++) {
var thistimes = schedule[x]["times"];
for (var time in thistimes) {
var thistime = thistimes[time];
// compare to last
for (var othertime in schedule[schedule.length-1]["times"]) {
var othertime = schedule[schedule.length-1]["times"][othertime];
// check if any of the days between them are the same
for (var day in thistime[0]) {
var day = thistime[0][day];
if (othertime[0].indexOf(day) > -1) {
// same day, check for time conflict
if (this.isConflicting(thistime[1], othertime[1])) {
timeconflict = true;
}
}
}
}
}
}
}
if (schedule.length > 1) {
// if there are group numbers, make sure all classes are in the same group
// Some Unis require your tutorials to match the specific lecture etc...
// we only need to look at the most recent and second most recent groups
// since classes that belong to the same course are appended consecutively
if (schedule[schedule.length-1]["name"] == schedule[schedule.length-2]["name"]) {
// make sure they have the same group number
if (schedule[schedule.length-1]["group"] != schedule[schedule.length-2]["group"]) {
// we have a conflict
timeconflict = true;
}
}
}
if (timeconflict == false) {
// we can continue
if (Object.keys(queue[0]["types"]).length > 0) {
// find an open type
var foundType = false;
for (var type in queue[0]["types"]) {
if (queue[0]["types"][type] == true) {
// they chose a general class to fulfill
foundType = type;
break;
}
else if (queue[0]["types"][type] != false) {
// they chose a specific class to fulfill
// add the specific class
// find the class
for (var classv in queue[0]["obj"]["classes"]) {
var thisclass = queue[0]["obj"]["classes"][classv];
if (thisclass["id"] == queue[0]["types"][type]) {
// we found the class obj, add it to the schedule
schedule.push(thisclass);
// remove the type from the queue
delete queue[0]["types"][type];
// recursively call the generator
this.generateSchedules(schedule, queue);
// remove the class
schedule.pop();
// add the type again
queue[0]["types"][type] = thisclass["id"];
break;
}
}
break;
}
}
if (foundType != false) {
// remove the type
delete queue[0]["types"][foundType];
// we need to iterate through the classes, find which ones match this type
for (var classv in queue[0]["obj"]["classes"]) {
var thisclass = queue[0]["obj"]["classes"][classv];
if (thisclass["type"] == foundType) {
// Push the class
schedule.push(thisclass);
// recursively go down a depth
this.generateSchedules(schedule, queue);
// pop the class we added
schedule.pop();
}
}
queue[0]["types"][foundType] = true;
}
}
else {
// we've already found all the types for this class, move on to the next
// remove this course
var thisitem = queue.shift();
this.generateSchedules(schedule, queue);
// add the item back
queue.unshift(thisitem);
}
}
}
},
isConflicting: function (time1, time2) {
// time1 and time2 are arrays with the first index being the total minutes
// since 12:00AM that day of the starttime and the second being the endtime
// ex. [570, 645] and [590, 740]
// We check whether the end time of time2 is greater than the start time of time1
// and whether the end time of time1 is greater than the start time of time2
// if so, there is a conflict
if (time1[1] > time2[0] && time2[1] > time1[0]) {
return true;
}
else {
return false;
}
},
k_combinations: function (set, k) {
/**
* Copyright 2012 Akseli Palén.
* Created 2012-07-15.
* Licensed under the MIT license.
*
* <license>
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* </lisence>
*
* Implements functions to calculate combinations of elements in JS Arrays.
*
* Functions:
* k_combinations(set, k) -- Return all k-sized combinations in a set
* combinations(set) -- Return all combinations of the set
*/
var i, j, combs, head, tailcombs;
// There is no way to take e.g. sets of 5 elements from
// a set of 4.
if (k > set.length || k <= 0) {
return [];
}
// K-sized set has only one K-sized subset.
if (k == set.length) {
return [set];
}
// There is N 1-sized subsets in a N-sized set.
if (k == 1) {
combs = [];
for (i = 0; i < set.length; i++) {
combs.push([set[i]]);
}
return combs;
}
// Assert {1 < k < set.length}
// Algorithm description:
// To get k-combinations of a set, we want to join each element
// with all (k-1)-combinations of the other elements. The set of
// these k-sized sets would be the desired result. However, as we
// represent sets with lists, we need to take duplicates into
// account. To avoid producing duplicates and also unnecessary
// computing, we use the following approach: each element i
// divides the list into three: the preceding elements, the
// current element i, and the subsequent elements. For the first
// element, the list of preceding elements is empty. For element i,
// we compute the (k-1)-computations of the subsequent elements,
// join each with the element i, and store the joined to the set of
// computed k-combinations. We do not need to take the preceding
// elements into account, because they have already been the i:th
// element so they are already computed and stored. When the length
// of the subsequent list drops below (k-1), we cannot find any
// (k-1)-combs, hence the upper limit for the iteration:
combs = [];
for (i = 0; i < set.length - k + 1; i++) {
// head is a list that includes only our current element.
head = set.slice(i, i + 1);
// We take smaller combinations from the subsequent elements
tailcombs = this.k_combinations(set.slice(i + 1), k - 1);
// For each (k-1)-combination we join it with the current
// and store it to the set of k-combinations.
for (j = 0; j < tailcombs.length; j++) {
combs.push(head.concat(tailcombs[j]));
}
}
return combs;
}
});
window.calendar.doneLoading(function () {
// only show the loader if the generation is taking longer than 500ms
// since the animations for it would take longer than the actual gen
setTimeout(function () {
if (self.doneGenerating == false) window.calendar.startLoading("Generating Schedules...");
}, 500);
// Spawn the generator
schedgenerator.init(self.classes, function(result) {
console.log("Web worker finished generating schedules");
self.possibleschedules = result;
self.doneGenerating = true;
console.log(result);
// Now score and sort them
self.schedSorter();
});
})
}
/*
Spawns a web worker that sorts and scores the current possibleschedules
*/
schedSorter() {
var self = this;
// Get the user's scoring preferences
this.getPreferences();
// Instantiate the sorter
var schedSort = operative({
possibleschedules: [],
init: function(schedules, morningSlider, nightSlider, consecutiveSlider, rmpSlider, rmpData, rmpAvg, callback) {
// Set local variables in the blob
this.morningSlider = morningSlider;
this.nightSlider = nightSlider;
this.consecutiveSlider = consecutiveSlider;
this.rmpSlider = rmpSlider;
this.rmpData = rmpData;
this.rmpAvg = rmpAvg;
// Add the scores for each schedules
for (var schedule in schedules) {
var thisschedule = schedules[schedule];
// add the score to the first index
thisschedule.unshift(this.scoreSchedule(thisschedule));
}
// Now sort
schedules.sort(this.compareSchedules);
callback(schedules);
},
/*
Compare function for the sorting algorithm
*/
compareSchedules: function (a, b) {
if (a[0] > b[0]) {
return -1;
}
if (b[0] > a[0]) {
return 1;
}
// a must be equal to b
return 0;
},
/*
Returns a numerical score given a schedule that defines how "good" it is given the user's preferences
*/
scoreSchedule: function (schedule) {
var thisscore = 0;
var totalrating = 0;
var totalteachers = 0;
for (var classv in schedule) {
var thisclass = schedule[classv];
// add a score based upon the teachers
totalteachers += thisclass["teachers"].length;
for (var teacher in thisclass["teachers"]) {
teacher = thisclass["teachers"][teacher];
if (this.rmpData[teacher] != undefined && this.rmpData[teacher]["numratings"] > 2) {
totalrating += this.rmpData[teacher]["rating"];
}
else {
// just give them an average rating
totalrating += this.rmpAvg;
}
}
}
var avgrmp = totalrating/totalteachers * 3;
if (this.rmpSlider > 0) {
// make this value worth more to the total score
avgrmp *= (1 + this.rmpSlider/20);
}
//console.log("AVG RMP: " + avgrmp);
thisscore += avgrmp;
// We want to transform the data into a usuable format for easily seeing how apart each class is
var formattedschedule = this.formatScheduleInOrder(schedule);
var classtimescore = 0.0;
for (var day in formattedschedule) {
var day = formattedschedule[day];
// Min/max time of the classes today
var mintime = 9999999;
var maxtime = 0;
for (var x = 0; x < day.length; x++) {
var time = day[x];
if (time[0] < mintime) {
mintime = time[0];
}
if (time[1] > maxtime) {
maxtime = time[1];
}
// check if it starts in the mourning
if (time[0] <= 720) {
classtimescore += this.morningSlider/50;
}
// check if it starts in the night
if (time[0] >= 1020) {
classtimescore += this.nightSlider/50;
}
// check for consecutive classes
// make sure there is a class next
if ((x+1) < day.length && this.consecutiveSlider != 0) {
// get the time of the next class
var nexttime = day[x+1];
// get the difference between the end of class1 and start of class2
var timediff = nexttime[0] - time[1];
var thisconsecscore = 0;
if (this.consecutiveSlider > 0) {
var thisconsecscore = 0.2;
}
else {
var thisconsecscore = -0.2;
}
thisconsecscore += (timediff/10) * (0.006 * -(this.consecutiveSlider/10));
//console.log("Consecutive: " + thisconsecscore);
classtimescore += thisconsecscore;
}
}
// we want there to be less time spent at school overall for a given day
// the longer the difference, the more penalty there is on the score depending on how much the user values time slots
var timediff = maxtime - mintime;
if (timediff > 0) {
if (this.rmpSlider < 0) {
// multiply the value
thisscore -= timediff/60 * (1 + -(this.rmpSlider/40));
}
else {
thisscore -= timediff/60 * 1.5;
}
}
}
// The user prioritizes time slots over professors, multiply this value
if (this.rmpSlider < 0) {
// make this value worth more to the total score
classtimescore *= 1 + -this.rmpSlider/20;
}
thisscore += classtimescore;
//console.log("Classes score: " + classtimescore);
//console.log(formattedschedule);
return thisscore;
},
/*
Formats a given schedule so that it is an array of days with an array of sorted times of each event
*/
formatScheduleInOrder: function (schedule) {
// formats a list of events to the appropriate duration
// the schedule must not have any conflicting events
var formated = [];
//console.log(schedule);
for (var classv in schedule) {
var thisclass = schedule[classv];
// for each time
for (var time in thisclass["times"]) {
var thistime = thisclass["times"][time];
// for each day in this time
for (var day in thistime[0]) {
var day = thistime[0][day];
// check whether the day index is an array
if (!(formated[day] instanceof Array)) {
// make it an array
formated[day] = [];
}
if (formated[day].length == 0) {
//console.log("Appending " + thistime[1] + " to " + day);
// just append the time
formated[day].push(thistime[1]);
}
else {
// iterate through each time already there
for (var formatedtime in formated[day]) {
// check if the end time of this event is less than the start time of the next event
var thisformatedtime = formated[day][formatedtime];
if (thistime[1][1] < thisformatedtime[0]) {
//console.log("Adding " + thistime[1] + " to " + day);
formated[day].splice(parseInt(formatedtime), 0, thistime[1]);
break;
}
else {
if (formated[day][parseInt(formatedtime)+1] == undefined) {
//console.log("Pushing " + thistime[1] + " to the end of " + day);
// push it to the end
formated[day].push(thistime[1]);
}
}
}
}
}
}
}
return formated
}
});
// Spawn the web worker
schedSort.init(this.possibleschedules, this.morningSlider, this.nightSlider, this.consecutiveSlider, this.rmpSlider, window.classList.rmpdata, window.classList.rmpavg,
function(result) {
console.log("Web worker finished sorting schedules");
console.log(result);
// Replace the reference with the sorted schedules
self.possibleschedules = result;
window.calendar.doneLoading(function () {
self.processSchedules(result);
});
}
);
}
/*
Adds additional course info to each class for easier processing after schedules have been generated
*/
addCourseInfo() {
for (var group in this.classes) {
var thisgroup = this.classes[group];
var thiscourses = thisgroup["courses"];
for (var course in thiscourses) {
var thiscourse = thiscourses[course];
// convert the times of each class
var classobj = thiscourse["obj"]["classes"];
for (var classv in classobj) {
var thisclass = classobj[classv];
thisclass["name"] = course;
}
}
}
}
/*
Converts the times on the desired classes to an easily processable format
*/
convertTimes() {
for (var group in this.classes) {
var thisgroup = this.classes[group];
var thiscourses = thisgroup["courses"];
for (var course in thiscourses) {
var thiscourse = thiscourses[course];
// convert the times of each class
var classobj = thiscourse["obj"]["classes"];
for (var classv in classobj) {
var thisclass = classobj[classv];
// convert time
for (var time in thisclass["times"]) {
thisclass["times"][time] = Generator.convertTime(thisclass["times"][time]);
}
}
}
}
}
/*
Converts a time to total minutes since 12:00AM on that day
*/
static convertToTotalMinutes(time) {
// Format XX:XXPM or AM
var type = time.slice(-2);
var hours = parseInt(time.split(":")[0]);
if (type == "PM" && hours < 12) {
hours += 12;
}
var minutes = time.split(":")[1];
minutes = minutes.substr(0, minutes.length-2);
minutes = parseInt(minutes);
return hours * 60 + minutes;
}
/*
Converts the total minutes from 12:00AM on a given day to the timestamp
*/
static totalMinutesToTime(time) {
var minutes = time % 60;
var hours = Math.floor(time/60);
return hours + ":" + minutes;
}
/*
Converts a time of the form Mo 12:00PM-1:00PM to an array of days and total minutes
*/
static convertTime(time) {
// first index are the days (integer with Monday being 0)
// second index is the array with time
var newtime = [];
// Map the days
var map = {
"Mo": 0,
"Tu": 1,
"We": 2,
"Th": 3,
"Fr": 4,
"Sa": 5,
"Su": 6
}
if (time.indexOf(" - ") > -1) {
var timesplit = time.split(" - ");
var endtime = Generator.convertToTotalMinutes(timesplit[1]);
var starttime = Generator.convertToTotalMinutes(timesplit[0].split(" ")[1]);
// get the days
var days = timesplit[0].split(" ")[0];
var dayarray = [];
for (var day in map) {
if (days.indexOf(day) > -1) {
dayarray.push(map[day]);
}
}
}
else {
// We don't know how to process this time
// This can happen with courses like web based courses with a time of "TBA"
newtime.push([-1]);
newtime.push([0, 0]);
}
newtime.push(dayarray);
newtime.push([starttime, endtime]);
return newtime;
}
/*
Processes a list of successful scored schedules and sets up the calendar
*/
processSchedules(schedules) {
// update the total
window.calendar.setTotalGenerated(schedules.length);
// update current
if (schedules.length == 0) window.calendar.setCurrentIndex(-1);
else if (schedules.length > 0) window.calendar.setCurrentIndex(0);
window.calendar.clearEvents();
if (schedules.length > 0) {
// populate the first one
window.calendar.displaySchedule(schedules[0]);
}
}
/*
Returns the schedule at the specified index
*/
getSchedule(index) {
if ((this.possibleschedules.length-1) >= index) {
return this.possibleschedules[index];
}
else {
return false;
}
}
/*
Sets the local preference values with the current state of the sliders
*/
getPreferences() {
this.morningSlider = preferences.getMorningValue();
this.nightSlider = preferences.getNightValue();
this.consecutiveSlider = preferences.getConsecutiveValue();
this.rmpSlider = preferences.getRMPValue();
}
} | js/Generator.js | class Generator {
constructor(classes) {
// chosen classes
this.classes = jQuery.extend(true, {}, classes);
this.convertTimes();
this.addCourseInfo();
console.log(this.classes);
// Generates the schedules
this.schedGen();
}
/*
Spawns a web worker that generates possible schedules given classes
*/
schedGen() {
var self = this;
self.doneGenerating = false;
// Instantiate the generator
var schedgenerator = operative({
possibleschedules: [],
combinations: [],
classes: {},
init: function(classes, callback) {
this.classes = classes;
this.findCombinations();
this.iterateCombos();
callback(this.possibleschedules);
},
/*
Iterates through every group combinations to find possible non-conflicting schedules
*/
iterateCombos: function () {
// reset possible schedules
this.possibleschedules = [];
if (this.combinations.length > 0) {
// there must be more than 0 combos for a schedule
for (var combos in this.combinations[0]) {
// create a copy to work with
var combocopy = JSON.parse(JSON.stringify(this.combinations[0][combos]));
this.generateSchedules([], combocopy);
this.possibleschedulescopy = JSON.parse(JSON.stringify(this.possibleschedules));
if (this.combinations.length > 1) {
console.log("Processing further groups");
this.possibleschedules = [];
// We have to add the other groups
for (var group = 1; group < this.combinations.length; group++) {
for (var newcombo in this.combinations[group]) {
// for every previous schedule
// TODO: If this starts to become slow, we might want to apply some heuristics
for (var possibleschedule in this.possibleschedulescopy) {
var combocopy = JSON.parse(JSON.stringify(this.combinations[group][newcombo]));
this.generateSchedules(this.possibleschedulescopy[possibleschedule], combocopy);
}
}
if (group < (this.combinations.length-1)) {
// clear the schedules (we don't want partially working schedules)
this.possibleschedulescopy = JSON.parse(JSON.stringify(this.possibleschedules));
this.possibleschedules = [];
}
}
}
}
}
},
/*
Pushes every combination given the type of groups
*/
findCombinations: function () {
this.combinations = [];
for (var group in this.classes) {
var thisgroup = this.classes[group];
var type = thisgroup["type"];
// figure out the length of the courses
var coursekeys = Object.keys(thisgroup["courses"]);
if (coursekeys.length > 0) {
// there must be courses selected
if (type == 0 || type > coursekeys.length) {
// they selected all of or they wanted more courses than chosen
type = coursekeys.length;
}
// convert the courses to an array
var thesecourses = [];
for (var course in thisgroup["courses"]) {
thisgroup["courses"][course]["name"] = course;
thesecourses.push(thisgroup["courses"][course]);
}
// push the combinations
this.combinations.push(this.k_combinations(thesecourses, type));
}
}
},
generateSchedules: function (schedule, queue) {
/*
Given a wanted class queue and current schedule, this method will recursively find every schedule that doesn't conflict
*/
var timeconflict = false;
if (queue.length == 0) {
// we found a successful schedule, push it
this.possibleschedules.push(schedule);
}
else {
if (schedule.length > 1) {
// TODO: REFACTOR NEEDED
// Check whether the most recent index has a time conflict with any of the others
for (var x = 0; x < schedule.length-1; x++) {
var thistimes = schedule[x]["times"];
for (var time in thistimes) {
var thistime = thistimes[time];
// compare to last
for (var othertime in schedule[schedule.length-1]["times"]) {
var othertime = schedule[schedule.length-1]["times"][othertime];
// check if any of the days between them are the same
for (var day in thistime[0]) {
var day = thistime[0][day];
if (othertime[0].indexOf(day) > -1) {
// same day, check for time conflict
if (this.isConflicting(thistime[1], othertime[1])) {
timeconflict = true;
}
}
}
}
}
}
}
if (schedule.length > 1) {
// if there are group numbers, make sure all classes are in the same group
// Some Unis require your tutorials to match the specific lecture etc...
// we only need to look at the most recent and second most recent groups
// since classes that belong to the same course are appended consecutively
if (schedule[schedule.length-1]["name"] == schedule[schedule.length-2]["name"]) {
// make sure they have the same group number
if (schedule[schedule.length-1]["group"] != schedule[schedule.length-2]["group"]) {
// we have a conflict
timeconflict = true;
}
}
}
if (timeconflict == false) {
// we can continue
if (Object.keys(queue[0]["types"]).length > 0) {
// find an open type
var foundType = false;
for (var type in queue[0]["types"]) {
if (queue[0]["types"][type] == true) {
// they chose a general class to fulfill
foundType = type;
break;
}
else if (queue[0]["types"][type] != false) {
// they chose a specific class to fulfill
// add the specific class
// find the class
for (var classv in queue[0]["obj"]["classes"]) {
var thisclass = queue[0]["obj"]["classes"][classv];
if (thisclass["id"] == queue[0]["types"][type]) {
// we found the class obj, add it to the schedule
schedule.push(thisclass);
// remove the type from the queue
delete queue[0]["types"][type];
// recursively call the generator
this.generateSchedules(JSON.parse(JSON.stringify(schedule)), JSON.parse(JSON.stringify(queue)));
break;
}
}
break;
}
}
if (foundType != false) {
// remove the type
delete queue[0]["types"][foundType];
// we need to iterate through the classes, find which ones match this type
for (var classv in queue[0]["obj"]["classes"]) {
var thisclass = queue[0]["obj"]["classes"][classv];
if (thisclass["type"] == foundType) {
// Create a copy of the schedule and push this class
var thisschedule = JSON.parse(JSON.stringify(schedule));
thisschedule.push(thisclass);
this.generateSchedules(thisschedule, JSON.parse(JSON.stringify(queue)));
}
}
}
}
else {
// we've already found all the types for this class, move on to the next
// remove this course
queue.shift();
this.generateSchedules(schedule, queue);
}
}
}
},
isConflicting: function (time1, time2) {
// time1 and time2 are arrays with the first index being the total minutes
// since 12:00AM that day of the starttime and the second being the endtime
// ex. [570, 645] and [590, 740]
// We check whether the end time of time2 is greater than the start time of time1
// and whether the end time of time1 is greater than the start time of time2
// if so, there is a conflict
if (time1[1] > time2[0] && time2[1] > time1[0]) {
return true;
}
else {
return false;
}
},
k_combinations: function (set, k) {
/**
* Copyright 2012 Akseli Palén.
* Created 2012-07-15.
* Licensed under the MIT license.
*
* <license>
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* </lisence>
*
* Implements functions to calculate combinations of elements in JS Arrays.
*
* Functions:
* k_combinations(set, k) -- Return all k-sized combinations in a set
* combinations(set) -- Return all combinations of the set
*/
var i, j, combs, head, tailcombs;
// There is no way to take e.g. sets of 5 elements from
// a set of 4.
if (k > set.length || k <= 0) {
return [];
}
// K-sized set has only one K-sized subset.
if (k == set.length) {
return [set];
}
// There is N 1-sized subsets in a N-sized set.
if (k == 1) {
combs = [];
for (i = 0; i < set.length; i++) {
combs.push([set[i]]);
}
return combs;
}
// Assert {1 < k < set.length}
// Algorithm description:
// To get k-combinations of a set, we want to join each element
// with all (k-1)-combinations of the other elements. The set of
// these k-sized sets would be the desired result. However, as we
// represent sets with lists, we need to take duplicates into
// account. To avoid producing duplicates and also unnecessary
// computing, we use the following approach: each element i
// divides the list into three: the preceding elements, the
// current element i, and the subsequent elements. For the first
// element, the list of preceding elements is empty. For element i,
// we compute the (k-1)-computations of the subsequent elements,
// join each with the element i, and store the joined to the set of
// computed k-combinations. We do not need to take the preceding
// elements into account, because they have already been the i:th
// element so they are already computed and stored. When the length
// of the subsequent list drops below (k-1), we cannot find any
// (k-1)-combs, hence the upper limit for the iteration:
combs = [];
for (i = 0; i < set.length - k + 1; i++) {
// head is a list that includes only our current element.
head = set.slice(i, i + 1);
// We take smaller combinations from the subsequent elements
tailcombs = this.k_combinations(set.slice(i + 1), k - 1);
// For each (k-1)-combination we join it with the current
// and store it to the set of k-combinations.
for (j = 0; j < tailcombs.length; j++) {
combs.push(head.concat(tailcombs[j]));
}
}
return combs;
}
});
window.calendar.doneLoading(function () {
// only show the loader if the generation is taking longer than 500ms
// since the animations for it would take longer than the actual gen
setTimeout(function () {
if (self.doneGenerating == false) window.calendar.startLoading("Generating Schedules...");
}, 500);
// Spawn the generator
schedgenerator.init(self.classes, function(result) {
console.log("Web worker finished generating schedules");
self.possibleschedules = result;
self.doneGenerating = true;
// Now score and sort them
self.schedSorter();
});
})
}
/*
Spawns a web worker that sorts and scores the current possibleschedules
*/
schedSorter() {
var self = this;
// Get the user's scoring preferences
this.getPreferences();
// Instantiate the sorter
var schedSort = operative({
possibleschedules: [],
init: function(schedules, morningSlider, nightSlider, consecutiveSlider, rmpSlider, rmpData, rmpAvg, callback) {
// Set local variables in the blob
this.morningSlider = morningSlider;
this.nightSlider = nightSlider;
this.consecutiveSlider = consecutiveSlider;
this.rmpSlider = rmpSlider;
this.rmpData = rmpData;
this.rmpAvg = rmpAvg;
// Add the scores for each schedules
for (var schedule in schedules) {
var thisschedule = schedules[schedule];
// add the score to the first index
thisschedule.unshift(this.scoreSchedule(thisschedule));
}
// Now sort
schedules.sort(this.compareSchedules);
callback(schedules);
},
/*
Compare function for the sorting algorithm
*/
compareSchedules: function (a, b) {
if (a[0] > b[0]) {
return -1;
}
if (b[0] > a[0]) {
return 1;
}
// a must be equal to b
return 0;
},
/*
Returns a numerical score given a schedule that defines how "good" it is given the user's preferences
*/
scoreSchedule: function (schedule) {
var thisscore = 0;
var totalrating = 0;
var totalteachers = 0;
for (var classv in schedule) {
var thisclass = schedule[classv];
// add a score based upon the teachers
totalteachers += thisclass["teachers"].length;
for (var teacher in thisclass["teachers"]) {
teacher = thisclass["teachers"][teacher];
if (this.rmpData[teacher] != undefined && this.rmpData[teacher]["numratings"] > 2) {
totalrating += this.rmpData[teacher]["rating"];
}
else {
// just give them an average rating
totalrating += this.rmpAvg;
}
}
}
var avgrmp = totalrating/totalteachers * 3;
if (this.rmpSlider > 0) {
// make this value worth more to the total score
avgrmp *= (1 + this.rmpSlider/20);
}
//console.log("AVG RMP: " + avgrmp);
thisscore += avgrmp;
// We want to transform the data into a usuable format for easily seeing how apart each class is
var formattedschedule = this.formatScheduleInOrder(schedule);
var classtimescore = 0.0;
for (var day in formattedschedule) {
var day = formattedschedule[day];
// Min/max time of the classes today
var mintime = 9999999;
var maxtime = 0;
for (var x = 0; x < day.length; x++) {
var time = day[x];
if (time[0] < mintime) {
mintime = time[0];
}
if (time[1] > maxtime) {
maxtime = time[1];
}
// check if it starts in the mourning
if (time[0] <= 720) {
classtimescore += this.morningSlider/50;
}
// check if it starts in the night
if (time[0] >= 1020) {
classtimescore += this.nightSlider/50;
}
// check for consecutive classes
// make sure there is a class next
if ((x+1) < day.length && this.consecutiveSlider != 0) {
// get the time of the next class
var nexttime = day[x+1];
// get the difference between the end of class1 and start of class2
var timediff = nexttime[0] - time[1];
var thisconsecscore = 0;
if (this.consecutiveSlider > 0) {
var thisconsecscore = 0.2;
}
else {
var thisconsecscore = -0.2;
}
thisconsecscore += (timediff/10) * (0.006 * -(this.consecutiveSlider/10));
//console.log("Consecutive: " + thisconsecscore);
classtimescore += thisconsecscore;
}
}
// we want there to be less time spent at school overall for a given day
// the longer the difference, the more penalty there is on the score depending on how much the user values time slots
var timediff = maxtime - mintime;
if (timediff > 0) {
if (this.rmpSlider < 0) {
// multiply the value
thisscore -= timediff/60 * (1 + -(this.rmpSlider/40));
}
else {
thisscore -= timediff/60 * 1.5;
}
}
}
// The user prioritizes time slots over professors, multiply this value
if (this.rmpSlider < 0) {
// make this value worth more to the total score
classtimescore *= 1 + -this.rmpSlider/20;
}
thisscore += classtimescore;
//console.log("Classes score: " + classtimescore);
//console.log(formattedschedule);
return thisscore;
},
/*
Formats a given schedule so that it is an array of days with an array of sorted times of each event
*/
formatScheduleInOrder: function (schedule) {
// formats a list of events to the appropriate duration
// the schedule must not have any conflicting events
var formated = [];
//console.log(schedule);
for (var classv in schedule) {
var thisclass = schedule[classv];
// for each time
for (var time in thisclass["times"]) {
var thistime = thisclass["times"][time];
// for each day in this time
for (var day in thistime[0]) {
var day = thistime[0][day];
// check whether the day index is an array
if (!(formated[day] instanceof Array)) {
// make it an array
formated[day] = [];
}
if (formated[day].length == 0) {
//console.log("Appending " + thistime[1] + " to " + day);
// just append the time
formated[day].push(thistime[1]);
}
else {
// iterate through each time already there
for (var formatedtime in formated[day]) {
// check if the end time of this event is less than the start time of the next event
var thisformatedtime = formated[day][formatedtime];
if (thistime[1][1] < thisformatedtime[0]) {
//console.log("Adding " + thistime[1] + " to " + day);
formated[day].splice(parseInt(formatedtime), 0, thistime[1]);
break;
}
else {
if (formated[day][parseInt(formatedtime)+1] == undefined) {
//console.log("Pushing " + thistime[1] + " to the end of " + day);
// push it to the end
formated[day].push(thistime[1]);
}
}
}
}
}
}
}
return formated
}
});
// Spawn the web worker
schedSort.init(this.possibleschedules, this.morningSlider, this.nightSlider, this.consecutiveSlider, this.rmpSlider, window.classList.rmpdata, window.classList.rmpavg,
function(result) {
console.log("Web worker finished sorting schedules");
console.log(result);
// Replace the reference with the sorted schedules
self.possibleschedules = result;
window.calendar.doneLoading(function () {
self.processSchedules(result);
});
}
);
}
/*
Adds additional course info to each class for easier processing after schedules have been generated
*/
addCourseInfo() {
for (var group in this.classes) {
var thisgroup = this.classes[group];
var thiscourses = thisgroup["courses"];
for (var course in thiscourses) {
var thiscourse = thiscourses[course];
// convert the times of each class
var classobj = thiscourse["obj"]["classes"];
for (var classv in classobj) {
var thisclass = classobj[classv];
thisclass["name"] = course;
}
}
}
}
/*
Converts the times on the desired classes to an easily processable format
*/
convertTimes() {
for (var group in this.classes) {
var thisgroup = this.classes[group];
var thiscourses = thisgroup["courses"];
for (var course in thiscourses) {
var thiscourse = thiscourses[course];
// convert the times of each class
var classobj = thiscourse["obj"]["classes"];
for (var classv in classobj) {
var thisclass = classobj[classv];
// convert time
for (var time in thisclass["times"]) {
thisclass["times"][time] = Generator.convertTime(thisclass["times"][time]);
}
}
}
}
}
/*
Converts a time to total minutes since 12:00AM on that day
*/
static convertToTotalMinutes(time) {
// Format XX:XXPM or AM
var type = time.slice(-2);
var hours = parseInt(time.split(":")[0]);
if (type == "PM" && hours < 12) {
hours += 12;
}
var minutes = time.split(":")[1];
minutes = minutes.substr(0, minutes.length-2);
minutes = parseInt(minutes);
return hours * 60 + minutes;
}
/*
Converts the total minutes from 12:00AM on a given day to the timestamp
*/
static totalMinutesToTime(time) {
var minutes = time % 60;
var hours = Math.floor(time/60);
return hours + ":" + minutes;
}
/*
Converts a time of the form Mo 12:00PM-1:00PM to an array of days and total minutes
*/
static convertTime(time) {
// first index are the days (integer with Monday being 0)
// second index is the array with time
var newtime = [];
// Map the days
var map = {
"Mo": 0,
"Tu": 1,
"We": 2,
"Th": 3,
"Fr": 4,
"Sa": 5,
"Su": 6
}
if (time.indexOf(" - ") > -1) {
var timesplit = time.split(" - ");
var endtime = Generator.convertToTotalMinutes(timesplit[1]);
var starttime = Generator.convertToTotalMinutes(timesplit[0].split(" ")[1]);
// get the days
var days = timesplit[0].split(" ")[0];
var dayarray = [];
for (var day in map) {
if (days.indexOf(day) > -1) {
dayarray.push(map[day]);
}
}
}
else {
// We don't know how to process this time
// This can happen with courses like web based courses with a time of "TBA"
newtime.push([-1]);
newtime.push([0, 0]);
}
newtime.push(dayarray);
newtime.push([starttime, endtime]);
return newtime;
}
/*
Processes a list of successful scored schedules and sets up the calendar
*/
processSchedules(schedules) {
// update the total
window.calendar.setTotalGenerated(schedules.length);
// update current
if (schedules.length == 0) window.calendar.setCurrentIndex(-1);
else if (schedules.length > 0) window.calendar.setCurrentIndex(0);
window.calendar.clearEvents();
if (schedules.length > 0) {
// populate the first one
window.calendar.displaySchedule(schedules[0]);
}
}
/*
Returns the schedule at the specified index
*/
getSchedule(index) {
if ((this.possibleschedules.length-1) >= index) {
return this.possibleschedules[index];
}
else {
return false;
}
}
/*
Sets the local preference values with the current state of the sliders
*/
getPreferences() {
this.morningSlider = preferences.getMorningValue();
this.nightSlider = preferences.getNightValue();
this.consecutiveSlider = preferences.getConsecutiveValue();
this.rmpSlider = preferences.getRMPValue();
}
} | Heavily optimized the generator algorithm
Removed the needless deep copying and replaced it with just undoing
actions throughout the hierarchy.
On some sample data, while generating ~9000 successful non-conflicting
schedules, there was a performance increase of around 5x.
| js/Generator.js | Heavily optimized the generator algorithm | <ide><path>s/Generator.js
<ide> this.convertTimes();
<ide> this.addCourseInfo();
<ide>
<del> console.log(this.classes);
<add> //console.log(this.classes);
<ide>
<ide> // Generates the schedules
<ide> this.schedGen();
<ide> for (var combos in this.combinations[0]) {
<ide> // create a copy to work with
<ide> var combocopy = JSON.parse(JSON.stringify(this.combinations[0][combos]));
<add>
<add> // geenrate the schedules
<ide> this.generateSchedules([], combocopy);
<ide>
<ide> this.possibleschedulescopy = JSON.parse(JSON.stringify(this.possibleschedules));
<ide>
<ide> if (queue.length == 0) {
<ide> // we found a successful schedule, push it
<del> this.possibleschedules.push(schedule);
<add> // we need to make a copy since the higher depths will undo the actions
<add> this.possibleschedules.push(JSON.parse(JSON.stringify(schedule)));
<ide> }
<ide> else {
<ide> if (schedule.length > 1) {
<ide> delete queue[0]["types"][type];
<ide>
<ide> // recursively call the generator
<del> this.generateSchedules(JSON.parse(JSON.stringify(schedule)), JSON.parse(JSON.stringify(queue)));
<add> this.generateSchedules(schedule, queue);
<add>
<add> // remove the class
<add> schedule.pop();
<add>
<add> // add the type again
<add> queue[0]["types"][type] = thisclass["id"];
<ide>
<ide> break;
<ide> }
<ide> var thisclass = queue[0]["obj"]["classes"][classv];
<ide>
<ide> if (thisclass["type"] == foundType) {
<del> // Create a copy of the schedule and push this class
<del> var thisschedule = JSON.parse(JSON.stringify(schedule));
<del> thisschedule.push(thisclass);
<del>
<del> this.generateSchedules(thisschedule, JSON.parse(JSON.stringify(queue)));
<add> // Push the class
<add> schedule.push(thisclass);
<add>
<add> // recursively go down a depth
<add> this.generateSchedules(schedule, queue);
<add>
<add> // pop the class we added
<add> schedule.pop();
<ide> }
<ide> }
<add>
<add> queue[0]["types"][foundType] = true;
<ide> }
<ide> }
<ide> else {
<ide> // we've already found all the types for this class, move on to the next
<ide> // remove this course
<del> queue.shift();
<add> var thisitem = queue.shift();
<ide>
<ide> this.generateSchedules(schedule, queue);
<add>
<add> // add the item back
<add> queue.unshift(thisitem);
<ide> }
<ide> }
<ide> }
<ide>
<ide> self.possibleschedules = result;
<ide> self.doneGenerating = true;
<add>
<add> console.log(result);
<ide>
<ide> // Now score and sort them
<ide> self.schedSorter(); |
|
Java | agpl-3.0 | f6437ad15c5a3579a885105b004e01c512936934 | 0 | XVManage/Panel,XVManage/Panel | package de.doridian.xvmanage;
import de.doridian.xvmanage.models.VMNode;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import javax.net.ssl.SSLSocketFactory;
import java.io.*;
import java.net.Socket;
public class XVMAPI {
private static final String API_KEY = Configuration.getString("apiKey");
public static JSONObject apiCall(VMNode node, JSONObject payload) throws IOException {
payload.put("key", API_KEY);
try {
Socket socket = SSLSocketFactory.getDefault().createSocket(node.getIp(), 1532);
socket.setTcpNoDelay(true);
DataInputStream socketInput = new DataInputStream(socket.getInputStream());
DataOutputStream socketOutput = new DataOutputStream(socket.getOutputStream());
ByteArrayOutputStream outputWriting = new ByteArrayOutputStream();
OutputStreamWriter requestWriter = new OutputStreamWriter(outputWriting);
payload.writeJSONString(requestWriter);
requestWriter.close();
byte[] buf = outputWriting.toByteArray();
socketOutput.writeInt(buf.length);
socketOutput.write(buf);
socketOutput.flush();
int len = socketInput.readInt();
buf = new byte[len];
socketInput.readFully(buf);
InputStreamReader responseReader = new InputStreamReader(new ByteArrayInputStream(buf));
JSONParser jsonParser = new JSONParser();
JSONObject responseObject = (JSONObject)jsonParser.parse(responseReader);
socket.close();
return responseObject;
} catch (Exception e) {
e.printStackTrace();
throw new IOException(e);
}
}
}
| src/de/doridian/xvmanage/XVMAPI.java | package de.doridian.xvmanage;
import de.doridian.xvmanage.models.VMNode;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.CipherOutputStream;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.*;
import java.net.Socket;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class XVMAPI {
private static final String API_KEY = Configuration.getString("apiKey");
public static JSONObject apiCall(VMNode node, JSONObject payload) throws IOException {
payload.put("key", API_KEY);
try {
Socket socket = new Socket(node.getIp(), 1532);
socket.setTcpNoDelay(true);
DataInputStream socketInput = new DataInputStream(socket.getInputStream());
DataOutputStream socketOutput = new DataOutputStream(socket.getOutputStream());
ByteArrayOutputStream outputWriting = new ByteArrayOutputStream();
OutputStreamWriter requestWriter = new OutputStreamWriter(new GZIPOutputStream(new CipherOutputStream(outputWriting, getCipher(true))));
payload.writeJSONString(requestWriter);
requestWriter.close();
byte[] buf = outputWriting.toByteArray();
socketOutput.writeInt(buf.length);
socketOutput.write(buf);
socketOutput.flush();
int len = socketInput.readInt();
buf = new byte[len];
socketInput.readFully(buf);
InputStreamReader responseReader = new InputStreamReader(new GZIPInputStream(new CipherInputStream(new ByteArrayInputStream(buf), getCipher(false))));
JSONParser jsonParser = new JSONParser();
JSONObject responseObject = (JSONObject)jsonParser.parse(responseReader);
socket.close();
return responseObject;
} catch (Exception e) {
e.printStackTrace();
throw new IOException(e);
}
}
private static final SecretKeySpec secretKey = new SecretKeySpec(XVMUtils.decodeHex(Configuration.getString("apiPSK")), "AES");
private static final IvParameterSpec initVector = new IvParameterSpec(XVMUtils.decodeHex(Configuration.getString("apiIV")), 0, 16);
private static Cipher getCipher(boolean encrypt) throws Exception {
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
cipher.init(encrypt ? Cipher.ENCRYPT_MODE : Cipher.DECRYPT_MODE, secretKey, initVector);
return cipher;
}
}
| Strip the silly encryption from the panel
| src/de/doridian/xvmanage/XVMAPI.java | Strip the silly encryption from the panel | <ide><path>rc/de/doridian/xvmanage/XVMAPI.java
<ide> import org.json.simple.JSONObject;
<ide> import org.json.simple.parser.JSONParser;
<ide>
<del>import javax.crypto.Cipher;
<del>import javax.crypto.CipherInputStream;
<del>import javax.crypto.CipherOutputStream;
<del>import javax.crypto.spec.IvParameterSpec;
<del>import javax.crypto.spec.SecretKeySpec;
<add>import javax.net.ssl.SSLSocketFactory;
<ide> import java.io.*;
<ide> import java.net.Socket;
<del>import java.util.zip.GZIPInputStream;
<del>import java.util.zip.GZIPOutputStream;
<ide>
<ide> public class XVMAPI {
<ide> private static final String API_KEY = Configuration.getString("apiKey");
<ide> payload.put("key", API_KEY);
<ide>
<ide> try {
<del> Socket socket = new Socket(node.getIp(), 1532);
<add> Socket socket = SSLSocketFactory.getDefault().createSocket(node.getIp(), 1532);
<ide> socket.setTcpNoDelay(true);
<ide>
<ide> DataInputStream socketInput = new DataInputStream(socket.getInputStream());
<ide>
<ide> ByteArrayOutputStream outputWriting = new ByteArrayOutputStream();
<ide>
<del> OutputStreamWriter requestWriter = new OutputStreamWriter(new GZIPOutputStream(new CipherOutputStream(outputWriting, getCipher(true))));
<add> OutputStreamWriter requestWriter = new OutputStreamWriter(outputWriting);
<ide> payload.writeJSONString(requestWriter);
<ide> requestWriter.close();
<ide>
<ide> buf = new byte[len];
<ide> socketInput.readFully(buf);
<ide>
<del> InputStreamReader responseReader = new InputStreamReader(new GZIPInputStream(new CipherInputStream(new ByteArrayInputStream(buf), getCipher(false))));
<add> InputStreamReader responseReader = new InputStreamReader(new ByteArrayInputStream(buf));
<ide>
<ide> JSONParser jsonParser = new JSONParser();
<ide>
<ide> throw new IOException(e);
<ide> }
<ide> }
<del>
<del> private static final SecretKeySpec secretKey = new SecretKeySpec(XVMUtils.decodeHex(Configuration.getString("apiPSK")), "AES");
<del> private static final IvParameterSpec initVector = new IvParameterSpec(XVMUtils.decodeHex(Configuration.getString("apiIV")), 0, 16);
<del>
<del> private static Cipher getCipher(boolean encrypt) throws Exception {
<del> Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
<del> cipher.init(encrypt ? Cipher.ENCRYPT_MODE : Cipher.DECRYPT_MODE, secretKey, initVector);
<del> return cipher;
<del> }
<ide> } |
|
JavaScript | agpl-3.0 | 3a75dfbf7e42ba5534f32d32cb8c3fa296c69f3e | 0 | civicrm/org.civicrm.civicase,civicrm/org.civicrm.civicase,civicrm/org.civicrm.civicase | (function(angular, $, _) {
// "civicaseActivityContactFilters" is a basic skeletal directive.
// Example usage: <div civicase-activity-contact-filters="apiv3ActivityParams"></div>
angular.module('civicase').directive('civicaseActivityContactFilters', function() {
return {
restrict: 'AE',
replace: true,
templateUrl: '~/civicase/ActivityContactFilters.html',
scope: {
filters: '=civicaseActivityContactFilters'
},
link: function($scope, $el, $attr) {
var ts = $scope.ts = CRM.ts('civicase');
$scope.$watch('filters', function(){
// Ensure "All" checkbox renders.
if ($scope.filters['@involvingContact'] === undefined) {
$scope.filters['@involvingContact'] = '';
}
});
$scope.$on('civicaseActivityFeed.query', function(event, filters, params) {
switch (filters['@involvingContact']) {
case 'myActivities':
params.contact_id = 'user_contact_id';
break;
case 'delegated':
if (_.isEmpty(params.assignee_contact_id)) {
params.assignee_contact_id = {'!=': 'user_contact_id'};
}
break;
default:
break;
}
});
}
};
});
})(angular, CRM.$, CRM._);
| ang/civicase/ActivityContactFilters.js | (function(angular, $, _) {
// "civicaseActivityContactFilters" is a basic skeletal directive.
// Example usage: <div civicase-activity-contact-filters="apiv3ActivityParams"></div>
angular.module('civicase').directive('civicaseActivityContactFilters', function() {
return {
restrict: 'AE',
replace: true,
templateUrl: '~/civicase/ActivityContactFilters.html',
scope: {
filters: '=civicaseActivityContactFilters'
},
link: function($scope, $el, $attr) {
var ts = $scope.ts = CRM.ts('civicase');
$scope.$watch('filters', function(){
// Ensure "All" checkbox renders.
if ($scope.filters['@involvingContact'] === undefined) {
$scope.filters['@involvingContact'] = '';
}
});
$scope.$on('civicaseActivityFeed.query', function(event, filters, params) {
switch (filters['@involvingContact']) {
case 'myActivities':
params.contact_id = 'user_contact_id';
break;
case 'delegated':
params.assignee_contact_id = {'!=': 'user_contact_id'};
break;
default:
break;
}
});
}
};
});
})(angular, CRM.$, CRM._);
| CRM-20083 - "Delegated" - Defer to "Assignee Contact" filter if defined
| ang/civicase/ActivityContactFilters.js | CRM-20083 - "Delegated" - Defer to "Assignee Contact" filter if defined | <ide><path>ng/civicase/ActivityContactFilters.js
<ide> break;
<ide>
<ide> case 'delegated':
<del> params.assignee_contact_id = {'!=': 'user_contact_id'};
<add> if (_.isEmpty(params.assignee_contact_id)) {
<add> params.assignee_contact_id = {'!=': 'user_contact_id'};
<add> }
<ide> break;
<ide>
<ide> default: |
|
Java | apache-2.0 | 8bc4821b45d7a7cf09d1ed3f93300eb8d3b9c065 | 0 | brk3/glimmr,brk3/glimmr | package com.bourke.glimmr.activities;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.TextView;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuItem;
import com.actionbarsherlock.view.Window;
import com.bourke.glimmr.common.Constants;
import com.bourke.glimmr.common.GsonHelper;
import com.bourke.glimmr.common.ViewPagerDisable;
import com.bourke.glimmr.fragments.viewer.CommentsFragment;
import com.bourke.glimmr.fragments.viewer.ExifInfoFragment;
import com.bourke.glimmr.fragments.viewer.PhotoViewerFragment;
import com.bourke.glimmr.R;
import com.googlecode.flickrjandroid.people.User;
import com.googlecode.flickrjandroid.photos.Photo;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Activity for viewing photos.
*
* Receives a list of photos via an intent and shows the first one specified by
* a startIndex in a zoomable ImageView.
*/
public class PhotoViewerActivity extends BaseActivity
implements PhotoViewerFragment.IPhotoViewerCallbacks {
private static final String TAG = "Glimmr/PhotoViewerActivity";
private List<Photo> mPhotos = new ArrayList<Photo>();
private PhotoViewerPagerAdapter mAdapter;
private ViewPagerDisable mPager;
private List<WeakReference<Fragment>> mFragList =
new ArrayList<WeakReference<Fragment>>();
private int mCurrentAdapterIndex = 0;
private CommentsFragment mCommentsFragment;
private ExifInfoFragment mExifFragment;
private boolean mCommentsFragmentShowing = false;
private boolean mExifFragmentShowing = false;
private ActionBarTitle mActionbarTitle;
/**
* Start the PhotoViewerActivity with a list of photos to view and an index
* to start at in the list.
*/
public static void startPhotoViewer(BaseActivity activity,
List<Photo> photos, int pos) {
if (photos == null) {
Log.e(TAG, "Cannot start PhotoViewer, photos is null");
return;
}
GsonHelper gsonHelper = new GsonHelper(activity);
boolean photolistStoreResult =
gsonHelper.marshallObject(photos, Constants.PHOTOVIEWER_LIST_FILE);
if (!photolistStoreResult) {
Log.e(TAG, "Error marshalling photos, cannot start viewer");
return;
}
Intent photoViewer = new Intent(activity, PhotoViewerActivity.class);
photoViewer.putExtra(Constants.KEY_PHOTOVIEWER_START_INDEX, pos);
activity.startActivity(photoViewer);
}
private void handleIntent(Intent intent) {
if (intent == null) {
Log.e(TAG, "Error: null intent received in handleIntent");
return;
}
GsonHelper gsonHelper = new GsonHelper(this);
String json = gsonHelper.loadJson(Constants.PHOTOVIEWER_LIST_FILE);
if (json.length() == 0) {
Log.e(TAG, String.format("Error reading %s",
Constants.PHOTOVIEWER_LIST_FILE));
return;
}
Type collectionType = new TypeToken<Collection<Photo>>(){}.getType();
mPhotos = new Gson().fromJson(json.toString(), collectionType);
Bundle bundle = intent.getExtras();
int startIndex;
if (bundle != null) {
startIndex = bundle.getInt(Constants.KEY_PHOTOVIEWER_START_INDEX);
} else {
Log.e(TAG, "handleIntent: bundle is null, cannot get startIndex");
startIndex = 0;
}
if (mPhotos != null) {
if (Constants.DEBUG) {
Log.d(getLogTag(), "Got list of photo urls, size: "
+ mPhotos.size());
}
mAdapter =
new PhotoViewerPagerAdapter(getSupportFragmentManager());
mAdapter.onPageSelected(startIndex);
mPager = (ViewPagerDisable) findViewById(R.id.pager);
mPager.setAdapter(mAdapter);
mPager.setOnPageChangeListener(mAdapter);
mPager.setCurrentItem(startIndex);
} else {
Log.e(getLogTag(), "Photos from intent are null");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
if (Constants.DEBUG) Log.d(getLogTag(), "onCreate");
/* Must be called before adding content */
requestWindowFeature(Window.FEATURE_ACTION_BAR_OVERLAY);
super.onCreate(savedInstanceState);
setContentView(R.layout.photoviewer_activity);
/* Configure the actionbar. Set custom layout to show photo
* author/title in actionbar for large screens */
mActionBar.setBackgroundDrawable(getResources().getDrawable(
R.drawable.ab_bg_black));
mActionBar.setDisplayHomeAsUpEnabled(true);
mActionbarTitle = new ActionBarTitle(this);
if (getResources().getBoolean(R.bool.sw600dp)) {
mActionbarTitle.init(mActionBar);
}
handleIntent(getIntent());
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
savedInstanceState.putBoolean(Constants.KEY_PHOTOVIEWER_ACTIONBAR_SHOW,
mActionBar.isShowing());
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
boolean overlayOn = savedInstanceState.getBoolean(
Constants.KEY_PHOTOVIEWER_ACTIONBAR_SHOW, true);
if (overlayOn) {
mActionBar.show();
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN);
} else {
mActionBar.hide();
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
}
}
@Override
public User getUser() {
return mUser;
}
@Override
protected void onNewIntent(Intent intent) {
setIntent(intent);
handleIntent(intent);
}
public void onCommentsButtonClick(Photo photo) {
if (getResources().getBoolean(R.bool.sw600dp)) {
boolean animateTransition = true;
if (mExifFragmentShowing) {
setExifFragmentVisibility(photo, false, animateTransition);
}
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(photo, false, animateTransition);
} else {
setCommentsFragmentVisibility(photo, true, animateTransition);
}
} else {
CommentsFragment commentsDialogFrag =
CommentsFragment.newInstance(photo);
commentsDialogFrag.show(getSupportFragmentManager(),
"CommentsDialogFragment");
}
}
public void onExifButtonClick(Photo photo) {
if (getResources().getBoolean(R.bool.sw600dp)) {
boolean animateTransition = true;
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(photo, false, animateTransition);
}
if (mExifFragmentShowing) {
setExifFragmentVisibility(photo, false, animateTransition);
} else {
setExifFragmentVisibility(photo, true, animateTransition);
}
} else {
ExifInfoFragment exifInfoDialogFrag =
ExifInfoFragment.newInstance(photo);
exifInfoDialogFrag.show(getSupportFragmentManager(),
"ExifInfoDialogFragment");
}
}
/**
* Overlay fragments are hidden/dismissed automatically onBackPressed, so
* just need to update the state variables.
*/
@Override
public void onBackPressed() {
super.onBackPressed();
mCommentsFragmentShowing = false;
mExifFragmentShowing = false;
}
private void setCommentsFragmentVisibility(Photo photo, boolean show,
boolean animate) {
FragmentTransaction ft =
getSupportFragmentManager().beginTransaction();
if (animate) {
ft.setCustomAnimations(android.R.anim.fade_in,
android.R.anim.fade_out);
}
if (show) {
if (photo != null) {
mCommentsFragment = CommentsFragment.newInstance(photo);
ft.replace(R.id.commentsFragment, mCommentsFragment);
ft.addToBackStack(null);
} else {
Log.e(TAG, "setCommentsFragmentVisibility: photo is null");
}
} else {
ft.hide(mCommentsFragment);
getSupportFragmentManager().popBackStack();
}
mCommentsFragmentShowing = show;
ft.commit();
}
private void setExifFragmentVisibility(Photo photo, boolean show,
boolean animate) {
FragmentTransaction ft =
getSupportFragmentManager().beginTransaction();
if (animate) {
ft.setCustomAnimations(android.R.anim.fade_in,
android.R.anim.fade_out);
}
if (show) {
if (photo != null) {
mExifFragment = ExifInfoFragment.newInstance(photo);
ft.replace(R.id.exifFragment, mExifFragment);
ft.addToBackStack(null);
} else {
Log.e(TAG, "setExifFragmentVisibility: photo is null");
}
} else {
ft.hide(mExifFragment);
getSupportFragmentManager().popBackStack();
}
mExifFragmentShowing = show;
ft.commit();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getSupportMenuInflater().inflate(R.menu.photoviewer_activity_menu,
menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Photo currentlyShowing = mPhotos.get(mCurrentAdapterIndex);
switch (item.getItemId()) {
case R.id.menu_view_comments:
onCommentsButtonClick(currentlyShowing);
return true;
case R.id.menu_view_exif:
onExifButtonClick(currentlyShowing);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected String getLogTag() {
return TAG;
}
@Override
public void onAttachFragment(Fragment fragment) {
if (fragment instanceof PhotoViewerFragment) {
mFragList.add(new WeakReference(fragment));
}
}
@Override
public void onVisibilityChanged(final boolean on) {
/* If overlay is being switched off and exif/comments fragments are
* showing, dismiss(hide) these and return */
if (!on) {
boolean animateTransition = true;
if (mExifFragmentShowing) {
setExifFragmentVisibility(null, false, true);
return;
}
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(null, false, true);
return;
}
}
for (WeakReference<Fragment> ref : mFragList) {
PhotoViewerFragment f = (PhotoViewerFragment) ref.get();
if (f != null) {
f.setOverlayVisibility(on);
}
}
}
@Override
public void onZoomed(final boolean isZoomed) {
mPager.setPagingEnabled(!isZoomed);
}
class PhotoViewerPagerAdapter extends FragmentStatePagerAdapter
implements ViewPager.OnPageChangeListener {
public PhotoViewerPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
return PhotoViewerFragment.newInstance(
mPhotos.get(position), PhotoViewerActivity.this);
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
/*
* If comments fragment is showing update it for the current photo
*/
if (mCommentsFragment != null && mCommentsFragmentShowing) {
getSupportFragmentManager().popBackStack();
boolean animateTransition = false;
boolean show = true;
setCommentsFragmentVisibility(mPhotos.get(position), show,
animateTransition);
/* Likewise for exif */
} else if (mExifFragment != null && mExifFragmentShowing) {
getSupportFragmentManager().popBackStack();
boolean animateTransition = false;
boolean show = true;
setExifFragmentVisibility(mPhotos.get(position), show,
animateTransition);
}
mCurrentAdapterIndex = position;
/* If sw600dp then show the title/author in the actionbar,
* otherwise the fragment will overlay them on the photo */
Photo currentlyShowing = mPhotos.get(mCurrentAdapterIndex);
if (getResources().getBoolean(R.bool.sw600dp)) {
String photoTitle = currentlyShowing.getTitle();
if (photoTitle == null || photoTitle.length() == 0) {
photoTitle = getString(R.string.untitled);
}
String authorText = String.format("%s %s",
getString(R.string.by),
currentlyShowing.getOwner().getUsername());
mActionbarTitle.setPhotoTitle(photoTitle);
mActionbarTitle.setAuthorText(authorText);
}
}
@Override
public void onPageScrollStateChanged(int state) {
}
@Override
public int getCount() {
return mPhotos.size();
}
}
class ActionBarTitle {
private TextView mPhotoTitle;
private TextView mPhotoAuthor;
private Context mContext;
public ActionBarTitle(Context context) {
mContext = context;
}
public void init(ActionBar actionbar) {
LayoutInflater inflator = (LayoutInflater)
mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = inflator.inflate(R.layout.photoviewer_action_bar, null);
mPhotoTitle = (TextView) v.findViewById(R.id.photoTitle);
mPhotoAuthor = (TextView) v.findViewById(R.id.photoAuthor);
setFont(mPhotoTitle, Constants.FONT_ROBOTOLIGHT);
setFont(mPhotoAuthor, Constants.FONT_ROBOTOTHIN);
actionbar.setDisplayShowCustomEnabled(true);
actionbar.setDisplayShowTitleEnabled(false);
actionbar.setCustomView(v);
}
public void setPhotoTitle(String title) {
mPhotoTitle.setText(title);
}
public void setAuthorText(String author) {
mPhotoAuthor.setText(author);
}
}
}
| src/com/bourke/glimmr/activities/PhotoViewerActivity.java | package com.bourke.glimmr.activities;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.TextView;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuItem;
import com.actionbarsherlock.view.Window;
import com.bourke.glimmr.common.Constants;
import com.bourke.glimmr.common.GsonHelper;
import com.bourke.glimmr.common.ViewPagerDisable;
import com.bourke.glimmr.fragments.viewer.CommentsFragment;
import com.bourke.glimmr.fragments.viewer.ExifInfoFragment;
import com.bourke.glimmr.fragments.viewer.PhotoViewerFragment;
import com.bourke.glimmr.R;
import com.googlecode.flickrjandroid.people.User;
import com.googlecode.flickrjandroid.photos.Photo;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Activity for viewing photos.
*
* Receives a list of photos via an intent and shows the first one specified by
* a startIndex in a zoomable ImageView.
*/
public class PhotoViewerActivity extends BaseActivity
implements PhotoViewerFragment.IPhotoViewerCallbacks {
private static final String TAG = "Glimmr/PhotoViewerActivity";
private List<Photo> mPhotos = new ArrayList<Photo>();
private PhotoViewerPagerAdapter mAdapter;
private ViewPagerDisable mPager;
private List<WeakReference<Fragment>> mFragList =
new ArrayList<WeakReference<Fragment>>();
private int mCurrentAdapterIndex = 0;
private CommentsFragment mCommentsFragment;
private ExifInfoFragment mExifFragment;
private boolean mCommentsFragmentShowing = false;
private boolean mExifFragmentShowing = false;
private ActionBarTitle mActionbarTitle;
/**
* Start the PhotoViewerActivity with a list of photos to view and an index
* to start at in the list.
*/
public static void startPhotoViewer(BaseActivity activity,
List<Photo> photos, int pos) {
if (photos == null) {
Log.e(TAG, "Cannot start PhotoViewer, photos is null");
return;
}
GsonHelper gsonHelper = new GsonHelper(activity);
boolean photolistStoreResult =
gsonHelper.marshallObject(photos, Constants.PHOTOVIEWER_LIST_FILE);
if (!photolistStoreResult) {
Log.e(TAG, "Error marshalling photos, cannot start viewer");
return;
}
Intent photoViewer = new Intent(activity, PhotoViewerActivity.class);
photoViewer.putExtra(Constants.KEY_PHOTOVIEWER_START_INDEX, pos);
activity.startActivity(photoViewer);
}
private void handleIntent(Intent intent) {
if (intent == null) {
Log.e(TAG, "Error: null intent received in handleIntent");
return;
}
GsonHelper gsonHelper = new GsonHelper(this);
String json = gsonHelper.loadJson(Constants.PHOTOVIEWER_LIST_FILE);
if (json.length() == 0) {
Log.e(TAG, String.format("Error reading %s",
Constants.PHOTOVIEWER_LIST_FILE));
return;
}
Type collectionType = new TypeToken<Collection<Photo>>(){}.getType();
mPhotos = new Gson().fromJson(json.toString(), collectionType);
Bundle bundle = intent.getExtras();
int startIndex;
if (bundle != null) {
startIndex = bundle.getInt(Constants.KEY_PHOTOVIEWER_START_INDEX);
} else {
Log.e(TAG, "handleIntent: bundle is null, cannot get startIndex");
startIndex = 0;
}
if (mPhotos != null) {
if (Constants.DEBUG) {
Log.d(getLogTag(), "Got list of photo urls, size: "
+ mPhotos.size());
}
mAdapter =
new PhotoViewerPagerAdapter(getSupportFragmentManager());
mPager = (ViewPagerDisable) findViewById(R.id.pager);
mPager.setAdapter(mAdapter);
mPager.setOnPageChangeListener(mAdapter);
mPager.setCurrentItem(startIndex);
} else {
Log.e(getLogTag(), "Photos from intent are null");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
if (Constants.DEBUG) Log.d(getLogTag(), "onCreate");
/* Must be called before adding content */
requestWindowFeature(Window.FEATURE_ACTION_BAR_OVERLAY);
super.onCreate(savedInstanceState);
setContentView(R.layout.photoviewer_activity);
/* Configure the actionbar. Set custom layout to show photo
* author/title in actionbar for large screens */
mActionBar.setBackgroundDrawable(getResources().getDrawable(
R.drawable.ab_bg_black));
mActionBar.setDisplayHomeAsUpEnabled(true);
mActionbarTitle = new ActionBarTitle(this);
if (getResources().getBoolean(R.bool.sw600dp)) {
mActionbarTitle.init(mActionBar);
}
handleIntent(getIntent());
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
savedInstanceState.putBoolean(Constants.KEY_PHOTOVIEWER_ACTIONBAR_SHOW,
mActionBar.isShowing());
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
boolean overlayOn = savedInstanceState.getBoolean(
Constants.KEY_PHOTOVIEWER_ACTIONBAR_SHOW, true);
if (overlayOn) {
mActionBar.show();
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN);
} else {
mActionBar.hide();
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
}
}
@Override
public User getUser() {
return mUser;
}
@Override
protected void onNewIntent(Intent intent) {
setIntent(intent);
handleIntent(intent);
}
public void onCommentsButtonClick(Photo photo) {
if (getResources().getBoolean(R.bool.sw600dp)) {
boolean animateTransition = true;
if (mExifFragmentShowing) {
setExifFragmentVisibility(photo, false, animateTransition);
}
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(photo, false, animateTransition);
} else {
setCommentsFragmentVisibility(photo, true, animateTransition);
}
} else {
CommentsFragment commentsDialogFrag =
CommentsFragment.newInstance(photo);
commentsDialogFrag.show(getSupportFragmentManager(),
"CommentsDialogFragment");
}
}
public void onExifButtonClick(Photo photo) {
if (getResources().getBoolean(R.bool.sw600dp)) {
boolean animateTransition = true;
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(photo, false, animateTransition);
}
if (mExifFragmentShowing) {
setExifFragmentVisibility(photo, false, animateTransition);
} else {
setExifFragmentVisibility(photo, true, animateTransition);
}
} else {
ExifInfoFragment exifInfoDialogFrag =
ExifInfoFragment.newInstance(photo);
exifInfoDialogFrag.show(getSupportFragmentManager(),
"ExifInfoDialogFragment");
}
}
/**
* Overlay fragments are hidden/dismissed automatically onBackPressed, so
* just need to update the state variables.
*/
@Override
public void onBackPressed() {
super.onBackPressed();
mCommentsFragmentShowing = false;
mExifFragmentShowing = false;
}
private void setCommentsFragmentVisibility(Photo photo, boolean show,
boolean animate) {
FragmentTransaction ft =
getSupportFragmentManager().beginTransaction();
if (animate) {
ft.setCustomAnimations(android.R.anim.fade_in,
android.R.anim.fade_out);
}
if (show) {
if (photo != null) {
mCommentsFragment = CommentsFragment.newInstance(photo);
ft.replace(R.id.commentsFragment, mCommentsFragment);
ft.addToBackStack(null);
} else {
Log.e(TAG, "setCommentsFragmentVisibility: photo is null");
}
} else {
ft.hide(mCommentsFragment);
getSupportFragmentManager().popBackStack();
}
mCommentsFragmentShowing = show;
ft.commit();
}
private void setExifFragmentVisibility(Photo photo, boolean show,
boolean animate) {
FragmentTransaction ft =
getSupportFragmentManager().beginTransaction();
if (animate) {
ft.setCustomAnimations(android.R.anim.fade_in,
android.R.anim.fade_out);
}
if (show) {
if (photo != null) {
mExifFragment = ExifInfoFragment.newInstance(photo);
ft.replace(R.id.exifFragment, mExifFragment);
ft.addToBackStack(null);
} else {
Log.e(TAG, "setExifFragmentVisibility: photo is null");
}
} else {
ft.hide(mExifFragment);
getSupportFragmentManager().popBackStack();
}
mExifFragmentShowing = show;
ft.commit();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getSupportMenuInflater().inflate(R.menu.photoviewer_activity_menu,
menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Photo currentlyShowing = mPhotos.get(mCurrentAdapterIndex);
switch (item.getItemId()) {
case R.id.menu_view_comments:
onCommentsButtonClick(currentlyShowing);
return true;
case R.id.menu_view_exif:
onExifButtonClick(currentlyShowing);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected String getLogTag() {
return TAG;
}
@Override
public void onAttachFragment(Fragment fragment) {
if (fragment instanceof PhotoViewerFragment) {
mFragList.add(new WeakReference(fragment));
}
}
@Override
public void onVisibilityChanged(final boolean on) {
/* If overlay is being switched off and exif/comments fragments are
* showing, dismiss(hide) these and return */
if (!on) {
boolean animateTransition = true;
if (mExifFragmentShowing) {
setExifFragmentVisibility(null, false, true);
return;
}
if (mCommentsFragmentShowing) {
setCommentsFragmentVisibility(null, false, true);
return;
}
}
for (WeakReference<Fragment> ref : mFragList) {
PhotoViewerFragment f = (PhotoViewerFragment) ref.get();
if (f != null) {
f.setOverlayVisibility(on);
}
}
}
@Override
public void onZoomed(final boolean isZoomed) {
mPager.setPagingEnabled(!isZoomed);
}
class PhotoViewerPagerAdapter extends FragmentStatePagerAdapter
implements ViewPager.OnPageChangeListener {
public PhotoViewerPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
return PhotoViewerFragment.newInstance(
mPhotos.get(position), PhotoViewerActivity.this);
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
/*
* If comments fragment is showing update it for the current photo
*/
if (mCommentsFragment != null && mCommentsFragmentShowing) {
getSupportFragmentManager().popBackStack();
boolean animateTransition = false;
boolean show = true;
setCommentsFragmentVisibility(mPhotos.get(position), show,
animateTransition);
/* Likewise for exif */
} else if (mExifFragment != null && mExifFragmentShowing) {
getSupportFragmentManager().popBackStack();
boolean animateTransition = false;
boolean show = true;
setExifFragmentVisibility(mPhotos.get(position), show,
animateTransition);
}
mCurrentAdapterIndex = position;
/* If sw600dp then show the title/author in the actionbar,
* otherwise the fragment will overlay them on the photo */
Photo currentlyShowing = mPhotos.get(mCurrentAdapterIndex);
if (getResources().getBoolean(R.bool.sw600dp)) {
String photoTitle = currentlyShowing.getTitle();
if (photoTitle == null || photoTitle.length() == 0) {
photoTitle = getString(R.string.untitled);
}
String authorText = String.format("%s %s",
getString(R.string.by),
currentlyShowing.getOwner().getUsername());
mActionbarTitle.setPhotoTitle(photoTitle);
mActionbarTitle.setAuthorText(authorText);
}
}
@Override
public void onPageScrollStateChanged(int state) {
}
@Override
public int getCount() {
return mPhotos.size();
}
}
class ActionBarTitle {
private TextView mPhotoTitle;
private TextView mPhotoAuthor;
private Context mContext;
public ActionBarTitle(Context context) {
mContext = context;
}
public void init(ActionBar actionbar) {
LayoutInflater inflator = (LayoutInflater)
mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = inflator.inflate(R.layout.photoviewer_action_bar, null);
mPhotoTitle = (TextView) v.findViewById(R.id.photoTitle);
mPhotoAuthor = (TextView) v.findViewById(R.id.photoAuthor);
setFont(mPhotoTitle, Constants.FONT_ROBOTOLIGHT);
setFont(mPhotoAuthor, Constants.FONT_ROBOTOTHIN);
actionbar.setDisplayShowCustomEnabled(true);
actionbar.setDisplayShowTitleEnabled(false);
actionbar.setCustomView(v);
}
public void setPhotoTitle(String title) {
mPhotoTitle.setText(title);
}
public void setAuthorText(String author) {
mPhotoAuthor.setText(author);
}
}
}
| Fix actionbar not showing photo info on first page
| src/com/bourke/glimmr/activities/PhotoViewerActivity.java | Fix actionbar not showing photo info on first page | <ide><path>rc/com/bourke/glimmr/activities/PhotoViewerActivity.java
<ide> }
<ide> mAdapter =
<ide> new PhotoViewerPagerAdapter(getSupportFragmentManager());
<add> mAdapter.onPageSelected(startIndex);
<ide> mPager = (ViewPagerDisable) findViewById(R.id.pager);
<ide> mPager.setAdapter(mAdapter);
<ide> mPager.setOnPageChangeListener(mAdapter); |
|
JavaScript | bsd-3-clause | f0dd5f9128e49cfee77c0455c2e3e8e71f8cdc92 | 0 | toboid/hapi,camerow/hapi,credosam/hapi,Eric013/hapi,StudyForFun/hapi,elnaz/hapi,rmoorman/hapi,youprofit/hapi,DJMcK/hapi,toboid/hapi,SimonLab/hapi,Marsup/hapi,youprofit/hapi,udhayam/hapi,rmoorman/hapi,deepakshrma/hapi,analytically/hapi,pandeysoni/hapi,ldesplat/hapi,cesarmarinhorj/hapi,aulvi/hapi,zhoujia123/hapi,jrmce/hapi,StudyForFun/hapi,ldesplat/hapi,jefflembeck/hapi,hulbert/hapi,vertiman/hapi,danielb2/hapi,jefflembeck/hapi,evdevgit/hapi,Eric013/hapi,zhoujia123/hapi,Marsup/hapi,ryanmurakami/hapi,papimomi/hapi,papimomi/hapi,Binarytales/hapi,rodfernandez/hapi,shalomabitan/hapi,shalomabitan/hapi,pandeysoni/hapi,camerow/hapi,jrmce/hapi,credosam/hapi,rodfernandez/hapi,aulvi/hapi,ClaudeHarry/hapi,julianduque/hapi,HugoRLopes/hapi,SimonLab/hapi,bells17/hapi,cesarmarinhorj/hapi,udhayam/hapi,danielb2/hapi,ryanmurakami/hapi,bbondy/hapi,HugoRLopes/hapi,julianduque/hapi,vertiman/hapi,ClaudeHarry/hapi,bbondy/hapi,wookiehangover/hapi,evdevgit/hapi,DJMcK/hapi,analytically/hapi,deepakshrma/hapi,pandeysoni/hapi,hulbert/hapi,wzrdtales/hapi,wzrdtales/hapi,wookiehangover/hapi,elnaz/hapi,Binarytales/hapi,elnaz/hapi,bells17/hapi | // Load modules
var Request = require('request');
var Utils = require('./utils');
var Boom = require('boom');
// Declare internals
var internals = {};
// Create and configure server instance
exports = module.exports = internals.Proxy = function (options, route) {
Utils.assert(options, 'Missing options');
Utils.assert(!!options.host ^ !!options.mapUri, 'Must have either options.host or options.mapUri');
Utils.assert(!options.passThrough || !route.settings.cache.mode.server, 'Cannot use pass-through proxy mode with caching');
Utils.assert(!options.mapUri || typeof options.mapUri === 'function', 'options.mapUri must be a function');
Utils.assert(!options.postResponse || typeof options.postResponse === 'function', 'options.postResponse must be a function');
Utils.assert(!options.hasOwnProperty('isCustomPostResponse'), 'Cannot manually set options.isCustomPostResponse');
this.settings = {};
this.settings.mapUri = options.mapUri || internals.mapUri(options.protocol, options.host, options.port);
this.settings.xforward = options.xforward || false;
this.settings.passThrough = options.passThrough || false;
this.settings.isCustomPostResponse = !!options.postResponse;
this.settings.postResponse = options.postResponse || internals.postResponse; // function (request, settings, response, payload)
return this;
};
internals.Proxy.prototype.httpClient = Request;
internals.Proxy.prototype.handler = function () {
var self = this;
return function (request) {
self.settings.mapUri(request, function (err, uri) {
if (err) {
return request.reply(err);
}
var req = request.raw.req;
var options = {
uri: uri,
method: request.method,
headers: {},
jar: false
};
if (self.settings.passThrough) { // Never set with cache
options.headers = Utils.clone(req.headers);
delete options.headers.host;
}
if (self.settings.xforward) {
options.headers['x-forwarded-for'] = (options.headers['x-forwarded-for'] ? options.headers['x-forwarded-for'] + ',' : '') + req.connection.remoteAddress || req.socket.remoteAddress;
options.headers['x-forwarded-port'] = (options.headers['x-forwarded-port'] ? options.headers['x-forwarded-port'] + ',' : '') + req.connection.remotePort || req.socket.remotePort;
options.headers['x-forwarded-proto'] = (options.headers['x-forwarded-proto'] ? options.headers['x-forwarded-proto'] + ',' : '') + self.settings.protocol;
}
var isGet = (request.method === 'get' || request.method === 'head');
if (self.settings.isCustomPostResponse || // Custom response method
(isGet && request.route.cache.mode.server)) { // GET/HEAD with Cache
// Callback interface
delete options.headers['accept-encoding']; // Remove until Request supports unzip/deflate
self.httpClient(options, function (err, response, payload) {
// Request handles all redirect responses (3xx) and will return an err if redirection fails
if (err) {
return request.reply(Boom.internal('Proxy error', err));
}
return self.settings.postResponse(request, self.settings, response, payload);
});
}
else {
// Stream interface
if (!isGet &&
request.rawPayload) {
options.headers['Content-Type'] = req.headers['content-type'];
options.body = request.rawPayload;
}
var reqStream = self.httpClient(options);
reqStream.on('response', function (resStream) {
request.reply(resStream); // Request._respond will pass-through headers and status code
});
if (!isGet &&
request.route.payload === 'stream') {
request.raw.req.pipe(reqStream);
}
}
});
};
};
internals.mapUri = function (protocol, host, port) {
protocol = protocol || 'http';
port = port || (protocol === 'http' ? 80 : 443);
var baseUrl = protocol + '://' + host + ':' + port;
return function (request, next) {
return next(null, baseUrl + request.path + (request.url.search || ''));
};
};
internals.postResponse = function (request, settings, response, payload) {
var contentType = response.headers['content-type'];
var statusCode = response.statusCode;
if (statusCode >= 400) {
return request.reply(Boom.passThrough(statusCode, payload, contentType));
}
response = request.reply.payload(payload);
if (contentType) {
response.type(contentType);
}
return response.send();
}; | lib/proxy.js | // Load modules
var Request = require('request');
var Utils = require('./utils');
var Boom = require('boom');
// Declare internals
var internals = {};
// Create and configure server instance
exports = module.exports = internals.Proxy = function (options, route) {
Utils.assert(options, 'Missing options');
Utils.assert(!!options.host ^ !!options.mapUri, 'Must have either options.host or options.mapUri');
Utils.assert(!options.passThrough || !route.settings.cache.mode.server, 'Cannot use pass-through proxy mode with caching');
Utils.assert(!options.mapUri || typeof options.mapUri === 'function', 'options.mapUri must be a function');
Utils.assert(!options.postResponse || typeof options.postResponse === 'function', 'options.postResponse must be a function');
Utils.assert(!options.hasOwnProperty('isCustomPostResponse'), 'Cannot manually set options.isCustomPostResponse');
this.settings = {};
this.settings.mapUri = options.mapUri || internals.mapUri(options.protocol, options.host, options.port);
this.settings.xforward = options.xforward || false;
this.settings.passThrough = options.passThrough || false;
this.settings.isCustomPostResponse = !!options.postResponse;
this.settings.postResponse = options.postResponse || internals.postResponse; // function (request, settings, response, payload)
return this;
};
internals.Proxy.prototype.httpClient = Request;
internals.Proxy.prototype.handler = function () {
var self = this;
return function (request) {
self.settings.mapUri(request, function (err, uri) {
if (err) {
return request.reply(err);
}
var req = request.raw.req;
var options = {
uri: uri,
method: request.method,
headers: {},
jar: false,
streams2: true
};
if (self.settings.passThrough) { // Never set with cache
options.headers = Utils.clone(req.headers);
delete options.headers.host;
}
if (self.settings.xforward) {
options.headers['x-forwarded-for'] = (options.headers['x-forwarded-for'] ? options.headers['x-forwarded-for'] + ',' : '') + req.connection.remoteAddress || req.socket.remoteAddress;
options.headers['x-forwarded-port'] = (options.headers['x-forwarded-port'] ? options.headers['x-forwarded-port'] + ',' : '') + req.connection.remotePort || req.socket.remotePort;
options.headers['x-forwarded-proto'] = (options.headers['x-forwarded-proto'] ? options.headers['x-forwarded-proto'] + ',' : '') + self.settings.protocol;
}
var isGet = (request.method === 'get' || request.method === 'head');
if (self.settings.isCustomPostResponse || // Custom response method
(isGet && request.route.cache.mode.server)) { // GET/HEAD with Cache
// Callback interface
delete options.headers['accept-encoding']; // Remove until Request supports unzip/deflate
self.httpClient(options, function (err, response, payload) {
// Request handles all redirect responses (3xx) and will return an err if redirection fails
if (err) {
return request.reply(Boom.internal('Proxy error', err));
}
return self.settings.postResponse(request, self.settings, response, payload);
});
}
else {
// Stream interface
if (!isGet &&
request.rawPayload) {
options.headers['Content-Type'] = req.headers['content-type'];
options.body = request.rawPayload;
}
var reqStream = self.httpClient(options);
reqStream.on('response', function (resStream) {
request.reply(resStream); // Request._respond will pass-through headers and status code
});
if (!isGet &&
request.route.payload === 'stream') {
request.raw.req.pipe(reqStream);
}
}
});
};
};
internals.mapUri = function (protocol, host, port) {
protocol = protocol || 'http';
port = port || (protocol === 'http' ? 80 : 443);
var baseUrl = protocol + '://' + host + ':' + port;
return function (request, next) {
return next(null, baseUrl + request.path + (request.url.search || ''));
};
};
internals.postResponse = function (request, settings, response, payload) {
var contentType = response.headers['content-type'];
var statusCode = response.statusCode;
if (statusCode >= 400) {
return request.reply(Boom.passThrough(statusCode, payload, contentType));
}
response = request.reply.payload(payload);
if (contentType) {
response.type(contentType);
}
return response.send();
}; | Cleanup
| lib/proxy.js | Cleanup | <ide><path>ib/proxy.js
<ide> uri: uri,
<ide> method: request.method,
<ide> headers: {},
<del> jar: false,
<del> streams2: true
<add> jar: false
<ide> };
<ide>
<ide> if (self.settings.passThrough) { // Never set with cache |
|
Java | apache-2.0 | 4181df0e2529d290e92a488ce438873828b061c9 | 0 | ThiagoGarciaAlves/intellij-community,signed/intellij-community,semonte/intellij-community,semonte/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ibinti/intellij-community,ibinti/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,da1z/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,allotria/intellij-community,apixandru/intellij-community,semonte/intellij-community,FHannes/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,da1z/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,semonte/intellij-community,apixandru/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,signed/intellij-community,FHannes/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,signed/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,asedunov/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,signed/intellij-community,ibinti/intellij-community,ibinti/intellij-community,signed/intellij-community,semonte/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,signed/intellij-community,allotria/intellij-community,signed/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,apixandru/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ibinti/intellij-community,da1z/intellij-community,semonte/intellij-community,asedunov/intellij-community,allotria/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,signed/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,semonte/intellij-community,xfournet/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,suncycheng/intellij-community,allotria/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,allotria/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,da1z/intellij-community,apixandru/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,signed/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,FHannes/intellij-community,apixandru/intellij-community,FHannes/intellij-community,allotria/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,da1z/intellij-community,signed/intellij-community,FHannes/intellij-community,signed/intellij-community,da1z/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,semonte/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,signed/intellij-community,asedunov/intellij-community,xfournet/intellij-community,xfournet/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ibinti/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,semonte/intellij-community,semonte/intellij-community,allotria/intellij-community,FHannes/intellij-community | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.lookup.*;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.UISettings;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.LangBundle;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.FontPreferences;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.util.CollectConsumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.ui.accessibility.AccessibleContextUtil;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.Collection;
import java.util.List;
import java.util.Map;
public class LookupImpl extends LightweightHint implements LookupEx, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.lookup.impl.LookupImpl");
private final LookupOffsets myOffsets;
private final Project myProject;
private final Editor myEditor;
private final Object myLock = new Object();
private final JBList myList = new JBList(new CollectionListModel<LookupElement>()) {
@Override
protected void processKeyEvent(@NotNull final KeyEvent e) {
final char keyChar = e.getKeyChar();
if (keyChar == KeyEvent.VK_ENTER || keyChar == KeyEvent.VK_TAB) {
IdeFocusManager.getInstance(myProject).requestFocus(myEditor.getContentComponent(), true).doWhenDone(
() -> IdeEventQueue.getInstance().getKeyEventDispatcher().dispatchKeyEvent(e));
return;
}
super.processKeyEvent(e);
}
@NotNull
@Override
protected ExpandableItemsHandler<Integer> createExpandableItemsHandler() {
return new CompletionExtender(this);
}
};
final LookupCellRenderer myCellRenderer;
private final List<LookupListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private PrefixChangeListener myPrefixChangeListener = new PrefixChangeListener.Adapter() {};
private final LookupPreview myPreview = new LookupPreview(this);
// keeping our own copy of editor's font preferences, which can be used in non-EDT threads (to avoid race conditions)
private final FontPreferences myFontPreferences = new FontPreferences();
private long myStampShown = 0;
private boolean myShown = false;
private boolean myDisposed = false;
private boolean myHidden = false;
private boolean mySelectionTouched;
private FocusDegree myFocusDegree = FocusDegree.FOCUSED;
private volatile boolean myCalculating;
private final Advertiser myAdComponent;
volatile int myLookupTextWidth = 50;
private boolean myChangeGuard;
private volatile LookupArranger myArranger;
private LookupArranger myPresentableArranger;
private final Map<LookupElement, Font> myCustomFonts = ContainerUtil.createConcurrentWeakMap(10, 0.75f, Runtime.getRuntime().availableProcessors(),
ContainerUtil.identityStrategy());
private boolean myStartCompletionWhenNothingMatches;
boolean myResizePending;
private boolean myFinishing;
boolean myUpdating;
private LookupUi myUi;
public LookupImpl(Project project, Editor editor, @NotNull LookupArranger arranger) {
super(new JPanel(new BorderLayout()));
setForceShowAsPopup(true);
setCancelOnClickOutside(false);
setResizable(true);
AbstractPopup.suppressMacCornerFor(getComponent());
myProject = project;
myEditor = InjectedLanguageUtil.getTopLevelEditor(editor);
myArranger = arranger;
myPresentableArranger = arranger;
myEditor.getColorsScheme().getFontPreferences().copyTo(myFontPreferences);
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
myCellRenderer = new LookupCellRenderer(this);
myList.setCellRenderer(myCellRenderer);
myList.setFocusable(false);
myList.setFixedCellWidth(50);
// a new top level frame just got the focus. This is important to prevent screen readers
// from announcing the title of the top level frame when the list is shown (or hidden),
// as they usually do when a new top-level frame receives the focus.
AccessibleContextUtil.setParent(myList, myEditor.getContentComponent());
myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myList.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myList.getExpandableItemsHandler();
myAdComponent = new Advertiser();
myOffsets = new LookupOffsets(myEditor);
final CollectionListModel<LookupElement> model = getListModel();
addEmptyItem(model);
updateListHeight(model);
addListeners();
}
private CollectionListModel<LookupElement> getListModel() {
//noinspection unchecked
return (CollectionListModel<LookupElement>)myList.getModel();
}
public void setArranger(LookupArranger arranger) {
myArranger = arranger;
}
public FocusDegree getFocusDegree() {
return myFocusDegree;
}
@Override
public boolean isFocused() {
return getFocusDegree() == FocusDegree.FOCUSED;
}
public void setFocusDegree(FocusDegree focusDegree) {
myFocusDegree = focusDegree;
}
public boolean isCalculating() {
return myCalculating;
}
public void setCalculating(final boolean calculating) {
myCalculating = calculating;
if (myUi != null) {
myUi.setCalculating(calculating);
}
}
public void markSelectionTouched() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
mySelectionTouched = true;
myList.repaint();
}
@TestOnly
public void setSelectionTouched(boolean selectionTouched) {
mySelectionTouched = selectionTouched;
}
@TestOnly
public int getSelectedIndex() {
return myList.getSelectedIndex();
}
protected void repaintLookup(boolean onExplicitAction, boolean reused, boolean selectionVisible, boolean itemsChanged) {
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
public void resort(boolean addAgain) {
final List<LookupElement> items = getItems();
withLock(() -> {
myPresentableArranger.prefixChanged(this);
getListModel().removeAll();
return null;
});
if (addAgain) {
for (final LookupElement item : items) {
addItem(item, itemMatcher(item));
}
}
refreshUi(true, true);
}
public boolean addItem(LookupElement item, PrefixMatcher matcher) {
LookupElementPresentation presentation = renderItemApproximately(item);
if (containsDummyIdentifier(presentation.getItemText()) ||
containsDummyIdentifier(presentation.getTailText()) ||
containsDummyIdentifier(presentation.getTypeText())) {
return false;
}
updateLookupWidth(item, presentation);
withLock(() -> {
myArranger.registerMatcher(item, matcher);
myArranger.addElement(item, presentation);
return null;
});
return true;
}
private static boolean containsDummyIdentifier(@Nullable final String s) {
return s != null && s.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED);
}
public void updateLookupWidth(LookupElement item) {
updateLookupWidth(item, renderItemApproximately(item));
}
private void updateLookupWidth(LookupElement item, LookupElementPresentation presentation) {
final Font customFont = myCellRenderer.getFontAbleToDisplay(presentation);
if (customFont != null) {
myCustomFonts.put(item, customFont);
}
int maxWidth = myCellRenderer.updateMaximumWidth(presentation, item);
myLookupTextWidth = Math.max(maxWidth, myLookupTextWidth);
}
@Nullable
public Font getCustomFont(LookupElement item, boolean bold) {
Font font = myCustomFonts.get(item);
return font == null ? null : bold ? font.deriveFont(Font.BOLD) : font;
}
public void requestResize() {
ApplicationManager.getApplication().assertIsDispatchThread();
myResizePending = true;
}
public Collection<LookupElementAction> getActionsFor(LookupElement element) {
final CollectConsumer<LookupElementAction> consumer = new CollectConsumer<>();
for (LookupActionProvider provider : LookupActionProvider.EP_NAME.getExtensions()) {
provider.fillActions(element, this, consumer);
}
if (!consumer.getResult().isEmpty()) {
consumer.consume(new ShowHideIntentionIconLookupAction());
}
return consumer.getResult();
}
public JList getList() {
return myList;
}
@Override
public List<LookupElement> getItems() {
return withLock(() -> ContainerUtil.findAll(getListModel().toList(), element -> !(element instanceof EmptyLookupItem)));
}
public String getAdditionalPrefix() {
return myOffsets.getAdditionalPrefix();
}
void appendPrefix(char c) {
checkValid();
myOffsets.appendPrefix(c);
withLock(() -> {
myPresentableArranger.prefixChanged(this);
return null;
});
requestResize();
refreshUi(false, true);
ensureSelectionVisible(true);
myPrefixChangeListener.afterAppend(c);
}
public void setStartCompletionWhenNothingMatches(boolean startCompletionWhenNothingMatches) {
myStartCompletionWhenNothingMatches = startCompletionWhenNothingMatches;
}
public boolean isStartCompletionWhenNothingMatches() {
return myStartCompletionWhenNothingMatches;
}
public void ensureSelectionVisible(boolean forceTopSelection) {
if (isSelectionVisible() && !forceTopSelection) {
return;
}
if (!forceTopSelection) {
ScrollingUtil.ensureIndexIsVisible(myList, myList.getSelectedIndex(), 1);
return;
}
// selected item should be at the top of the visible list
int top = myList.getSelectedIndex();
if (top > 0) {
top--; // show one element above the selected one to give the hint that there are more available via scrolling
}
int firstVisibleIndex = myList.getFirstVisibleIndex();
if (firstVisibleIndex == top) {
return;
}
ScrollingUtil.ensureRangeIsVisible(myList, top, top + myList.getLastVisibleIndex() - firstVisibleIndex);
}
boolean truncatePrefix(boolean preserveSelection) {
if (!myOffsets.truncatePrefix()) {
return false;
}
if (preserveSelection) {
markSelectionTouched();
}
boolean shouldUpdate = withLock(() -> {
myPresentableArranger.prefixChanged(this);
return myPresentableArranger == myArranger;
});
requestResize();
if (shouldUpdate) {
refreshUi(false, true);
ensureSelectionVisible(true);
}
return true;
}
private boolean updateList(boolean onExplicitAction, boolean reused) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
checkValid();
CollectionListModel<LookupElement> listModel = getListModel();
Pair<List<LookupElement>, Integer> pair = withLock(() -> myPresentableArranger.arrangeItems(this, onExplicitAction || reused));
List<LookupElement> items = pair.first;
Integer toSelect = pair.second;
if (toSelect == null || toSelect < 0 || items.size() > 0 && toSelect >= items.size()) {
LOG.error("Arranger " + myPresentableArranger + " returned invalid selection index=" + toSelect + "; items=" + items);
toSelect = 0;
}
myOffsets.checkMinPrefixLengthChanges(items, this);
List<LookupElement> oldModel = listModel.toList();
listModel.removeAll();
if (!items.isEmpty()) {
listModel.add(items);
}
else {
addEmptyItem(listModel);
}
updateListHeight(listModel);
myList.setSelectedIndex(toSelect);
return !ContainerUtil.equalsIdentity(oldModel, items);
}
protected boolean isSelectionVisible() {
return ScrollingUtil.isIndexFullyVisible(myList, myList.getSelectedIndex());
}
private boolean checkReused() {
return withLock(() -> {
if (myPresentableArranger != myArranger) {
myPresentableArranger = myArranger;
myOffsets.clearAdditionalPrefix();
myPresentableArranger.prefixChanged(this);
return true;
}
return false;
});
}
private void updateListHeight(ListModel model) {
myList.setFixedCellHeight(myCellRenderer.getListCellRendererComponent(myList, model.getElementAt(0), 0, false, false).getPreferredSize().height);
myList.setVisibleRowCount(Math.min(model.getSize(), UISettings.getInstance().getMaxLookupListHeight()));
}
private void addEmptyItem(CollectionListModel<LookupElement> model) {
LookupElement item = new EmptyLookupItem(myCalculating ? " " : LangBundle.message("completion.no.suggestions"), false);
model.add(item);
updateLookupWidth(item);
requestResize();
}
private static LookupElementPresentation renderItemApproximately(LookupElement item) {
final LookupElementPresentation p = new LookupElementPresentation();
item.renderElement(p);
return p;
}
@NotNull
@Override
public String itemPattern(@NotNull LookupElement element) {
if (element instanceof EmptyLookupItem) return "";
return myPresentableArranger.itemPattern(element);
}
@Override
@NotNull
public PrefixMatcher itemMatcher(@NotNull LookupElement item) {
if (item instanceof EmptyLookupItem) {
return new CamelHumpMatcher("");
}
return myPresentableArranger.itemMatcher(item);
}
public void finishLookup(final char completionChar) {
finishLookup(completionChar, (LookupElement)myList.getSelectedValue());
}
public void finishLookup(char completionChar, @Nullable final LookupElement item) {
LOG.assertTrue(!ApplicationManager.getApplication().isWriteAccessAllowed(), "finishLookup should be called without a write action");
final PsiFile file = getPsiFile();
boolean writableOk = file == null || FileModificationService.getInstance().prepareFileForWrite(file);
if (myDisposed) { // ensureFilesWritable could close us by showing a dialog
return;
}
if (!writableOk) {
doHide(false, true);
fireItemSelected(null, completionChar);
return;
}
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookupInWritableFile(completionChar, item), null, null);
}
void finishLookupInWritableFile(char completionChar, @Nullable LookupElement item) {
//noinspection deprecation,unchecked
if (item == null ||
!item.isValid() ||
item instanceof EmptyLookupItem ||
item.getObject() instanceof DeferredUserLookupValue &&
item.as(LookupItem.CLASS_CONDITION_KEY) != null &&
!((DeferredUserLookupValue)item.getObject()).handleUserSelection(item.as(LookupItem.CLASS_CONDITION_KEY), myProject)) {
doHide(false, true);
fireItemSelected(null, completionChar);
return;
}
if (myDisposed) { // DeferredUserLookupValue could close us in any way
return;
}
final String prefix = itemPattern(item);
boolean plainMatch = ContainerUtil.or(item.getAllLookupStrings(), s -> StringUtil.containsIgnoreCase(s, prefix));
if (!plainMatch) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CAMEL_HUMPS);
}
myFinishing = true;
ApplicationManager.getApplication().runWriteAction(() -> {
myEditor.getDocument().startGuardedBlockChecking();
try {
insertLookupString(item, getPrefixLength(item));
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
}
});
if (myDisposed) { // any document listeners could close us
return;
}
doHide(false, true);
fireItemSelected(item, completionChar);
}
public int getPrefixLength(LookupElement item) {
return myOffsets.getPrefixLength(item, this);
}
protected void insertLookupString(LookupElement item, final int prefix) {
final String lookupString = getCaseCorrectedLookupString(item);
final Editor hostEditor = getTopLevelEditor();
hostEditor.getCaretModel().runForEachCaret(new CaretAction() {
@Override
public void perform(Caret caret) {
EditorModificationUtil.deleteSelectedText(hostEditor);
final int caretOffset = hostEditor.getCaretModel().getOffset();
int offset = insertLookupInDocumentWindowIfNeeded(caretOffset, prefix, lookupString);
hostEditor.getCaretModel().moveToOffset(offset);
hostEditor.getSelectionModel().removeSelection();
}
});
myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
private int insertLookupInDocumentWindowIfNeeded(int caretOffset, int prefix, String lookupString) {
DocumentWindow document = getInjectedDocument(caretOffset);
if (document == null) return insertLookupInDocument(caretOffset, myEditor.getDocument(), prefix, lookupString);
PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document);
int offset = document.hostToInjected(caretOffset);
int lookupStart = Math.min(offset, Math.max(offset - prefix, 0));
int diff = -1;
if (file != null) {
List<TextRange> ranges = InjectedLanguageManager.getInstance(myProject)
.intersectWithAllEditableFragments(file, TextRange.create(lookupStart, offset));
if (!ranges.isEmpty()) {
diff = ranges.get(0).getStartOffset() - lookupStart;
if (ranges.size() == 1 && diff == 0) diff = -1;
}
}
if (diff == -1) return insertLookupInDocument(caretOffset, myEditor.getDocument(), prefix, lookupString);
return document.injectedToHost(
insertLookupInDocument(offset, document, prefix - diff, diff == 0 ? lookupString : lookupString.substring(diff))
);
}
private static int insertLookupInDocument(int caretOffset, Document document, int prefix, String lookupString) {
int lookupStart = Math.min(caretOffset, Math.max(caretOffset - prefix, 0));
int len = document.getTextLength();
LOG.assertTrue(lookupStart >= 0 && lookupStart <= len,
"ls: " + lookupStart + " caret: " + caretOffset + " prefix:" + prefix + " doc: " + len);
LOG.assertTrue(caretOffset >= 0 && caretOffset <= len, "co: " + caretOffset + " doc: " + len);
document.replaceString(lookupStart, caretOffset, lookupString);
return lookupStart + lookupString.length();
}
private String getCaseCorrectedLookupString(LookupElement item) {
String lookupString = item.getLookupString();
if (item.isCaseSensitive()) {
return lookupString;
}
final String prefix = itemPattern(item);
final int length = prefix.length();
if (length == 0 || !itemMatcher(item).prefixMatches(prefix)) return lookupString;
boolean isAllLower = true;
boolean isAllUpper = true;
boolean sameCase = true;
for (int i = 0; i < length && (isAllLower || isAllUpper || sameCase); i++) {
final char c = prefix.charAt(i);
boolean isLower = Character.isLowerCase(c);
boolean isUpper = Character.isUpperCase(c);
// do not take this kind of symbols into account ('_', '@', etc.)
if (!isLower && !isUpper) continue;
isAllLower = isAllLower && isLower;
isAllUpper = isAllUpper && isUpper;
sameCase = sameCase && i < lookupString.length() && isLower == Character.isLowerCase(lookupString.charAt(i));
}
if (sameCase) return lookupString;
if (isAllLower) return lookupString.toLowerCase();
if (isAllUpper) return StringUtil.toUpperCase(lookupString);
return lookupString;
}
@Override
public int getLookupStart() {
return myOffsets.getLookupStart(disposeTrace);
}
public int getLookupOriginalStart() {
return myOffsets.getLookupOriginalStart();
}
public boolean performGuardedChange(Runnable change) {
checkValid();
assert !myChangeGuard : "already in change";
myEditor.getDocument().startGuardedBlockChecking();
myChangeGuard = true;
boolean result;
try {
result = myOffsets.performGuardedChange(change);
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
myChangeGuard = false;
}
if (!result || myDisposed) {
hideLookup(false);
return false;
}
if (isVisible()) {
HintManagerImpl.updateLocation(this, myEditor, myUi.calculatePosition().getLocation());
}
checkValid();
return true;
}
@Override
public boolean vetoesHiding() {
return myChangeGuard;
}
public boolean isAvailableToUser() {
if (ApplicationManager.getApplication().isUnitTestMode()) {
return myShown;
}
return isVisible();
}
public boolean isShown() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
return myShown;
}
public boolean showLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
checkValid();
LOG.assertTrue(!myShown);
myShown = true;
myStampShown = System.currentTimeMillis();
if (ApplicationManager.getApplication().isUnitTestMode()) return true;
if (!myEditor.getContentComponent().isShowing()) {
hideLookup(false);
return false;
}
myAdComponent.showRandomText();
myUi = new LookupUi(this, myAdComponent, myList, myProject);
myUi.setCalculating(myCalculating);
Point p = myUi.calculatePosition().getLocation();
try {
HintManagerImpl.getInstanceImpl().showEditorHint(this, myEditor, p, HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING, 0, false,
HintManagerImpl.createHintHint(myEditor, p, this, HintManager.UNDER).setAwtTooltip(false));
}
catch (Exception e) {
LOG.error(e);
}
if (!isVisible() || !myList.isShowing()) {
hideLookup(false);
return false;
}
return true;
}
public Advertiser getAdvertiser() {
return myAdComponent;
}
public boolean mayBeNoticed() {
return myStampShown > 0 && System.currentTimeMillis() - myStampShown > 300;
}
private void addListeners() {
myEditor.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
}, this);
final CaretListener caretListener = new CaretAdapter() {
@Override
public void caretPositionChanged(CaretEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
};
final SelectionListener selectionListener = new SelectionListener() {
@Override
public void selectionChanged(final SelectionEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
};
final EditorMouseListener mouseListener = new EditorMouseAdapter() {
@Override
public void mouseClicked(EditorMouseEvent e){
e.consume();
hideLookup(false);
}
};
myEditor.getCaretModel().addCaretListener(caretListener);
myEditor.getSelectionModel().addSelectionListener(selectionListener);
myEditor.addEditorMouseListener(mouseListener);
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
myEditor.getCaretModel().removeCaretListener(caretListener);
myEditor.getSelectionModel().removeSelectionListener(selectionListener);
myEditor.removeEditorMouseListener(mouseListener);
}
});
JComponent editorComponent = myEditor.getContentComponent();
if (editorComponent.isShowing()) {
Disposer.register(this, new UiNotifyConnector(editorComponent, new Activatable() {
@Override
public void showNotify() {
}
@Override
public void hideNotify() {
hideLookup(false);
}
}));
}
myList.addListSelectionListener(new ListSelectionListener() {
private LookupElement oldItem = null;
@Override
public void valueChanged(@NotNull ListSelectionEvent e){
if (!myUpdating) {
final LookupElement item = getCurrentItem();
fireCurrentItemChanged(oldItem, item);
oldItem = item;
}
}
});
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
setFocusDegree(FocusDegree.FOCUSED);
markSelectionTouched();
if (clickCount == 2){
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookup(NORMAL_SELECT_CHAR), "", null);
}
return true;
}
}.installOn(myList);
}
@Override
@Nullable
public LookupElement getCurrentItem(){
LookupElement item = (LookupElement)myList.getSelectedValue();
return item instanceof EmptyLookupItem ? null : item;
}
@Override
public void setCurrentItem(LookupElement item){
markSelectionTouched();
myList.setSelectedValue(item, false);
}
@Override
public void addLookupListener(LookupListener listener){
myListeners.add(listener);
}
@Override
public void removeLookupListener(LookupListener listener){
myListeners.remove(listener);
}
@Override
public Rectangle getCurrentItemBounds(){
int index = myList.getSelectedIndex();
if (index < 0) {
LOG.error("No selected element, size=" + getListModel().getSize() + "; items" + getItems());
}
Rectangle itmBounds = myList.getCellBounds(index, index);
if (itmBounds == null){
LOG.error("No bounds for " + index + "; size=" + getListModel().getSize());
return null;
}
Point layeredPanePoint=SwingUtilities.convertPoint(myList,itmBounds.x,itmBounds.y,getComponent());
itmBounds.x = layeredPanePoint.x;
itmBounds.y = layeredPanePoint.y;
return itmBounds;
}
public void fireItemSelected(@Nullable final LookupElement item, char completionChar){
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
myArranger.itemSelected(item, completionChar);
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
listener.itemSelected(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireLookupCanceled(final boolean explicitly) {
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, explicitly);
for (LookupListener listener : myListeners) {
try {
listener.lookupCanceled(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireCurrentItemChanged(@Nullable LookupElement oldItem, @Nullable LookupElement currentItem) {
if (oldItem != currentItem && !myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, currentItem, (char)0);
for (LookupListener listener : myListeners) {
listener.currentItemChanged(event);
}
}
myPreview.updatePreview(currentItem);
}
public boolean fillInCommonPrefix(boolean explicitlyInvoked) {
if (explicitlyInvoked) {
setFocusDegree(FocusDegree.FOCUSED);
}
if (explicitlyInvoked && myCalculating) return false;
if (!explicitlyInvoked && mySelectionTouched) return false;
ListModel listModel = getListModel();
if (listModel.getSize() <= 1) return false;
if (listModel.getSize() == 0) return false;
final LookupElement firstItem = (LookupElement)listModel.getElementAt(0);
if (listModel.getSize() == 1 && firstItem instanceof EmptyLookupItem) return false;
final PrefixMatcher firstItemMatcher = itemMatcher(firstItem);
final String oldPrefix = firstItemMatcher.getPrefix();
final String presentPrefix = oldPrefix + getAdditionalPrefix();
String commonPrefix = getCaseCorrectedLookupString(firstItem);
for (int i = 1; i < listModel.getSize(); i++) {
LookupElement item = (LookupElement)listModel.getElementAt(i);
if (item instanceof EmptyLookupItem) return false;
if (!oldPrefix.equals(itemMatcher(item).getPrefix())) return false;
final String lookupString = getCaseCorrectedLookupString(item);
final int length = Math.min(commonPrefix.length(), lookupString.length());
if (length < commonPrefix.length()) {
commonPrefix = commonPrefix.substring(0, length);
}
for (int j = 0; j < length; j++) {
if (commonPrefix.charAt(j) != lookupString.charAt(j)) {
commonPrefix = lookupString.substring(0, j);
break;
}
}
if (commonPrefix.length() == 0 || commonPrefix.length() < presentPrefix.length()) {
return false;
}
}
if (commonPrefix.equals(presentPrefix)) {
return false;
}
for (int i = 0; i < listModel.getSize(); i++) {
LookupElement item = (LookupElement)listModel.getElementAt(i);
if (!itemMatcher(item).cloneWithPrefix(commonPrefix).prefixMatches(item)) {
return false;
}
}
myOffsets.setInitialPrefix(presentPrefix, explicitlyInvoked);
replacePrefix(presentPrefix, commonPrefix);
return true;
}
public void replacePrefix(final String presentPrefix, final String newPrefix) {
if (!performGuardedChange(() -> {
EditorModificationUtil.deleteSelectedText(myEditor);
int offset = myEditor.getCaretModel().getOffset();
final int start = offset - presentPrefix.length();
myEditor.getDocument().replaceString(start, offset, newPrefix);
myOffsets.clearAdditionalPrefix();
myEditor.getCaretModel().moveToOffset(start + newPrefix.length());
})) {
return;
}
withLock(() -> {
myPresentableArranger.prefixReplaced(this, newPrefix);
return null;
});
refreshUi(true, true);
}
@Override
@Nullable
public PsiFile getPsiFile() {
return PsiDocumentManager.getInstance(myProject).getPsiFile(getEditor().getDocument());
}
@Override
public boolean isCompletion() {
return myArranger instanceof CompletionLookupArranger;
}
@Override
public PsiElement getPsiElement() {
PsiFile file = getPsiFile();
if (file == null) return null;
int offset = getLookupStart();
Editor editor = getEditor();
if (editor instanceof EditorWindow) {
offset = editor.logicalPositionToOffset(((EditorWindow)editor).hostToInjected(myEditor.offsetToLogicalPosition(offset)));
}
if (offset > 0) return file.findElementAt(offset - 1);
return file.findElementAt(0);
}
@Nullable
private DocumentWindow getInjectedDocument(int offset) {
PsiFile hostFile = PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument());
if (hostFile != null) {
// inspired by com.intellij.codeInsight.editorActions.TypedHandler.injectedEditorIfCharTypedIsSignificant()
for (DocumentWindow documentWindow : InjectedLanguageUtil.getCachedInjectedDocuments(hostFile)) {
if (documentWindow.isValid() && documentWindow.containsRange(offset, offset)) {
return documentWindow;
}
}
}
return null;
}
@Override
@NotNull
public Editor getEditor() {
DocumentWindow documentWindow = getInjectedDocument(myEditor.getCaretModel().getOffset());
if (documentWindow != null) {
PsiFile injectedFile = PsiDocumentManager.getInstance(myProject).getPsiFile(documentWindow);
return InjectedLanguageUtil.getInjectedEditorForInjectedFile(myEditor, injectedFile);
}
return myEditor;
}
@Override
@NotNull
public Editor getTopLevelEditor() {
return myEditor;
}
@NotNull
@Override
public Project getProject() {
return myProject;
}
@Override
public boolean isPositionedAboveCaret(){
return myUi != null && myUi.isPositionedAboveCaret();
}
@Override
public boolean isSelectionTouched() {
return mySelectionTouched;
}
@Override
public List<String> getAdvertisements() {
return myAdComponent.getAdvertisements();
}
@Override
public void hide(){
hideLookup(true);
}
public void hideLookup(boolean explicitly) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myHidden) return;
doHide(true, explicitly);
}
private void doHide(final boolean fireCanceled, final boolean explicitly) {
if (myDisposed) {
LOG.error(disposeTrace);
}
else {
myHidden = true;
try {
super.hide();
Disposer.dispose(this);
assert myDisposed;
}
catch (Throwable e) {
LOG.error(e);
}
}
if (fireCanceled) {
fireLookupCanceled(explicitly);
}
}
public void restorePrefix() {
myOffsets.restorePrefix();
}
private static String staticDisposeTrace = null;
private String disposeTrace = null;
public static String getLastLookupDisposeTrace() {
return staticDisposeTrace;
}
@Override
public void dispose() {
assert ApplicationManager.getApplication().isDispatchThread();
assert myHidden;
if (myDisposed) {
LOG.error(disposeTrace);
return;
}
myOffsets.disposeMarkers();
myDisposed = true;
disposeTrace = DebugUtil.currentStackTrace() + "\n============";
//noinspection AssignmentToStaticFieldFromInstanceMethod
staticDisposeTrace = disposeTrace;
}
public void refreshUi(boolean mayCheckReused, boolean onExplicitAction) {
assert !myUpdating;
LookupElement prevItem = getCurrentItem();
myUpdating = true;
try {
final boolean reused = mayCheckReused && checkReused();
boolean selectionVisible = isSelectionVisible();
boolean itemsChanged = updateList(onExplicitAction, reused);
if (isVisible()) {
LOG.assertTrue(!ApplicationManager.getApplication().isUnitTestMode());
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
}
finally {
myUpdating = false;
fireCurrentItemChanged(prevItem, getCurrentItem());
}
}
public void markReused() {
withLock(() -> myArranger = myArranger.createEmptyCopy());
requestResize();
}
public void addAdvertisement(@NotNull final String text, final @Nullable Color bgColor) {
if (containsDummyIdentifier(text)) {
return;
}
myAdComponent.addAdvertisement(text, bgColor);
requestResize();
}
public boolean isLookupDisposed() {
return myDisposed;
}
public void checkValid() {
if (myDisposed) {
throw new AssertionError("Disposed at: " + disposeTrace);
}
}
@Override
public void showItemPopup(JBPopup hint) {
final Rectangle bounds = getCurrentItemBounds();
hint.show(new RelativePoint(getComponent(), new Point(bounds.x + bounds.width, bounds.y)));
}
@Override
public boolean showElementActions() {
if (!isVisible()) return false;
final LookupElement element = getCurrentItem();
if (element == null) {
return false;
}
final Collection<LookupElementAction> actions = getActionsFor(element);
if (actions.isEmpty()) {
return false;
}
showItemPopup(JBPopupFactory.getInstance().createListPopup(new LookupActionsStep(actions, this, element)));
return true;
}
@NotNull
public Map<LookupElement, List<Pair<String, Object>>> getRelevanceObjects(@NotNull Iterable<LookupElement> items, boolean hideSingleValued) {
return withLock(() -> myPresentableArranger.getRelevanceObjects(items, hideSingleValued));
}
private <T> T withLock(Computable<T> computable) {
if (ApplicationManager.getApplication().isDispatchThread()) {
HeavyProcessLatch.INSTANCE.stopThreadPrioritizing();
}
synchronized (myLock) {
return computable.compute();
}
}
@SuppressWarnings("unused")
public void setPrefixChangeListener(PrefixChangeListener listener) {
myPrefixChangeListener = listener;
}
FontPreferences getFontPreferences() {
return myFontPreferences;
}
public enum FocusDegree { FOCUSED, SEMI_FOCUSED, UNFOCUSED }
}
| platform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.lookup.*;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.UISettings;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.LangBundle;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.FontPreferences;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.util.CollectConsumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.ui.accessibility.AccessibleContextUtil;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.Collection;
import java.util.List;
import java.util.Map;
public class LookupImpl extends LightweightHint implements LookupEx, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.lookup.impl.LookupImpl");
private final LookupOffsets myOffsets;
private final Project myProject;
private final Editor myEditor;
private final Object myLock = new Object();
private final JBList myList = new JBList(new CollectionListModel<LookupElement>()) {
@Override
protected void processKeyEvent(@NotNull final KeyEvent e) {
final char keyChar = e.getKeyChar();
if (keyChar == KeyEvent.VK_ENTER || keyChar == KeyEvent.VK_TAB) {
IdeFocusManager.getInstance(myProject).requestFocus(myEditor.getContentComponent(), true).doWhenDone(
() -> IdeEventQueue.getInstance().getKeyEventDispatcher().dispatchKeyEvent(e));
return;
}
super.processKeyEvent(e);
}
@NotNull
@Override
protected ExpandableItemsHandler<Integer> createExpandableItemsHandler() {
return new CompletionExtender(this);
}
};
final LookupCellRenderer myCellRenderer;
private final List<LookupListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private PrefixChangeListener myPrefixChangeListener = new PrefixChangeListener.Adapter() {};
private final LookupPreview myPreview = new LookupPreview(this);
// keeping our own copy of editor's font preferences, which can be used in non-EDT threads (to avoid race conditions)
private final FontPreferences myFontPreferences = new FontPreferences();
private long myStampShown = 0;
private boolean myShown = false;
private boolean myDisposed = false;
private boolean myHidden = false;
private boolean mySelectionTouched;
private FocusDegree myFocusDegree = FocusDegree.FOCUSED;
private volatile boolean myCalculating;
private final Advertiser myAdComponent;
volatile int myLookupTextWidth = 50;
private boolean myChangeGuard;
private volatile LookupArranger myArranger;
private LookupArranger myPresentableArranger;
private final Map<LookupElement, Font> myCustomFonts = ContainerUtil.createConcurrentWeakMap(10, 0.75f, Runtime.getRuntime().availableProcessors(),
ContainerUtil.identityStrategy());
private boolean myStartCompletionWhenNothingMatches;
boolean myResizePending;
private boolean myFinishing;
boolean myUpdating;
private LookupUi myUi;
public LookupImpl(Project project, Editor editor, @NotNull LookupArranger arranger) {
super(new JPanel(new BorderLayout()));
setForceShowAsPopup(true);
setCancelOnClickOutside(false);
setResizable(true);
AbstractPopup.suppressMacCornerFor(getComponent());
myProject = project;
myEditor = InjectedLanguageUtil.getTopLevelEditor(editor);
myArranger = arranger;
myPresentableArranger = arranger;
myEditor.getColorsScheme().getFontPreferences().copyTo(myFontPreferences);
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
myCellRenderer = new LookupCellRenderer(this);
myList.setCellRenderer(myCellRenderer);
myList.setFocusable(false);
myList.setFixedCellWidth(50);
// a new top level frame just got the focus. This is important to prevent screen readers
// from announcing the title of the top level frame when the list is shown (or hidden),
// as they usually do when a new top-level frame receives the focus.
AccessibleContextUtil.setParent(myList, myEditor.getContentComponent());
myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myList.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myList.getExpandableItemsHandler();
myAdComponent = new Advertiser();
myOffsets = new LookupOffsets(myEditor);
final CollectionListModel<LookupElement> model = getListModel();
addEmptyItem(model);
updateListHeight(model);
addListeners();
}
private CollectionListModel<LookupElement> getListModel() {
//noinspection unchecked
return (CollectionListModel<LookupElement>)myList.getModel();
}
public void setArranger(LookupArranger arranger) {
myArranger = arranger;
}
public FocusDegree getFocusDegree() {
return myFocusDegree;
}
@Override
public boolean isFocused() {
return getFocusDegree() == FocusDegree.FOCUSED;
}
public void setFocusDegree(FocusDegree focusDegree) {
myFocusDegree = focusDegree;
}
public boolean isCalculating() {
return myCalculating;
}
public void setCalculating(final boolean calculating) {
myCalculating = calculating;
if (myUi != null) {
myUi.setCalculating(calculating);
}
}
public void markSelectionTouched() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
mySelectionTouched = true;
myList.repaint();
}
@TestOnly
public void setSelectionTouched(boolean selectionTouched) {
mySelectionTouched = selectionTouched;
}
@TestOnly
public int getSelectedIndex() {
return myList.getSelectedIndex();
}
protected void repaintLookup(boolean onExplicitAction, boolean reused, boolean selectionVisible, boolean itemsChanged) {
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
public void resort(boolean addAgain) {
final List<LookupElement> items = getItems();
withLock(() -> {
myPresentableArranger.prefixChanged(this);
getListModel().removeAll();
return null;
});
if (addAgain) {
for (final LookupElement item : items) {
addItem(item, itemMatcher(item));
}
}
refreshUi(true, true);
}
public boolean addItem(LookupElement item, PrefixMatcher matcher) {
LookupElementPresentation presentation = renderItemApproximately(item);
if (containsDummyIdentifier(presentation.getItemText()) ||
containsDummyIdentifier(presentation.getTailText()) ||
containsDummyIdentifier(presentation.getTypeText())) {
return false;
}
updateLookupWidth(item, presentation);
withLock(() -> {
myArranger.registerMatcher(item, matcher);
myArranger.addElement(item, presentation);
return null;
});
return true;
}
private static boolean containsDummyIdentifier(@Nullable final String s) {
return s != null && s.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED);
}
public void updateLookupWidth(LookupElement item) {
updateLookupWidth(item, renderItemApproximately(item));
}
private void updateLookupWidth(LookupElement item, LookupElementPresentation presentation) {
final Font customFont = myCellRenderer.getFontAbleToDisplay(presentation);
if (customFont != null) {
myCustomFonts.put(item, customFont);
}
int maxWidth = myCellRenderer.updateMaximumWidth(presentation, item);
myLookupTextWidth = Math.max(maxWidth, myLookupTextWidth);
}
@Nullable
public Font getCustomFont(LookupElement item, boolean bold) {
Font font = myCustomFonts.get(item);
return font == null ? null : bold ? font.deriveFont(Font.BOLD) : font;
}
public void requestResize() {
ApplicationManager.getApplication().assertIsDispatchThread();
myResizePending = true;
}
public Collection<LookupElementAction> getActionsFor(LookupElement element) {
final CollectConsumer<LookupElementAction> consumer = new CollectConsumer<>();
for (LookupActionProvider provider : LookupActionProvider.EP_NAME.getExtensions()) {
provider.fillActions(element, this, consumer);
}
if (!consumer.getResult().isEmpty()) {
consumer.consume(new ShowHideIntentionIconLookupAction());
}
return consumer.getResult();
}
public JList getList() {
return myList;
}
@Override
public List<LookupElement> getItems() {
return withLock(() -> ContainerUtil.findAll(getListModel().toList(), element -> !(element instanceof EmptyLookupItem)));
}
public String getAdditionalPrefix() {
return myOffsets.getAdditionalPrefix();
}
void appendPrefix(char c) {
checkValid();
myOffsets.appendPrefix(c);
withLock(() -> {
myPresentableArranger.prefixChanged(this);
return null;
});
requestResize();
refreshUi(false, true);
ensureSelectionVisible(true);
myPrefixChangeListener.afterAppend(c);
}
public void setStartCompletionWhenNothingMatches(boolean startCompletionWhenNothingMatches) {
myStartCompletionWhenNothingMatches = startCompletionWhenNothingMatches;
}
public boolean isStartCompletionWhenNothingMatches() {
return myStartCompletionWhenNothingMatches;
}
public void ensureSelectionVisible(boolean forceTopSelection) {
if (isSelectionVisible() && !forceTopSelection) {
return;
}
if (!forceTopSelection) {
ScrollingUtil.ensureIndexIsVisible(myList, myList.getSelectedIndex(), 1);
return;
}
// selected item should be at the top of the visible list
int top = myList.getSelectedIndex();
if (top > 0) {
top--; // show one element above the selected one to give the hint that there are more available via scrolling
}
int firstVisibleIndex = myList.getFirstVisibleIndex();
if (firstVisibleIndex == top) {
return;
}
ScrollingUtil.ensureRangeIsVisible(myList, top, top + myList.getLastVisibleIndex() - firstVisibleIndex);
}
boolean truncatePrefix(boolean preserveSelection) {
if (!myOffsets.truncatePrefix()) {
return false;
}
if (preserveSelection) {
markSelectionTouched();
}
boolean shouldUpdate = withLock(() -> {
myPresentableArranger.prefixChanged(this);
return myPresentableArranger == myArranger;
});
requestResize();
if (shouldUpdate) {
refreshUi(false, true);
ensureSelectionVisible(true);
}
return true;
}
private boolean updateList(boolean onExplicitAction, boolean reused) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
checkValid();
CollectionListModel<LookupElement> listModel = getListModel();
Pair<List<LookupElement>, Integer> pair = withLock(() -> myPresentableArranger.arrangeItems(this, onExplicitAction || reused));
List<LookupElement> items = pair.first;
Integer toSelect = pair.second;
if (toSelect == null || toSelect < 0 || items.size() > 0 && toSelect >= items.size()) {
LOG.error("Arranger " + myPresentableArranger + " returned invalid selection index=" + toSelect + "; items=" + items);
toSelect = 0;
}
myOffsets.checkMinPrefixLengthChanges(items, this);
List<LookupElement> oldModel = listModel.toList();
listModel.removeAll();
if (!items.isEmpty()) {
listModel.add(items);
}
else {
addEmptyItem(listModel);
}
updateListHeight(listModel);
myList.setSelectedIndex(toSelect);
return !ContainerUtil.equalsIdentity(oldModel, items);
}
protected boolean isSelectionVisible() {
return ScrollingUtil.isIndexFullyVisible(myList, myList.getSelectedIndex());
}
private boolean checkReused() {
return withLock(() -> {
if (myPresentableArranger != myArranger) {
myPresentableArranger = myArranger;
myOffsets.clearAdditionalPrefix();
myPresentableArranger.prefixChanged(this);
return true;
}
return false;
});
}
private void updateListHeight(ListModel model) {
myList.setFixedCellHeight(myCellRenderer.getListCellRendererComponent(myList, model.getElementAt(0), 0, false, false).getPreferredSize().height);
myList.setVisibleRowCount(Math.min(model.getSize(), UISettings.getInstance().getMaxLookupListHeight()));
}
private void addEmptyItem(CollectionListModel<LookupElement> model) {
LookupElement item = new EmptyLookupItem(myCalculating ? " " : LangBundle.message("completion.no.suggestions"), false);
model.add(item);
updateLookupWidth(item);
requestResize();
}
private static LookupElementPresentation renderItemApproximately(LookupElement item) {
final LookupElementPresentation p = new LookupElementPresentation();
item.renderElement(p);
return p;
}
@NotNull
@Override
public String itemPattern(@NotNull LookupElement element) {
if (element instanceof EmptyLookupItem) return "";
return myPresentableArranger.itemPattern(element);
}
@Override
@NotNull
public PrefixMatcher itemMatcher(@NotNull LookupElement item) {
if (item instanceof EmptyLookupItem) {
return new CamelHumpMatcher("");
}
return myPresentableArranger.itemMatcher(item);
}
public void finishLookup(final char completionChar) {
finishLookup(completionChar, (LookupElement)myList.getSelectedValue());
}
public void finishLookup(char completionChar, @Nullable final LookupElement item) {
LOG.assertTrue(!ApplicationManager.getApplication().isWriteAccessAllowed(), "finishLookup should be called without a write action");
final PsiFile file = getPsiFile();
boolean writableOk = file == null || FileModificationService.getInstance().prepareFileForWrite(file);
if (myDisposed) { // ensureFilesWritable could close us by showing a dialog
return;
}
if (!writableOk) {
doHide(false, true);
fireItemSelected(null, completionChar);
return;
}
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookupInWritableFile(completionChar, item), null, null);
}
void finishLookupInWritableFile(char completionChar, @Nullable LookupElement item) {
//noinspection deprecation,unchecked
if (item == null ||
!item.isValid() ||
item instanceof EmptyLookupItem ||
item.getObject() instanceof DeferredUserLookupValue &&
item.as(LookupItem.CLASS_CONDITION_KEY) != null &&
!((DeferredUserLookupValue)item.getObject()).handleUserSelection(item.as(LookupItem.CLASS_CONDITION_KEY), myProject)) {
doHide(false, true);
fireItemSelected(null, completionChar);
return;
}
if (myDisposed) { // DeferredUserLookupValue could close us in any way
return;
}
final String prefix = itemPattern(item);
boolean plainMatch = ContainerUtil.or(item.getAllLookupStrings(), s -> StringUtil.containsIgnoreCase(s, prefix));
if (!plainMatch) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CAMEL_HUMPS);
}
myFinishing = true;
ApplicationManager.getApplication().runWriteAction(() -> {
myEditor.getDocument().startGuardedBlockChecking();
try {
insertLookupString(item, getPrefixLength(item));
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
}
});
if (myDisposed) { // any document listeners could close us
return;
}
doHide(false, true);
fireItemSelected(item, completionChar);
}
public int getPrefixLength(LookupElement item) {
return myOffsets.getPrefixLength(item, this);
}
protected void insertLookupString(LookupElement item, final int prefix) {
final String lookupString = getCaseCorrectedLookupString(item);
final Editor hostEditor = getTopLevelEditor();
hostEditor.getCaretModel().runForEachCaret(new CaretAction() {
@Override
public void perform(Caret caret) {
EditorModificationUtil.deleteSelectedText(hostEditor);
final int caretOffset = hostEditor.getCaretModel().getOffset();
int offset = insertLookupInDocumentWindowIfNeeded(caretOffset, prefix, lookupString);
hostEditor.getCaretModel().moveToOffset(offset);
hostEditor.getSelectionModel().removeSelection();
}
});
myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
private int insertLookupInDocumentWindowIfNeeded(int caretOffset, int prefix, String lookupString) {
DocumentWindow document = getInjectedDocument(caretOffset);
if (document == null) return insertLookupInDocument(caretOffset, myEditor.getDocument(), prefix, lookupString);
PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document);
int offset = document.hostToInjected(caretOffset);
int lookupStart = Math.min(offset, Math.max(offset - prefix, 0));
int diff = -1;
if (file != null) {
List<TextRange> ranges = InjectedLanguageManager.getInstance(myProject)
.intersectWithAllEditableFragments(file, TextRange.create(lookupStart, offset));
if (!ranges.isEmpty()) {
diff = ranges.get(0).getStartOffset() - lookupStart;
if (ranges.size() == 1 && diff == 0) diff = -1;
}
}
if (diff == -1) return insertLookupInDocument(caretOffset, myEditor.getDocument(), prefix, lookupString);
return document.injectedToHost(
insertLookupInDocument(offset, document, prefix - diff, diff == 0 ? lookupString : lookupString.substring(diff))
);
}
private static int insertLookupInDocument(int caretOffset, Document document, int prefix, String lookupString) {
int lookupStart = Math.min(caretOffset, Math.max(caretOffset - prefix, 0));
int len = document.getTextLength();
LOG.assertTrue(lookupStart >= 0 && lookupStart <= len,
"ls: " + lookupStart + " caret: " + caretOffset + " prefix:" + prefix + " doc: " + len);
LOG.assertTrue(caretOffset >= 0 && caretOffset <= len, "co: " + caretOffset + " doc: " + len);
document.replaceString(lookupStart, caretOffset, lookupString);
return lookupStart + lookupString.length();
}
private String getCaseCorrectedLookupString(LookupElement item) {
String lookupString = item.getLookupString();
if (item.isCaseSensitive()) {
return lookupString;
}
final String prefix = itemPattern(item);
final int length = prefix.length();
if (length == 0 || !itemMatcher(item).prefixMatches(prefix)) return lookupString;
boolean isAllLower = true;
boolean isAllUpper = true;
boolean sameCase = true;
for (int i = 0; i < length && (isAllLower || isAllUpper || sameCase); i++) {
final char c = prefix.charAt(i);
boolean isLower = Character.isLowerCase(c);
boolean isUpper = Character.isUpperCase(c);
// do not take this kind of symbols into account ('_', '@', etc.)
if (!isLower && !isUpper) continue;
isAllLower = isAllLower && isLower;
isAllUpper = isAllUpper && isUpper;
sameCase = sameCase && isLower == Character.isLowerCase(lookupString.charAt(i));
}
if (sameCase) return lookupString;
if (isAllLower) return lookupString.toLowerCase();
if (isAllUpper) return StringUtil.toUpperCase(lookupString);
return lookupString;
}
@Override
public int getLookupStart() {
return myOffsets.getLookupStart(disposeTrace);
}
public int getLookupOriginalStart() {
return myOffsets.getLookupOriginalStart();
}
public boolean performGuardedChange(Runnable change) {
checkValid();
assert !myChangeGuard : "already in change";
myEditor.getDocument().startGuardedBlockChecking();
myChangeGuard = true;
boolean result;
try {
result = myOffsets.performGuardedChange(change);
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
myChangeGuard = false;
}
if (!result || myDisposed) {
hideLookup(false);
return false;
}
if (isVisible()) {
HintManagerImpl.updateLocation(this, myEditor, myUi.calculatePosition().getLocation());
}
checkValid();
return true;
}
@Override
public boolean vetoesHiding() {
return myChangeGuard;
}
public boolean isAvailableToUser() {
if (ApplicationManager.getApplication().isUnitTestMode()) {
return myShown;
}
return isVisible();
}
public boolean isShown() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
return myShown;
}
public boolean showLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
checkValid();
LOG.assertTrue(!myShown);
myShown = true;
myStampShown = System.currentTimeMillis();
if (ApplicationManager.getApplication().isUnitTestMode()) return true;
if (!myEditor.getContentComponent().isShowing()) {
hideLookup(false);
return false;
}
myAdComponent.showRandomText();
myUi = new LookupUi(this, myAdComponent, myList, myProject);
myUi.setCalculating(myCalculating);
Point p = myUi.calculatePosition().getLocation();
try {
HintManagerImpl.getInstanceImpl().showEditorHint(this, myEditor, p, HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING, 0, false,
HintManagerImpl.createHintHint(myEditor, p, this, HintManager.UNDER).setAwtTooltip(false));
}
catch (Exception e) {
LOG.error(e);
}
if (!isVisible() || !myList.isShowing()) {
hideLookup(false);
return false;
}
return true;
}
public Advertiser getAdvertiser() {
return myAdComponent;
}
public boolean mayBeNoticed() {
return myStampShown > 0 && System.currentTimeMillis() - myStampShown > 300;
}
private void addListeners() {
myEditor.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
}, this);
final CaretListener caretListener = new CaretAdapter() {
@Override
public void caretPositionChanged(CaretEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
};
final SelectionListener selectionListener = new SelectionListener() {
@Override
public void selectionChanged(final SelectionEvent e) {
if (!myChangeGuard && !myFinishing) {
hideLookup(false);
}
}
};
final EditorMouseListener mouseListener = new EditorMouseAdapter() {
@Override
public void mouseClicked(EditorMouseEvent e){
e.consume();
hideLookup(false);
}
};
myEditor.getCaretModel().addCaretListener(caretListener);
myEditor.getSelectionModel().addSelectionListener(selectionListener);
myEditor.addEditorMouseListener(mouseListener);
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
myEditor.getCaretModel().removeCaretListener(caretListener);
myEditor.getSelectionModel().removeSelectionListener(selectionListener);
myEditor.removeEditorMouseListener(mouseListener);
}
});
JComponent editorComponent = myEditor.getContentComponent();
if (editorComponent.isShowing()) {
Disposer.register(this, new UiNotifyConnector(editorComponent, new Activatable() {
@Override
public void showNotify() {
}
@Override
public void hideNotify() {
hideLookup(false);
}
}));
}
myList.addListSelectionListener(new ListSelectionListener() {
private LookupElement oldItem = null;
@Override
public void valueChanged(@NotNull ListSelectionEvent e){
if (!myUpdating) {
final LookupElement item = getCurrentItem();
fireCurrentItemChanged(oldItem, item);
oldItem = item;
}
}
});
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
setFocusDegree(FocusDegree.FOCUSED);
markSelectionTouched();
if (clickCount == 2){
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookup(NORMAL_SELECT_CHAR), "", null);
}
return true;
}
}.installOn(myList);
}
@Override
@Nullable
public LookupElement getCurrentItem(){
LookupElement item = (LookupElement)myList.getSelectedValue();
return item instanceof EmptyLookupItem ? null : item;
}
@Override
public void setCurrentItem(LookupElement item){
markSelectionTouched();
myList.setSelectedValue(item, false);
}
@Override
public void addLookupListener(LookupListener listener){
myListeners.add(listener);
}
@Override
public void removeLookupListener(LookupListener listener){
myListeners.remove(listener);
}
@Override
public Rectangle getCurrentItemBounds(){
int index = myList.getSelectedIndex();
if (index < 0) {
LOG.error("No selected element, size=" + getListModel().getSize() + "; items" + getItems());
}
Rectangle itmBounds = myList.getCellBounds(index, index);
if (itmBounds == null){
LOG.error("No bounds for " + index + "; size=" + getListModel().getSize());
return null;
}
Point layeredPanePoint=SwingUtilities.convertPoint(myList,itmBounds.x,itmBounds.y,getComponent());
itmBounds.x = layeredPanePoint.x;
itmBounds.y = layeredPanePoint.y;
return itmBounds;
}
public void fireItemSelected(@Nullable final LookupElement item, char completionChar){
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
myArranger.itemSelected(item, completionChar);
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
listener.itemSelected(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireLookupCanceled(final boolean explicitly) {
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, explicitly);
for (LookupListener listener : myListeners) {
try {
listener.lookupCanceled(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireCurrentItemChanged(@Nullable LookupElement oldItem, @Nullable LookupElement currentItem) {
if (oldItem != currentItem && !myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, currentItem, (char)0);
for (LookupListener listener : myListeners) {
listener.currentItemChanged(event);
}
}
myPreview.updatePreview(currentItem);
}
public boolean fillInCommonPrefix(boolean explicitlyInvoked) {
if (explicitlyInvoked) {
setFocusDegree(FocusDegree.FOCUSED);
}
if (explicitlyInvoked && myCalculating) return false;
if (!explicitlyInvoked && mySelectionTouched) return false;
ListModel listModel = getListModel();
if (listModel.getSize() <= 1) return false;
if (listModel.getSize() == 0) return false;
final LookupElement firstItem = (LookupElement)listModel.getElementAt(0);
if (listModel.getSize() == 1 && firstItem instanceof EmptyLookupItem) return false;
final PrefixMatcher firstItemMatcher = itemMatcher(firstItem);
final String oldPrefix = firstItemMatcher.getPrefix();
final String presentPrefix = oldPrefix + getAdditionalPrefix();
String commonPrefix = getCaseCorrectedLookupString(firstItem);
for (int i = 1; i < listModel.getSize(); i++) {
LookupElement item = (LookupElement)listModel.getElementAt(i);
if (item instanceof EmptyLookupItem) return false;
if (!oldPrefix.equals(itemMatcher(item).getPrefix())) return false;
final String lookupString = getCaseCorrectedLookupString(item);
final int length = Math.min(commonPrefix.length(), lookupString.length());
if (length < commonPrefix.length()) {
commonPrefix = commonPrefix.substring(0, length);
}
for (int j = 0; j < length; j++) {
if (commonPrefix.charAt(j) != lookupString.charAt(j)) {
commonPrefix = lookupString.substring(0, j);
break;
}
}
if (commonPrefix.length() == 0 || commonPrefix.length() < presentPrefix.length()) {
return false;
}
}
if (commonPrefix.equals(presentPrefix)) {
return false;
}
for (int i = 0; i < listModel.getSize(); i++) {
LookupElement item = (LookupElement)listModel.getElementAt(i);
if (!itemMatcher(item).cloneWithPrefix(commonPrefix).prefixMatches(item)) {
return false;
}
}
myOffsets.setInitialPrefix(presentPrefix, explicitlyInvoked);
replacePrefix(presentPrefix, commonPrefix);
return true;
}
public void replacePrefix(final String presentPrefix, final String newPrefix) {
if (!performGuardedChange(() -> {
EditorModificationUtil.deleteSelectedText(myEditor);
int offset = myEditor.getCaretModel().getOffset();
final int start = offset - presentPrefix.length();
myEditor.getDocument().replaceString(start, offset, newPrefix);
myOffsets.clearAdditionalPrefix();
myEditor.getCaretModel().moveToOffset(start + newPrefix.length());
})) {
return;
}
withLock(() -> {
myPresentableArranger.prefixReplaced(this, newPrefix);
return null;
});
refreshUi(true, true);
}
@Override
@Nullable
public PsiFile getPsiFile() {
return PsiDocumentManager.getInstance(myProject).getPsiFile(getEditor().getDocument());
}
@Override
public boolean isCompletion() {
return myArranger instanceof CompletionLookupArranger;
}
@Override
public PsiElement getPsiElement() {
PsiFile file = getPsiFile();
if (file == null) return null;
int offset = getLookupStart();
Editor editor = getEditor();
if (editor instanceof EditorWindow) {
offset = editor.logicalPositionToOffset(((EditorWindow)editor).hostToInjected(myEditor.offsetToLogicalPosition(offset)));
}
if (offset > 0) return file.findElementAt(offset - 1);
return file.findElementAt(0);
}
@Nullable
private DocumentWindow getInjectedDocument(int offset) {
PsiFile hostFile = PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument());
if (hostFile != null) {
// inspired by com.intellij.codeInsight.editorActions.TypedHandler.injectedEditorIfCharTypedIsSignificant()
for (DocumentWindow documentWindow : InjectedLanguageUtil.getCachedInjectedDocuments(hostFile)) {
if (documentWindow.isValid() && documentWindow.containsRange(offset, offset)) {
return documentWindow;
}
}
}
return null;
}
@Override
@NotNull
public Editor getEditor() {
DocumentWindow documentWindow = getInjectedDocument(myEditor.getCaretModel().getOffset());
if (documentWindow != null) {
PsiFile injectedFile = PsiDocumentManager.getInstance(myProject).getPsiFile(documentWindow);
return InjectedLanguageUtil.getInjectedEditorForInjectedFile(myEditor, injectedFile);
}
return myEditor;
}
@Override
@NotNull
public Editor getTopLevelEditor() {
return myEditor;
}
@NotNull
@Override
public Project getProject() {
return myProject;
}
@Override
public boolean isPositionedAboveCaret(){
return myUi != null && myUi.isPositionedAboveCaret();
}
@Override
public boolean isSelectionTouched() {
return mySelectionTouched;
}
@Override
public List<String> getAdvertisements() {
return myAdComponent.getAdvertisements();
}
@Override
public void hide(){
hideLookup(true);
}
public void hideLookup(boolean explicitly) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myHidden) return;
doHide(true, explicitly);
}
private void doHide(final boolean fireCanceled, final boolean explicitly) {
if (myDisposed) {
LOG.error(disposeTrace);
}
else {
myHidden = true;
try {
super.hide();
Disposer.dispose(this);
assert myDisposed;
}
catch (Throwable e) {
LOG.error(e);
}
}
if (fireCanceled) {
fireLookupCanceled(explicitly);
}
}
public void restorePrefix() {
myOffsets.restorePrefix();
}
private static String staticDisposeTrace = null;
private String disposeTrace = null;
public static String getLastLookupDisposeTrace() {
return staticDisposeTrace;
}
@Override
public void dispose() {
assert ApplicationManager.getApplication().isDispatchThread();
assert myHidden;
if (myDisposed) {
LOG.error(disposeTrace);
return;
}
myOffsets.disposeMarkers();
myDisposed = true;
disposeTrace = DebugUtil.currentStackTrace() + "\n============";
//noinspection AssignmentToStaticFieldFromInstanceMethod
staticDisposeTrace = disposeTrace;
}
public void refreshUi(boolean mayCheckReused, boolean onExplicitAction) {
assert !myUpdating;
LookupElement prevItem = getCurrentItem();
myUpdating = true;
try {
final boolean reused = mayCheckReused && checkReused();
boolean selectionVisible = isSelectionVisible();
boolean itemsChanged = updateList(onExplicitAction, reused);
if (isVisible()) {
LOG.assertTrue(!ApplicationManager.getApplication().isUnitTestMode());
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
}
finally {
myUpdating = false;
fireCurrentItemChanged(prevItem, getCurrentItem());
}
}
public void markReused() {
withLock(() -> myArranger = myArranger.createEmptyCopy());
requestResize();
}
public void addAdvertisement(@NotNull final String text, final @Nullable Color bgColor) {
if (containsDummyIdentifier(text)) {
return;
}
myAdComponent.addAdvertisement(text, bgColor);
requestResize();
}
public boolean isLookupDisposed() {
return myDisposed;
}
public void checkValid() {
if (myDisposed) {
throw new AssertionError("Disposed at: " + disposeTrace);
}
}
@Override
public void showItemPopup(JBPopup hint) {
final Rectangle bounds = getCurrentItemBounds();
hint.show(new RelativePoint(getComponent(), new Point(bounds.x + bounds.width, bounds.y)));
}
@Override
public boolean showElementActions() {
if (!isVisible()) return false;
final LookupElement element = getCurrentItem();
if (element == null) {
return false;
}
final Collection<LookupElementAction> actions = getActionsFor(element);
if (actions.isEmpty()) {
return false;
}
showItemPopup(JBPopupFactory.getInstance().createListPopup(new LookupActionsStep(actions, this, element)));
return true;
}
@NotNull
public Map<LookupElement, List<Pair<String, Object>>> getRelevanceObjects(@NotNull Iterable<LookupElement> items, boolean hideSingleValued) {
return withLock(() -> myPresentableArranger.getRelevanceObjects(items, hideSingleValued));
}
private <T> T withLock(Computable<T> computable) {
if (ApplicationManager.getApplication().isDispatchThread()) {
HeavyProcessLatch.INSTANCE.stopThreadPrioritizing();
}
synchronized (myLock) {
return computable.compute();
}
}
@SuppressWarnings("unused")
public void setPrefixChangeListener(PrefixChangeListener listener) {
myPrefixChangeListener = listener;
}
FontPreferences getFontPreferences() {
return myFontPreferences;
}
public enum FocusDegree { FOCUSED, SEMI_FOCUSED, UNFOCUSED }
}
| EA-92531 - SIOOBE: LookupImpl.getCaseCorrectedLookupString
| platform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java | EA-92531 - SIOOBE: LookupImpl.getCaseCorrectedLookupString | <ide><path>latform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java
<ide> if (!isLower && !isUpper) continue;
<ide> isAllLower = isAllLower && isLower;
<ide> isAllUpper = isAllUpper && isUpper;
<del> sameCase = sameCase && isLower == Character.isLowerCase(lookupString.charAt(i));
<add> sameCase = sameCase && i < lookupString.length() && isLower == Character.isLowerCase(lookupString.charAt(i));
<ide> }
<ide> if (sameCase) return lookupString;
<ide> if (isAllLower) return lookupString.toLowerCase(); |
|
Java | apache-2.0 | c0ae86e969b40688563fdd9196ad3071122ec466 | 0 | Nanoware/Terasology,kartikey0303/Terasology,MovingBlocks/Terasology,Malanius/Terasology,Nanoware/Terasology,Nanoware/Terasology,kartikey0303/Terasology,Malanius/Terasology,MovingBlocks/Terasology,MovingBlocks/Terasology | /*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.logic;
import org.terasology.rendering.nui.Color;
/**
* Makes the game render the specified text at the current location of the enitity.
*/
public class FloatingTextComponent implements VisualComponent {
public String text;
public Color textColor = Color.WHITE;
public Color textShadowColor = Color.BLACK;
public float scale = 1f;
public boolean isOccluded;
}
| engine/src/main/java/org/terasology/rendering/logic/FloatingTextComponent.java | /*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.logic;
import org.terasology.rendering.nui.Color;
/**
* Makes the game render the specified text at the current location of the enitity.
*/
public class FloatingTextComponent implements VisualComponent {
public String text;
public Color textColor = Color.WHITE;
public Color textShadowColor = Color.BLACK;
public float scale = 1f;
}
| Add isOccluded to FloatingTextComponent
| engine/src/main/java/org/terasology/rendering/logic/FloatingTextComponent.java | Add isOccluded to FloatingTextComponent | <ide><path>ngine/src/main/java/org/terasology/rendering/logic/FloatingTextComponent.java
<ide> public Color textColor = Color.WHITE;
<ide> public Color textShadowColor = Color.BLACK;
<ide> public float scale = 1f;
<add> public boolean isOccluded;
<ide> } |
|
Java | bsd-3-clause | d9be215146532c6ccb57324b08343378be26fd7a | 0 | flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij | /*
* Copyright 2017 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.run.daemon;
import com.google.common.base.Charsets;
import com.google.gson.*;
import com.intellij.execution.process.ProcessAdapter;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.StdoutJsonParser;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* Sends JSON commands to a flutter daemon process, assigning a new id to each one.
*
* <p>Also handles dispatching incoming responses and events.
*
* <p>The protocol is specified in
* <a href="https://github.com/flutter/flutter/wiki/The-flutter-daemon-mode"
* >The Flutter Daemon Mode</a>.
*/
public class DaemonApi {
private static final int STDERR_LINES_TO_KEEP = 100;
@NotNull private final Consumer<String> callback;
private final AtomicInteger nextId = new AtomicInteger();
private final Map<Integer, Command> pending = new LinkedHashMap<>();
private final StdoutJsonParser stdoutParser = new StdoutJsonParser();
/**
* A ring buffer holding the last few lines that the process sent to stderr.
*/
private final Deque<String> stderr = new ArrayDeque<>();
/**
* Creates an Api that sends JSON to a callback.
*/
DaemonApi(@NotNull Consumer<String> callback) {
this.callback = callback;
}
/**
* Creates an Api that sends JSON to a process.
*/
DaemonApi(@NotNull ProcessHandler process) {
this((String json) -> sendCommand(json, process));
}
// app domain
CompletableFuture<RestartResult> restartApp(@NotNull String appId, boolean fullRestart, boolean pause) {
return send("app.restart", new AppRestart(appId, fullRestart, pause));
}
CompletableFuture<Boolean> stopApp(@NotNull String appId) {
return send("app.stop", new AppStop(appId));
}
void cancelPending() {
final List<Command> commands;
synchronized (pending) {
commands = new ArrayList<>(pending.values());
pending.clear();
}
for (Command command : commands) {
command.completeExceptionally(new IOException("Application terminated"));
}
}
/**
* Used to invoke an arbitrary service protocol extension.
*/
CompletableFuture<JsonObject> callAppServiceExtension(@NotNull String appId,
@NotNull String methodName,
@NotNull Map<String, Object> params) {
return send("app.callServiceExtension", new AppServiceExtension(appId, methodName, params));
}
// device domain
CompletableFuture enableDeviceEvents() {
return send("device.enable", null);
}
/**
* Receive responses and events from a process until it shuts down.
*/
void listen(@NotNull ProcessHandler process, @NotNull DaemonEvent.Listener listener) {
process.addProcessListener(new ProcessAdapter() {
@Override
public void onTextAvailable(@NotNull ProcessEvent event, @NotNull Key outputType) {
if (outputType.equals(ProcessOutputTypes.STDERR)) {
// Append text to last line in buffer.
final String last = stderr.peekLast();
if (last != null && !last.endsWith("\n")) {
stderr.removeLast();
stderr.add(last + event.getText());
}
else {
stderr.add(event.getText());
}
// Trim buffer size.
while (stderr.size() > STDERR_LINES_TO_KEEP) {
stderr.removeFirst();
}
}
else if (outputType.equals(ProcessOutputTypes.STDOUT)) {
final String text = event.getText();
if (FlutterSettings.getInstance().isVerboseLogging()) {
LOG.info("[<-- " + text.trim() + "]");
}
stdoutParser.appendOutput(text);
for (String line : stdoutParser.getAvailableLines()) {
final JsonObject obj = parseAndValidateDaemonEvent(line);
if (obj != null) {
dispatch(obj, listener);
}
}
}
}
@Override
public void processWillTerminate(@NotNull ProcessEvent event, boolean willBeDestroyed) {
listener.processWillTerminate();
}
@Override
public void processTerminated(@NotNull ProcessEvent event) {
listener.processTerminated(event.getExitCode());
}
});
// All hooked up and ready to receive events.
process.startNotify();
}
/**
* Parses some JSON and handles it as either a command's response or an event.
*/
void dispatch(@NotNull JsonObject obj, @Nullable DaemonEvent.Listener eventListener) {
final JsonPrimitive idField = obj.getAsJsonPrimitive("id");
if (idField == null) {
// It's an event.
if (eventListener != null) {
DaemonEvent.dispatch(obj, eventListener);
}
}
else {
final Command cmd = takePending(idField.getAsInt());
if (cmd == null) {
return;
}
final JsonElement error = obj.get("error");
if (error != null) {
cmd.completeExceptionally(new IOException("error from " + cmd.method + ": " + error));
}
else {
cmd.complete(obj.get("result"));
}
}
}
@Nullable
private Command takePending(int id) {
final Command cmd;
synchronized (pending) {
cmd = pending.remove(id);
}
if (cmd == null) {
LOG.warn("received a response for a request that wasn't sent: " + id);
return null;
}
return cmd;
}
private <T> CompletableFuture<T> send(String method, @Nullable Params<T> params) {
// Synchronize on nextId to ensure that we send one command at a time and they are numbered in the order they are sent.
synchronized (nextId) {
final int id = nextId.getAndIncrement();
final Command<T> command = new Command<>(method, params, id);
final String json = command.toString();
synchronized (pending) {
pending.put(id, command);
}
callback.accept(json);
return command.done;
}
}
/**
* Parse the given string; if it is valid JSON - and a valid Daemon message - then return
* the parsed JsonObject.
*/
public static JsonObject parseAndValidateDaemonEvent(String message) {
if (!message.startsWith("[{")) {
return null;
}
message = message.trim();
if (!message.endsWith("}]")) {
return null;
}
message = message.substring(1, message.length() - 1);
final JsonObject obj;
try {
final JsonParser jsonParser = new JsonParser();
final JsonElement element = jsonParser.parse(message);
obj = element.getAsJsonObject();
}
catch (JsonSyntaxException e) {
return null;
}
// obj must contain either an "id" (int), or an "event" field
final JsonPrimitive eventField = obj.getAsJsonPrimitive("event");
if (eventField != null) {
final String eventName = eventField.getAsString();
if (eventName == null) {
return null;
}
final JsonObject params = obj.getAsJsonObject("params");
return params == null ? null : obj;
}
else {
// id
final JsonPrimitive idField = obj.getAsJsonPrimitive("id");
if (idField == null || !idField.isNumber()) {
return null;
}
try {
final int id = idField.getAsInt();
return obj;
}
catch (NumberFormatException e) {
return null;
}
}
}
private static void sendCommand(String json, ProcessHandler handler) {
final PrintWriter stdin = getStdin(handler);
if (stdin == null) {
LOG.warn("can't write command to Flutter process: " + json);
return;
}
stdin.write('[');
stdin.write(json);
stdin.write("]\n");
if (FlutterSettings.getInstance().isVerboseLogging()) {
LOG.info("[--> " + json + "]");
}
if (stdin.checkError()) {
LOG.warn("can't write command to Flutter process: " + json);
}
}
@Nullable
private static PrintWriter getStdin(ProcessHandler processHandler) {
final OutputStream stdin = processHandler.getProcessInput();
if (stdin == null) return null;
return new PrintWriter(new OutputStreamWriter(stdin, Charsets.UTF_8));
}
/**
* Returns the last lines written to stderr.
*/
public String getStderrTail() {
final String[] lines = stderr.toArray(new String[]{});
return String.join("", lines);
}
public static class RestartResult {
private int code;
private String message;
private String hintMessage;
private String hintId;
public boolean ok() {
return code == 0;
}
public int getCode() {
return code;
}
public String getMessage() {
return message;
}
public String getHintMessage() {
return hintMessage;
}
public String getHintId() {
return hintId;
}
public boolean isRestartRecommended() {
return "restartRecommended".equals(hintId);
}
@Override
public String toString() {
return getCode() + ":" + getMessage();
}
}
/**
* A pending command to a Flutter process.
*/
private static class Command<T> {
final @NotNull String method;
final @Nullable JsonElement params;
final int id;
transient final @Nullable Function<JsonElement, T> parseResult;
transient final CompletableFuture<T> done = new CompletableFuture<>();
Command(@NotNull String method, @Nullable Params<T> params, int id) {
this.method = method;
// GSON has trouble with params as a field, because it has both a generic type and subclasses.
// But it handles it okay at top-level.
this.params = GSON.toJsonTree(params);
this.id = id;
this.parseResult = params == null ? null : params::parseResult;
}
void complete(@Nullable JsonElement result) {
if (parseResult == null) {
done.complete(null);
return;
}
try {
done.complete(parseResult.apply(result));
}
catch (Exception e) {
LOG.warn("Unable to parse response from Flutter daemon. Command was: " + this, e);
done.completeExceptionally(e);
}
}
void completeExceptionally(Throwable t) {
done.completeExceptionally(t);
}
@Override
public String toString() {
return GSON.toJson(this);
}
}
private abstract static class Params<T> {
@Nullable
abstract T parseResult(@Nullable JsonElement result);
}
@SuppressWarnings("unused")
private static class AppRestart extends Params<RestartResult> {
@NotNull final String appId;
final boolean fullRestart;
final boolean pause;
AppRestart(@NotNull String appId, boolean fullRestart, boolean pause) {
this.appId = appId;
this.fullRestart = fullRestart;
this.pause = pause;
}
@Override
RestartResult parseResult(JsonElement result) {
return GSON.fromJson(result, RestartResult.class);
}
}
@SuppressWarnings("unused")
private static class AppStop extends Params<Boolean> {
@NotNull final String appId;
AppStop(@NotNull String appId) {
this.appId = appId;
}
@Override
Boolean parseResult(JsonElement result) {
return GSON.fromJson(result, Boolean.class);
}
}
@SuppressWarnings("unused")
private static class AppServiceExtension extends Params<JsonObject> {
final String appId;
final String methodName;
final Map<String, Object> params;
AppServiceExtension(String appId, String methodName, Map<String, Object> params) {
this.appId = appId;
this.methodName = methodName;
this.params = params;
}
@Override
JsonObject parseResult(JsonElement result) {
if (result instanceof JsonObject) {
return (JsonObject)result;
}
final JsonObject obj = new JsonObject();
obj.add("result", result);
return obj;
}
}
private static final Gson GSON = new Gson();
private static final Logger LOG = Logger.getInstance(DaemonApi.class);
}
| src/io/flutter/run/daemon/DaemonApi.java | /*
* Copyright 2017 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.run.daemon;
import com.google.common.base.Charsets;
import com.google.gson.*;
import com.intellij.execution.process.ProcessAdapter;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.StdoutJsonParser;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* Sends JSON commands to a flutter daemon process, assigning a new id to each one.
*
* <p>Also handles dispatching incoming responses and events.
*
* <p>The protocol is specified in
* <a href="https://github.com/flutter/flutter/wiki/The-flutter-daemon-mode"
* >The Flutter Daemon Mode</a>.
*/
public class DaemonApi {
private static final int STDERR_LINES_TO_KEEP = 100;
@NotNull private final Consumer<String> callback;
private final AtomicInteger nextId = new AtomicInteger();
private final Map<Integer, Command> pending = new LinkedHashMap<>();
private final StdoutJsonParser stdoutParser = new StdoutJsonParser();
/**
* A ring buffer holding the last few lines that the process sent to stderr.
*/
private final Deque<String> stderr = new ArrayDeque<>();
/**
* Creates an Api that sends JSON to a callback.
*/
DaemonApi(@NotNull Consumer<String> callback) {
this.callback = callback;
}
/**
* Creates an Api that sends JSON to a process.
*/
DaemonApi(@NotNull ProcessHandler process) {
this((String json) -> sendCommand(json, process));
}
// app domain
CompletableFuture<RestartResult> restartApp(@NotNull String appId, boolean fullRestart, boolean pause) {
return send("app.restart", new AppRestart(appId, fullRestart, pause));
}
CompletableFuture<Boolean> stopApp(@NotNull String appId) {
return send("app.stop", new AppStop(appId));
}
void cancelPending() {
final List<Command> commands;
synchronized (pending) {
commands = new ArrayList<>(pending.values());
pending.clear();
}
for (Command command : commands) {
command.completeExceptionally(new IOException("Application terminated"));
}
}
/**
* Used to invoke an arbitrary service protocol extension.
*/
CompletableFuture<JsonObject> callAppServiceExtension(@NotNull String appId,
@NotNull String methodName,
@NotNull Map<String, Object> params) {
return send("app.callServiceExtension", new AppServiceExtension(appId, methodName, params));
}
// device domain
CompletableFuture enableDeviceEvents() {
return send("device.enable", null);
}
/**
* Receive responses and events from a process until it shuts down.
*/
void listen(@NotNull ProcessHandler process, @NotNull DaemonEvent.Listener listener) {
process.addProcessListener(new ProcessAdapter() {
@Override
public void onTextAvailable(@NotNull ProcessEvent event, @NotNull Key outputType) {
if (outputType.equals(ProcessOutputTypes.STDERR)) {
// Append text to last line in buffer.
final String last = stderr.peekLast();
if (last != null && !last.endsWith("\n")) {
stderr.removeLast();
stderr.add(last + event.getText());
}
else {
stderr.add(event.getText());
}
// Trim buffer size.
while (stderr.size() > STDERR_LINES_TO_KEEP) {
stderr.removeFirst();
}
}
else if (outputType.equals(ProcessOutputTypes.STDOUT)) {
final String text = event.getText();
if (FlutterSettings.getInstance().isVerboseLogging()) {
LOG.info("[<-- " + text.trim() + "]");
}
stdoutParser.appendOutput(text);
for (String line : stdoutParser.getAvailableLines()) {
final JsonObject obj = parseAndValidateDaemonEvent(line);
if (obj != null) {
dispatch(obj, listener);
}
}
}
}
@Override
public void processWillTerminate(@NotNull ProcessEvent event, boolean willBeDestroyed) {
listener.processWillTerminate();
}
@Override
public void processTerminated(@NotNull ProcessEvent event) {
listener.processTerminated(event.getExitCode());
}
});
// All hooked up and ready to receive events.
process.startNotify();
}
/**
* Parses some JSON and handles it as either a command's response or an event.
*/
void dispatch(@NotNull JsonObject obj, @Nullable DaemonEvent.Listener eventListener) {
final JsonPrimitive idField = obj.getAsJsonPrimitive("id");
if (idField == null) {
// It's an event.
if (eventListener != null) {
DaemonEvent.dispatch(obj, eventListener);
}
}
else {
final Command cmd = takePending(idField.getAsInt());
if (cmd == null) {
return;
}
final JsonElement error = obj.get("error");
if (error != null) {
cmd.completeExceptionally(new IOException("unexpected response: " + obj));
}
else {
cmd.complete(obj.get("result"));
}
}
}
@Nullable
private Command takePending(int id) {
final Command cmd;
synchronized (pending) {
cmd = pending.remove(id);
}
if (cmd == null) {
LOG.warn("received a response for a request that wasn't sent: " + id);
return null;
}
return cmd;
}
private <T> CompletableFuture<T> send(String method, @Nullable Params<T> params) {
// Synchronize on nextId to ensure that we send one command at a time and they are numbered in the order they are sent.
synchronized (nextId) {
final int id = nextId.getAndIncrement();
final Command<T> command = new Command<>(method, params, id);
final String json = command.toString();
synchronized (pending) {
pending.put(id, command);
}
callback.accept(json);
return command.done;
}
}
/**
* Parse the given string; if it is valid JSON - and a valid Daemon message - then return
* the parsed JsonObject.
*/
public static JsonObject parseAndValidateDaemonEvent(String message) {
if (!message.startsWith("[{")) {
return null;
}
message = message.trim();
if (!message.endsWith("}]")) {
return null;
}
message = message.substring(1, message.length() - 1);
final JsonObject obj;
try {
final JsonParser jsonParser = new JsonParser();
final JsonElement element = jsonParser.parse(message);
obj = element.getAsJsonObject();
}
catch (JsonSyntaxException e) {
return null;
}
// obj must contain either an "id" (int), or an "event" field
final JsonPrimitive eventField = obj.getAsJsonPrimitive("event");
if (eventField != null) {
final String eventName = eventField.getAsString();
if (eventName == null) {
return null;
}
final JsonObject params = obj.getAsJsonObject("params");
return params == null ? null : obj;
}
else {
// id
final JsonPrimitive idField = obj.getAsJsonPrimitive("id");
if (idField == null || !idField.isNumber()) {
return null;
}
try {
final int id = idField.getAsInt();
return obj;
}
catch (NumberFormatException e) {
return null;
}
}
}
private static void sendCommand(String json, ProcessHandler handler) {
final PrintWriter stdin = getStdin(handler);
if (stdin == null) {
LOG.warn("can't write command to Flutter process: " + json);
return;
}
stdin.write('[');
stdin.write(json);
stdin.write("]\n");
if (FlutterSettings.getInstance().isVerboseLogging()) {
LOG.info("[--> " + json + "]");
}
if (stdin.checkError()) {
LOG.warn("can't write command to Flutter process: " + json);
}
}
@Nullable
private static PrintWriter getStdin(ProcessHandler processHandler) {
final OutputStream stdin = processHandler.getProcessInput();
if (stdin == null) return null;
return new PrintWriter(new OutputStreamWriter(stdin, Charsets.UTF_8));
}
/**
* Returns the last lines written to stderr.
*/
public String getStderrTail() {
final String[] lines = stderr.toArray(new String[]{});
return String.join("", lines);
}
public static class RestartResult {
private int code;
private String message;
private String hintMessage;
private String hintId;
public boolean ok() {
return code == 0;
}
public int getCode() {
return code;
}
public String getMessage() {
return message;
}
public String getHintMessage() {
return hintMessage;
}
public String getHintId() {
return hintId;
}
public boolean isRestartRecommended() {
return "restartRecommended".equals(hintId);
}
@Override
public String toString() {
return getCode() + ":" + getMessage();
}
}
/**
* A pending command to a Flutter process.
*/
private static class Command<T> {
final @NotNull String method;
final @Nullable JsonElement params;
final int id;
transient final @Nullable Function<JsonElement, T> parseResult;
transient final CompletableFuture<T> done = new CompletableFuture<>();
Command(@NotNull String method, @Nullable Params<T> params, int id) {
this.method = method;
// GSON has trouble with params as a field, because it has both a generic type and subclasses.
// But it handles it okay at top-level.
this.params = GSON.toJsonTree(params);
this.id = id;
this.parseResult = params == null ? null : params::parseResult;
}
void complete(@Nullable JsonElement result) {
if (parseResult == null) {
done.complete(null);
return;
}
try {
done.complete(parseResult.apply(result));
}
catch (Exception e) {
LOG.warn("Unable to parse response from Flutter daemon. Command was: " + this, e);
done.completeExceptionally(e);
}
}
void completeExceptionally(Throwable t) {
done.completeExceptionally(t);
}
@Override
public String toString() {
return GSON.toJson(this);
}
}
private abstract static class Params<T> {
@Nullable
abstract T parseResult(@Nullable JsonElement result);
}
@SuppressWarnings("unused")
private static class AppRestart extends Params<RestartResult> {
@NotNull final String appId;
final boolean fullRestart;
final boolean pause;
AppRestart(@NotNull String appId, boolean fullRestart, boolean pause) {
this.appId = appId;
this.fullRestart = fullRestart;
this.pause = pause;
}
@Override
RestartResult parseResult(JsonElement result) {
return GSON.fromJson(result, RestartResult.class);
}
}
@SuppressWarnings("unused")
private static class AppStop extends Params<Boolean> {
@NotNull final String appId;
AppStop(@NotNull String appId) {
this.appId = appId;
}
@Override
Boolean parseResult(JsonElement result) {
return GSON.fromJson(result, Boolean.class);
}
}
@SuppressWarnings("unused")
private static class AppServiceExtension extends Params<JsonObject> {
final String appId;
final String methodName;
final Map<String, Object> params;
AppServiceExtension(String appId, String methodName, Map<String, Object> params) {
this.appId = appId;
this.methodName = methodName;
this.params = params;
}
@Override
JsonObject parseResult(JsonElement result) {
if (result instanceof JsonObject) {
return (JsonObject)result;
}
final JsonObject obj = new JsonObject();
obj.add("result", result);
return obj;
}
}
private static final Gson GSON = new Gson();
private static final Logger LOG = Logger.getInstance(DaemonApi.class);
}
| add more details to logging output (#2526)
| src/io/flutter/run/daemon/DaemonApi.java | add more details to logging output (#2526) | <ide><path>rc/io/flutter/run/daemon/DaemonApi.java
<ide>
<ide> final JsonElement error = obj.get("error");
<ide> if (error != null) {
<del> cmd.completeExceptionally(new IOException("unexpected response: " + obj));
<add> cmd.completeExceptionally(new IOException("error from " + cmd.method + ": " + error));
<ide> }
<ide> else {
<ide> cmd.complete(obj.get("result")); |
|
Java | apache-2.0 | 51735f5fe010f6797ee93510bd5c7ef0689dcb5b | 0 | apache/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.rdb;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.Lock;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
import org.apache.jackrabbit.oak.plugins.document.cache.CachingDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import com.google.common.cache.Cache;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Striped;
/**
* Implementation of {@link CachingDocumentStore} for relational databases.
*
* <h3>Supported Databases</h3>
* <p>
* The code is supposed to be sufficiently generic to run with a variety of
* database implementations. However, the tables are created when required to
* simplify testing, and <em>that</em> code specifically supports these
* databases:
* <ul>
* <li>h2</li>
* <li>IBM DB2</li>
* <li>Postgres</li>
* <li>MariaDB (MySQL) (experimental)</li>
* <li>Oracle (experimental)</li>
* </ul>
*
* <h3>Table Layout</h3>
* <p>
* Data for each of the DocumentStore's {@link Collection}s is stored in its own
* database table (with a name matching the collection).
* <p>
* The tables essentially implement key/value storage, where the key usually is
* derived from an Oak path, and the value is a serialization of a
* {@link Document} (or a part of one). Additional fields are used for queries,
* debugging, and concurrency control:
* <table style="text-align: left;">
* <thead>
* <tr>
* <th>Column</th>
* <th>Type</th>
* <th>Description</th>
* </tr>
* </thead> <tbody>
* <tr>
* <th>ID</th>
* <td>varchar(512) not null primary key</td>
* <td>the document's key</td>
* </tr>
* <tr>
* <th>MODIFIED</th>
* <td>bigint</td>
* <td>low-resolution timestamp
* </tr>
* <tr>
* <th>HASBINARY</th>
* <td>smallint</td>
* <td>flag indicating whether the document has binary properties
* </tr>
* <tr>
* <th>MODCOUNT</th>
* <td>bigint</td>
* <td>modification counter, used for avoiding overlapping updates</td>
* </tr>
* <tr>
* <th>DSIZE</th>
* <td>bigint</td>
* <td>the approximate size of the document's JSON serialization (for debugging purposes)</td>
* </tr>
* <tr>
* <th>DATA</th>
* <td>varchar(16384)</td>
* <td>the document's JSON serialization (only used for small document sizes, in
* which case BDATA (below) is not set), or a sequence of JSON serialized update operations
* to be applied against the last full serialization</td>
* </tr>
* <tr>
* <th>BDATA</th>
* <td>blob</td>
* <td>the document's JSON serialization (usually GZIPped, only used for "large"
* documents)</td>
* </tr>
* </tbody>
* </table>
* <p>
* The names of database tables can be prefixed; the purpose is mainly for testing, as
* tables can also be dropped automatically when the store is disposed (this only happens
* for those tables that have been created on demand)
* <p>
* <em>Note that the database needs to be created/configured to support all Unicode
* characters in text fields, and to collate by Unicode code point (in DB2: "identity collation",
* in Postgres: "C").
* THIS IS NOT THE DEFAULT!</em>
* <p>
* <em>For MySQL, the database parameter "max_allowed_packet" needs to be increased to support ~2M blobs.</em>
*
* <h3>Caching</h3>
* <p>
* The cache borrows heavily from the {@link MongoDocumentStore} implementation;
* however it does not support the off-heap mechanism yet.
*
* <h3>Queries</h3>
* <p>
* The implementation currently supports only two indexed properties: "_modified" and
* "_bin". Attempts to use a different indexed property will cause a {@link DocumentStoreException}.
*/
public class RDBDocumentStore implements CachingDocumentStore {
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder, RDBOptions options) {
try {
initialize(ds, builder, options);
} catch (Exception ex) {
throw new DocumentStoreException("initializing RDB document store", ex);
}
}
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and default {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder) {
this(ds, builder, new RDBOptions());
}
@Override
public <T extends Document> T find(Collection<T> collection, String id) {
return find(collection, id, Integer.MAX_VALUE);
}
@Override
public <T extends Document> T find(final Collection<T> collection, final String id, int maxCacheAge) {
return readDocumentCached(collection, id, maxCacheAge);
}
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, int limit) {
return query(collection, fromKey, toKey, null, 0, limit);
}
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, String indexedProperty,
long startValue, int limit) {
return internalQuery(collection, fromKey, toKey, indexedProperty, startValue, limit);
}
@Override
public <T extends Document> void remove(Collection<T> collection, String id) {
delete(collection, id);
invalidateCache(collection, id);
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> ids) {
for (String id : ids) {
invalidateCache(collection, id);
}
delete(collection, ids);
}
@Override
public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) {
return internalCreate(collection, updateOps);
}
@Override
public <T extends Document> void update(Collection<T> collection, List<String> keys, UpdateOp updateOp) {
internalUpdate(collection, keys, updateOp);
}
@Override
public <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, true, false);
}
@Override
public <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, false, true);
}
@Override
public void invalidateCache() {
nodesCache.invalidateAll();
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String id) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(id);
try {
nodesCache.invalidate(new StringValue(id));
} finally {
lock.unlock();
}
}
}
@Override
public void dispose() {
if (!this.tablesToBeDropped.isEmpty()) {
LOG.debug("attempting to drop: " + this.tablesToBeDropped);
for (String tname : this.tablesToBeDropped) {
Connection con = null;
try {
con = getConnection();
try {
Statement stmt = con.createStatement();
stmt.execute("drop table " + tname);
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname);
}
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname);
} finally {
try {
if (con != null) {
con.close();
}
} catch (SQLException ex) {
LOG.debug("on close ", ex);
}
}
}
}
this.ds = null;
}
@Override
public <T extends Document> T getIfCached(Collection<T> collection, String id) {
if (collection != Collection.NODES) {
return null;
} else {
NodeDocument doc = nodesCache.getIfPresent(new StringValue(id));
return castAsT(doc);
}
}
@Override
public CacheStats getCacheStats() {
return this.cacheStats;
}
// implementation
private static final String MODIFIED = "_modified";
private static final String MODCOUNT = "_modCount";
private static final String ID = "_id";
private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentStore.class);
private final Comparator<Revision> comparator = StableRevisionComparator.REVERSE;
private Exception callStack;
private DataSource ds;
// from options
private String tablePrefix = "";
private Set<String> tablesToBeDropped = new HashSet<String>();
// ratio between Java characters and UTF-8 encoding
// a) single characters will fit into 3 bytes
// b) a surrogate pair (two Java characters) will fit into 4 bytes
// thus...
private static int CHAR2OCTETRATIO = 3;
// capacity of DATA column
private int dataLimitInOctets = 16384;
// number of retries for updates
private static int RETRIES = 10;
// for DBs that prefer "concat" over "||"
private boolean needsConcat = false;
// for DBs that prefer "limit" over "fetch first"
private boolean needsLimit = false;
// for DBs that do not support CASE in SELECT (currently all)
private boolean allowsCaseInSelect = true;
// set of supported indexed properties
private static Set<String> INDEXEDPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { MODIFIED,
NodeDocument.HAS_BINARY_FLAG }));
// set of properties not serialized to JSON
private static Set<String> COLUMNPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { ID,
NodeDocument.HAS_BINARY_FLAG, MODIFIED, MODCOUNT }));
private RDBDocumentSerializer SR = new RDBDocumentSerializer(this, COLUMNPROPERTIES);
private void initialize(DataSource ds, DocumentMK.Builder builder, RDBOptions options) throws Exception {
this.tablePrefix = options.getTablePrefix();
if (tablePrefix.length() > 0 && !tablePrefix.endsWith("_")) {
tablePrefix += "_";
}
this.ds = ds;
this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBDocumentStore creation") : null;
this.nodesCache = builder.buildDocumentCache(this);
this.cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), builder.getDocumentCacheSize());
Connection con = ds.getConnection();
String dbtype = con.getMetaData().getDatabaseProductName();
if ("Oracle".equals(dbtype)) {
// https://issues.apache.org/jira/browse/OAK-1914
// for some reason, the default for NLS_SORT is incorrect
Statement stmt = con.createStatement();
stmt.execute("ALTER SESSION SET NLS_SORT='BINARY'");
stmt.close();
con.commit();
} else if ("MySQL".equals(dbtype)) {
this.needsConcat = true;
this.needsLimit = true;
}
try {
con.setAutoCommit(false);
createTableFor(con, dbtype, Collection.CLUSTER_NODES, options.isDropTablesOnClose());
createTableFor(con, dbtype, Collection.NODES, options.isDropTablesOnClose());
createTableFor(con, dbtype, Collection.SETTINGS, options.isDropTablesOnClose());
} finally {
con.close();
}
}
private void createTableFor(Connection con, String dbtype, Collection<? extends Document> col, boolean dropTablesOnClose)
throws SQLException {
String tableName = getTable(col);
try {
PreparedStatement stmt = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
stmt.setString(1, "0:/");
ResultSet rs = stmt.executeQuery();
if (col.equals(Collection.NODES)) {
// try to discover size of DATA column
ResultSetMetaData met = rs.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
} catch (SQLException ex) {
// table does not appear to exist
con.rollback();
LOG.info("Attempting to create table " + tableName + " in " + dbtype);
Statement stmt = con.createStatement();
// the code below likely will need to be extended for new
// database types
if ("PostgreSQL".equals(dbtype)) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA bytea)");
} else if ("DB2".equals(dbtype) || (dbtype != null && dbtype.startsWith("DB2/"))) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob)");
} else if ("MySQL".equals(dbtype)) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA mediumblob)");
} else if ("Oracle".equals(dbtype)) {
// see https://issues.apache.org/jira/browse/OAK-1914
this.dataLimitInOctets = 4000;
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED number, HASBINARY number, MODCOUNT number, CMODCOUNT number, DSIZE number, DATA varchar(4000), BDATA blob)");
} else {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob)");
}
stmt.close();
con.commit();
if (dropTablesOnClose) {
tablesToBeDropped.add(tableName);
}
}
}
@Override
public void finalize() {
if (this.ds != null && this.callStack != null) {
LOG.debug("finalizing RDBDocumentStore that was not disposed", this.callStack);
}
}
private <T extends Document> T readDocumentCached(final Collection<T> collection, final String id, int maxCacheAge) {
if (collection != Collection.NODES) {
return readDocumentUncached(collection, id, null);
} else {
CacheValue cacheKey = new StringValue(id);
NodeDocument doc = null;
if (maxCacheAge > 0) {
// first try without lock
doc = nodesCache.getIfPresent(cacheKey);
if (doc != null) {
if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - doc.getLastCheckTime() < maxCacheAge) {
return castAsT(unwrap(doc));
}
}
}
try {
Lock lock = getAndLock(id);
final NodeDocument cachedDoc = doc;
try {
if (maxCacheAge == 0) {
invalidateCache(collection, id);
}
while (true) {
doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
@Override
public NodeDocument call() throws Exception {
NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (doc != null) {
doc.seal();
}
return wrap(doc);
}
});
if (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE) {
break;
}
if (System.currentTimeMillis() - doc.getLastCheckTime() < maxCacheAge) {
break;
}
// too old: invalidate, try again
invalidateCache(collection, id);
}
} finally {
lock.unlock();
}
return castAsT(unwrap(doc));
} catch (ExecutionException e) {
throw new IllegalStateException("Failed to load document with " + id, e);
}
}
}
@CheckForNull
private <T extends Document> boolean internalCreate(Collection<T> collection, List<UpdateOp> updates) {
try {
// try up to CHUNKSIZE ops in one transaction
for (List<UpdateOp> chunks : Lists.partition(updates, CHUNKSIZE)) {
List<T> docs = new ArrayList<T>();
for (UpdateOp update : chunks) {
T doc = collection.newDocument(this);
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
if (!update.getId().equals(doc.getId())) {
throw new DocumentStoreException("ID mismatch - UpdateOp: " + update.getId() + ", ID property: "
+ doc.getId());
}
docs.add(doc);
}
insertDocuments(collection, docs);
for (T doc : docs) {
addToCache(collection, doc);
}
}
return true;
} catch (DocumentStoreException ex) {
return false;
}
}
@CheckForNull
private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate,
boolean checkConditions) {
T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc == null) {
if (!allowCreate) {
return null;
} else if (!update.isNew()) {
throw new DocumentStoreException("Document does not exist: " + update.getId());
}
T doc = collection.newDocument(this);
if (checkConditions && !UpdateUtils.checkConditions(doc, update)) {
return null;
}
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
try {
insertDocuments(collection, Collections.singletonList(doc));
addToCache(collection, doc);
return oldDoc;
} catch (DocumentStoreException ex) {
// may have failed due to a race condition; try update instead
// this is an edge case, so it's ok to bypass the cache
// (avoiding a race condition where the DB is already updated
// but the cache is not)
oldDoc = readDocumentUncached(collection, update.getId(), null);
if (oldDoc == null) {
// something else went wrong
LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
throw (ex);
}
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
} else {
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
}
/**
* @return previous version of document or <code>null</code>
*/
@CheckForNull
private <T extends Document> T internalUpdate(Collection<T> collection, UpdateOp update, T oldDoc, boolean checkConditions,
int maxRetries) {
T doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
// conditions not met
return null;
} else {
Lock l = getAndLock(update.getId());
try {
boolean success = false;
int retries = maxRetries;
while (!success && retries > 0) {
long lastmodcount = (Long) oldDoc.get(MODCOUNT);
success = updateDocument(collection, doc, update, lastmodcount);
if (!success) {
retries -= 1;
oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc != null) {
long newmodcount = (Long) oldDoc.get(MODCOUNT);
if (lastmodcount == newmodcount) {
// cached copy did not change so it probably was updated by
// a different instance, get a fresh one
oldDoc = readDocumentUncached(collection, update.getId(), null);
}
}
if (oldDoc == null) {
// document was there but is now gone
LOG.error("failed to apply update because document is gone in the meantime: " + update.getId());
return null;
}
doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
return null;
}
} else {
if (collection == Collection.NODES) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) doc);
}
}
}
if (!success) {
throw new DocumentStoreException("failed update of " + doc.getId() + " (race?) after " + maxRetries + " retries");
}
return oldDoc;
} finally {
l.unlock();
}
}
}
@CheckForNull
private <T extends Document> T applyChanges(Collection<T> collection, T oldDoc, UpdateOp update, boolean checkConditions) {
T doc = collection.newDocument(this);
oldDoc.deepCopy(doc);
if (checkConditions && !UpdateUtils.checkConditions(doc, update)) {
return null;
}
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
update.increment(MODCOUNT, 1);
UpdateUtils.applyChanges(doc, update, comparator);
doc.seal();
return doc;
}
@CheckForNull
private <T extends Document> void internalUpdate(Collection<T> collection, List<String> ids, UpdateOp update) {
if (isAppendableUpdate(update) && !requiresPreviousState(update)) {
long modified = getModifiedFromUpdate(update);
String appendData = SR.asString(update);
for (List<String> chunkedIds : Lists.partition(ids, CHUNKSIZE)) {
// remember what we already have in the cache
Map<String, NodeDocument> cachedDocs = Collections.emptyMap();
if (collection == Collection.NODES) {
cachedDocs = new HashMap<String, NodeDocument>();
for (String key : chunkedIds) {
cachedDocs.put(key, nodesCache.getIfPresent(new StringValue(key)));
}
}
Connection connection = null;
String tableName = getTable(collection);
boolean success = false;
try {
connection = getConnection();
success = dbBatchedAppendingUpdate(connection, tableName, chunkedIds, modified, appendData);
connection.commit();
} catch (SQLException ex) {
success = false;
} finally {
closeConnection(connection);
}
if (success) {
for (Entry<String, NodeDocument> entry : cachedDocs.entrySet()) {
if (entry.getValue() == null) {
// make sure concurrently loaded document is invalidated
nodesCache.invalidate(new StringValue(entry.getKey()));
} else {
T oldDoc = (T)(entry.getValue());
T newDoc = applyChanges(collection, (T)(entry.getValue()), update, true);
applyToCache((NodeDocument) oldDoc, (NodeDocument) newDoc);
}
}
} else {
for (String id : chunkedIds) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
} else {
for (String id : ids) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
private <T extends Document> List<T> internalQuery(Collection<T> collection, String fromKey, String toKey,
String indexedProperty, long startValue, int limit) {
Connection connection = null;
String tableName = getTable(collection);
List<T> result = new ArrayList<T>();
if (indexedProperty != null && (!INDEXEDPROPERTIES.contains(indexedProperty))) {
String message = "indexed property " + indexedProperty + " not supported, query was '>= '" + startValue + "'; supported properties are "+ INDEXEDPROPERTIES;
LOG.info(message);
throw new DocumentStoreException(message);
}
try {
connection = getConnection();
long now = System.currentTimeMillis();
List<RDBRow> dbresult = dbQuery(connection, tableName, fromKey, toKey, indexedProperty, startValue, limit);
for (RDBRow r : dbresult) {
T doc = runThroughCache(collection, r, now);
result.add(doc);
}
} catch (Exception ex) {
LOG.error("SQL exception on query", ex);
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
return result;
}
private <T extends Document> String getTable(Collection<T> collection) {
if (collection == Collection.CLUSTER_NODES) {
return this.tablePrefix + "CLUSTERNODES";
} else if (collection == Collection.NODES) {
return this.tablePrefix + "NODES";
} else if (collection == Collection.SETTINGS) {
return this.tablePrefix + "SETTINGS";
} else {
throw new IllegalArgumentException("Unknown collection: " + collection.toString());
}
}
@CheckForNull
private <T extends Document> T readDocumentUncached(Collection<T> collection, String id, NodeDocument cachedDoc) {
Connection connection = null;
String tableName = getTable(collection);
try {
long lastmodcount = -1;
if (cachedDoc != null && cachedDoc.getModCount() != null) {
lastmodcount = cachedDoc.getModCount().longValue();
}
connection = getConnection();
RDBRow row = dbRead(connection, tableName, id, lastmodcount);
if (row == null) {
return null;
}
else {
if (lastmodcount == row.getModcount()) {
// we can re-use the cached document
cachedDoc.markUpToDate(System.currentTimeMillis());
return (T)cachedDoc;
}
else {
return SR.fromRow(collection, row);
}
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, String id) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
dbDelete(connection, tableName, Collections.singletonList(id));
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, List<String> ids) {
for (List<String> sublist : Lists.partition(ids, 64)) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
dbDelete(connection, tableName, sublist);
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
}
private <T extends Document> boolean updateDocument(@Nonnull Collection<T> collection, @Nonnull T document,
@Nonnull UpdateOp update, Long oldmodcount) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
Long modified = (Long) document.get(MODIFIED);
Number flag = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flag == null ? false : flag.intValue() == NodeDocument.HAS_BINARY_VAL;
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(NodeDocument.COLLISIONSMODCOUNT);
boolean success = false;
// every 16th update is a full rewrite
if (isAppendableUpdate(update) && modcount % 16 != 0) {
String appendData = SR.asString(update);
if (appendData.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
try {
success = dbAppendingUpdate(connection, tableName, document.getId(), modified, hasBinary, modcount,
cmodcount, oldmodcount, appendData);
connection.commit();
} catch (SQLException ex) {
continueIfStringOverflow(ex);
connection.rollback();
success = false;
}
}
}
if (! success) {
String data = SR.asString(document);
success = dbUpdate(connection, tableName, document.getId(), modified, hasBinary, modcount, cmodcount,
oldmodcount, data);
connection.commit();
}
return success;
} catch (SQLException ex) {
try {
if (connection != null) {
connection.rollback();
}
} catch (SQLException e) {
// TODO
}
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private static void continueIfStringOverflow(SQLException ex) throws SQLException {
String state = ex.getSQLState();
if ("22001".equals(state) /* everybody */|| ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
// ok
} else {
throw (ex);
}
}
/* currently we use append for all updates, but this might change in the future */
private static boolean isAppendableUpdate(UpdateOp update) {
return true;
}
/* check whether this update operation requires knowledge about the previous state */
private static boolean requiresPreviousState(UpdateOp update) {
for (Map.Entry<Key, Operation> change : update.getChanges().entrySet()) {
Operation op = change.getValue();
if (op.type == UpdateOp.Operation.Type.CONTAINS_MAP_ENTRY) return true;
}
return false;
}
private static long getModifiedFromUpdate(UpdateOp update) {
for (Map.Entry<Key, Operation> change : update.getChanges().entrySet()) {
Operation op = change.getValue();
if (op.type == UpdateOp.Operation.Type.MAX || op.type == UpdateOp.Operation.Type.SET) {
if (MODIFIED.equals(change.getKey().getName())) {
return Long.parseLong(op.value.toString());
}
}
}
return 0L;
}
private <T extends Document> void insertDocuments(Collection<T> collection, List<T> documents) {
Connection connection = null;
String tableName = getTable(collection);
List<String> ids = new ArrayList<String>();
try {
connection = getConnection();
for (T document : documents) {
String data = SR.asString(document);
Long modified = (Long) document.get(MODIFIED);
Number flag = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flag == null ? false : flag.intValue() == NodeDocument.HAS_BINARY_VAL;
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(NodeDocument.COLLISIONSMODCOUNT);
dbInsert(connection, tableName, document.getId(), modified, hasBinary, modcount, cmodcount, data);
}
connection.commit();
} catch (SQLException ex) {
LOG.debug("insert of " + ids + " failed", ex);
try {
if (connection != null) {
connection.rollback();
}
} catch (SQLException e) {
// TODO
}
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
// configuration
// Whether to use GZIP compression
private static boolean NOGZIP = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
// Number of documents to insert at once for batch create
private static int CHUNKSIZE = Integer.getInteger("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.CHUNKSIZE", 64);
private static byte[] asBytes(String data) {
byte[] bytes;
try {
bytes = data.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
if (NOGZIP) {
return bytes;
} else {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
GZIPOutputStream gos = new GZIPOutputStream(bos) {
{
// TODO: make this configurable
this.def.setLevel(Deflater.BEST_SPEED);
}
};
gos.write(bytes);
gos.close();
return bos.toByteArray();
} catch (IOException ex) {
LOG.error("Error while gzipping contents", ex);
throw new DocumentStoreException(ex);
}
}
}
@CheckForNull
private RDBRow dbRead(Connection connection, String tableName, String id, long lastmodcount) throws SQLException {
PreparedStatement stmt;
boolean useCaseStatement = lastmodcount != -1 && allowsCaseInSelect;
if (useCaseStatement) {
// either we don't have a previous version of the document
// or the database does not support CASE in SELECT
stmt = connection.prepareStatement("select MODIFIED, MODCOUNT, HASBINARY, DATA, BDATA from " + tableName
+ " where ID = ?");
} else {
// the case statement causes the actual row data not to be
// sent in case we already have it
stmt = connection
.prepareStatement("select MODIFIED, MODCOUNT, HASBINARY, case MODCOUNT when ? then null else DATA end as DATA, "
+ "case MODCOUNT when ? then null else BDATA end as BDATA from " + tableName + " where ID = ?");
}
try {
if (useCaseStatement) {
stmt.setString(1, id);
}
else {
stmt.setLong(1, lastmodcount);
stmt.setLong(2, lastmodcount);
stmt.setString(3, id);
}
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
long modified = rs.getLong(1);
long modcount = rs.getLong(2);
long hasBinary = rs.getLong(3);
String data = rs.getString(4);
byte[] bdata = rs.getBytes(5);
return new RDBRow(id, hasBinary == 1, modified, modcount, data, bdata);
} else {
return null;
}
} catch (SQLException ex) {
LOG.error("attempting to read " + id + " (id length is " + id.length() + ")", ex);
// DB2 throws an SQLException for invalid keys; handle this more
// gracefully
if ("22001".equals(ex.getSQLState())) {
connection.rollback();
return null;
} else {
throw (ex);
}
} finally {
stmt.close();
}
}
private List<RDBRow> dbQuery(Connection connection, String tableName, String minId, String maxId, String indexedProperty,
long startValue, int limit) throws SQLException {
String t = "select ID, MODIFIED, MODCOUNT, HASBINARY, DATA, BDATA from " + tableName + " where ID > ? and ID < ?";
if (indexedProperty != null) {
if (MODIFIED.equals(indexedProperty)) {
t += " and MODIFIED >= ?";
} else if (NodeDocument.HAS_BINARY_FLAG.equals(indexedProperty)) {
if (startValue != NodeDocument.HAS_BINARY_VAL) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.HAS_BINARY_FLAG);
}
t += " and HASBINARY = 1";
}
}
t += " order by ID";
if (limit != Integer.MAX_VALUE) {
t += this.needsLimit ? (" LIMIT " + limit) : (" FETCH FIRST " + limit + " ROWS ONLY");
}
PreparedStatement stmt = connection.prepareStatement(t);
List<RDBRow> result = new ArrayList<RDBRow>();
try {
int si = 1;
stmt.setString(si++, minId);
stmt.setString(si++, maxId);
if (MODIFIED.equals(indexedProperty)) {
stmt.setLong(si++, startValue);
}
if (limit != Integer.MAX_VALUE) {
stmt.setFetchSize(limit);
}
ResultSet rs = stmt.executeQuery();
while (rs.next() && result.size() < limit) {
String id = rs.getString(1);
if (id.compareTo(minId) < 0 || id.compareTo(maxId) > 0) {
throw new DocumentStoreException("unexpected query result: '" + minId + "' < '" + id + "' < '" + maxId + "' - broken DB collation?");
}
long modified = rs.getLong(2);
long modcount = rs.getLong(3);
long hasBinary = rs.getLong(4);
String data = rs.getString(5);
byte[] bdata = rs.getBytes(6);
result.add(new RDBRow(id, hasBinary == 1, modified, modcount, data, bdata));
}
} finally {
stmt.close();
}
return result;
}
private boolean dbUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount, Long cmodcount, Long oldmodcount,
String data) throws SQLException {
String t = "update " + tableName + " set MODIFIED = ?, HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?";
if (oldmodcount != null) {
t += " and MODCOUNT = ?";
}
PreparedStatement stmt = connection.prepareStatement(t);
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
stmt.setString(si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbAppendingUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount, Long cmodcount, Long oldmodcount,
String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set MODIFIED = GREATEST(MODIFIED, ?), HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = DSIZE + ?, ");
t.append(this.needsConcat ? "DATA = CONCAT(DATA, ?) " : "DATA = DATA || CAST(? AS varchar(" + this.dataLimitInOctets
+ ")) ");
t.append("where ID = ?");
if (oldmodcount != null) {
t.append(" and MODCOUNT = ?");
}
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
stmt.setString(si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB append update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
}
finally {
stmt.close();
}
}
private boolean dbBatchedAppendingUpdate(Connection connection, String tableName, List<String> ids, Long modified, String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set MODIFIED = GREATEST(MODIFIED, ?), MODCOUNT = MODCOUNT + 1, DSIZE = DSIZE + ?, ");
t.append(this.needsConcat ? "DATA = CONCAT(DATA, ?) " : "DATA = DATA || CAST(? AS varchar(" + this.dataLimitInOctets
+ ")) ");
t.append("where ID in (");
for (int i = 0; i < ids.size(); i++) {
if (i != 0) {
t.append(',');
}
t.append('?');
}
t.append(")");
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
for (String id : ids) {
stmt.setString(si++, id);
}
int result = stmt.executeUpdate();
if (result != ids.size()) {
LOG.debug("DB update failed: only " + result + " of " + ids.size() + " updated. Table: " + tableName + ", IDs:" + ids);
}
return result == ids.size();
}
finally {
stmt.close();
}
}
private boolean dbInsert(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount,
Long cmodcount, String data) throws SQLException {
PreparedStatement stmt = connection.prepareStatement("insert into " + tableName
+ "(ID, MODIFIED, HASBINARY, MODCOUNT, CMODCOUNT, DSIZE, DATA, BDATA) values (?, ?, ?, ?, ?, ?, ?, ?)");
try {
int si = 1;
stmt.setString(si++, id);
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB insert failed for " + tableName + "/" + id);
}
return result == 1;
} finally {
stmt.close();
}
}
private void dbDelete(Connection connection, String tableName, List<String> ids) throws SQLException {
PreparedStatement stmt;
int cnt = ids.size();
if (cnt == 1) {
stmt = connection.prepareStatement("delete from " + tableName + " where ID=?");
} else {
StringBuilder inClause = new StringBuilder();
for (int i = 0; i < cnt; i++) {
inClause.append('?');
if (i != cnt - 1) {
inClause.append(',');
}
}
stmt = connection.prepareStatement("delete from " + tableName + " where ID in (" + inClause.toString() + ")");
}
try {
for (int i = 0; i < cnt; i++) {
stmt.setString(i + 1, ids.get(i));
}
int result = stmt.executeUpdate();
if (result != cnt) {
LOG.debug("DB delete failed for " + tableName + "/" + ids);
}
} finally {
stmt.close();
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
// ignored
}
@SuppressWarnings("unchecked")
private static <T extends Document> T castAsT(NodeDocument doc) {
return (T) doc;
}
// Memory Cache
private Cache<CacheValue, NodeDocument> nodesCache;
private CacheStats cacheStats;
private final Striped<Lock> locks = Striped.lock(64);
private Lock getAndLock(String key) {
Lock l = locks.get(key);
l.lock();
return l;
}
@CheckForNull
private static NodeDocument unwrap(@Nonnull NodeDocument doc) {
return doc == NodeDocument.NULL ? null : doc;
}
@Nonnull
private static NodeDocument wrap(@CheckForNull NodeDocument doc) {
return doc == null ? NodeDocument.NULL : doc;
}
/**
* Adds a document to the {@link #nodesCache} iff there is no document in
* the cache with the document key. This method does not acquire a lock from
* {@link #locks}! The caller must ensure a lock is held for the given
* document.
*
* @param doc
* the document to add to the cache.
* @return either the given <code>doc</code> or the document already present
* in the cache.
*/
@Nonnull
private NodeDocument addToCache(@Nonnull final NodeDocument doc) {
if (doc == NodeDocument.NULL) {
throw new IllegalArgumentException("doc must not be NULL document");
}
doc.seal();
// make sure we only cache the document if it wasn't
// changed and cached by some other thread in the
// meantime. That is, use get() with a Callable,
// which is only used when the document isn't there
try {
CacheValue key = new StringValue(doc.getId());
for (;;) {
NodeDocument cached = nodesCache.get(key, new Callable<NodeDocument>() {
@Override
public NodeDocument call() {
return doc;
}
});
if (cached != NodeDocument.NULL) {
return cached;
} else {
nodesCache.invalidate(key);
}
}
} catch (ExecutionException e) {
// will never happen because call() just returns
// the already available doc
throw new IllegalStateException(e);
}
}
@Nonnull
private void applyToCache(@Nonnull final NodeDocument oldDoc, @Nonnull final NodeDocument newDoc) {
NodeDocument cached = addToCache(newDoc);
if (cached == newDoc) {
// successful
return;
} else if (oldDoc == null) {
// this is an insert and some other thread was quicker
// loading it into the cache -> return now
return;
} else {
CacheValue key = new StringValue(newDoc.getId());
// this is an update (oldDoc != null)
if (Objects.equal(cached.getModCount(), oldDoc.getModCount())) {
nodesCache.put(key, newDoc);
} else {
// the cache entry was modified by some other thread in
// the meantime. the updated cache entry may or may not
// include this update. we cannot just apply our update
// on top of the cached entry.
// therefore we must invalidate the cache entry
nodesCache.invalidate(key);
}
}
}
private <T extends Document> void addToCache(Collection<T> collection, T doc) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(doc.getId());
try {
addToCache((NodeDocument) doc);
} finally {
lock.unlock();
}
}
}
private <T extends Document> T runThroughCache(Collection<T> collection, RDBRow row, long now) {
if (collection != Collection.NODES) {
// not in the cache anyway
return SR.fromRow(collection, row);
}
String id = row.getId();
CacheValue cacheKey = new StringValue(id);
NodeDocument inCache = nodesCache.getIfPresent(cacheKey);
Number modCount = row.getModcount();
// do not overwrite document in cache if the
// existing one in the cache is newer
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() <= cachedModCount.longValue()) {
// we can use the cached document
inCache.markUpToDate(now);
return (T) inCache;
}
}
NodeDocument fresh = (NodeDocument) SR.fromRow(collection, row);
fresh.seal();
Lock lock = getAndLock(id);
try {
inCache = nodesCache.getIfPresent(cacheKey);
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() > cachedModCount.longValue()) {
nodesCache.put(cacheKey, fresh);
} else {
fresh = inCache;
}
}
else {
nodesCache.put(cacheKey, fresh);
}
} finally {
lock.unlock();
}
return (T) fresh;
}
private Connection getConnection() throws SQLException {
Connection c = this.ds.getConnection();
c.setAutoCommit(false);
return c;
}
private void closeConnection(Connection c) {
if (c != null) {
try {
c.close();
} catch (SQLException ex) {
// log me
}
}
}
private boolean hasChangesToCollisions(UpdateOp update) {
for (Entry<Key, Operation> e : checkNotNull(update).getChanges().entrySet()) {
Key k = e.getKey();
Operation op = e.getValue();
if (op.type == Operation.Type.SET_MAP_ENTRY) {
if (NodeDocument.COLLISIONS.equals(k.getName())) {
return true;
}
}
}
return false;
}
}
| oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.rdb;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.Lock;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
import org.apache.jackrabbit.oak.plugins.document.cache.CachingDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import com.google.common.cache.Cache;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Striped;
/**
* Implementation of {@link CachingDocumentStore} for relational databases.
*
* <h3>Supported Databases</h3>
* <p>
* The code is supposed to be sufficiently generic to run with a variety of
* database implementations. However, the tables are created when required to
* simplify testing, and <em>that</em> code specifically supports these
* databases:
* <ul>
* <li>h2</li>
* <li>IBM DB2</li>
* <li>Postgres</li>
* <li>MariaDB (MySQL) (experimental)</li>
* <li>Oracle (experimental)</li>
* </ul>
*
* <h3>Table Layout</h3>
* <p>
* Data for each of the DocumentStore's {@link Collection}s is stored in its own
* database table (with a name matching the collection).
* <p>
* The tables essentially implement key/value storage, where the key usually is
* derived from an Oak path, and the value is a serialization of a
* {@link Document} (or a part of one). Additional fields are used for queries,
* debugging, and concurrency control:
* <table style="text-align: left;">
* <thead>
* <tr>
* <th>Column</th>
* <th>Type</th>
* <th>Description</th>
* </tr>
* </thead> <tbody>
* <tr>
* <th>ID</th>
* <td>varchar(512) not null primary key</td>
* <td>the document's key</td>
* </tr>
* <tr>
* <th>MODIFIED</th>
* <td>bigint</td>
* <td>low-resolution timestamp
* </tr>
* <tr>
* <th>HASBINARY</th>
* <td>smallint</td>
* <td>flag indicating whether the document has binary properties
* </tr>
* <tr>
* <th>MODCOUNT</th>
* <td>bigint</td>
* <td>modification counter, used for avoiding overlapping updates</td>
* </tr>
* <tr>
* <th>DSIZE</th>
* <td>bigint</td>
* <td>the approximate size of the document's JSON serialization (for debugging purposes)</td>
* </tr>
* <tr>
* <th>DATA</th>
* <td>varchar(16384)</td>
* <td>the document's JSON serialization (only used for small document sizes, in
* which case BDATA (below) is not set), or a sequence of JSON serialized update operations
* to be applied against the last full serialization</td>
* </tr>
* <tr>
* <th>BDATA</th>
* <td>blob</td>
* <td>the document's JSON serialization (usually GZIPped, only used for "large"
* documents)</td>
* </tr>
* </tbody>
* </table>
* <p>
* The names of database tables can be prefixed; the purpose is mainly for testing, as
* tables can also be dropped automatically when the store is disposed (this only happens
* for those tables that have been created on demand)
* <p>
* <em>Note that the database needs to be created/configured to support all Unicode
* characters in text fields, and to collate by Unicode code point (in DB2: "identity collation",
* in Postgres: "C").
* THIS IS NOT THE DEFAULT!</em>
* <p>
* <em>For MySQL, the database parameter "max_allowed_packet" needs to be increased to support ~2M blobs.</em>
*
* <h3>Caching</h3>
* <p>
* The cache borrows heavily from the {@link MongoDocumentStore} implementation;
* however it does not support the off-heap mechanism yet.
*
* <h3>Queries</h3>
* <p>
* The implementation currently supports only two indexed properties: "_modified" and
* "_bin". Attempts to use a different indexed property will cause a {@link DocumentStoreException}.
*/
public class RDBDocumentStore implements CachingDocumentStore {
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder, RDBOptions options) {
try {
initialize(ds, builder, options);
} catch (Exception ex) {
throw new DocumentStoreException("initializing RDB document store", ex);
}
}
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and default {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder) {
this(ds, builder, new RDBOptions());
}
@Override
public <T extends Document> T find(Collection<T> collection, String id) {
return find(collection, id, Integer.MAX_VALUE);
}
@Override
public <T extends Document> T find(final Collection<T> collection, final String id, int maxCacheAge) {
return readDocumentCached(collection, id, maxCacheAge);
}
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, int limit) {
return query(collection, fromKey, toKey, null, 0, limit);
}
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, String indexedProperty,
long startValue, int limit) {
return internalQuery(collection, fromKey, toKey, indexedProperty, startValue, limit);
}
@Override
public <T extends Document> void remove(Collection<T> collection, String id) {
delete(collection, id);
invalidateCache(collection, id);
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> ids) {
for (String id : ids) {
invalidateCache(collection, id);
}
delete(collection, ids);
}
@Override
public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) {
return internalCreate(collection, updateOps);
}
@Override
public <T extends Document> void update(Collection<T> collection, List<String> keys, UpdateOp updateOp) {
internalUpdate(collection, keys, updateOp);
}
@Override
public <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, true, false);
}
@Override
public <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, false, true);
}
@Override
public void invalidateCache() {
nodesCache.invalidateAll();
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String id) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(id);
try {
nodesCache.invalidate(new StringValue(id));
} finally {
lock.unlock();
}
}
}
@Override
public void dispose() {
if (!this.tablesToBeDropped.isEmpty()) {
LOG.debug("attempting to drop: " + this.tablesToBeDropped);
for (String tname : this.tablesToBeDropped) {
Connection con = null;
try {
con = getConnection();
try {
Statement stmt = con.createStatement();
stmt.execute("drop table " + tname);
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname);
}
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname);
} finally {
try {
if (con != null) {
con.close();
}
} catch (SQLException ex) {
LOG.debug("on close ", ex);
}
}
}
}
this.ds = null;
}
@Override
public <T extends Document> T getIfCached(Collection<T> collection, String id) {
if (collection != Collection.NODES) {
return null;
} else {
NodeDocument doc = nodesCache.getIfPresent(new StringValue(id));
return castAsT(doc);
}
}
@Override
public CacheStats getCacheStats() {
return this.cacheStats;
}
// implementation
private static final String MODIFIED = "_modified";
private static final String MODCOUNT = "_modCount";
private static final String ID = "_id";
private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentStore.class);
private final Comparator<Revision> comparator = StableRevisionComparator.REVERSE;
private Exception callStack;
private DataSource ds;
// from options
private String tablePrefix = "";
private Set<String> tablesToBeDropped = new HashSet<String>();
// ratio between Java characters and UTF-8 encoding
// a) single characters will fit into 3 bytes
// b) a surrogate pair (two Java characters) will fit into 4 bytes
// thus...
private static int CHAR2OCTETRATIO = 3;
// capacity of DATA column
private int dataLimitInOctets = 16384;
// number of retries for updates
private static int RETRIES = 10;
// for DBs that prefer "concat" over "||"
private boolean needsConcat = false;
// for DBs that prefer "limit" over "fetch first"
private boolean needsLimit = false;
// for DBs that do not support CASE in SELECT (currently all)
private boolean allowsCaseInSelect = true;
// set of supported indexed properties
private static Set<String> INDEXEDPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { MODIFIED,
NodeDocument.HAS_BINARY_FLAG }));
// set of properties not serialized to JSON
private static Set<String> COLUMNPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { ID,
NodeDocument.HAS_BINARY_FLAG, MODIFIED, MODCOUNT }));
private RDBDocumentSerializer SR = new RDBDocumentSerializer(this, COLUMNPROPERTIES);
private void initialize(DataSource ds, DocumentMK.Builder builder, RDBOptions options) throws Exception {
this.tablePrefix = options.getTablePrefix();
if (tablePrefix.length() > 0 && !tablePrefix.endsWith("_")) {
tablePrefix += "_";
}
this.ds = ds;
this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBDocumentStore creation") : null;
this.nodesCache = builder.buildDocumentCache(this);
this.cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), builder.getDocumentCacheSize());
Connection con = ds.getConnection();
String dbtype = con.getMetaData().getDatabaseProductName();
if ("Oracle".equals(dbtype)) {
// https://issues.apache.org/jira/browse/OAK-1914
// for some reason, the default for NLS_SORT is incorrect
Statement stmt = con.createStatement();
stmt.execute("ALTER SESSION SET NLS_SORT='BINARY'");
stmt.close();
con.commit();
} else if ("MySQL".equals(dbtype)) {
this.needsConcat = true;
this.needsLimit = true;
}
try {
con.setAutoCommit(false);
createTableFor(con, dbtype, Collection.CLUSTER_NODES, options.isDropTablesOnClose());
createTableFor(con, dbtype, Collection.NODES, options.isDropTablesOnClose());
createTableFor(con, dbtype, Collection.SETTINGS, options.isDropTablesOnClose());
} finally {
con.close();
}
}
private void createTableFor(Connection con, String dbtype, Collection<? extends Document> col, boolean dropTablesOnClose)
throws SQLException {
String tableName = getTable(col);
try {
PreparedStatement stmt = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
stmt.setString(1, "0:/");
ResultSet rs = stmt.executeQuery();
if (col.equals(Collection.NODES)) {
// try to discover size of DATA column
ResultSetMetaData met = rs.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
} catch (SQLException ex) {
// table does not appear to exist
con.rollback();
LOG.info("Attempting to create table " + tableName + " in " + dbtype);
Statement stmt = con.createStatement();
// the code below likely will need to be extended for new
// database types
if ("PostgreSQL".equals(dbtype)) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA bytea)");
} else if ("DB2".equals(dbtype) || (dbtype != null && dbtype.startsWith("DB2/"))) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob)");
} else if ("MySQL".equals(dbtype)) {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA mediumblob)");
} else if ("Oracle".equals(dbtype)) {
// see https://issues.apache.org/jira/browse/OAK-1914
this.dataLimitInOctets = 4000;
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED number, HASBINARY number, MODCOUNT number, CMODCOUNT number, DSIZE number, DATA varchar(4000), BDATA blob)");
} else {
stmt.execute("create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob)");
}
stmt.close();
con.commit();
if (dropTablesOnClose) {
tablesToBeDropped.add(tableName);
}
}
}
@Override
public void finalize() {
if (this.ds != null && this.callStack != null) {
LOG.debug("finalizing RDBDocumentStore that was not disposed", this.callStack);
}
}
private <T extends Document> T readDocumentCached(final Collection<T> collection, final String id, int maxCacheAge) {
if (collection != Collection.NODES) {
return readDocumentUncached(collection, id, null);
} else {
CacheValue cacheKey = new StringValue(id);
NodeDocument doc = null;
if (maxCacheAge > 0) {
// first try without lock
doc = nodesCache.getIfPresent(cacheKey);
if (doc != null) {
if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - doc.getLastCheckTime() < maxCacheAge) {
return castAsT(unwrap(doc));
}
}
}
try {
Lock lock = getAndLock(id);
final NodeDocument cachedDoc = doc;
try {
if (maxCacheAge == 0) {
invalidateCache(collection, id);
}
while (true) {
doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
@Override
public NodeDocument call() throws Exception {
NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (doc != null) {
doc.seal();
}
return wrap(doc);
}
});
if (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE) {
break;
}
if (System.currentTimeMillis() - doc.getLastCheckTime() < maxCacheAge) {
break;
}
// too old: invalidate, try again
invalidateCache(collection, id);
}
} finally {
lock.unlock();
}
return castAsT(unwrap(doc));
} catch (ExecutionException e) {
throw new IllegalStateException("Failed to load document with " + id, e);
}
}
}
@CheckForNull
private <T extends Document> boolean internalCreate(Collection<T> collection, List<UpdateOp> updates) {
try {
// try up to CHUNKSIZE ops in one transaction
for (List<UpdateOp> chunks : Lists.partition(updates, CHUNKSIZE)) {
List<T> docs = new ArrayList<T>();
for (UpdateOp update : chunks) {
T doc = collection.newDocument(this);
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
if (!update.getId().equals(doc.getId())) {
throw new DocumentStoreException("ID mismatch - UpdateOp: " + update.getId() + ", ID property: "
+ doc.getId());
}
docs.add(doc);
}
insertDocuments(collection, docs);
for (T doc : docs) {
addToCache(collection, doc);
}
}
return true;
} catch (DocumentStoreException ex) {
return false;
}
}
@CheckForNull
private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate,
boolean checkConditions) {
T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc == null) {
if (!allowCreate) {
return null;
} else if (!update.isNew()) {
throw new DocumentStoreException("Document does not exist: " + update.getId());
}
T doc = collection.newDocument(this);
if (checkConditions && !UpdateUtils.checkConditions(doc, update)) {
return null;
}
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
try {
insertDocuments(collection, Collections.singletonList(doc));
addToCache(collection, doc);
return oldDoc;
} catch (DocumentStoreException ex) {
// may have failed due to a race condition; try update instead
// this is an edge case, so it's ok to bypass the cache
// (avoiding a race condition where the DB is already updated
// but the cache is not)
oldDoc = readDocumentUncached(collection, update.getId(), null);
if (oldDoc == null) {
// something else went wrong
LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
throw (ex);
}
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
} else {
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
}
/**
* @return previous version of document or <code>null</code>
*/
@CheckForNull
private <T extends Document> T internalUpdate(Collection<T> collection, UpdateOp update, T oldDoc, boolean checkConditions,
int maxRetries) {
T doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
// conditions not met
return null;
} else {
Lock l = getAndLock(update.getId());
try {
boolean success = false;
int retries = maxRetries;
while (!success && retries > 0) {
long lastmodcount = (Long) oldDoc.get(MODCOUNT);
success = updateDocument(collection, doc, update, lastmodcount);
if (!success) {
retries -= 1;
oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc != null) {
long newmodcount = (Long) oldDoc.get(MODCOUNT);
if (lastmodcount == newmodcount) {
// cached copy did not change so it probably was updated by
// a different instance, get a fresh one
oldDoc = readDocumentUncached(collection, update.getId(), null);
}
}
if (oldDoc == null) {
// document was there but is now gone
LOG.error("failed to apply update because document is gone in the meantime: " + update.getId());
return null;
}
doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
return null;
}
} else {
if (collection == Collection.NODES) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) doc);
}
}
}
if (!success) {
throw new DocumentStoreException("failed update of " + doc.getId() + " (race?) after " + maxRetries + " retries");
}
return oldDoc;
} finally {
l.unlock();
}
}
}
@CheckForNull
private <T extends Document> T applyChanges(Collection<T> collection, T oldDoc, UpdateOp update, boolean checkConditions) {
T doc = collection.newDocument(this);
oldDoc.deepCopy(doc);
if (checkConditions && !UpdateUtils.checkConditions(doc, update)) {
return null;
}
if (hasChangesToCollisions(update)) {
update.increment(NodeDocument.COLLISIONSMODCOUNT, 1);
}
update.increment(MODCOUNT, 1);
UpdateUtils.applyChanges(doc, update, comparator);
doc.seal();
return doc;
}
@CheckForNull
private <T extends Document> void internalUpdate(Collection<T> collection, List<String> ids, UpdateOp update) {
if (isAppendableUpdate(update) && !requiresPreviousState(update)) {
long modified = getModifiedFromUpdate(update);
String appendData = SR.asString(update);
for (List<String> chunkedIds : Lists.partition(ids, CHUNKSIZE)) {
// remember what we already have in the cache
Map<String, NodeDocument> cachedDocs = Collections.emptyMap();
if (collection == Collection.NODES) {
cachedDocs = new HashMap<String, NodeDocument>();
for (String key : chunkedIds) {
cachedDocs.put(key, nodesCache.getIfPresent(new StringValue(key)));
}
}
Connection connection = null;
String tableName = getTable(collection);
boolean success = false;
try {
connection = getConnection();
success = dbBatchedAppendingUpdate(connection, tableName, chunkedIds, modified, appendData);
connection.commit();
} catch (SQLException ex) {
success = false;
} finally {
closeConnection(connection);
}
if (success) {
for (Entry<String, NodeDocument> entry : cachedDocs.entrySet()) {
if (entry.getValue() == null) {
// make sure concurrently loaded document is invalidated
nodesCache.invalidate(new StringValue(entry.getKey()));
} else {
T oldDoc = (T)(entry.getValue());
T newDoc = applyChanges(collection, (T)(entry.getValue()), update, true);
applyToCache((NodeDocument) oldDoc, (NodeDocument) newDoc);
}
}
} else {
for (String id : chunkedIds) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
} else {
for (String id : ids) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
private <T extends Document> List<T> internalQuery(Collection<T> collection, String fromKey, String toKey,
String indexedProperty, long startValue, int limit) {
Connection connection = null;
String tableName = getTable(collection);
List<T> result = new ArrayList<T>();
if (indexedProperty != null && (!INDEXEDPROPERTIES.contains(indexedProperty))) {
String message = "indexed property " + indexedProperty + " not supported, query was '>= '" + startValue + "'; supported properties are "+ INDEXEDPROPERTIES;
LOG.info(message);
throw new DocumentStoreException(message);
}
try {
connection = getConnection();
long now = System.currentTimeMillis();
List<RDBRow> dbresult = dbQuery(connection, tableName, fromKey, toKey, indexedProperty, startValue, limit);
for (RDBRow r : dbresult) {
T doc = runThroughCache(collection, r, now);
result.add(doc);
}
} catch (Exception ex) {
LOG.error("SQL exception on query", ex);
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
return result;
}
private <T extends Document> String getTable(Collection<T> collection) {
if (collection == Collection.CLUSTER_NODES) {
return this.tablePrefix + "CLUSTERNODES";
} else if (collection == Collection.NODES) {
return this.tablePrefix + "NODES";
} else if (collection == Collection.SETTINGS) {
return this.tablePrefix + "SETTINGS";
} else {
throw new IllegalArgumentException("Unknown collection: " + collection.toString());
}
}
@CheckForNull
private <T extends Document> T readDocumentUncached(Collection<T> collection, String id, NodeDocument cachedDoc) {
Connection connection = null;
String tableName = getTable(collection);
try {
long lastmodcount = -1;
if (cachedDoc != null && cachedDoc.getModCount() != null) {
lastmodcount = cachedDoc.getModCount().longValue();
}
connection = getConnection();
RDBRow row = dbRead(connection, tableName, id, lastmodcount);
if (row == null) {
return null;
}
else {
if (lastmodcount == row.getModcount()) {
// we can re-use the cached document
cachedDoc.markUpToDate(System.currentTimeMillis());
return (T)cachedDoc;
}
else {
return SR.fromRow(collection, row);
}
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, String id) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
dbDelete(connection, tableName, Collections.singletonList(id));
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, List<String> ids) {
for (List<String> sublist : Lists.partition(ids, 64)) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
dbDelete(connection, tableName, sublist);
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
}
private <T extends Document> boolean updateDocument(@Nonnull Collection<T> collection, @Nonnull T document,
@Nonnull UpdateOp update, Long oldmodcount) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = getConnection();
Long modified = (Long) document.get(MODIFIED);
Number flag = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flag == null ? false : flag.intValue() == NodeDocument.HAS_BINARY_VAL;
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(NodeDocument.COLLISIONSMODCOUNT);
boolean success = false;
// every 16th update is a full rewrite
if (isAppendableUpdate(update) && modcount % 16 != 0) {
String appendData = SR.asString(update);
if (appendData.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
try {
success = dbAppendingUpdate(connection, tableName, document.getId(), modified, hasBinary, modcount,
cmodcount, oldmodcount, appendData);
connection.commit();
} catch (SQLException ex) {
continueIfStringOverflow(ex);
connection.rollback();
success = false;
}
}
}
if (! success) {
String data = SR.asString(document);
success = dbUpdate(connection, tableName, document.getId(), modified, hasBinary, modcount, cmodcount,
oldmodcount, data);
connection.commit();
}
return success;
} catch (SQLException ex) {
try {
if (connection != null) {
connection.rollback();
}
} catch (SQLException e) {
// TODO
}
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
private static void continueIfStringOverflow(SQLException ex) throws SQLException {
String state = ex.getSQLState();
if ("22001".equals(state) /* everybody */|| ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
// ok
} else {
throw (ex);
}
}
/* currently we use append for all updates, but this might change in the future */
private static boolean isAppendableUpdate(UpdateOp update) {
return true;
}
/* check whether this update operation requires knowledge about the previous state */
private static boolean requiresPreviousState(UpdateOp update) {
for (Map.Entry<Key, Operation> change : update.getChanges().entrySet()) {
Operation op = change.getValue();
if (op.type == UpdateOp.Operation.Type.CONTAINS_MAP_ENTRY) return true;
}
return false;
}
private static long getModifiedFromUpdate(UpdateOp update) {
for (Map.Entry<Key, Operation> change : update.getChanges().entrySet()) {
Operation op = change.getValue();
if (op.type == UpdateOp.Operation.Type.MAX || op.type == UpdateOp.Operation.Type.SET) {
if (MODIFIED.equals(change.getKey().getName())) {
return Long.parseLong(op.value.toString());
}
}
}
return 0L;
}
private <T extends Document> void insertDocuments(Collection<T> collection, List<T> documents) {
Connection connection = null;
String tableName = getTable(collection);
List<String> ids = new ArrayList<String>();
try {
connection = getConnection();
for (T document : documents) {
String data = SR.asString(document);
Long modified = (Long) document.get(MODIFIED);
Number flag = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flag == null ? false : flag.intValue() == NodeDocument.HAS_BINARY_VAL;
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(NodeDocument.COLLISIONSMODCOUNT);
dbInsert(connection, tableName, document.getId(), modified, hasBinary, modcount, cmodcount, data);
}
connection.commit();
} catch (SQLException ex) {
LOG.debug("insert of " + ids + " failed", ex);
try {
if (connection != null) {
connection.rollback();
}
} catch (SQLException e) {
// TODO
}
throw new DocumentStoreException(ex);
} finally {
closeConnection(connection);
}
}
// configuration
// Whether to use GZIP compression
private static boolean NOGZIP = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
// Number of documents to insert at once for batch create
private static int CHUNKSIZE = Integer.getInteger("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.CHUNKSIZE", 64);
// Whether to use cache for query results
private static boolean NOQUERYFROMCACHE = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOQUERYFROMCACHE");
private static byte[] asBytes(String data) {
byte[] bytes;
try {
bytes = data.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
if (NOGZIP) {
return bytes;
} else {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
GZIPOutputStream gos = new GZIPOutputStream(bos) {
{
// TODO: make this configurable
this.def.setLevel(Deflater.BEST_SPEED);
}
};
gos.write(bytes);
gos.close();
return bos.toByteArray();
} catch (IOException ex) {
LOG.error("Error while gzipping contents", ex);
throw new DocumentStoreException(ex);
}
}
}
@CheckForNull
private RDBRow dbRead(Connection connection, String tableName, String id, long lastmodcount) throws SQLException {
PreparedStatement stmt;
boolean useCaseStatement = lastmodcount != -1 && allowsCaseInSelect;
if (useCaseStatement) {
// either we don't have a previous version of the document
// or the database does not support CASE in SELECT
stmt = connection.prepareStatement("select MODIFIED, MODCOUNT, HASBINARY, DATA, BDATA from " + tableName
+ " where ID = ?");
} else {
// the case statement causes the actual row data not to be
// sent in case we already have it
stmt = connection
.prepareStatement("select MODIFIED, MODCOUNT, HASBINARY, case MODCOUNT when ? then null else DATA end as DATA, "
+ "case MODCOUNT when ? then null else BDATA end as BDATA from " + tableName + " where ID = ?");
}
try {
if (useCaseStatement) {
stmt.setString(1, id);
}
else {
stmt.setLong(1, lastmodcount);
stmt.setLong(2, lastmodcount);
stmt.setString(3, id);
}
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
long modified = rs.getLong(1);
long modcount = rs.getLong(2);
long hasBinary = rs.getLong(3);
String data = rs.getString(4);
byte[] bdata = rs.getBytes(5);
return new RDBRow(id, hasBinary == 1, modified, modcount, data, bdata);
} else {
return null;
}
} catch (SQLException ex) {
LOG.error("attempting to read " + id + " (id length is " + id.length() + ")", ex);
// DB2 throws an SQLException for invalid keys; handle this more
// gracefully
if ("22001".equals(ex.getSQLState())) {
connection.rollback();
return null;
} else {
throw (ex);
}
} finally {
stmt.close();
}
}
private List<RDBRow> dbQuery(Connection connection, String tableName, String minId, String maxId, String indexedProperty,
long startValue, int limit) throws SQLException {
String t = "select ID, MODIFIED, MODCOUNT, HASBINARY, DATA, BDATA from " + tableName + " where ID > ? and ID < ?";
if (indexedProperty != null) {
if (MODIFIED.equals(indexedProperty)) {
t += " and MODIFIED >= ?";
} else if (NodeDocument.HAS_BINARY_FLAG.equals(indexedProperty)) {
if (startValue != NodeDocument.HAS_BINARY_VAL) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.HAS_BINARY_FLAG);
}
t += " and HASBINARY = 1";
}
}
t += " order by ID";
if (limit != Integer.MAX_VALUE) {
t += this.needsLimit ? (" LIMIT " + limit) : (" FETCH FIRST " + limit + " ROWS ONLY");
}
PreparedStatement stmt = connection.prepareStatement(t);
List<RDBRow> result = new ArrayList<RDBRow>();
try {
int si = 1;
stmt.setString(si++, minId);
stmt.setString(si++, maxId);
if (MODIFIED.equals(indexedProperty)) {
stmt.setLong(si++, startValue);
}
if (limit != Integer.MAX_VALUE) {
stmt.setFetchSize(limit);
}
ResultSet rs = stmt.executeQuery();
while (rs.next() && result.size() < limit) {
String id = rs.getString(1);
if (id.compareTo(minId) < 0 || id.compareTo(maxId) > 0) {
throw new DocumentStoreException("unexpected query result: '" + minId + "' < '" + id + "' < '" + maxId + "' - broken DB collation?");
}
long modified = rs.getLong(2);
long modcount = rs.getLong(3);
long hasBinary = rs.getLong(4);
String data = rs.getString(5);
byte[] bdata = rs.getBytes(6);
result.add(new RDBRow(id, hasBinary == 1, modified, modcount, data, bdata));
}
} finally {
stmt.close();
}
return result;
}
private boolean dbUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount, Long cmodcount, Long oldmodcount,
String data) throws SQLException {
String t = "update " + tableName + " set MODIFIED = ?, HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?";
if (oldmodcount != null) {
t += " and MODCOUNT = ?";
}
PreparedStatement stmt = connection.prepareStatement(t);
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
stmt.setString(si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbAppendingUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount, Long cmodcount, Long oldmodcount,
String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set MODIFIED = GREATEST(MODIFIED, ?), HASBINARY = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = DSIZE + ?, ");
t.append(this.needsConcat ? "DATA = CONCAT(DATA, ?) " : "DATA = DATA || CAST(? AS varchar(" + this.dataLimitInOctets
+ ")) ");
t.append("where ID = ?");
if (oldmodcount != null) {
t.append(" and MODCOUNT = ?");
}
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
stmt.setString(si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB append update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
}
finally {
stmt.close();
}
}
private boolean dbBatchedAppendingUpdate(Connection connection, String tableName, List<String> ids, Long modified, String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set MODIFIED = GREATEST(MODIFIED, ?), MODCOUNT = MODCOUNT + 1, DSIZE = DSIZE + ?, ");
t.append(this.needsConcat ? "DATA = CONCAT(DATA, ?) " : "DATA = DATA || CAST(? AS varchar(" + this.dataLimitInOctets
+ ")) ");
t.append("where ID in (");
for (int i = 0; i < ids.size(); i++) {
if (i != 0) {
t.append(',');
}
t.append('?');
}
t.append(")");
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
for (String id : ids) {
stmt.setString(si++, id);
}
int result = stmt.executeUpdate();
if (result != ids.size()) {
LOG.debug("DB update failed: only " + result + " of " + ids.size() + " updated. Table: " + tableName + ", IDs:" + ids);
}
return result == ids.size();
}
finally {
stmt.close();
}
}
private boolean dbInsert(Connection connection, String tableName, String id, Long modified, Boolean hasBinary, Long modcount,
Long cmodcount, String data) throws SQLException {
PreparedStatement stmt = connection.prepareStatement("insert into " + tableName
+ "(ID, MODIFIED, HASBINARY, MODCOUNT, CMODCOUNT, DSIZE, DATA, BDATA) values (?, ?, ?, ?, ?, ?, ?, ?)");
try {
int si = 1;
stmt.setString(si++, id);
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? 0 : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB insert failed for " + tableName + "/" + id);
}
return result == 1;
} finally {
stmt.close();
}
}
private void dbDelete(Connection connection, String tableName, List<String> ids) throws SQLException {
PreparedStatement stmt;
int cnt = ids.size();
if (cnt == 1) {
stmt = connection.prepareStatement("delete from " + tableName + " where ID=?");
} else {
StringBuilder inClause = new StringBuilder();
for (int i = 0; i < cnt; i++) {
inClause.append('?');
if (i != cnt - 1) {
inClause.append(',');
}
}
stmt = connection.prepareStatement("delete from " + tableName + " where ID in (" + inClause.toString() + ")");
}
try {
for (int i = 0; i < cnt; i++) {
stmt.setString(i + 1, ids.get(i));
}
int result = stmt.executeUpdate();
if (result != cnt) {
LOG.debug("DB delete failed for " + tableName + "/" + ids);
}
} finally {
stmt.close();
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
// ignored
}
@SuppressWarnings("unchecked")
private static <T extends Document> T castAsT(NodeDocument doc) {
return (T) doc;
}
// Memory Cache
private Cache<CacheValue, NodeDocument> nodesCache;
private CacheStats cacheStats;
private final Striped<Lock> locks = Striped.lock(64);
private Lock getAndLock(String key) {
Lock l = locks.get(key);
l.lock();
return l;
}
@CheckForNull
private static NodeDocument unwrap(@Nonnull NodeDocument doc) {
return doc == NodeDocument.NULL ? null : doc;
}
@Nonnull
private static NodeDocument wrap(@CheckForNull NodeDocument doc) {
return doc == null ? NodeDocument.NULL : doc;
}
/**
* Adds a document to the {@link #nodesCache} iff there is no document in
* the cache with the document key. This method does not acquire a lock from
* {@link #locks}! The caller must ensure a lock is held for the given
* document.
*
* @param doc
* the document to add to the cache.
* @return either the given <code>doc</code> or the document already present
* in the cache.
*/
@Nonnull
private NodeDocument addToCache(@Nonnull final NodeDocument doc) {
if (doc == NodeDocument.NULL) {
throw new IllegalArgumentException("doc must not be NULL document");
}
doc.seal();
// make sure we only cache the document if it wasn't
// changed and cached by some other thread in the
// meantime. That is, use get() with a Callable,
// which is only used when the document isn't there
try {
CacheValue key = new StringValue(doc.getId());
for (;;) {
NodeDocument cached = nodesCache.get(key, new Callable<NodeDocument>() {
@Override
public NodeDocument call() {
return doc;
}
});
if (cached != NodeDocument.NULL) {
return cached;
} else {
nodesCache.invalidate(key);
}
}
} catch (ExecutionException e) {
// will never happen because call() just returns
// the already available doc
throw new IllegalStateException(e);
}
}
@Nonnull
private void applyToCache(@Nonnull final NodeDocument oldDoc, @Nonnull final NodeDocument newDoc) {
NodeDocument cached = addToCache(newDoc);
if (cached == newDoc) {
// successful
return;
} else if (oldDoc == null) {
// this is an insert and some other thread was quicker
// loading it into the cache -> return now
return;
} else {
CacheValue key = new StringValue(newDoc.getId());
// this is an update (oldDoc != null)
if (Objects.equal(cached.getModCount(), oldDoc.getModCount())) {
nodesCache.put(key, newDoc);
} else {
// the cache entry was modified by some other thread in
// the meantime. the updated cache entry may or may not
// include this update. we cannot just apply our update
// on top of the cached entry.
// therefore we must invalidate the cache entry
nodesCache.invalidate(key);
}
}
}
private <T extends Document> void addToCache(Collection<T> collection, T doc) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(doc.getId());
try {
addToCache((NodeDocument) doc);
} finally {
lock.unlock();
}
}
}
private <T extends Document> T runThroughCache(Collection<T> collection, RDBRow row, long now) {
if (collection != Collection.NODES) {
// not in the cache anyway
return SR.fromRow(collection, row);
}
String id = row.getId();
CacheValue cacheKey = new StringValue(id);
NodeDocument inCache = nodesCache.getIfPresent(cacheKey);
Number modCount = row.getModcount();
if (! NOQUERYFROMCACHE) {
// do not overwrite document in cache if the
// existing one in the cache is newer
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() <= cachedModCount.longValue()) {
// we can use the cached document
inCache.markUpToDate(now);
return (T) inCache;
}
}
}
NodeDocument fresh = (NodeDocument) SR.fromRow(collection, row);
fresh.seal();
Lock lock = getAndLock(id);
try {
inCache = nodesCache.getIfPresent(cacheKey);
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() > cachedModCount.longValue()) {
nodesCache.put(cacheKey, fresh);
} else {
fresh = inCache;
}
}
else {
nodesCache.put(cacheKey, fresh);
}
} finally {
lock.unlock();
}
return (T) fresh;
}
private Connection getConnection() throws SQLException {
Connection c = this.ds.getConnection();
c.setAutoCommit(false);
return c;
}
private void closeConnection(Connection c) {
if (c != null) {
try {
c.close();
} catch (SQLException ex) {
// log me
}
}
}
private boolean hasChangesToCollisions(UpdateOp update) {
for (Entry<Key, Operation> e : checkNotNull(update).getChanges().entrySet()) {
Key k = e.getKey();
Operation op = e.getValue();
if (op.type == Operation.Type.SET_MAP_ENTRY) {
if (NodeDocument.COLLISIONS.equals(k.getName())) {
return true;
}
}
}
return false;
}
}
| OAK-1941 - remove system-property-based switch to disable cache updates from queries
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1641406 13f79535-47bb-0310-9956-ffa450edef68
| oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java | OAK-1941 - remove system-property-based switch to disable cache updates from queries | <ide><path>ak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
<ide> private static boolean NOGZIP = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
<ide> // Number of documents to insert at once for batch create
<ide> private static int CHUNKSIZE = Integer.getInteger("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.CHUNKSIZE", 64);
<del> // Whether to use cache for query results
<del> private static boolean NOQUERYFROMCACHE = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOQUERYFROMCACHE");
<ide>
<ide> private static byte[] asBytes(String data) {
<ide> byte[] bytes;
<ide> NodeDocument inCache = nodesCache.getIfPresent(cacheKey);
<ide> Number modCount = row.getModcount();
<ide>
<del> if (! NOQUERYFROMCACHE) {
<del> // do not overwrite document in cache if the
<del> // existing one in the cache is newer
<del> if (inCache != null && inCache != NodeDocument.NULL) {
<del> // check mod count
<del> Number cachedModCount = inCache.getModCount();
<del> if (cachedModCount == null) {
<del> throw new IllegalStateException("Missing " + Document.MOD_COUNT);
<del> }
<del> if (modCount.longValue() <= cachedModCount.longValue()) {
<del> // we can use the cached document
<del> inCache.markUpToDate(now);
<del> return (T) inCache;
<del> }
<add> // do not overwrite document in cache if the
<add> // existing one in the cache is newer
<add> if (inCache != null && inCache != NodeDocument.NULL) {
<add> // check mod count
<add> Number cachedModCount = inCache.getModCount();
<add> if (cachedModCount == null) {
<add> throw new IllegalStateException("Missing " + Document.MOD_COUNT);
<add> }
<add> if (modCount.longValue() <= cachedModCount.longValue()) {
<add> // we can use the cached document
<add> inCache.markUpToDate(now);
<add> return (T) inCache;
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 13d183ab4e5deef11ffc7148eb408278c0a97a5f | 0 | apache/kylin,apache/incubator-kylin,apache/kylin,apache/incubator-kylin,apache/kylin,apache/incubator-kylin,apache/kylin,apache/kylin,apache/kylin,apache/kylin,apache/incubator-kylin,apache/incubator-kylin,apache/incubator-kylin | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.rest.controller;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.kylin.common.util.JsonUtil;
import org.apache.kylin.common.util.RandomUtil;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeManager;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.cuboid.CuboidScheduler;
import org.apache.kylin.cube.cuboid.TreeCuboidScheduler;
import org.apache.kylin.cube.model.CubeBuildTypeEnum;
import org.apache.kylin.cube.model.CubeDesc;
import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
import org.apache.kylin.cube.model.HBaseColumnDesc;
import org.apache.kylin.cube.model.HBaseColumnFamilyDesc;
import org.apache.kylin.cube.model.RowKeyColDesc;
import org.apache.kylin.dimension.DimensionEncodingFactory;
import org.apache.kylin.engine.mr.common.CuboidStatsReaderUtil;
import org.apache.kylin.job.JobInstance;
import org.apache.kylin.job.JoinedFlatTable;
import org.apache.kylin.job.exception.JobException;
import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
import org.apache.kylin.metadata.model.ISourceAware;
import org.apache.kylin.metadata.model.MeasureDesc;
import org.apache.kylin.metadata.model.SegmentRange;
import org.apache.kylin.metadata.model.SegmentRange.TSRange;
import org.apache.kylin.metadata.project.ProjectInstance;
import org.apache.kylin.metadata.realization.RealizationStatusEnum;
import org.apache.kylin.metrics.MetricsManager;
import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
import org.apache.kylin.rest.exception.BadRequestException;
import org.apache.kylin.rest.exception.ForbiddenException;
import org.apache.kylin.rest.exception.InternalErrorException;
import org.apache.kylin.rest.exception.NotFoundException;
import org.apache.kylin.rest.exception.TooManyRequestException;
import org.apache.kylin.rest.msg.Message;
import org.apache.kylin.rest.msg.MsgPicker;
import org.apache.kylin.rest.request.CubeRequest;
import org.apache.kylin.rest.request.JobBuildRequest;
import org.apache.kylin.rest.request.JobBuildRequest2;
import org.apache.kylin.rest.request.JobOptimizeRequest;
import org.apache.kylin.rest.request.LookupSnapshotBuildRequest;
import org.apache.kylin.rest.request.SQLRequest;
import org.apache.kylin.rest.response.CubeInstanceResponse;
import org.apache.kylin.rest.response.CuboidTreeResponse;
import org.apache.kylin.rest.response.EnvelopeResponse;
import org.apache.kylin.rest.response.GeneralResponse;
import org.apache.kylin.rest.response.HBaseResponse;
import org.apache.kylin.rest.response.ResponseCode;
import org.apache.kylin.rest.service.CubeService;
import org.apache.kylin.rest.service.JobService;
import org.apache.kylin.rest.service.ProjectService;
import org.apache.kylin.rest.service.QueryService;
import org.apache.kylin.rest.util.ValidateUtil;
import org.apache.kylin.source.kafka.util.KafkaClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* CubeController is defined as Restful API entrance for UI.
*/
@Controller
@RequestMapping(value = "/cubes")
public class CubeController extends BasicController {
private static final Logger logger = LoggerFactory.getLogger(CubeController.class);
@Autowired
@Qualifier("cubeMgmtService")
private CubeService cubeService;
@Autowired
@Qualifier("jobService")
private JobService jobService;
@Autowired
@Qualifier("projectService")
private ProjectService projectService;
@Autowired
@Qualifier("queryService")
private QueryService queryService;
@RequestMapping(value = "/validate/{cubeName}", method = RequestMethod.GET, produces = { "application/json" })
@ResponseBody
public EnvelopeResponse<Boolean> validateModelName(@PathVariable String cubeName) {
return new EnvelopeResponse<>(ResponseCode.CODE_SUCCESS, cubeService.isCubeNameVaildate(cubeName), "");
}
@RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<CubeInstanceResponse> getCubes(@RequestParam(value = "cubeName", required = false) String cubeName,
@RequestParam(value = "modelName", required = false) String modelName,
@RequestParam(value = "projectName", required = false) String projectName,
@RequestParam(value = "limit", required = false) Integer limit,
@RequestParam(value = "offset", required = false) Integer offset) {
List<CubeInstance> cubes = cubeService.listAllCubes(cubeName, projectName, modelName, false);
List<CubeInstanceResponse> response = Lists.newArrayListWithExpectedSize(cubes.size());
for (CubeInstance cube : cubes) {
try {
response.add(cubeService.createCubeInstanceResponse(cube));
} catch (Exception e) {
logger.error("Error creating cube instance response, skipping.", e);
}
}
int climit = (null == limit) ? response.size() : limit;
int coffset = (null == offset) ? 0 : offset;
if (response.size() <= coffset) {
return Collections.emptyList();
}
if ((response.size() - coffset) < climit) {
return response.subList(coffset, response.size());
}
return response.subList(coffset, coffset + climit);
}
@RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public Map<String, Integer> getValidEncodings() {
Map<String, Integer> encodings;
try {
encodings = DimensionEncodingFactory.getValidEncodings();
} catch (Exception e) {
logger.error("Error when getting valid encodings", e);
return Maps.newHashMap();
}
return encodings;
}
@RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public CubeInstance getCube(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cube;
}
/**
* Get SQL of a Cube
*
* @param cubeName Cube Name
* @return
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public GeneralResponse getSql(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
IJoinedFlatTableDesc flatTableDesc = new CubeJoinedFlatTableDesc(cube.getDescriptor(), true);
String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc);
GeneralResponse response = new GeneralResponse();
response.setProperty("sql", sql);
return response;
}
/**
* Get SQL of a Cube segment
*
* @param cubeName Cube Name
* @param segmentName Segment Name
* @return
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}/sql", method = { RequestMethod.GET }, produces = {
"application/json" })
@ResponseBody
public GeneralResponse getSql(@PathVariable String cubeName, @PathVariable String segmentName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegment(segmentName, null);
if (segment == null) {
throw new NotFoundException("Cannot find segment " + segmentName);
}
IJoinedFlatTableDesc flatTableDesc = new CubeJoinedFlatTableDesc(segment, true);
String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc);
GeneralResponse response = new GeneralResponse();
response.setProperty("sql", sql);
return response;
}
/**
* Update cube notify list
*
* @param cubeName
* @param notifyList
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public void updateNotifyList(@PathVariable String cubeName, @RequestBody List<String> notifyList) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
try {
cubeService.updateCubeNotifyList(cube, notifyList);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance updateCubeCost(@PathVariable String cubeName, @RequestParam(value = "cost") int cost) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
try {
return cubeService.updateCubeCost(cube, cost);
} catch (Exception e) {
String message = "Failed to update cube cost: " + cubeName + " : " + cost;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
/**
* Force rebuild a cube's lookup table snapshot
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = {
RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVariable String segmentName,
@RequestParam(value = "lookupTable") String lookupTable) {
try {
final CubeManager cubeMgr = cubeService.getCubeManager();
final CubeInstance cube = cubeMgr.getCube(cubeName);
return cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable);
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Force rebuild a cube's lookup table snapshot
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public JobInstance rebuildLookupSnapshot(@PathVariable String cubeName,
@RequestBody LookupSnapshotBuildRequest request) {
try {
final CubeManager cubeMgr = cubeService.getCubeManager();
final CubeInstance cube = cubeMgr.getCube(cubeName);
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(),
submitter);
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Delete a cube segment
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
"application/json" })
@ResponseBody
public CubeInstance deleteSegment(@PathVariable String cubeName, @PathVariable String segmentName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegment(segmentName, null);
if (segment == null) {
throw new NotFoundException("Cannot find segment '" + segmentName + "'");
}
try {
return cubeService.deleteSegment(cube, segmentName);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Build/Rebuild a cube segment
*/
@RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance build(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
return rebuild(cubeName, req);
}
/** Build/Rebuild a cube segment */
/**
* Build/Rebuild a cube segment
*/
@RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
return buildInternal(cubeName, new TSRange(req.getStartTime(), req.getEndTime()), null, null, null,
req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment());
}
/**
* Build/Rebuild a cube segment by source offset
*/
@RequestMapping(value = "/{cubeName}/build2", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance build2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
try {
Class<?> clazz = Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer");
if (clazz == null) {
throw new ClassNotFoundException();
}
} catch (ClassNotFoundException e) {
throw new InternalErrorException("Could not find Kafka dependency");
}
return rebuild2(cubeName, req);
}
/**
* Build/Rebuild a cube segment by source offset
*/
@RequestMapping(value = "/{cubeName}/rebuild2", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance rebuild2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
return buildInternal(cubeName, null, new SegmentRange(req.getSourceOffsetStart(), req.getSourceOffsetEnd()),
req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(),
req.isForce());
}
private JobInstance buildInternal(String cubeName, TSRange tsRange, SegmentRange segRange, //
Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd,
String buildType, boolean force) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
checkBuildingSegment(cube);
return jobService.submitJob(cube, tsRange, segRange, sourcePartitionOffsetStart, sourcePartitionOffsetEnd,
CubeBuildTypeEnum.valueOf(buildType), force, submitter);
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage(), e);
}
}
/**
* Send a optimize cube job
*
* @param cubeName Cube ID
* @return JobInstance of CheckpointExecutable
*/
@RequestMapping(value = "/{cubeName}/optimize", method = { RequestMethod.PUT })
@ResponseBody
public JobInstance optimize(@PathVariable String cubeName, @RequestBody JobOptimizeRequest jobOptimizeRequest) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
checkCubeExists(cubeName);
logger.info("cuboid recommend:" + jobOptimizeRequest.getCuboidsRecommend());
return jobService.submitOptimizeJob(cube, jobOptimizeRequest.getCuboidsRecommend(), submitter).getFirst();
} catch (BadRequestException e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
} catch (JobException e) {
logger.error(e.getLocalizedMessage(), e);
throw new BadRequestException(e.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Send a optimize cube segment job
*
* @param cubeName Cube ID
* @param segmentID for segment to be optimized
*/
@RequestMapping(value = "/{cubeName}/recover_segment_optimize/{segmentID}", method = { RequestMethod.PUT })
@ResponseBody
public JobInstance recoverSegmentOptimize(@PathVariable String cubeName, @PathVariable String segmentID) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegmentById(segmentID);
if (segment == null) {
throw new NotFoundException("Cannot find segment '" + segmentID + "'");
}
return jobService.submitRecoverSegmentOptimizeJob(segment, submitter);
} catch (JobException e) {
logger.error(e.getLocalizedMessage(), e);
throw new BadRequestException(e.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance disableCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cubeService.disableCube(cube);
} catch (Exception e) {
String message = "Failed to disable cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance purgeCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cubeService.purgeCube(cube);
} catch (Exception e) {
String message = "Failed to purge cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance cloneCube(@PathVariable String cubeName, @RequestBody CubeRequest cubeRequest) {
String newCubeName = cubeRequest.getCubeName();
String projectName = cubeRequest.getProject();
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
throw new BadRequestException("Broken cube can't be cloned");
}
if (!ValidateUtil.isAlphanumericUnderscore(newCubeName)) {
throw new BadRequestException("Invalid Cube name, only letters, numbers and underscore supported.");
}
ProjectInstance project = cubeService.getProjectManager().getProject(projectName);
if (project == null) {
throw new NotFoundException("Project " + projectName + " doesn't exist");
}
// KYLIN-1925, forbid cloning cross projects
if (!project.getName().equals(cube.getProject())) {
throw new BadRequestException("Cloning cubes across projects is not supported.");
}
CubeDesc cubeDesc = cube.getDescriptor();
CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc);
newCubeDesc.setName(newCubeName);
CubeInstance newCube;
try {
newCube = cubeService.createCubeAndDesc(project, newCubeDesc);
//reload to avoid shallow clone
cubeService.getCubeDescManager().reloadCubeDescLocal(newCubeName);
} catch (IOException e) {
throw new InternalErrorException("Failed to clone cube ", e);
}
return newCube;
}
@RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance enableCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
cubeService.checkEnableCubeCondition(cube);
return cubeService.enableCube(cube);
} catch (Exception e) {
String message = "Failed to enable cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = { "application/json" })
@ResponseBody
public void deleteCube(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
//drop Cube
try {
cubeService.deleteCube(cube);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException("Failed to delete cube. " + " Caused by: " + e.getMessage(), e);
}
}
/**
* save cubeDesc
*
* @return Table metadata array
* @throws IOException
*/
@RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/json" })
@ResponseBody
public CubeRequest saveCubeDesc(@RequestBody CubeRequest cubeRequest) {
CubeDesc desc = deserializeCubeDesc(cubeRequest);
if (desc == null) {
cubeRequest.setMessage("CubeDesc is null.");
return cubeRequest;
}
String name = desc.getName();
if (StringUtils.isEmpty(name)) {
logger.info("Cube name should not be empty.");
throw new BadRequestException("Cube name should not be empty.");
}
if (!ValidateUtil.isAlphanumericUnderscore(name)) {
throw new BadRequestException("Invalid Cube name, only letters, numbers and underscore supported.");
}
validateColumnFamily(desc);
try {
desc.setUuid(RandomUtil.randomUUID().toString());
String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
: cubeRequest.getProject();
ProjectInstance project = cubeService.getProjectManager().getProject(projectName);
if (project == null) {
throw new NotFoundException("Project " + projectName + " doesn't exist");
}
cubeService.createCubeAndDesc(project, desc);
} catch (Exception e) {
logger.error("Failed to deal with the request.", e);
throw new InternalErrorException(e.getLocalizedMessage(), e);
}
cubeRequest.setUuid(desc.getUuid());
cubeRequest.setSuccessful(true);
return cubeRequest;
}
//column family metrics may not match the real metrics when editing cube by json,see MTHDP-5091
private void validateColumnFamily(CubeDesc cubeDesc) {
Set<String> columnFamilyMetricsSet = Sets.newHashSet();
for (HBaseColumnFamilyDesc hBaseColumnFamilyDesc : cubeDesc.getHbaseMapping().getColumnFamily()) {
for (HBaseColumnDesc hBaseColumnDesc : hBaseColumnFamilyDesc.getColumns()) {
for (String columnName : hBaseColumnDesc.getMeasureRefs()) {
columnFamilyMetricsSet.add(columnName);
}
}
}
for (MeasureDesc measureDesc : cubeDesc.getMeasures()) {
if (!columnFamilyMetricsSet.contains(measureDesc.getName())) {
throw new BadRequestException("column family lack measure:" + measureDesc.getName());
}
}
if (cubeDesc.getMeasures().size() != columnFamilyMetricsSet.size()) {
throw new BadRequestException(
"the number of input measure and the number of measure defined in cubedesc are not consistent");
}
}
/**
* update CubDesc
*
* @return Table metadata array
* @throws JsonProcessingException
* @throws IOException
*/
@RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeRequest updateCubeDesc(@RequestBody CubeRequest cubeRequest) throws JsonProcessingException {
CubeDesc desc = deserializeCubeDesc(cubeRequest);
if (desc == null) {
return cubeRequest;
}
String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
: cubeRequest.getProject();
try {
CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
if (cube == null) {
String error = "The cube named " + cubeRequest.getCubeName() + " does not exist ";
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
validateColumnFamily(desc);
//cube renaming is not allowed
if (!cube.getDescriptor().getName().equalsIgnoreCase(desc.getName())) {
String error = "Cube Desc renaming is not allowed: desc.getName(): " + desc.getName()
+ ", cubeRequest.getCubeName(): " + cubeRequest.getCubeName();
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
String error = "CubeDesc " + desc.getName()
+ " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
desc = cubeService.updateCubeAndDesc(cube, desc, projectName, true);
} catch (AccessDeniedException accessDeniedException) {
throw new ForbiddenException("You don't have right to update this cube.");
} catch (Exception e) {
logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
throw new InternalErrorException("Failed to deal with the request: " + e.getLocalizedMessage());
}
if (desc.isBroken()) {
updateRequest(cubeRequest, false, desc.getErrorsAsString());
return cubeRequest;
}
String descData = JsonUtil.writeValueAsIndentString(desc);
cubeRequest.setCubeDescData(descData);
cubeRequest.setSuccessful(true);
return cubeRequest;
}
/**
* get Hbase Info
*
* @return true
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<HBaseResponse> getHBaseInfo(@PathVariable String cubeName) {
List<HBaseResponse> hbase = new ArrayList<HBaseResponse>();
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
if (null == cube) {
throw new InternalErrorException("Cannot find cube " + cubeName);
}
List<CubeSegment> segments = cube.getSegments();
for (CubeSegment segment : segments) {
String tableName = segment.getStorageLocationIdentifier();
HBaseResponse hr = null;
// Get info of given table.
try {
hr = cubeService.getHTableInfo(cubeName, tableName);
} catch (IOException e) {
logger.error("Failed to calcuate size of HTable \"" + tableName + "\".", e);
}
if (null == hr) {
logger.info("Failed to calcuate size of HTable \"" + tableName + "\".");
hr = new HBaseResponse();
}
hr.setTableName(tableName);
hr.setDateRangeStart(segment.getTSRange().start.v);
hr.setDateRangeEnd(segment.getTSRange().end.v);
hr.setSegmentName(segment.getName());
hr.setSegmentStatus(segment.getStatus().toString());
hr.setSourceCount(segment.getInputRecords());
if (segment.isOffsetCube()) {
hr.setSourceOffsetStart((Long) segment.getSegRange().start.v);
hr.setSourceOffsetEnd((Long) segment.getSegRange().end.v);
}
hbase.add(hr);
}
return hbase;
}
/**
* get cube segment holes
*
* @return a list of CubeSegment, each representing a hole
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<CubeSegment> getHoles(@PathVariable String cubeName) {
checkCubeExists(cubeName);
return cubeService.getCubeManager().calculateHoles(cubeName);
}
/**
* fill cube segment holes
*
* @return a list of JobInstances to fill the holes
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public List<JobInstance> fillHoles(@PathVariable String cubeName) {
checkCubeExists(cubeName);
List<JobInstance> jobs = Lists.newArrayList();
List<CubeSegment> holes = cubeService.getCubeManager().calculateHoles(cubeName);
if (holes.size() == 0) {
logger.info("No hole detected for cube '" + cubeName + "'");
return jobs;
}
for (CubeSegment hole : holes) {
if (hole.isOffsetCube()) {
JobBuildRequest2 request = new JobBuildRequest2();
request.setBuildType(CubeBuildTypeEnum.BUILD.toString());
request.setSourceOffsetStart((Long) hole.getSegRange().start.v);
request.setSourceOffsetEnd((Long) hole.getSegRange().end.v);
request.setSourcePartitionOffsetStart(hole.getSourcePartitionOffsetStart());
request.setSourcePartitionOffsetEnd(hole.getSourcePartitionOffsetEnd());
try {
JobInstance job = build2(cubeName, request);
jobs.add(job);
} catch (Exception e) {
// it may exceed the max allowed job number
logger.info("Error to submit job for hole '" + hole.toString() + "', skip it now.", e);
continue;
}
} else {
JobBuildRequest request = new JobBuildRequest();
request.setBuildType(CubeBuildTypeEnum.BUILD.toString());
request.setStartTime(hole.getTSRange().start.v);
request.setEndTime(hole.getTSRange().end.v);
try {
JobInstance job = build(cubeName, request);
jobs.add(job);
} catch (Exception e) {
// it may exceed the max allowed job number
logger.info("Error to submit job for hole '" + hole.toString() + "', skip it now.", e);
continue;
}
}
}
return jobs;
}
@RequestMapping(value = "/{cubeName}/cuboids/export", method = RequestMethod.GET)
@ResponseBody
public void cuboidsExport(@PathVariable String cubeName, @RequestParam(value = "top") Integer top,
HttpServletResponse response) throws IOException {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
Map<Long, Long> cuboidList = getRecommendCuboidList(cube);
List<Set<String>> dimensionSetList = Lists.newLinkedList();
if (cuboidList == null || cuboidList.isEmpty()) {
logger.info("Cannot get recommended cuboid list for cube " + cubeName);
} else {
if (cuboidList.size() < top) {
logger.info("Require " + top + " recommended cuboids, but only " + cuboidList.size() + " is found.");
}
Iterator<Long> cuboidIterator = cuboidList.keySet().iterator();
RowKeyColDesc[] rowKeyColDescList = cube.getDescriptor().getRowkey().getRowKeyColumns();
while (top-- > 0 && cuboidIterator.hasNext()) {
Set<String> dimensionSet = Sets.newHashSet();
dimensionSetList.add(dimensionSet);
long cuboid = cuboidIterator.next();
for (int i = 0; i < rowKeyColDescList.length; i++) {
if ((cuboid & (1L << rowKeyColDescList[i].getBitIndex())) > 0) {
dimensionSet.add(rowKeyColDescList[i].getColumn());
}
}
}
}
response.setContentType("text/json;charset=utf-8");
response.setHeader("Content-Disposition", "attachment; filename=\"" + cubeName + ".json\"");
try (PrintWriter writer = response.getWriter()) {
writer.write(JsonUtil.writeValueAsString(dimensionSetList));
} catch (IOException e) {
logger.error("", e);
throw new InternalErrorException("Failed to write: " + e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/cuboids/current", method = RequestMethod.GET)
@ResponseBody
public CuboidTreeResponse getCurrentCuboids(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
// The cuboid tree displayed should be consistent with the current one
CuboidScheduler cuboidScheduler = cube.getCuboidScheduler();
Map<Long, Long> cuboidStatsMap = cube.getCuboids();
if (cuboidStatsMap == null) {
cuboidStatsMap = CuboidStatsReaderUtil.readCuboidStatsFromCube(cuboidScheduler.getAllCuboidIds(), cube);
}
Map<Long, Long> hitFrequencyMap = null;
Map<Long, Long> queryMatchMap = null;
try {
hitFrequencyMap = getTargetCuboidHitFrequency(cubeName);
queryMatchMap = getCuboidQueryMatchCount(cubeName);
} catch (Exception e) {
logger.warn("Fail to query on system cube due to " + e);
}
Set<Long> currentCuboidSet = cube.getCuboidScheduler().getAllCuboidIds();
return cubeService.getCuboidTreeResponse(cuboidScheduler, cuboidStatsMap, hitFrequencyMap, queryMatchMap,
currentCuboidSet);
}
@RequestMapping(value = "/{cubeName}/cuboids/recommend", method = RequestMethod.GET)
@ResponseBody
public CuboidTreeResponse getRecommendCuboids(@PathVariable String cubeName) throws IOException {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
Map<Long, Long> recommendCuboidStatsMap = getRecommendCuboidList(cube);
if (recommendCuboidStatsMap == null || recommendCuboidStatsMap.isEmpty()) {
return new CuboidTreeResponse();
}
CuboidScheduler cuboidScheduler = new TreeCuboidScheduler(cube.getDescriptor(),
Lists.newArrayList(recommendCuboidStatsMap.keySet()),
new TreeCuboidScheduler.CuboidCostComparator(recommendCuboidStatsMap));
// Get cuboid target info for displaying heat map of cuboid hit
Map<Long, Long> displayHitFrequencyMap = getTargetCuboidHitFrequency(cubeName);
// Get exactly matched cuboid query count
Map<Long, Long> queryMatchMap = getCuboidQueryMatchCount(cubeName);
Set<Long> currentCuboidSet = cube.getCuboidScheduler().getAllCuboidIds();
return cubeService.getCuboidTreeResponse(cuboidScheduler, recommendCuboidStatsMap, displayHitFrequencyMap,
queryMatchMap, currentCuboidSet);
}
private Map<Long, Long> getRecommendCuboidList(CubeInstance cube) throws IOException {
// Get cuboid source info
Map<Long, Long> optimizeHitFrequencyMap = getSourceCuboidHitFrequency(cube.getName());
Map<Long, Map<Long, Long>> rollingUpCountSourceMap = getCuboidRollingUpCount(cube.getName());
return cubeService.getRecommendCuboidStatistics(cube, optimizeHitFrequencyMap, rollingUpCountSourceMap);
}
private Map<Long, Long> getSourceCuboidHitFrequency(String cubeName) {
return getCuboidHitFrequency(cubeName, true);
}
private Map<Long, Long> getTargetCuboidHitFrequency(String cubeName) {
return getCuboidHitFrequency(cubeName, false);
}
private Map<Long, Long> getCuboidHitFrequency(String cubeName, boolean isCuboidSource) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidColumn = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
if (!isCuboidSource) {
cuboidColumn = QueryCubePropertyEnum.CUBOID_TARGET.toString();
}
String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidColumn + ", sum(" + hitMeasure + ") " //
+ "from " + table//
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' " //
+ "group by " + cuboidColumn;
sqlRequest.setSql(sql);
List<List<String>> orgHitFrequency = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatQueryCount(orgHitFrequency);
}
private Map<Long, Map<Long, Long>> getCuboidRollingUpCount(String cubeName) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
String cuboidTarget = QueryCubePropertyEnum.CUBOID_TARGET.toString();
String aggCount = QueryCubePropertyEnum.AGGR_COUNT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidSource + ", " + cuboidTarget + ", sum(" + aggCount + ")/count(*) " //
+ "from " + table //
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' " //
+ "group by " + cuboidSource + ", " + cuboidTarget;
sqlRequest.setSql(sql);
List<List<String>> orgRollingUpCount = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatRollingUpCount(orgRollingUpCount);
}
private Map<Long, Long> getCuboidQueryMatchCount(String cubeName) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidSource + ", sum(" + hitMeasure + ") " //
+ "from " + table //
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' and "
+ QueryCubePropertyEnum.IF_MATCH.toString() + " = true " //
+ "group by " + cuboidSource;
sqlRequest.setSql(sql);
List<List<String>> orgMatchHitFrequency = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatQueryCount(orgMatchHitFrequency);
}
/**
* Initiate the very beginning of a streaming cube. Will seek the latest offests of each partition from streaming
* source (kafka) and record in the cube descriptor; In the first build job, it will use these offests as the start point.
*
* @param cubeName
* @return
*/
@RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public GeneralResponse initStartOffsets(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
if (cubeInstance.getSourceType() != ISourceAware.ID_STREAMING) {
String msg = "Cube '" + cubeName + "' is not a Streaming Cube.";
throw new IllegalArgumentException(msg);
}
final GeneralResponse response = new GeneralResponse();
try {
final Map<Integer, Long> startOffsets = KafkaClient.getLatestOffsets(cubeInstance);
CubeDesc desc = cubeInstance.getDescriptor();
desc.setPartitionOffsetStart(startOffsets);
cubeService.getCubeDescManager().updateCubeDesc(desc);
response.setProperty("result", "success");
response.setProperty("offsets", startOffsets.toString());
} catch (Throwable e) {
throw new RuntimeException(e);
}
return response;
}
private CubeDesc deserializeCubeDesc(CubeRequest cubeRequest) {
CubeDesc desc = null;
try {
logger.debug("Saving cube " + cubeRequest.getCubeDescData());
desc = JsonUtil.readValue(cubeRequest.getCubeDescData(), CubeDesc.class);
} catch (JsonParseException e) {
logger.error("The cube definition is not valid.", e);
updateRequest(cubeRequest, false, e.getMessage());
} catch (JsonMappingException e) {
logger.error("The cube definition is not valid.", e);
updateRequest(cubeRequest, false, e.getMessage());
} catch (IOException e) {
logger.error("Failed to deal with the request.", e);
throw new InternalErrorException("Failed to deal with the request:" + e.getMessage(), e);
}
return desc;
}
private void updateRequest(CubeRequest request, boolean success, String message) {
request.setCubeDescData("");
request.setSuccessful(success);
request.setMessage(message);
}
private void checkCubeExists(String cubeName) {
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
if (cubeInstance == null) {
Message msg = MsgPicker.getMsg();
throw new NotFoundException(String.format(Locale.ROOT, msg.getCUBE_NOT_FOUND(), cubeName));
}
}
private void checkBuildingSegment(CubeInstance cube) {
checkBuildingSegment(cube, cube.getConfig().getMaxBuildingSegments());
}
private void checkBuildingSegment(CubeInstance cube, int maxBuildingSeg) {
if (cube.getBuildingSegments().size() >= maxBuildingSeg) {
throw new TooManyRequestException(
"There is already " + cube.getBuildingSegments().size() + " building segment; ");
}
}
@RequestMapping(value = "/{cube}/{project}/migrate", method = { RequestMethod.POST })
@ResponseBody
public void migrateCube(@PathVariable String cube, @PathVariable String project) {
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cube);
cubeService.migrateCube(cubeInstance, project);
}
public void setCubeService(CubeService cubeService) {
this.cubeService = cubeService;
}
public void setJobService(JobService jobService) {
this.jobService = jobService;
}
} | server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.rest.controller;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.kylin.common.util.JsonUtil;
import org.apache.kylin.common.util.RandomUtil;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeManager;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.cuboid.CuboidScheduler;
import org.apache.kylin.cube.cuboid.TreeCuboidScheduler;
import org.apache.kylin.cube.model.CubeBuildTypeEnum;
import org.apache.kylin.cube.model.CubeDesc;
import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
import org.apache.kylin.cube.model.RowKeyColDesc;
import org.apache.kylin.dimension.DimensionEncodingFactory;
import org.apache.kylin.engine.mr.common.CuboidStatsReaderUtil;
import org.apache.kylin.job.JobInstance;
import org.apache.kylin.job.JoinedFlatTable;
import org.apache.kylin.job.exception.JobException;
import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
import org.apache.kylin.metadata.model.ISourceAware;
import org.apache.kylin.metadata.model.SegmentRange;
import org.apache.kylin.metadata.model.SegmentRange.TSRange;
import org.apache.kylin.metadata.project.ProjectInstance;
import org.apache.kylin.metadata.realization.RealizationStatusEnum;
import org.apache.kylin.metrics.MetricsManager;
import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
import org.apache.kylin.rest.exception.BadRequestException;
import org.apache.kylin.rest.exception.ForbiddenException;
import org.apache.kylin.rest.exception.InternalErrorException;
import org.apache.kylin.rest.exception.NotFoundException;
import org.apache.kylin.rest.exception.TooManyRequestException;
import org.apache.kylin.rest.msg.Message;
import org.apache.kylin.rest.msg.MsgPicker;
import org.apache.kylin.rest.request.CubeRequest;
import org.apache.kylin.rest.request.JobBuildRequest;
import org.apache.kylin.rest.request.JobBuildRequest2;
import org.apache.kylin.rest.request.JobOptimizeRequest;
import org.apache.kylin.rest.request.LookupSnapshotBuildRequest;
import org.apache.kylin.rest.request.SQLRequest;
import org.apache.kylin.rest.response.CubeInstanceResponse;
import org.apache.kylin.rest.response.CuboidTreeResponse;
import org.apache.kylin.rest.response.EnvelopeResponse;
import org.apache.kylin.rest.response.GeneralResponse;
import org.apache.kylin.rest.response.HBaseResponse;
import org.apache.kylin.rest.response.ResponseCode;
import org.apache.kylin.rest.service.CubeService;
import org.apache.kylin.rest.service.JobService;
import org.apache.kylin.rest.service.ProjectService;
import org.apache.kylin.rest.service.QueryService;
import org.apache.kylin.rest.util.ValidateUtil;
import org.apache.kylin.source.kafka.util.KafkaClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* CubeController is defined as Restful API entrance for UI.
*/
@Controller
@RequestMapping(value = "/cubes")
public class CubeController extends BasicController {
private static final Logger logger = LoggerFactory.getLogger(CubeController.class);
@Autowired
@Qualifier("cubeMgmtService")
private CubeService cubeService;
@Autowired
@Qualifier("jobService")
private JobService jobService;
@Autowired
@Qualifier("projectService")
private ProjectService projectService;
@Autowired
@Qualifier("queryService")
private QueryService queryService;
@RequestMapping(value = "/validate/{cubeName}", method = RequestMethod.GET, produces = { "application/json" })
@ResponseBody
public EnvelopeResponse<Boolean> validateModelName(@PathVariable String cubeName) {
return new EnvelopeResponse<>(ResponseCode.CODE_SUCCESS, cubeService.isCubeNameVaildate(cubeName), "");
}
@RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<CubeInstanceResponse> getCubes(@RequestParam(value = "cubeName", required = false) String cubeName,
@RequestParam(value = "modelName", required = false) String modelName,
@RequestParam(value = "projectName", required = false) String projectName,
@RequestParam(value = "limit", required = false) Integer limit,
@RequestParam(value = "offset", required = false) Integer offset) {
List<CubeInstance> cubes = cubeService.listAllCubes(cubeName, projectName, modelName, false);
List<CubeInstanceResponse> response = Lists.newArrayListWithExpectedSize(cubes.size());
for (CubeInstance cube : cubes) {
try {
response.add(cubeService.createCubeInstanceResponse(cube));
} catch (Exception e) {
logger.error("Error creating cube instance response, skipping.", e);
}
}
int climit = (null == limit) ? response.size() : limit;
int coffset = (null == offset) ? 0 : offset;
if (response.size() <= coffset) {
return Collections.emptyList();
}
if ((response.size() - coffset) < climit) {
return response.subList(coffset, response.size());
}
return response.subList(coffset, coffset + climit);
}
@RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public Map<String, Integer> getValidEncodings() {
Map<String, Integer> encodings;
try {
encodings = DimensionEncodingFactory.getValidEncodings();
} catch (Exception e) {
logger.error("Error when getting valid encodings", e);
return Maps.newHashMap();
}
return encodings;
}
@RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public CubeInstance getCube(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cube;
}
/**
* Get SQL of a Cube
*
* @param cubeName Cube Name
* @return
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public GeneralResponse getSql(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
IJoinedFlatTableDesc flatTableDesc = new CubeJoinedFlatTableDesc(cube.getDescriptor(), true);
String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc);
GeneralResponse response = new GeneralResponse();
response.setProperty("sql", sql);
return response;
}
/**
* Get SQL of a Cube segment
*
* @param cubeName Cube Name
* @param segmentName Segment Name
* @return
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}/sql", method = { RequestMethod.GET }, produces = {
"application/json" })
@ResponseBody
public GeneralResponse getSql(@PathVariable String cubeName, @PathVariable String segmentName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegment(segmentName, null);
if (segment == null) {
throw new NotFoundException("Cannot find segment " + segmentName);
}
IJoinedFlatTableDesc flatTableDesc = new CubeJoinedFlatTableDesc(segment, true);
String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc);
GeneralResponse response = new GeneralResponse();
response.setProperty("sql", sql);
return response;
}
/**
* Update cube notify list
*
* @param cubeName
* @param notifyList
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public void updateNotifyList(@PathVariable String cubeName, @RequestBody List<String> notifyList) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
try {
cubeService.updateCubeNotifyList(cube, notifyList);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance updateCubeCost(@PathVariable String cubeName, @RequestParam(value = "cost") int cost) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
try {
return cubeService.updateCubeCost(cube, cost);
} catch (Exception e) {
String message = "Failed to update cube cost: " + cubeName + " : " + cost;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
/**
* Force rebuild a cube's lookup table snapshot
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = {
RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVariable String segmentName,
@RequestParam(value = "lookupTable") String lookupTable) {
try {
final CubeManager cubeMgr = cubeService.getCubeManager();
final CubeInstance cube = cubeMgr.getCube(cubeName);
return cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable);
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Force rebuild a cube's lookup table snapshot
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public JobInstance rebuildLookupSnapshot(@PathVariable String cubeName,
@RequestBody LookupSnapshotBuildRequest request) {
try {
final CubeManager cubeMgr = cubeService.getCubeManager();
final CubeInstance cube = cubeMgr.getCube(cubeName);
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(),
submitter);
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Delete a cube segment
*
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
"application/json" })
@ResponseBody
public CubeInstance deleteSegment(@PathVariable String cubeName, @PathVariable String segmentName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegment(segmentName, null);
if (segment == null) {
throw new NotFoundException("Cannot find segment '" + segmentName + "'");
}
try {
return cubeService.deleteSegment(cube, segmentName);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Build/Rebuild a cube segment
*/
@RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance build(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
return rebuild(cubeName, req);
}
/** Build/Rebuild a cube segment */
/**
* Build/Rebuild a cube segment
*/
@RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
return buildInternal(cubeName, new TSRange(req.getStartTime(), req.getEndTime()), null, null, null,
req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment());
}
/**
* Build/Rebuild a cube segment by source offset
*/
@RequestMapping(value = "/{cubeName}/build2", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance build2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
try {
Class<?> clazz = Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer");
if (clazz == null) {
throw new ClassNotFoundException();
}
} catch (ClassNotFoundException e) {
throw new InternalErrorException("Could not find Kafka dependency");
}
return rebuild2(cubeName, req);
}
/**
* Build/Rebuild a cube segment by source offset
*/
@RequestMapping(value = "/{cubeName}/rebuild2", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public JobInstance rebuild2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
return buildInternal(cubeName, null, new SegmentRange(req.getSourceOffsetStart(), req.getSourceOffsetEnd()),
req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(),
req.isForce());
}
private JobInstance buildInternal(String cubeName, TSRange tsRange, SegmentRange segRange, //
Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd,
String buildType, boolean force) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
checkBuildingSegment(cube);
return jobService.submitJob(cube, tsRange, segRange, sourcePartitionOffsetStart, sourcePartitionOffsetEnd,
CubeBuildTypeEnum.valueOf(buildType), force, submitter);
} catch (Throwable e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage(), e);
}
}
/**
* Send a optimize cube job
*
* @param cubeName Cube ID
* @return JobInstance of CheckpointExecutable
*/
@RequestMapping(value = "/{cubeName}/optimize", method = { RequestMethod.PUT })
@ResponseBody
public JobInstance optimize(@PathVariable String cubeName, @RequestBody JobOptimizeRequest jobOptimizeRequest) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
checkCubeExists(cubeName);
logger.info("cuboid recommend:" + jobOptimizeRequest.getCuboidsRecommend());
return jobService.submitOptimizeJob(cube, jobOptimizeRequest.getCuboidsRecommend(), submitter).getFirst();
} catch (BadRequestException e) {
logger.error(e.getLocalizedMessage(), e);
throw e;
} catch (JobException e) {
logger.error(e.getLocalizedMessage(), e);
throw new BadRequestException(e.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
/**
* Send a optimize cube segment job
*
* @param cubeName Cube ID
* @param segmentID for segment to be optimized
*/
@RequestMapping(value = "/{cubeName}/recover_segment_optimize/{segmentID}", method = { RequestMethod.PUT })
@ResponseBody
public JobInstance recoverSegmentOptimize(@PathVariable String cubeName, @PathVariable String segmentID) {
try {
String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
CubeSegment segment = cube.getSegmentById(segmentID);
if (segment == null) {
throw new NotFoundException("Cannot find segment '" + segmentID + "'");
}
return jobService.submitRecoverSegmentOptimizeJob(segment, submitter);
} catch (JobException e) {
logger.error(e.getLocalizedMessage(), e);
throw new BadRequestException(e.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException(e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance disableCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cubeService.disableCube(cube);
} catch (Exception e) {
String message = "Failed to disable cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance purgeCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
return cubeService.purgeCube(cube);
} catch (Exception e) {
String message = "Failed to purge cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance cloneCube(@PathVariable String cubeName, @RequestBody CubeRequest cubeRequest) {
String newCubeName = cubeRequest.getCubeName();
String projectName = cubeRequest.getProject();
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
throw new BadRequestException("Broken cube can't be cloned");
}
if (!ValidateUtil.isAlphanumericUnderscore(newCubeName)) {
throw new BadRequestException("Invalid Cube name, only letters, numbers and underscore supported.");
}
ProjectInstance project = cubeService.getProjectManager().getProject(projectName);
if (project == null) {
throw new NotFoundException("Project " + projectName + " doesn't exist");
}
// KYLIN-1925, forbid cloning cross projects
if (!project.getName().equals(cube.getProject())) {
throw new BadRequestException("Cloning cubes across projects is not supported.");
}
CubeDesc cubeDesc = cube.getDescriptor();
CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc);
newCubeDesc.setName(newCubeName);
CubeInstance newCube;
try {
newCube = cubeService.createCubeAndDesc(project, newCubeDesc);
//reload to avoid shallow clone
cubeService.getCubeDescManager().reloadCubeDescLocal(newCubeName);
} catch (IOException e) {
throw new InternalErrorException("Failed to clone cube ", e);
}
return newCube;
}
@RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeInstance enableCube(@PathVariable String cubeName) {
try {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
cubeService.checkEnableCubeCondition(cube);
return cubeService.enableCube(cube);
} catch (Exception e) {
String message = "Failed to enable cube: " + cubeName;
logger.error(message, e);
throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
}
}
@RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = { "application/json" })
@ResponseBody
public void deleteCube(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
//drop Cube
try {
cubeService.deleteCube(cube);
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
throw new InternalErrorException("Failed to delete cube. " + " Caused by: " + e.getMessage(), e);
}
}
/**
* save cubeDesc
*
* @return Table metadata array
* @throws IOException
*/
@RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/json" })
@ResponseBody
public CubeRequest saveCubeDesc(@RequestBody CubeRequest cubeRequest) {
CubeDesc desc = deserializeCubeDesc(cubeRequest);
if (desc == null) {
cubeRequest.setMessage("CubeDesc is null.");
return cubeRequest;
}
String name = desc.getName();
if (StringUtils.isEmpty(name)) {
logger.info("Cube name should not be empty.");
throw new BadRequestException("Cube name should not be empty.");
}
if (!ValidateUtil.isAlphanumericUnderscore(name)) {
throw new BadRequestException("Invalid Cube name, only letters, numbers and underscore supported.");
}
try {
desc.setUuid(RandomUtil.randomUUID().toString());
String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
: cubeRequest.getProject();
ProjectInstance project = cubeService.getProjectManager().getProject(projectName);
if (project == null) {
throw new NotFoundException("Project " + projectName + " doesn't exist");
}
cubeService.createCubeAndDesc(project, desc);
} catch (Exception e) {
logger.error("Failed to deal with the request.", e);
throw new InternalErrorException(e.getLocalizedMessage(), e);
}
cubeRequest.setUuid(desc.getUuid());
cubeRequest.setSuccessful(true);
return cubeRequest;
}
/**
* update CubDesc
*
* @return Table metadata array
* @throws JsonProcessingException
* @throws IOException
*/
@RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public CubeRequest updateCubeDesc(@RequestBody CubeRequest cubeRequest) throws JsonProcessingException {
CubeDesc desc = deserializeCubeDesc(cubeRequest);
if (desc == null) {
return cubeRequest;
}
String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
: cubeRequest.getProject();
try {
CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
if (cube == null) {
String error = "The cube named " + cubeRequest.getCubeName() + " does not exist ";
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
//cube renaming is not allowed
if (!cube.getDescriptor().getName().equalsIgnoreCase(desc.getName())) {
String error = "Cube Desc renaming is not allowed: desc.getName(): " + desc.getName()
+ ", cubeRequest.getCubeName(): " + cubeRequest.getCubeName();
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
String error = "CubeDesc " + desc.getName()
+ " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
updateRequest(cubeRequest, false, error);
return cubeRequest;
}
desc = cubeService.updateCubeAndDesc(cube, desc, projectName, true);
} catch (AccessDeniedException accessDeniedException) {
throw new ForbiddenException("You don't have right to update this cube.");
} catch (Exception e) {
logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
throw new InternalErrorException("Failed to deal with the request: " + e.getLocalizedMessage());
}
if (desc.isBroken()) {
updateRequest(cubeRequest, false, desc.getErrorsAsString());
return cubeRequest;
}
String descData = JsonUtil.writeValueAsIndentString(desc);
cubeRequest.setCubeDescData(descData);
cubeRequest.setSuccessful(true);
return cubeRequest;
}
/**
* get Hbase Info
*
* @return true
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<HBaseResponse> getHBaseInfo(@PathVariable String cubeName) {
List<HBaseResponse> hbase = new ArrayList<HBaseResponse>();
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
if (null == cube) {
throw new InternalErrorException("Cannot find cube " + cubeName);
}
List<CubeSegment> segments = cube.getSegments();
for (CubeSegment segment : segments) {
String tableName = segment.getStorageLocationIdentifier();
HBaseResponse hr = null;
// Get info of given table.
try {
hr = cubeService.getHTableInfo(cubeName, tableName);
} catch (IOException e) {
logger.error("Failed to calcuate size of HTable \"" + tableName + "\".", e);
}
if (null == hr) {
logger.info("Failed to calcuate size of HTable \"" + tableName + "\".");
hr = new HBaseResponse();
}
hr.setTableName(tableName);
hr.setDateRangeStart(segment.getTSRange().start.v);
hr.setDateRangeEnd(segment.getTSRange().end.v);
hr.setSegmentName(segment.getName());
hr.setSegmentStatus(segment.getStatus().toString());
hr.setSourceCount(segment.getInputRecords());
if (segment.isOffsetCube()) {
hr.setSourceOffsetStart((Long) segment.getSegRange().start.v);
hr.setSourceOffsetEnd((Long) segment.getSegRange().end.v);
}
hbase.add(hr);
}
return hbase;
}
/**
* get cube segment holes
*
* @return a list of CubeSegment, each representing a hole
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = { "application/json" })
@ResponseBody
public List<CubeSegment> getHoles(@PathVariable String cubeName) {
checkCubeExists(cubeName);
return cubeService.getCubeManager().calculateHoles(cubeName);
}
/**
* fill cube segment holes
*
* @return a list of JobInstances to fill the holes
* @throws IOException
*/
@RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = { "application/json" })
@ResponseBody
public List<JobInstance> fillHoles(@PathVariable String cubeName) {
checkCubeExists(cubeName);
List<JobInstance> jobs = Lists.newArrayList();
List<CubeSegment> holes = cubeService.getCubeManager().calculateHoles(cubeName);
if (holes.size() == 0) {
logger.info("No hole detected for cube '" + cubeName + "'");
return jobs;
}
for (CubeSegment hole : holes) {
if (hole.isOffsetCube()) {
JobBuildRequest2 request = new JobBuildRequest2();
request.setBuildType(CubeBuildTypeEnum.BUILD.toString());
request.setSourceOffsetStart((Long) hole.getSegRange().start.v);
request.setSourceOffsetEnd((Long) hole.getSegRange().end.v);
request.setSourcePartitionOffsetStart(hole.getSourcePartitionOffsetStart());
request.setSourcePartitionOffsetEnd(hole.getSourcePartitionOffsetEnd());
try {
JobInstance job = build2(cubeName, request);
jobs.add(job);
} catch (Exception e) {
// it may exceed the max allowed job number
logger.info("Error to submit job for hole '" + hole.toString() + "', skip it now.", e);
continue;
}
} else {
JobBuildRequest request = new JobBuildRequest();
request.setBuildType(CubeBuildTypeEnum.BUILD.toString());
request.setStartTime(hole.getTSRange().start.v);
request.setEndTime(hole.getTSRange().end.v);
try {
JobInstance job = build(cubeName, request);
jobs.add(job);
} catch (Exception e) {
// it may exceed the max allowed job number
logger.info("Error to submit job for hole '" + hole.toString() + "', skip it now.", e);
continue;
}
}
}
return jobs;
}
@RequestMapping(value = "/{cubeName}/cuboids/export", method = RequestMethod.GET)
@ResponseBody
public void cuboidsExport(@PathVariable String cubeName, @RequestParam(value = "top") Integer top,
HttpServletResponse response) throws IOException {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
Map<Long, Long> cuboidList = getRecommendCuboidList(cube);
List<Set<String>> dimensionSetList = Lists.newLinkedList();
if (cuboidList == null || cuboidList.isEmpty()) {
logger.info("Cannot get recommended cuboid list for cube " + cubeName);
} else {
if (cuboidList.size() < top) {
logger.info("Require " + top + " recommended cuboids, but only " + cuboidList.size() + " is found.");
}
Iterator<Long> cuboidIterator = cuboidList.keySet().iterator();
RowKeyColDesc[] rowKeyColDescList = cube.getDescriptor().getRowkey().getRowKeyColumns();
while (top-- > 0 && cuboidIterator.hasNext()) {
Set<String> dimensionSet = Sets.newHashSet();
dimensionSetList.add(dimensionSet);
long cuboid = cuboidIterator.next();
for (int i = 0; i < rowKeyColDescList.length; i++) {
if ((cuboid & (1L << rowKeyColDescList[i].getBitIndex())) > 0) {
dimensionSet.add(rowKeyColDescList[i].getColumn());
}
}
}
}
response.setContentType("text/json;charset=utf-8");
response.setHeader("Content-Disposition", "attachment; filename=\"" + cubeName + ".json\"");
try (PrintWriter writer = response.getWriter()) {
writer.write(JsonUtil.writeValueAsString(dimensionSetList));
} catch (IOException e) {
logger.error("", e);
throw new InternalErrorException("Failed to write: " + e.getLocalizedMessage());
}
}
@RequestMapping(value = "/{cubeName}/cuboids/current", method = RequestMethod.GET)
@ResponseBody
public CuboidTreeResponse getCurrentCuboids(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
// The cuboid tree displayed should be consistent with the current one
CuboidScheduler cuboidScheduler = cube.getCuboidScheduler();
Map<Long, Long> cuboidStatsMap = cube.getCuboids();
if (cuboidStatsMap == null) {
cuboidStatsMap = CuboidStatsReaderUtil.readCuboidStatsFromCube(cuboidScheduler.getAllCuboidIds(), cube);
}
Map<Long, Long> hitFrequencyMap = null;
Map<Long, Long> queryMatchMap = null;
try {
hitFrequencyMap = getTargetCuboidHitFrequency(cubeName);
queryMatchMap = getCuboidQueryMatchCount(cubeName);
} catch (Exception e) {
logger.warn("Fail to query on system cube due to " + e);
}
Set<Long> currentCuboidSet = cube.getCuboidScheduler().getAllCuboidIds();
return cubeService.getCuboidTreeResponse(cuboidScheduler, cuboidStatsMap, hitFrequencyMap, queryMatchMap,
currentCuboidSet);
}
@RequestMapping(value = "/{cubeName}/cuboids/recommend", method = RequestMethod.GET)
@ResponseBody
public CuboidTreeResponse getRecommendCuboids(@PathVariable String cubeName) throws IOException {
checkCubeExists(cubeName);
CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
Map<Long, Long> recommendCuboidStatsMap = getRecommendCuboidList(cube);
if (recommendCuboidStatsMap == null || recommendCuboidStatsMap.isEmpty()) {
return new CuboidTreeResponse();
}
CuboidScheduler cuboidScheduler = new TreeCuboidScheduler(cube.getDescriptor(),
Lists.newArrayList(recommendCuboidStatsMap.keySet()),
new TreeCuboidScheduler.CuboidCostComparator(recommendCuboidStatsMap));
// Get cuboid target info for displaying heat map of cuboid hit
Map<Long, Long> displayHitFrequencyMap = getTargetCuboidHitFrequency(cubeName);
// Get exactly matched cuboid query count
Map<Long, Long> queryMatchMap = getCuboidQueryMatchCount(cubeName);
Set<Long> currentCuboidSet = cube.getCuboidScheduler().getAllCuboidIds();
return cubeService.getCuboidTreeResponse(cuboidScheduler, recommendCuboidStatsMap, displayHitFrequencyMap,
queryMatchMap, currentCuboidSet);
}
private Map<Long, Long> getRecommendCuboidList(CubeInstance cube) throws IOException {
// Get cuboid source info
Map<Long, Long> optimizeHitFrequencyMap = getSourceCuboidHitFrequency(cube.getName());
Map<Long, Map<Long, Long>> rollingUpCountSourceMap = getCuboidRollingUpCount(cube.getName());
return cubeService.getRecommendCuboidStatistics(cube, optimizeHitFrequencyMap, rollingUpCountSourceMap);
}
private Map<Long, Long> getSourceCuboidHitFrequency(String cubeName) {
return getCuboidHitFrequency(cubeName, true);
}
private Map<Long, Long> getTargetCuboidHitFrequency(String cubeName) {
return getCuboidHitFrequency(cubeName, false);
}
private Map<Long, Long> getCuboidHitFrequency(String cubeName, boolean isCuboidSource) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidColumn = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
if (!isCuboidSource) {
cuboidColumn = QueryCubePropertyEnum.CUBOID_TARGET.toString();
}
String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidColumn + ", sum(" + hitMeasure + ") " //
+ "from " + table//
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' " //
+ "group by " + cuboidColumn;
sqlRequest.setSql(sql);
List<List<String>> orgHitFrequency = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatQueryCount(orgHitFrequency);
}
private Map<Long, Map<Long, Long>> getCuboidRollingUpCount(String cubeName) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
String cuboidTarget = QueryCubePropertyEnum.CUBOID_TARGET.toString();
String aggCount = QueryCubePropertyEnum.AGGR_COUNT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidSource + ", " + cuboidTarget + ", sum(" + aggCount + ")/count(*) " //
+ "from " + table //
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' " //
+ "group by " + cuboidSource + ", " + cuboidTarget;
sqlRequest.setSql(sql);
List<List<String>> orgRollingUpCount = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatRollingUpCount(orgRollingUpCount);
}
private Map<Long, Long> getCuboidQueryMatchCount(String cubeName) {
SQLRequest sqlRequest = new SQLRequest();
sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
String table = cubeService.getMetricsManager()
.getSystemTableFromSubject(cubeService.getConfig().getKylinMetricsSubjectQueryCube());
String sql = "select " + cuboidSource + ", sum(" + hitMeasure + ") " //
+ "from " + table //
+ " where " + QueryCubePropertyEnum.CUBE.toString() + " = '" + cubeName + "' and "
+ QueryCubePropertyEnum.IF_MATCH.toString() + " = true " //
+ "group by " + cuboidSource;
sqlRequest.setSql(sql);
List<List<String>> orgMatchHitFrequency = queryService.doQueryWithCache(sqlRequest).getResults();
return cubeService.formatQueryCount(orgMatchHitFrequency);
}
/**
* Initiate the very beginning of a streaming cube. Will seek the latest offests of each partition from streaming
* source (kafka) and record in the cube descriptor; In the first build job, it will use these offests as the start point.
*
* @param cubeName
* @return
*/
@RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = {
"application/json" })
@ResponseBody
public GeneralResponse initStartOffsets(@PathVariable String cubeName) {
checkCubeExists(cubeName);
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
if (cubeInstance.getSourceType() != ISourceAware.ID_STREAMING) {
String msg = "Cube '" + cubeName + "' is not a Streaming Cube.";
throw new IllegalArgumentException(msg);
}
final GeneralResponse response = new GeneralResponse();
try {
final Map<Integer, Long> startOffsets = KafkaClient.getLatestOffsets(cubeInstance);
CubeDesc desc = cubeInstance.getDescriptor();
desc.setPartitionOffsetStart(startOffsets);
cubeService.getCubeDescManager().updateCubeDesc(desc);
response.setProperty("result", "success");
response.setProperty("offsets", startOffsets.toString());
} catch (Throwable e) {
throw new RuntimeException(e);
}
return response;
}
private CubeDesc deserializeCubeDesc(CubeRequest cubeRequest) {
CubeDesc desc = null;
try {
logger.debug("Saving cube " + cubeRequest.getCubeDescData());
desc = JsonUtil.readValue(cubeRequest.getCubeDescData(), CubeDesc.class);
} catch (JsonParseException e) {
logger.error("The cube definition is not valid.", e);
updateRequest(cubeRequest, false, e.getMessage());
} catch (JsonMappingException e) {
logger.error("The cube definition is not valid.", e);
updateRequest(cubeRequest, false, e.getMessage());
} catch (IOException e) {
logger.error("Failed to deal with the request.", e);
throw new InternalErrorException("Failed to deal with the request:" + e.getMessage(), e);
}
return desc;
}
private void updateRequest(CubeRequest request, boolean success, String message) {
request.setCubeDescData("");
request.setSuccessful(success);
request.setMessage(message);
}
private void checkCubeExists(String cubeName) {
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
if (cubeInstance == null) {
Message msg = MsgPicker.getMsg();
throw new NotFoundException(String.format(Locale.ROOT, msg.getCUBE_NOT_FOUND(), cubeName));
}
}
private void checkBuildingSegment(CubeInstance cube) {
checkBuildingSegment(cube, cube.getConfig().getMaxBuildingSegments());
}
private void checkBuildingSegment(CubeInstance cube, int maxBuildingSeg) {
if (cube.getBuildingSegments().size() >= maxBuildingSeg) {
throw new TooManyRequestException(
"There is already " + cube.getBuildingSegments().size() + " building segment; ");
}
}
@RequestMapping(value = "/{cube}/{project}/migrate", method = { RequestMethod.POST })
@ResponseBody
public void migrateCube(@PathVariable String cube, @PathVariable String project) {
CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cube);
cubeService.migrateCube(cubeInstance, project);
}
public void setCubeService(CubeService cubeService) {
this.cubeService = cubeService;
}
public void setJobService(JobService jobService) {
this.jobService = jobService;
}
} | KYLIN-3720 add column family check when save/update cube desc
| server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java | KYLIN-3720 add column family check when save/update cube desc | <ide><path>erver-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
<ide> import org.apache.kylin.cube.model.CubeBuildTypeEnum;
<ide> import org.apache.kylin.cube.model.CubeDesc;
<ide> import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
<add>import org.apache.kylin.cube.model.HBaseColumnDesc;
<add>import org.apache.kylin.cube.model.HBaseColumnFamilyDesc;
<ide> import org.apache.kylin.cube.model.RowKeyColDesc;
<ide> import org.apache.kylin.dimension.DimensionEncodingFactory;
<ide> import org.apache.kylin.engine.mr.common.CuboidStatsReaderUtil;
<ide> import org.apache.kylin.job.exception.JobException;
<ide> import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
<ide> import org.apache.kylin.metadata.model.ISourceAware;
<add>import org.apache.kylin.metadata.model.MeasureDesc;
<ide> import org.apache.kylin.metadata.model.SegmentRange;
<ide> import org.apache.kylin.metadata.model.SegmentRange.TSRange;
<ide> import org.apache.kylin.metadata.project.ProjectInstance;
<ide> throw new BadRequestException("Invalid Cube name, only letters, numbers and underscore supported.");
<ide> }
<ide>
<add> validateColumnFamily(desc);
<add>
<ide> try {
<ide> desc.setUuid(RandomUtil.randomUUID().toString());
<ide> String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
<ide> return cubeRequest;
<ide> }
<ide>
<add> //column family metrics may not match the real metrics when editing cube by json,see MTHDP-5091
<add> private void validateColumnFamily(CubeDesc cubeDesc) {
<add> Set<String> columnFamilyMetricsSet = Sets.newHashSet();
<add> for (HBaseColumnFamilyDesc hBaseColumnFamilyDesc : cubeDesc.getHbaseMapping().getColumnFamily()) {
<add> for (HBaseColumnDesc hBaseColumnDesc : hBaseColumnFamilyDesc.getColumns()) {
<add> for (String columnName : hBaseColumnDesc.getMeasureRefs()) {
<add> columnFamilyMetricsSet.add(columnName);
<add> }
<add> }
<add> }
<add> for (MeasureDesc measureDesc : cubeDesc.getMeasures()) {
<add> if (!columnFamilyMetricsSet.contains(measureDesc.getName())) {
<add> throw new BadRequestException("column family lack measure:" + measureDesc.getName());
<add> }
<add> }
<add> if (cubeDesc.getMeasures().size() != columnFamilyMetricsSet.size()) {
<add> throw new BadRequestException(
<add> "the number of input measure and the number of measure defined in cubedesc are not consistent");
<add> }
<add> }
<add>
<ide> /**
<ide> * update CubDesc
<ide> *
<ide> updateRequest(cubeRequest, false, error);
<ide> return cubeRequest;
<ide> }
<add>
<add> validateColumnFamily(desc);
<ide>
<ide> //cube renaming is not allowed
<ide> if (!cube.getDescriptor().getName().equalsIgnoreCase(desc.getName())) { |
|
Java | apache-2.0 | 8e44577aa7cd828d8c9f3f3e06425b28aeff0bdb | 0 | mohanaraosv/commons-pool,mohanaraosv/commons-pool,mohanaraosv/commons-pool | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool2.impl;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.commons.pool2.BaseObjectPool;
import org.apache.commons.pool2.ObjectPool;
import org.apache.commons.pool2.PoolUtils;
import org.apache.commons.pool2.PoolableObjectFactory;
/**
* A configurable {@link ObjectPool} implementation.
* <p>
* When coupled with the appropriate {@link PoolableObjectFactory},
* <tt>GenericObjectPool</tt> provides robust pooling functionality for
* arbitrary objects.
* <p>
* A <tt>GenericObjectPool</tt> provides a number of configurable parameters:
* <ul>
* <li>
* {@link #setMaxTotal <i>maxTotal</i>} controls the maximum number of
* objects that can be allocated by the pool (checked out to clients, or idle
* awaiting checkout) at a given time. When non-positive, there is no limit to
* the number of objects that can be managed by the pool at one time. When
* {@link #setMaxTotal <i>maxTotal</i>} is reached, the pool is said to be
* exhausted. The default setting for this parameter is 8.</li>
* <li>
* {@link #setMaxIdle <i>maxIdle</i>} controls the maximum number of objects
* that can sit idle in the pool at any time. When negative, there is no limit
* to the number of objects that may be idle at one time. The default setting
* for this parameter is 8.</li>
* <li>
* {@link #getBlockWhenExhausted} specifies the
* behavior of the {@link #borrowObject} method when the pool is exhausted:
* <ul>
* <li>When {@link #getBlockWhenExhausted} is false,
* {@link #borrowObject} will throw a {@link NoSuchElementException}</li>
* <li>When {@link #getBlockWhenExhausted} is true,
* {@link #borrowObject} will block (invoke
* {@link Object#wait()}) until a new or idle object is available. If a
* non-negative {@link #setMaxWait <i>maxWait</i>} value is supplied, then
* {@link #borrowObject} will block for at most that many milliseconds, after
* which a {@link NoSuchElementException} will be thrown. If {@link #setMaxWait
* <i>maxWait</i>} is negative, the {@link #borrowObject} method will block
* indefinitely.</li>
* </ul>
* The default {@link #getBlockWhenExhausted} is true
* and the default <code>maxWait</code> setting is
* -1. By default, therefore, <code>borrowObject</code> will block indefinitely
* until an idle instance becomes available.</li>
* <li>When {@link #setTestOnBorrow <i>testOnBorrow</i>} is set, the pool will
* attempt to validate each object before it is returned from the
* {@link #borrowObject} method. (Using the provided factory's
* {@link PoolableObjectFactory#validateObject} method.) Objects that fail to
* validate will be dropped from the pool, and a different object will be
* borrowed. The default setting for this parameter is <code>false.</code></li>
* <li>When {@link #setTestOnReturn <i>testOnReturn</i>} is set, the pool will
* attempt to validate each object before it is returned to the pool in the
* {@link #returnObject} method. (Using the provided factory's
* {@link PoolableObjectFactory#validateObject} method.) Objects that fail to
* validate will be dropped from the pool. The default setting for this
* parameter is <code>false.</code></li>
* </ul>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects are available. This is performed by an "idle object eviction" thread,
* which runs asynchronously. Caution should be used when configuring this
* optional feature. Eviction runs contend with client threads for access to
* objects in the pool, so if they run too frequently performance issues may
* result. The idle object eviction thread may be configured using the following
* attributes:
* <ul>
* <li>
* {@link #setTimeBetweenEvictionRunsMillis
* <i>timeBetweenEvictionRunsMillis</i>} indicates how long the eviction thread
* should sleep before "runs" of examining idle objects. When non-positive, no
* eviction thread will be launched. The default setting for this parameter is
* -1 (i.e., idle object eviction is disabled by default).</li>
* <li>
* {@link #setMinEvictableIdleTimeMillis <i>minEvictableIdleTimeMillis</i>}
* specifies the minimum amount of time that an object may sit idle in the pool
* before it is eligible for eviction due to idle time. When non-positive, no
* object will be dropped from the pool due to idle time alone. This setting has
* no effect unless <code>timeBetweenEvictionRunsMillis > 0.</code> The default
* setting for this parameter is 30 minutes.</li>
* <li>
* {@link #setTestWhileIdle <i>testWhileIdle</i>} indicates whether or not
* idle objects should be validated using the factory's
* {@link PoolableObjectFactory#validateObject} method. Objects that fail to
* validate will be dropped from the pool. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting for this
* parameter is <code>false.</code></li>
* <li>
* {@link #setSoftMinEvictableIdleTimeMillis
* <i>softMinEvictableIdleTimeMillis</i>} specifies the minimum amount of time
* an object may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any), with the extra condition that at least
* "minIdle" object instances remain in the pool. This setting has no
* effect unless <code>timeBetweenEvictionRunsMillis > 0.</code> and it is
* superseded by {@link #setMinEvictableIdleTimeMillis
* <i>minEvictableIdleTimeMillis</i>} (that is, if
* <code>minEvictableIdleTimeMillis</code> is positive, then
* <code>softMinEvictableIdleTimeMillis</code> is ignored). The default setting
* for this parameter is -1 (disabled).</li>
* <li>
* {@link #setNumTestsPerEvictionRun <i>numTestsPerEvictionRun</i>}
* determines the number of objects examined in each run of the idle object
* evictor. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting for this
* parameter is 3.</li>
* </ul>
* <p>
* <p>
* The pool can be configured to behave as a LIFO queue with respect to idle
* objects - always returning the most recently used object from the pool, or as
* a FIFO queue, where borrowObject always returns the oldest object in the idle
* object pool.
* <ul>
* <li>
* {@link #setLifo <i>lifo</i>} determines whether or not the pool returns
* idle objects in last-in-first-out order. The default setting for this
* parameter is <code>true.</code></li>
* </ul>
* <p>
* GenericObjectPool is not usable without a {@link PoolableObjectFactory}. A
* non-<code>null</code> factory must be provided as a constructor
* argument before the pool is used.
* <p>
* Implementation note: To prevent possible deadlocks, care has been taken to
* ensure that no call to a factory method will occur within a synchronization
* block. See POOL-125 and DBCP-44 for more information.
*
* @see GenericKeyedObjectPool
* @param <T>
* Type of element pooled in this pool.
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date: 2011-05-11 13:50:33 +0100 (Wed, 11 May
* 2011) $
* @since Pool 1.0
*/
public class GenericObjectPool<T> extends BaseObjectPool<T>
implements GenericObjectPoolMBean {
// --- constructors -----------------------------------------------
/**
* Create a new <tt>GenericObjectPool</tt> with default properties.
*/
public GenericObjectPool(PoolableObjectFactory<T> factory) {
this(factory, new GenericObjectPoolConfig<T>());
}
public GenericObjectPool(PoolableObjectFactory<T> factory,
GenericObjectPoolConfig<T> config) {
this.factory = factory;
this.lifo = config.getLifo();
this.maxTotal = config.getMaxTotal();
this.maxIdle = config.getMaxIdle();
this.maxWait = config.getMaxWait();
this.minEvictableIdleTimeMillis =
config.getMinEvictableIdleTimeMillis();
this.minIdle = config.getMinIdle();
this.numTestsPerEvictionRun = config.getNumTestsPerEvictionRun();
this.softMinEvictableIdleTimeMillis =
config.getSoftMinEvictableIdleTimeMillis();
this.testOnBorrow = config.getTestOnBorrow();
this.testOnReturn = config.getTestOnReturn();
this.testWhileIdle = config.getTestWhileIdle();
this.timeBetweenEvictionRunsMillis =
config.getTimeBetweenEvictionRunsMillis();
this.blockWhenExhausted = config.getBlockWhenExhausted();
startEvictor(timeBetweenEvictionRunsMillis);
initStats();
// JMX Registration
if (config.isJmxEnabled()) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
String jmxNamePrefix = config.getJmxNamePrefix();
int i = 1;
boolean registered = false;
while (!registered) {
try {
ObjectName oname =
new ObjectName(ONAME_BASE + jmxNamePrefix + i);
mbs.registerMBean(this, oname);
this.oname = oname;
registered = true;
} catch (MalformedObjectNameException e) {
if (GenericObjectPoolConfig.DEFAULT_JMX_NAME_PREFIX.equals(
jmxNamePrefix)) {
// Shouldn't happen. Skip registration if it does.
registered = true;
} else {
// Must be an invalid name prefix. Use the default
// instead.
jmxNamePrefix =
GenericObjectPoolConfig.DEFAULT_JMX_NAME_PREFIX;
}
} catch (InstanceAlreadyExistsException e) {
// Increment the index and try again
i++;
} catch (MBeanRegistrationException e) {
// Shouldn't happen. Skip registration if it does.
registered = true;
} catch (NotCompliantMBeanException e) {
// Shouldn't happen. Skip registration if it does.
registered = true;
}
}
}
}
// --- public methods ---------------------------------------------
// --- configuration methods --------------------------------------
/**
* Returns the maximum number of objects that can be allocated by the pool
* (checked out to clients, or idle awaiting checkout) at a given time. When
* non-positive, there is no limit to the number of objects that can be
* managed by the pool at one time.
*
* @return the cap on the total number of object instances managed by the
* pool.
* @see #setMaxTotal
*/
public int getMaxTotal() {
return maxTotal;
}
/**
* Sets the cap on the number of objects that can be allocated by the pool
* (checked out to clients, or idle awaiting checkout) at a given time. Use
* a negative value for no limit.
*
* @param maxTotal
* The cap on the total number of object instances managed by the
* pool. Negative values mean that there is no limit to the
* number of objects allocated by the pool.
* @see #getMaxTotal
*/
public void setMaxTotal(int maxTotal) {
this.maxTotal = maxTotal;
}
/**
* Returns whether to block when the {@link #borrowObject} method is
* invoked when the pool is exhausted (the maximum number of "active"
* objects has been reached).
*
* @return true if should block when the pool is exhuasted
* @see #setBlockWhenExhausted
*/
public boolean getBlockWhenExhausted() {
return blockWhenExhausted;
}
/**
* Sets whether to block when the {@link #borrowObject} method is invoked
* when the pool is exhausted (the maximum number of "active" objects has
* been reached).
*
* @param blockWhenExhausted true if should block when the pool is exhausted
* @see #getBlockWhenExhausted
*/
public void setBlockWhenExhausted(boolean blockWhenExhausted) {
this.blockWhenExhausted = blockWhenExhausted;
}
/**
* Returns the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing an exception
* when the pool is exhausted and the {@link #getBlockWhenExhausted} is true.
* When less than 0, the {@link #borrowObject} method may block indefinitely.
*
* @return maximum number of milliseconds to block when borrowing an object.
* @see #setMaxWait
* @see #setBlockWhenExhausted
*/
public long getMaxWait() {
return maxWait;
}
/**
* Sets the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing an exception
* when the pool is exhausted and the {@link #getBlockWhenExhausted} is true.
* When less than 0, the {@link #borrowObject} method may block indefinitely.
*
* @param maxWait
* maximum number of milliseconds to block when borrowing an
* object.
* @see #getMaxWait
* @see #getBlockWhenExhausted
*/
public void setMaxWait(long maxWait) {
this.maxWait = maxWait;
}
/**
* Returns the cap on the number of "idle" instances in the pool.
*
* @return the cap on the number of "idle" instances in the pool.
* @see #setMaxIdle
*/
public int getMaxIdle() {
return maxIdle;
}
/**
* Sets the cap on the number of "idle" instances in the pool. If maxIdle is
* set too low on heavily loaded systems it is possible you will see objects
* being destroyed and almost immediately new objects being created. This is
* a result of the active threads momentarily returning objects faster than
* they are requesting them them, causing the number of idle objects to rise
* above maxIdle. The best value for maxIdle for heavily loaded system will
* vary but the default is a good starting point.
*
* @param maxIdle
* The cap on the number of "idle" instances in the pool. Use a
* negative value to indicate an unlimited number of idle
* instances.
* @see #getMaxIdle
*/
public void setMaxIdle(int maxIdle) {
this.maxIdle = maxIdle;
}
/**
* Sets the minimum number of objects allowed in the pool before the evictor
* thread (if active) spawns new objects. Note that no objects are created
* when <code>numActive + numIdle >= maxActive.</code> This setting has no
* effect if the idle object evictor is disabled (i.e. if
* <code>timeBetweenEvictionRunsMillis <= 0</code>).
*
* @param minIdle
* The minimum number of objects.
* @see #getMinIdle
* @see #getTimeBetweenEvictionRunsMillis()
*/
public void setMinIdle(int minIdle) {
this.minIdle = minIdle;
}
/**
* Returns the minimum number of objects allowed in the pool before the
* evictor thread (if active) spawns new objects. (Note no objects are
* created when: numActive + numIdle >= maxActive)
*
* @return The minimum number of objects.
* @see #setMinIdle
*/
public int getMinIdle() {
return minIdle;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @return <code>true</code> if objects are validated before being borrowed.
* @see #setTestOnBorrow
*/
public boolean getTestOnBorrow() {
return testOnBorrow;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @param testOnBorrow
* <code>true</code> if objects should be validated before being
* borrowed.
* @see #getTestOnBorrow
*/
public void setTestOnBorrow(boolean testOnBorrow) {
this.testOnBorrow = testOnBorrow;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @return <code>true</code> when objects will be validated after returned
* to {@link #returnObject}.
* @see #setTestOnReturn
*/
public boolean getTestOnReturn() {
return testOnReturn;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @param testOnReturn
* <code>true</code> so objects will be validated after returned
* to {@link #returnObject}.
* @see #getTestOnReturn
*/
public void setTestOnReturn(boolean testOnReturn) {
this.testOnReturn = testOnReturn;
}
/**
* Returns the number of milliseconds to sleep between runs of the idle
* object evictor thread. When non-positive, no idle object evictor thread
* will be run.
*
* @return number of milliseconds to sleep between evictor runs.
* @see #setTimeBetweenEvictionRunsMillis
*/
public long getTimeBetweenEvictionRunsMillis() {
return timeBetweenEvictionRunsMillis;
}
/**
* Sets the number of milliseconds to sleep between runs of the idle object
* evictor thread. When non-positive, no idle object evictor thread will be
* run.
*
* @param timeBetweenEvictionRunsMillis
* number of milliseconds to sleep between evictor runs.
* @see #getTimeBetweenEvictionRunsMillis
*/
public void setTimeBetweenEvictionRunsMillis(
long timeBetweenEvictionRunsMillis) {
this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
startEvictor(timeBetweenEvictionRunsMillis);
}
/**
* Returns the max number of objects to examine during each run of the idle
* object evictor thread (if any).
*
* @return max number of objects to examine during each evictor run.
* @see #setNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public int getNumTestsPerEvictionRun() {
return numTestsPerEvictionRun;
}
/**
* Sets the max number of objects to examine during each run of the idle
* object evictor thread (if any).
* <p>
* When a negative value is supplied,
* <tt>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</tt>
* tests will be run. That is, when the value is <i>-n</i>, roughly one
* <i>n</i>th of the idle objects will be tested per run. When the value is
* positive, the number of tests actually performed in each run will be the
* minimum of this value and the number of instances idle in the pool.
*
* @param numTestsPerEvictionRun
* max number of objects to examine during each evictor run.
* @see #getNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setNumTestsPerEvictionRun(int numTestsPerEvictionRun) {
this.numTestsPerEvictionRun = numTestsPerEvictionRun;
}
/**
* Returns the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor (if any).
*
* @return minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction.
* @see #setMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public long getMinEvictableIdleTimeMillis() {
return minEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction by the idle object evictor (if any). When
* non-positive, no objects will be evicted from the pool due to idle time
* alone.
*
* @param minEvictableIdleTimeMillis
* minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction.
* @see #getMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {
this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
/**
* Returns the minimum amount of time
* an object may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any), with the extra condition that at least
* "minIdle" object instances remain in the pool. This setting has no
* effect unless {@code timeBetweenEvictionRunsMillis > 0.} and it is
* superseded by {@link #setMinEvictableIdleTimeMillis
* <i>minEvictableIdleTimeMillis</i>} (that is, if
* {@code minEvictableIdleTimeMillis} is positive, then
* {@code softMinEvictableIdleTimeMillis} is ignored). The default setting
* for this parameter is -1 (disabled).
*
* @return minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction if minIdle instances are available
* @since Pool 1.3
*/
public long getSoftMinEvictableIdleTimeMillis() {
return softMinEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction by the idle object evictor (if any), with the
* extra condition that at least "minIdle" object instances remain in the
* pool. When non-positive, no objects will be evicted from the pool due to
* idle time alone.
*
* @param softMinEvictableIdleTimeMillis
* minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction.
* @since Pool 1.3
* @see #getSoftMinEvictableIdleTimeMillis
*/
public void setSoftMinEvictableIdleTimeMillis(
long softMinEvictableIdleTimeMillis) {
this.softMinEvictableIdleTimeMillis = softMinEvictableIdleTimeMillis;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @return <code>true</code> when objects will be validated by the evictor.
* @see #setTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public boolean getTestWhileIdle() {
return testWhileIdle;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @param testWhileIdle
* <code>true</code> so objects will be validated by the evictor.
* @see #getTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setTestWhileIdle(boolean testWhileIdle) {
this.testWhileIdle = testWhileIdle;
}
/**
* Whether or not the idle object pool acts as a LIFO queue. True means that
* borrowObject returns the most recently used ("last in") idle object in
* the pool (if there are idle instances available). False means that the
* pool behaves as a FIFO queue - objects are taken from the idle object
* pool in the order that they are returned to the pool.
*
* @return <code>true</true> if the pool is configured to act as a LIFO queue
* @since 1.4
*/
public boolean getLifo() {
return lifo;
}
/**
* Sets the LIFO property of the pool. True means that borrowObject returns
* the most recently used ("last in") idle object in the pool (if there are
* idle instances available). False means that the pool behaves as a FIFO
* queue - objects are taken from the idle object pool in the order that
* they are returned to the pool.
*
* @param lifo
* the new value for the LIFO property
* @since 1.4
*/
public void setLifo(boolean lifo) {
this.lifo = lifo;
}
/**
* Sets my configuration.
*
* @param conf
* configuration to use.
* @see GenericObjectPoolConfig
*/
public void setConfig(GenericObjectPoolConfig<T> conf) {
setMaxIdle(conf.getMaxIdle());
setMinIdle(conf.getMinIdle());
setMaxTotal(conf.getMaxTotal());
setMaxWait(conf.getMaxWait());
setBlockWhenExhausted(conf.getBlockWhenExhausted());
setTestOnBorrow(conf.getTestOnBorrow());
setTestOnReturn(conf.getTestOnReturn());
setTestWhileIdle(conf.getTestWhileIdle());
setNumTestsPerEvictionRun(conf.getNumTestsPerEvictionRun());
setMinEvictableIdleTimeMillis(conf.getMinEvictableIdleTimeMillis());
setTimeBetweenEvictionRunsMillis(
conf.getTimeBetweenEvictionRunsMillis());
setSoftMinEvictableIdleTimeMillis(
conf.getSoftMinEvictableIdleTimeMillis());
setLifo(conf.getLifo());
}
// -- ObjectPool methods ------------------------------------------
/**
* <p>
* Borrows an object from the pool.
* </p>
* <p>
* If there is an idle instance available in the pool, then either the
* most-recently returned (if {@link #getLifo() lifo} == true) or "oldest"
* (lifo == false) instance sitting idle in the pool will be activated and
* returned. If activation fails, or {@link #getTestOnBorrow() testOnBorrow}
* is set to true and validation fails, the instance is destroyed and the
* next available instance is examined. This continues until either a valid
* instance is returned or there are no more idle instances available.
* </p>
* <p>
* If there are no idle instances available in the pool, behavior depends on
* the {@link #getMaxTotal() maxTotal} and (if applicable)
* {@link #getBlockWhenExhausted()} and
* {@link #getMaxWait() maxWait} properties. If the number of instances
* checked out from the pool is less than <code>maxActive,</code> a new
* instance is created, activated and (if applicable) validated and returned
* to the caller.
* </p>
* <p>
* If the pool is exhausted (no available idle instances and no capacity to
* create new ones), this method will either block (
* {@link #getBlockWhenExhausted()} is true) or throw a
* <code>NoSuchElementException</code> ({@link #getBlockWhenExhausted()} is false). The
* length of time that this method will block when
* {@link #getBlockWhenExhausted()} is true is determined by
* the {@link #getMaxWait() maxWait} property.
* </p>
* <p>
* When the pool is exhausted, multiple calling threads may be
* simultaneously blocked waiting for instances to become available. As of
* pool 1.5, a "fairness" algorithm has been implemented to ensure that
* threads receive available instances in request arrival order.
* </p>
*
* @return object instance
* @throws NoSuchElementException
* if an instance cannot be returned
*/
@Override
public T borrowObject() throws Exception {
return borrowObject(maxWait);
}
/**
* Borrow an object from the pool using a user specific waiting time which
* only applies if {@link #getBlockWhenExhausted()} is true.
*
* @param borrowMaxWait The time to wait in milliseconds for an object to
* become available
* @return object instance
* @throws NoSuchElementException
* if an instance cannot be returned
*/
public T borrowObject(long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = this.blockWhenExhausted;
boolean create;
long waitTime = 0;
while (p == null) {
create = false;
if (blockWhenExhausted) {
p = idleObjects.pollFirst();
if (p == null) {
create = true;
p = create();
}
if (p == null) {
if (borrowMaxWait < 0) {
p = idleObjects.takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = idleObjects.pollFirst(borrowMaxWait,
TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
p = idleObjects.pollFirst();
if (p == null) {
create = true;
p = create();
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(p.getObject());
} catch (Exception e) {
try {
destroy(p);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(p);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
/**
* <p>
* Returns an object instance to the pool.
* </p>
* <p>
* If {@link #getMaxIdle() maxIdle} is set to a positive value and the
* number of idle instances has reached this value, the returning instance
* is destroyed.
* </p>
* <p>
* If {@link #getTestOnReturn() testOnReturn} == true, the returning
* instance is validated before being returned to the idle instance pool. In
* this case, if validation fails, the instance is destroyed.
* </p>
*
* @param obj
* instance to return to the pool
*/
@Override
public void returnObject(T obj) {
PooledObject<T> p = allObjects.get(obj);
if (p == null) {
throw new IllegalStateException(
"Returned object not currently part of this pool");
}
long activeTime = p.getActiveTimeMillis();
if (getTestOnReturn()) {
if (!factory.validateObject(obj)) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
updateStatsReturn(activeTime);
return;
}
}
try {
factory.passivateObject(obj);
} catch (Exception e1) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
updateStatsReturn(activeTime);
return;
}
if (!p.deallocate()) {
throw new IllegalStateException(
"Object has already been retured to this pool");
}
int maxIdle = getMaxIdle();
if (isClosed() || maxIdle > -1 && maxIdle <= idleObjects.size()) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
} else {
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
updateStatsReturn(activeTime);
}
private void updateStatsReturn(long activeTime) {
returnedCount.incrementAndGet();
synchronized (activeTimes) {
activeTimes.add(Long.valueOf(activeTime));
activeTimes.poll();
}
}
/**
* {@inheritDoc}
* <p>
* Activation of this method decrements the active count and attempts to
* destroy the instance.
* </p>
*
* @throws Exception
* if the configured {@link PoolableObjectFactory} throws an
* exception destroying obj
*/
@Override
public void invalidateObject(T obj) throws Exception {
PooledObject<T> p = allObjects.get(obj);
if (p == null) {
throw new IllegalStateException(
"Object not currently part of this pool");
}
destroy(p);
}
/**
* Clears any objects sitting idle in the pool by removing them from the
* idle instance pool and then invoking the configured
* {@link PoolableObjectFactory#destroyObject(Object)} method on each idle
* instance.
* <p>
* Implementation notes:
* <ul>
* <li>This method does not destroy or effect in any way instances that are
* checked out of the pool when it is invoked.</li>
* <li>Invoking this method does not prevent objects being returned to the
* idle instance pool, even during its execution. It locks the pool only
* during instance removal. Additional instances may be returned while
* removed items are being destroyed.</li>
* <li>Exceptions encountered destroying idle instances are swallowed.</li>
* </ul>
* </p>
*/
@Override
public void clear() {
PooledObject<T> p = idleObjects.poll();
while (p != null) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
p = idleObjects.poll();
}
}
/**
* Return the number of instances currently borrowed from this pool.
*
* @return the number of instances currently borrowed from this pool
*/
@Override
public int getNumActive() {
return allObjects.size() - idleObjects.size();
}
/**
* Return the number of instances currently idle in this pool.
*
* @return the number of instances currently idle in this pool
*/
@Override
public int getNumIdle() {
return idleObjects.size();
}
/**
* <p>
* Closes the pool. Once the pool is closed, {@link #borrowObject()} will
* fail with IllegalStateException, but {@link #returnObject(Object)} and
* {@link #invalidateObject(Object)} will continue to work, with returned
* objects destroyed on return.
* </p>
* <p>
* Destroys idle instances in the pool by invoking {@link #clear()}.
* </p>
*
* @throws Exception
*/
@Override
public void close() throws Exception {
if (isClosed()) {
return;
}
synchronized (closeLock) {
if (isClosed()) {
return;
}
super.close();
clear();
startEvictor(-1L);
if (oname != null) {
ManagementFactory.getPlatformMBeanServer().unregisterMBean(
oname);
}
}
}
/**
* <p>
* Perform <code>numTests</code> idle object eviction tests, evicting
* examined objects that meet the criteria for eviction. If
* <code>testWhileIdle</code> is true, examined objects are validated when
* visited (and removed if invalid); otherwise only objects that have been
* idle for more than <code>minEvicableIdletimeMillis</code> are removed.
* </p>
* <p>
* Successive activations of this method examine objects in in sequence,
* cycling through objects in oldest-to-youngest order.
* </p>
*
* @throws Exception
* if the pool is closed or eviction fails.
*/
public void evict() throws Exception {
assertOpen();
if (idleObjects.size() == 0) {
return;
}
PooledObject<T> underTest = null;
boolean testWhileIdle = getTestWhileIdle();
long idleEvictTime = Long.MAX_VALUE;
long idleSoftEvictTime = Long.MAX_VALUE;
if (getMinEvictableIdleTimeMillis() > 0) {
idleEvictTime = getMinEvictableIdleTimeMillis();
}
if (getSoftMinEvictableIdleTimeMillis() > 0) {
idleSoftEvictTime = getSoftMinEvictableIdleTimeMillis();
}
for (int i = 0, m = getNumTests(); i < m; i++) {
if (evictionIterator == null || !evictionIterator.hasNext()) {
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
}
if (!evictionIterator.hasNext()) {
// Pool exhausted, nothing to do here
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (idleEvictTime < underTest.getIdleTimeMillis() ||
(idleSoftEvictTime < underTest.getIdleTimeMillis() &&
getMinIdle() < idleObjects.size())) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(underTest.getObject());
active = true;
} catch (Exception e) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(underTest.getObject())) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(underTest.getObject());
} catch (Exception e) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional states
// are used
}
}
}
return;
}
private PooledObject<T> create() throws Exception {
int localMaxTotal = getMaxTotal();
long newCreateCount = createCount.incrementAndGet();
if (localMaxTotal > -1 && newCreateCount > localMaxTotal ||
newCreateCount > Integer.MAX_VALUE) {
createCount.decrementAndGet();
return null;
}
T t = null;
try {
t = factory.makeObject();
} catch (Exception e) {
createCount.decrementAndGet();
throw e;
}
PooledObject<T> p = new PooledObject<T>(t);
createdCount.incrementAndGet();
allObjects.put(t, p);
return p;
}
private void destroy(PooledObject<T> toDestory) throws Exception {
toDestory.invalidate();
idleObjects.remove(toDestory);
allObjects.remove(toDestory.getObject());
try {
factory.destroyObject(toDestory.getObject());
} finally {
destroyedCount.incrementAndGet();
createCount.decrementAndGet();
}
}
/**
* Check to see if we are below our minimum number of objects if so enough
* to bring us back to our minimum.
*
* @throws Exception
* when {@link #addObject()} fails.
*/
private void ensureMinIdle() throws Exception {
int minIdle = getMinIdle();
if (minIdle < 1) {
return;
}
while (idleObjects.size() < minIdle) {
PooledObject<T> p = create();
if (p == null) {
// Can't create objects, no reason to think another call to
// create will work. Give up.
break;
}
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Create an object, and place it into the pool. addObject() is useful for
* "pre-loading" a pool with idle objects.
*/
@Override
public void addObject() throws Exception {
assertOpen();
if (factory == null) {
throw new IllegalStateException(
"Cannot add objects without a factory.");
}
PooledObject<T> p = create();
addIdleObject(p);
}
// --- non-public methods ----------------------------------------
private void addIdleObject(PooledObject<T> p) throws Exception {
if (p != null) {
factory.passivateObject(p.getObject());
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Start the eviction thread or service, or when <i>delay</i> is
* non-positive, stop it if it is already running.
*
* @param delay
* milliseconds between evictor runs.
*/
protected final synchronized void startEvictor(long delay) {
if (null != evictor) {
EvictionTimer.cancel(evictor);
evictor = null;
}
if (delay > 0) {
evictor = new Evictor();
EvictionTimer.schedule(evictor, delay, delay);
}
}
/**
* Returns pool info including {@link #getNumActive()},
* {@link #getNumIdle()} and a list of objects idle in the pool with their
* idle times.
*
* @return string containing debug information
*/
String debugInfo() {
StringBuilder buf = new StringBuilder();
buf.append("Active: ").append(getNumActive()).append("\n");
buf.append("Idle: ").append(getNumIdle()).append("\n");
buf.append("Idle Objects:\n");
for (PooledObject<T> pair : idleObjects) {
buf.append("\t").append(pair.toString());
}
return buf.toString();
}
/**
* Returns the number of tests to be performed in an Evictor run, based on
* the current value of <code>numTestsPerEvictionRun</code> and the number
* of idle instances in the pool.
*
* @see #setNumTestsPerEvictionRun
* @return the number of tests for the Evictor to run
*/
private int getNumTests() {
if (numTestsPerEvictionRun >= 0) {
return Math.min(numTestsPerEvictionRun, idleObjects.size());
} else {
return (int) (Math.ceil(idleObjects.size() /
Math.abs((double) numTestsPerEvictionRun)));
}
}
//--- JMX specific attributes ----------------------------------------------
private void initStats() {
for (int i = 0; i < AVERAGE_TIMING_STATS_CACHE_SIZE; i++) {
activeTimes.add(null);
idleTimes.add(null);
waitTimes.add(null);
}
}
private long getMeanFromStatsCache(LinkedList<Long> cache) {
List<Long> times = new ArrayList<Long>(AVERAGE_TIMING_STATS_CACHE_SIZE);
synchronized (cache) {
times.addAll(cache);
}
double result = 0;
int counter = 0;
Iterator<Long> iter = times.iterator();
while (iter.hasNext()) {
Long time = iter.next();
if (time != null) {
counter++;
result = result * ((counter - 1) / (double) counter) +
time.longValue()/(double) counter;
}
}
return (long) result;
}
public long getBorrowedCount() {
return borrowedCount.get();
}
public long getReturnedCount() {
return returnedCount.get();
}
public long getCreatedCount() {
return createdCount.get();
}
public long getDestroyedCount() {
return destroyedCount.get();
}
public long getDestroyedByEvictorCount() {
return destroyedByEvictorCount.get();
}
public long getDestroyedByBorrowValidationCount() {
return destroyedByBorrowValidationCount.get();
}
public long getMeanActiveTimeMillis() {
return getMeanFromStatsCache(activeTimes);
}
public long getMeanIdleTimeMillis() {
return getMeanFromStatsCache(idleTimes);
}
public long getMeanBorrowWaitTimeMillis() {
return getMeanFromStatsCache(waitTimes);
}
public long getMaxBorrowWaitTimeMillis() {
return maxBorrowWaitTimeMillis;
}
// --- inner classes ----------------------------------------------
/**
* The idle object evictor {@link TimerTask}.
*
* @see GenericObjectPool#setTimeBetweenEvictionRunsMillis
*/
private class Evictor extends TimerTask {
/**
* Run pool maintenance. Evict objects qualifying for eviction and then
* invoke {@link GenericObjectPool#ensureMinIdle()}.
*/
@Override
public void run() {
try {
evict();
} catch (Exception e) {
// ignored
} catch (OutOfMemoryError oome) {
// Log problem but give evictor thread a chance to continue in
// case error is recoverable
oome.printStackTrace(System.err);
}
try {
ensureMinIdle();
} catch (Exception e) {
// ignored
}
}
}
// --- private attributes ---------------------------------------
/**
* The cap on the number of idle instances in the pool.
*
* @see #setMaxIdle
* @see #getMaxIdle
*/
private volatile int maxIdle = GenericObjectPoolConfig.DEFAULT_MAX_IDLE;
/**
* The cap on the minimum number of idle instances in the pool.
*
* @see #setMinIdle
* @see #getMinIdle
*/
private volatile int minIdle = GenericObjectPoolConfig.DEFAULT_MIN_IDLE;
/**
* The cap on the total number of active instances from the pool.
*
* @see #setMaxTotal
* @see #getMaxTotal
*/
private volatile int maxTotal =
GenericObjectPoolConfig.DEFAULT_MAX_TOTAL;
/**
* The maximum amount of time (in millis) the {@link #borrowObject} method
* should block before throwing an exception when the pool is exhausted and
* {@link #getBlockWhenExhausted()} is true.
* When less than 0, the
* {@link #borrowObject} method may block indefinitely.
*
* @see #setMaxWait
* @see #getMaxWait
* @see #setBlockWhenExhausted
* @see #getBlockWhenExhausted
*/
private volatile long maxWait = GenericObjectPoolConfig.DEFAULT_MAX_WAIT;
/**
* When the {@link #borrowObject} method is invoked when the pool is
* exhausted (the maximum number of "active" objects has been reached)
* should the {@link #borrowObject} method block or not?
*
* @see #setBlockWhenExhausted
* @see #getBlockWhenExhausted
*/
private volatile boolean blockWhenExhausted =
GenericObjectPoolConfig.DEFAULT_BLOCK_WHEN_EXHAUSTED;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @see #setTestOnBorrow
* @see #getTestOnBorrow
*/
private volatile boolean testOnBorrow =
GenericObjectPoolConfig.DEFAULT_TEST_ON_BORROW;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
private volatile boolean testOnReturn =
GenericObjectPoolConfig.DEFAULT_TEST_ON_RETURN;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @see #setTestWhileIdle
* @see #getTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile boolean testWhileIdle =
GenericObjectPoolConfig.DEFAULT_TEST_WHILE_IDLE;
/**
* The number of milliseconds to sleep between runs of the idle object
* evictor thread. When non-positive, no idle object evictor thread will be
* run.
*
* @see #setTimeBetweenEvictionRunsMillis
* @see #getTimeBetweenEvictionRunsMillis
*/
private volatile long timeBetweenEvictionRunsMillis =
GenericObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* The max number of objects to examine during each run of the idle object
* evictor thread (if any).
* <p>
* When a negative value is supplied,
* <tt>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</tt>
* tests will be run. I.e., when the value is <i>-n</i>, roughly one
* <i>n</i>th of the idle objects will be tested per run.
*
* @see #setNumTestsPerEvictionRun
* @see #getNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile int numTestsPerEvictionRun =
GenericObjectPoolConfig.DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* The minimum amount of time an object may sit idle in the pool before it
* is eligible for eviction by the idle object evictor (if any). When
* non-positive, no objects will be evicted from the pool due to idle time
* alone.
*
* @see #setMinEvictableIdleTimeMillis
* @see #getMinEvictableIdleTimeMillis
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile long minEvictableIdleTimeMillis =
GenericObjectPoolConfig.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/**
* The minimum amount of time an object may sit idle in the pool before it
* is eligible for eviction by the idle object evictor (if any), with the
* extra condition that at least "minIdle" amount of object remain in the
* pool. When non-positive, no objects will be evicted from the pool due to
* idle time alone.
*
* @see #setSoftMinEvictableIdleTimeMillis
* @see #getSoftMinEvictableIdleTimeMillis
*/
private volatile long softMinEvictableIdleTimeMillis =
GenericObjectPoolConfig.DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/** Whether or not the pool behaves as a LIFO queue (last in first out) */
private volatile boolean lifo = GenericObjectPoolConfig.DEFAULT_LIFO;
/** My {@link PoolableObjectFactory}. */
final private PoolableObjectFactory<T> factory;
/**
* My idle object eviction {@link TimerTask}, if any.
*/
private Evictor evictor = null;
/**
* All of the objects currently associated with this pool in any state. It
* excludes objects that have been destroyed. The size of
* {@link #allObjects} will always be less than or equal to {@link
* #_maxActive}.
*/
private final Map<T, PooledObject<T>> allObjects =
new ConcurrentHashMap<T, PooledObject<T>>();
/**
* The combined count of the currently created objects and those in the
* process of being created. Under load, it may exceed {@link #_maxActive}
* if multiple threads try and create a new object at the same time but
* {@link #create(boolean)} will ensure that there are never more than
* {@link #_maxActive} objects created at any one time.
*/
private final AtomicLong createCount = new AtomicLong(0);
/** The queue of idle objects */
private final LinkedBlockingDeque<PooledObject<T>> idleObjects =
new LinkedBlockingDeque<PooledObject<T>>();
/** An iterator for {@link #idleObjects} that is used by the evictor. */
private Iterator<PooledObject<T>> evictionIterator = null;
/** Object used to ensure closed() is only called once. */
private final Object closeLock = new Object();
// JMX specific attributes
private static final int AVERAGE_TIMING_STATS_CACHE_SIZE = 100;
private AtomicLong borrowedCount = new AtomicLong(0);
private AtomicLong returnedCount = new AtomicLong(0);
private AtomicLong createdCount = new AtomicLong(0);
private AtomicLong destroyedCount = new AtomicLong(0);
private AtomicLong destroyedByEvictorCount = new AtomicLong(0);
private AtomicLong destroyedByBorrowValidationCount = new AtomicLong(0);
private final LinkedList<Long> activeTimes = new LinkedList<Long>();
private final LinkedList<Long> idleTimes = new LinkedList<Long>();
private final LinkedList<Long> waitTimes = new LinkedList<Long>();
private Object maxBorrowWaitTimeMillisLock = new Object();
private volatile long maxBorrowWaitTimeMillis = 0;
private ObjectName oname = null;
private static final String ONAME_BASE =
"org.apache.commoms.pool2:type=GenericObjectPool,name=";
}
| src/java/org/apache/commons/pool2/impl/GenericObjectPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool2.impl;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.commons.pool2.BaseObjectPool;
import org.apache.commons.pool2.ObjectPool;
import org.apache.commons.pool2.PoolUtils;
import org.apache.commons.pool2.PoolableObjectFactory;
/**
* A configurable {@link ObjectPool} implementation.
* <p>
* When coupled with the appropriate {@link PoolableObjectFactory},
* <tt>GenericObjectPool</tt> provides robust pooling functionality for
* arbitrary objects.
* <p>
* A <tt>GenericObjectPool</tt> provides a number of configurable parameters:
* <ul>
* <li>
* {@link #setMaxTotal <i>maxTotal</i>} controls the maximum number of
* objects that can be allocated by the pool (checked out to clients, or idle
* awaiting checkout) at a given time. When non-positive, there is no limit to
* the number of objects that can be managed by the pool at one time. When
* {@link #setMaxTotal <i>maxTotal</i>} is reached, the pool is said to be
* exhausted. The default setting for this parameter is 8.</li>
* <li>
* {@link #setMaxIdle <i>maxIdle</i>} controls the maximum number of objects
* that can sit idle in the pool at any time. When negative, there is no limit
* to the number of objects that may be idle at one time. The default setting
* for this parameter is 8.</li>
* <li>
* {@link #getBlockWhenExhausted} specifies the
* behavior of the {@link #borrowObject} method when the pool is exhausted:
* <ul>
* <li>When {@link #getBlockWhenExhausted} is false,
* {@link #borrowObject} will throw a {@link NoSuchElementException}</li>
* <li>When {@link #getBlockWhenExhausted} is true,
* {@link #borrowObject} will block (invoke
* {@link Object#wait()}) until a new or idle object is available. If a
* non-negative {@link #setMaxWait <i>maxWait</i>} value is supplied, then
* {@link #borrowObject} will block for at most that many milliseconds, after
* which a {@link NoSuchElementException} will be thrown. If {@link #setMaxWait
* <i>maxWait</i>} is negative, the {@link #borrowObject} method will block
* indefinitely.</li>
* </ul>
* The default {@link #getBlockWhenExhausted} is true
* and the default <code>maxWait</code> setting is
* -1. By default, therefore, <code>borrowObject</code> will block indefinitely
* until an idle instance becomes available.</li>
* <li>When {@link #setTestOnBorrow <i>testOnBorrow</i>} is set, the pool will
* attempt to validate each object before it is returned from the
* {@link #borrowObject} method. (Using the provided factory's
* {@link PoolableObjectFactory#validateObject} method.) Objects that fail to
* validate will be dropped from the pool, and a different object will be
* borrowed. The default setting for this parameter is <code>false.</code></li>
* <li>When {@link #setTestOnReturn <i>testOnReturn</i>} is set, the pool will
* attempt to validate each object before it is returned to the pool in the
* {@link #returnObject} method. (Using the provided factory's
* {@link PoolableObjectFactory#validateObject} method.) Objects that fail to
* validate will be dropped from the pool. The default setting for this
* parameter is <code>false.</code></li>
* </ul>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects are available. This is performed by an "idle object eviction" thread,
* which runs asynchronously. Caution should be used when configuring this
* optional feature. Eviction runs contend with client threads for access to
* objects in the pool, so if they run too frequently performance issues may
* result. The idle object eviction thread may be configured using the following
* attributes:
* <ul>
* <li>
* {@link #setTimeBetweenEvictionRunsMillis
* <i>timeBetweenEvictionRunsMillis</i>} indicates how long the eviction thread
* should sleep before "runs" of examining idle objects. When non-positive, no
* eviction thread will be launched. The default setting for this parameter is
* -1 (i.e., idle object eviction is disabled by default).</li>
* <li>
* {@link #setMinEvictableIdleTimeMillis <i>minEvictableIdleTimeMillis</i>}
* specifies the minimum amount of time that an object may sit idle in the pool
* before it is eligible for eviction due to idle time. When non-positive, no
* object will be dropped from the pool due to idle time alone. This setting has
* no effect unless <code>timeBetweenEvictionRunsMillis > 0.</code> The default
* setting for this parameter is 30 minutes.</li>
* <li>
* {@link #setTestWhileIdle <i>testWhileIdle</i>} indicates whether or not
* idle objects should be validated using the factory's
* {@link PoolableObjectFactory#validateObject} method. Objects that fail to
* validate will be dropped from the pool. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting for this
* parameter is <code>false.</code></li>
* <li>
* {@link #setSoftMinEvictableIdleTimeMillis
* <i>softMinEvictableIdleTimeMillis</i>} specifies the minimum amount of time
* an object may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any), with the extra condition that at least
* "minIdle" object instances remain in the pool. This setting has no
* effect unless <code>timeBetweenEvictionRunsMillis > 0.</code> and it is
* superseded by {@link #setMinEvictableIdleTimeMillis
* <i>minEvictableIdleTimeMillis</i>} (that is, if
* <code>minEvictableIdleTimeMillis</code> is positive, then
* <code>softMinEvictableIdleTimeMillis</code> is ignored). The default setting
* for this parameter is -1 (disabled).</li>
* <li>
* {@link #setNumTestsPerEvictionRun <i>numTestsPerEvictionRun</i>}
* determines the number of objects examined in each run of the idle object
* evictor. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting for this
* parameter is 3.</li>
* </ul>
* <p>
* <p>
* The pool can be configured to behave as a LIFO queue with respect to idle
* objects - always returning the most recently used object from the pool, or as
* a FIFO queue, where borrowObject always returns the oldest object in the idle
* object pool.
* <ul>
* <li>
* {@link #setLifo <i>lifo</i>} determines whether or not the pool returns
* idle objects in last-in-first-out order. The default setting for this
* parameter is <code>true.</code></li>
* </ul>
* <p>
* GenericObjectPool is not usable without a {@link PoolableObjectFactory}. A
* non-<code>null</code> factory must be provided as a constructor
* argument before the pool is used.
* <p>
* Implementation note: To prevent possible deadlocks, care has been taken to
* ensure that no call to a factory method will occur within a synchronization
* block. See POOL-125 and DBCP-44 for more information.
*
* @see GenericKeyedObjectPool
* @param <T>
* Type of element pooled in this pool.
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date: 2011-05-11 13:50:33 +0100 (Wed, 11 May
* 2011) $
* @since Pool 1.0
*/
public class GenericObjectPool<T> extends BaseObjectPool<T>
implements GenericObjectPoolMBean {
// --- constructors -----------------------------------------------
/**
* Create a new <tt>GenericObjectPool</tt> with default properties.
*/
public GenericObjectPool(PoolableObjectFactory<T> factory) {
this(factory, new GenericObjectPoolConfig<T>());
}
public GenericObjectPool(PoolableObjectFactory<T> factory,
GenericObjectPoolConfig<T> config) {
this.factory = factory;
this.lifo = config.getLifo();
this.maxTotal = config.getMaxTotal();
this.maxIdle = config.getMaxIdle();
this.maxWait = config.getMaxWait();
this.minEvictableIdleTimeMillis =
config.getMinEvictableIdleTimeMillis();
this.minIdle = config.getMinIdle();
this.numTestsPerEvictionRun = config.getNumTestsPerEvictionRun();
this.softMinEvictableIdleTimeMillis =
config.getSoftMinEvictableIdleTimeMillis();
this.testOnBorrow = config.getTestOnBorrow();
this.testOnReturn = config.getTestOnReturn();
this.testWhileIdle = config.getTestWhileIdle();
this.timeBetweenEvictionRunsMillis =
config.getTimeBetweenEvictionRunsMillis();
this.blockWhenExhausted = config.getBlockWhenExhausted();
startEvictor(timeBetweenEvictionRunsMillis);
initStats();
// JMX Registration
if (config.isJmxEnabled()) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
String jmxNamePrefix = config.getJmxNamePrefix();
int i = 1;
boolean registered = false;
while (!registered) {
try {
ObjectName oname =
new ObjectName(ONAME_BASE + jmxNamePrefix + i);
mbs.registerMBean(this, oname);
this.oname = oname;
registered = true;
} catch (MalformedObjectNameException e) {
if (GenericObjectPoolConfig.DEFAULT_JMX_NAME_PREFIX.equals(
jmxNamePrefix)) {
// Shouldn't happen. Skip registration if it does.
registered = true;
} else {
// Must be an invalid name prefix. Use the default
// instead.
jmxNamePrefix =
GenericObjectPoolConfig.DEFAULT_JMX_NAME_PREFIX;
}
} catch (InstanceAlreadyExistsException e) {
// Increment the index and try again
i++;
} catch (MBeanRegistrationException e) {
// Shouldn't happen. Skip registration if it does.
registered = true;
} catch (NotCompliantMBeanException e) {
// Shouldn't happen. Skip registration if it does.
registered = true;
}
}
}
}
// --- public methods ---------------------------------------------
// --- configuration methods --------------------------------------
/**
* Returns the maximum number of objects that can be allocated by the pool
* (checked out to clients, or idle awaiting checkout) at a given time. When
* non-positive, there is no limit to the number of objects that can be
* managed by the pool at one time.
*
* @return the cap on the total number of object instances managed by the
* pool.
* @see #setMaxTotal
*/
public int getMaxTotal() {
return maxTotal;
}
/**
* Sets the cap on the number of objects that can be allocated by the pool
* (checked out to clients, or idle awaiting checkout) at a given time. Use
* a negative value for no limit.
*
* @param maxTotal
* The cap on the total number of object instances managed by the
* pool. Negative values mean that there is no limit to the
* number of objects allocated by the pool.
* @see #getMaxTotal
*/
public void setMaxTotal(int maxTotal) {
this.maxTotal = maxTotal;
}
/**
* Returns whether to block when the {@link #borrowObject} method is
* invoked when the pool is exhausted (the maximum number of "active"
* objects has been reached).
*
* @return true if should block when the pool is exhuasted
* @see #setBlockWhenExhausted
*/
public boolean getBlockWhenExhausted() {
return blockWhenExhausted;
}
/**
* Sets whether to block when the {@link #borrowObject} method is invoked
* when the pool is exhausted (the maximum number of "active" objects has
* been reached).
*
* @param blockWhenExhausted true if should block when the pool is exhausted
* @see #getBlockWhenExhausted
*/
public void setBlockWhenExhausted(boolean blockWhenExhausted) {
this.blockWhenExhausted = blockWhenExhausted;
}
/**
* Returns the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing an exception
* when the pool is exhausted and the {@link #getBlockWhenExhausted} is true.
* When less than 0, the {@link #borrowObject} method may block indefinitely.
*
* @return maximum number of milliseconds to block when borrowing an object.
* @see #setMaxWait
* @see #setBlockWhenExhausted
*/
public long getMaxWait() {
return maxWait;
}
/**
* Sets the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing an exception
* when the pool is exhausted and the {@link #getBlockWhenExhausted} is true.
* When less than 0, the {@link #borrowObject} method may block indefinitely.
*
* @param maxWait
* maximum number of milliseconds to block when borrowing an
* object.
* @see #getMaxWait
* @see #getBlockWhenExhausted
*/
public void setMaxWait(long maxWait) {
this.maxWait = maxWait;
}
/**
* Returns the cap on the number of "idle" instances in the pool.
*
* @return the cap on the number of "idle" instances in the pool.
* @see #setMaxIdle
*/
public int getMaxIdle() {
return maxIdle;
}
/**
* Sets the cap on the number of "idle" instances in the pool. If maxIdle is
* set too low on heavily loaded systems it is possible you will see objects
* being destroyed and almost immediately new objects being created. This is
* a result of the active threads momentarily returning objects faster than
* they are requesting them them, causing the number of idle objects to rise
* above maxIdle. The best value for maxIdle for heavily loaded system will
* vary but the default is a good starting point.
*
* @param maxIdle
* The cap on the number of "idle" instances in the pool. Use a
* negative value to indicate an unlimited number of idle
* instances.
* @see #getMaxIdle
*/
public void setMaxIdle(int maxIdle) {
this.maxIdle = maxIdle;
}
/**
* Sets the minimum number of objects allowed in the pool before the evictor
* thread (if active) spawns new objects. Note that no objects are created
* when <code>numActive + numIdle >= maxActive.</code> This setting has no
* effect if the idle object evictor is disabled (i.e. if
* <code>timeBetweenEvictionRunsMillis <= 0</code>).
*
* @param minIdle
* The minimum number of objects.
* @see #getMinIdle
* @see #getTimeBetweenEvictionRunsMillis()
*/
public void setMinIdle(int minIdle) {
this.minIdle = minIdle;
}
/**
* Returns the minimum number of objects allowed in the pool before the
* evictor thread (if active) spawns new objects. (Note no objects are
* created when: numActive + numIdle >= maxActive)
*
* @return The minimum number of objects.
* @see #setMinIdle
*/
public int getMinIdle() {
return minIdle;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @return <code>true</code> if objects are validated before being borrowed.
* @see #setTestOnBorrow
*/
public boolean getTestOnBorrow() {
return testOnBorrow;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @param testOnBorrow
* <code>true</code> if objects should be validated before being
* borrowed.
* @see #getTestOnBorrow
*/
public void setTestOnBorrow(boolean testOnBorrow) {
this.testOnBorrow = testOnBorrow;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @return <code>true</code> when objects will be validated after returned
* to {@link #returnObject}.
* @see #setTestOnReturn
*/
public boolean getTestOnReturn() {
return testOnReturn;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @param testOnReturn
* <code>true</code> so objects will be validated after returned
* to {@link #returnObject}.
* @see #getTestOnReturn
*/
public void setTestOnReturn(boolean testOnReturn) {
this.testOnReturn = testOnReturn;
}
/**
* Returns the number of milliseconds to sleep between runs of the idle
* object evictor thread. When non-positive, no idle object evictor thread
* will be run.
*
* @return number of milliseconds to sleep between evictor runs.
* @see #setTimeBetweenEvictionRunsMillis
*/
public long getTimeBetweenEvictionRunsMillis() {
return timeBetweenEvictionRunsMillis;
}
/**
* Sets the number of milliseconds to sleep between runs of the idle object
* evictor thread. When non-positive, no idle object evictor thread will be
* run.
*
* @param timeBetweenEvictionRunsMillis
* number of milliseconds to sleep between evictor runs.
* @see #getTimeBetweenEvictionRunsMillis
*/
public void setTimeBetweenEvictionRunsMillis(
long timeBetweenEvictionRunsMillis) {
this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
startEvictor(timeBetweenEvictionRunsMillis);
}
/**
* Returns the max number of objects to examine during each run of the idle
* object evictor thread (if any).
*
* @return max number of objects to examine during each evictor run.
* @see #setNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public int getNumTestsPerEvictionRun() {
return numTestsPerEvictionRun;
}
/**
* Sets the max number of objects to examine during each run of the idle
* object evictor thread (if any).
* <p>
* When a negative value is supplied,
* <tt>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</tt>
* tests will be run. That is, when the value is <i>-n</i>, roughly one
* <i>n</i>th of the idle objects will be tested per run. When the value is
* positive, the number of tests actually performed in each run will be the
* minimum of this value and the number of instances idle in the pool.
*
* @param numTestsPerEvictionRun
* max number of objects to examine during each evictor run.
* @see #getNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setNumTestsPerEvictionRun(int numTestsPerEvictionRun) {
this.numTestsPerEvictionRun = numTestsPerEvictionRun;
}
/**
* Returns the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor (if any).
*
* @return minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction.
* @see #setMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public long getMinEvictableIdleTimeMillis() {
return minEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction by the idle object evictor (if any). When
* non-positive, no objects will be evicted from the pool due to idle time
* alone.
*
* @param minEvictableIdleTimeMillis
* minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction.
* @see #getMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {
this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
/**
* Returns the minimum amount of time
* an object may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any), with the extra condition that at least
* "minIdle" object instances remain in the pool. This setting has no
* effect unless {@code timeBetweenEvictionRunsMillis > 0.} and it is
* superseded by {@link #setMinEvictableIdleTimeMillis
* <i>minEvictableIdleTimeMillis</i>} (that is, if
* {@code minEvictableIdleTimeMillis} is positive, then
* {@code softMinEvictableIdleTimeMillis} is ignored). The default setting
* for this parameter is -1 (disabled).
*
* @return minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction if minIdle instances are available
* @since Pool 1.3
*/
public long getSoftMinEvictableIdleTimeMillis() {
return softMinEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool before
* it is eligible for eviction by the idle object evictor (if any), with the
* extra condition that at least "minIdle" object instances remain in the
* pool. When non-positive, no objects will be evicted from the pool due to
* idle time alone.
*
* @param softMinEvictableIdleTimeMillis
* minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction.
* @since Pool 1.3
* @see #getSoftMinEvictableIdleTimeMillis
*/
public void setSoftMinEvictableIdleTimeMillis(
long softMinEvictableIdleTimeMillis) {
this.softMinEvictableIdleTimeMillis = softMinEvictableIdleTimeMillis;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @return <code>true</code> when objects will be validated by the evictor.
* @see #setTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public boolean getTestWhileIdle() {
return testWhileIdle;
}
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @param testWhileIdle
* <code>true</code> so objects will be validated by the evictor.
* @see #getTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setTestWhileIdle(boolean testWhileIdle) {
this.testWhileIdle = testWhileIdle;
}
/**
* Whether or not the idle object pool acts as a LIFO queue. True means that
* borrowObject returns the most recently used ("last in") idle object in
* the pool (if there are idle instances available). False means that the
* pool behaves as a FIFO queue - objects are taken from the idle object
* pool in the order that they are returned to the pool.
*
* @return <code>true</true> if the pool is configured to act as a LIFO queue
* @since 1.4
*/
public boolean getLifo() {
return lifo;
}
/**
* Sets the LIFO property of the pool. True means that borrowObject returns
* the most recently used ("last in") idle object in the pool (if there are
* idle instances available). False means that the pool behaves as a FIFO
* queue - objects are taken from the idle object pool in the order that
* they are returned to the pool.
*
* @param lifo
* the new value for the LIFO property
* @since 1.4
*/
public void setLifo(boolean lifo) {
this.lifo = lifo;
}
/**
* Sets my configuration.
*
* @param conf
* configuration to use.
* @see GenericObjectPoolConfig
*/
public void setConfig(GenericObjectPoolConfig<T> conf) {
setMaxIdle(conf.getMaxIdle());
setMinIdle(conf.getMinIdle());
setMaxTotal(conf.getMaxTotal());
setMaxWait(conf.getMaxWait());
setBlockWhenExhausted(conf.getBlockWhenExhausted());
setTestOnBorrow(conf.getTestOnBorrow());
setTestOnReturn(conf.getTestOnReturn());
setTestWhileIdle(conf.getTestWhileIdle());
setNumTestsPerEvictionRun(conf.getNumTestsPerEvictionRun());
setMinEvictableIdleTimeMillis(conf.getMinEvictableIdleTimeMillis());
setTimeBetweenEvictionRunsMillis(
conf.getTimeBetweenEvictionRunsMillis());
setSoftMinEvictableIdleTimeMillis(
conf.getSoftMinEvictableIdleTimeMillis());
setLifo(conf.getLifo());
}
// -- ObjectPool methods ------------------------------------------
/**
* <p>
* Borrows an object from the pool.
* </p>
* <p>
* If there is an idle instance available in the pool, then either the
* most-recently returned (if {@link #getLifo() lifo} == true) or "oldest"
* (lifo == false) instance sitting idle in the pool will be activated and
* returned. If activation fails, or {@link #getTestOnBorrow() testOnBorrow}
* is set to true and validation fails, the instance is destroyed and the
* next available instance is examined. This continues until either a valid
* instance is returned or there are no more idle instances available.
* </p>
* <p>
* If there are no idle instances available in the pool, behavior depends on
* the {@link #getMaxTotal() maxTotal} and (if applicable)
* {@link #getBlockWhenExhausted()} and
* {@link #getMaxWait() maxWait} properties. If the number of instances
* checked out from the pool is less than <code>maxActive,</code> a new
* instance is created, activated and (if applicable) validated and returned
* to the caller.
* </p>
* <p>
* If the pool is exhausted (no available idle instances and no capacity to
* create new ones), this method will either block (
* {@link #getBlockWhenExhausted()} is true) or throw a
* <code>NoSuchElementException</code> ({@link #getBlockWhenExhausted()} is false). The
* length of time that this method will block when
* {@link #getBlockWhenExhausted()} is true is determined by
* the {@link #getMaxWait() maxWait} property.
* </p>
* <p>
* When the pool is exhausted, multiple calling threads may be
* simultaneously blocked waiting for instances to become available. As of
* pool 1.5, a "fairness" algorithm has been implemented to ensure that
* threads receive available instances in request arrival order.
* </p>
*
* @return object instance
* @throws NoSuchElementException
* if an instance cannot be returned
*/
@Override
public T borrowObject() throws Exception {
return borrowObject(maxWait);
}
/**
* Borrow an object from the pool using a user specific waiting time which
* only applies if {@link #getBlockWhenExhausted()} is true.
*
* @param borrowMaxWait The time to wait in milliseconds for an object to
* become available
* @return object instance
* @throws NoSuchElementException
* if an instance cannot be returned
*/
public T borrowObject(long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = this.blockWhenExhausted;
boolean create;
long waitTime = 0;
while (p == null) {
create = false;
if (blockWhenExhausted) {
p = idleObjects.pollFirst();
if (p == null) {
create = true;
p = create();
}
if (p == null) {
if (borrowMaxWait < 0) {
p = idleObjects.takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = idleObjects.pollFirst(borrowMaxWait,
TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
p = idleObjects.pollFirst();
if (p == null) {
create = true;
p = create();
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(p.getObject());
} catch (Exception e) {
try {
destroy(p);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(p);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
/**
* <p>
* Returns an object instance to the pool.
* </p>
* <p>
* If {@link #getMaxIdle() maxIdle} is set to a positive value and the
* number of idle instances has reached this value, the returning instance
* is destroyed.
* </p>
* <p>
* If {@link #getTestOnReturn() testOnReturn} == true, the returning
* instance is validated before being returned to the idle instance pool. In
* this case, if validation fails, the instance is destroyed.
* </p>
*
* @param obj
* instance to return to the pool
*/
@Override
public void returnObject(T obj) {
PooledObject<T> p = allObjects.get(obj);
if (p == null) {
throw new IllegalStateException(
"Returned object not currently part of this pool");
}
long activeTime = p.getActiveTimeMillis();
if (getTestOnReturn()) {
if (!factory.validateObject(obj)) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
updateStatsReturn(activeTime);
return;
}
}
try {
factory.passivateObject(obj);
} catch (Exception e1) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
updateStatsReturn(activeTime);
return;
}
if (!p.deallocate()) {
throw new IllegalStateException(
"Object has already been retured to this pool");
}
int maxIdle = getMaxIdle();
if (isClosed() || maxIdle > -1 && maxIdle <= idleObjects.size()) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
} else {
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
updateStatsReturn(activeTime);
}
private void updateStatsReturn(long activeTime) {
returnedCount.incrementAndGet();
synchronized (activeTimes) {
activeTimes.add(Long.valueOf(activeTime));
activeTimes.poll();
}
}
/**
* {@inheritDoc}
* <p>
* Activation of this method decrements the active count and attempts to
* destroy the instance.
* </p>
*
* @throws Exception
* if the configured {@link PoolableObjectFactory} throws an
* exception destroying obj
*/
@Override
public void invalidateObject(T obj) throws Exception {
PooledObject<T> p = allObjects.get(obj);
if (p == null) {
throw new IllegalStateException(
"Object not currently part of this pool");
}
destroy(p);
}
/**
* Clears any objects sitting idle in the pool by removing them from the
* idle instance pool and then invoking the configured
* {@link PoolableObjectFactory#destroyObject(Object)} method on each idle
* instance.
* <p>
* Implementation notes:
* <ul>
* <li>This method does not destroy or effect in any way instances that are
* checked out of the pool when it is invoked.</li>
* <li>Invoking this method does not prevent objects being returned to the
* idle instance pool, even during its execution. It locks the pool only
* during instance removal. Additional instances may be returned while
* removed items are being destroyed.</li>
* <li>Exceptions encountered destroying idle instances are swallowed.</li>
* </ul>
* </p>
*/
@Override
public void clear() {
PooledObject<T> p = idleObjects.poll();
while (p != null) {
try {
destroy(p);
} catch (Exception e) {
// TODO - Ignore?
}
p = idleObjects.poll();
}
}
/**
* Return the number of instances currently borrowed from this pool.
*
* @return the number of instances currently borrowed from this pool
*/
@Override
public int getNumActive() {
return allObjects.size() - idleObjects.size();
}
/**
* Return the number of instances currently idle in this pool.
*
* @return the number of instances currently idle in this pool
*/
@Override
public int getNumIdle() {
return idleObjects.size();
}
/**
* <p>
* Closes the pool. Once the pool is closed, {@link #borrowObject()} will
* fail with IllegalStateException, but {@link #returnObject(Object)} and
* {@link #invalidateObject(Object)} will continue to work, with returned
* objects destroyed on return.
* </p>
* <p>
* Destroys idle instances in the pool by invoking {@link #clear()}.
* </p>
*
* @throws Exception
*/
@Override
public void close() throws Exception {
if (isClosed()) {
return;
}
synchronized (closeLock) {
if (isClosed()) {
return;
}
super.close();
clear();
startEvictor(-1L);
if (oname != null) {
ManagementFactory.getPlatformMBeanServer().unregisterMBean(
oname);
}
}
}
/**
* <p>
* Perform <code>numTests</code> idle object eviction tests, evicting
* examined objects that meet the criteria for eviction. If
* <code>testWhileIdle</code> is true, examined objects are validated when
* visited (and removed if invalid); otherwise only objects that have been
* idle for more than <code>minEvicableIdletimeMillis</code> are removed.
* </p>
* <p>
* Successive activations of this method examine objects in in sequence,
* cycling through objects in oldest-to-youngest order.
* </p>
*
* @throws Exception
* if the pool is closed or eviction fails.
*/
public void evict() throws Exception {
assertOpen();
if (idleObjects.size() == 0) {
return;
}
PooledObject<T> underTest = null;
boolean testWhileIdle = getTestWhileIdle();
long idleEvictTime = Long.MAX_VALUE;
long idleSoftEvictTime = Long.MAX_VALUE;
if (getMinEvictableIdleTimeMillis() > 0) {
idleEvictTime = getMinEvictableIdleTimeMillis();
}
if (getSoftMinEvictableIdleTimeMillis() > 0) {
idleSoftEvictTime = getSoftMinEvictableIdleTimeMillis();
}
for (int i = 0, m = getNumTests(); i < m; i++) {
if (evictionIterator == null || !evictionIterator.hasNext()) {
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
}
if (!evictionIterator.hasNext()) {
// Pool exhausted, nothing to do here
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (idleEvictTime < underTest.getIdleTimeMillis() ||
(idleSoftEvictTime < underTest.getIdleTimeMillis() &&
getMinIdle() < idleObjects.size())) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(underTest.getObject());
active = true;
} catch (Exception e) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(underTest.getObject())) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(underTest.getObject());
} catch (Exception e) {
destroy(underTest);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional states
// are used
}
}
}
return;
}
private PooledObject<T> create() throws Exception {
int localMaxTotal = getMaxTotal();
long newCreateCount = createCount.incrementAndGet();
if (localMaxTotal > -1 && newCreateCount > localMaxTotal ||
newCreateCount > Integer.MAX_VALUE) {
createCount.decrementAndGet();
return null;
}
T t = null;
try {
t = factory.makeObject();
} catch (Exception e) {
createCount.decrementAndGet();
throw e;
}
PooledObject<T> p = new PooledObject<T>(t);
createdCount.incrementAndGet();
allObjects.put(t, p);
return p;
}
private void destroy(PooledObject<T> toDestory) throws Exception {
toDestory.invalidate();
idleObjects.remove(toDestory);
allObjects.remove(toDestory.getObject());
try {
factory.destroyObject(toDestory.getObject());
} finally {
destroyedCount.incrementAndGet();
createCount.decrementAndGet();
}
}
/**
* Check to see if we are below our minimum number of objects if so enough
* to bring us back to our minimum.
*
* @throws Exception
* when {@link #addObject()} fails.
*/
private void ensureMinIdle() throws Exception {
int minIdle = getMinIdle();
if (minIdle < 1) {
return;
}
while (idleObjects.size() < minIdle) {
PooledObject<T> p = create();
if (p == null) {
// Can't create objects, no reason to think another call to
// create will work. Give up.
break;
}
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Create an object, and place it into the pool. addObject() is useful for
* "pre-loading" a pool with idle objects.
*/
@Override
public void addObject() throws Exception {
assertOpen();
if (factory == null) {
throw new IllegalStateException(
"Cannot add objects without a factory.");
}
PooledObject<T> p = create();
addIdleObject(p);
}
// --- non-public methods ----------------------------------------
private void addIdleObject(PooledObject<T> p) throws Exception {
if (p != null) {
factory.passivateObject(p.getObject());
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Start the eviction thread or service, or when <i>delay</i> is
* non-positive, stop it if it is already running.
*
* @param delay
* milliseconds between evictor runs.
*/
protected final void startEvictor(long delay) {
if (null != evictor) {
EvictionTimer.cancel(evictor);
evictor = null;
}
if (delay > 0) {
evictor = new Evictor();
EvictionTimer.schedule(evictor, delay, delay);
}
}
/**
* Returns pool info including {@link #getNumActive()},
* {@link #getNumIdle()} and a list of objects idle in the pool with their
* idle times.
*
* @return string containing debug information
*/
String debugInfo() {
StringBuilder buf = new StringBuilder();
buf.append("Active: ").append(getNumActive()).append("\n");
buf.append("Idle: ").append(getNumIdle()).append("\n");
buf.append("Idle Objects:\n");
for (PooledObject<T> pair : idleObjects) {
buf.append("\t").append(pair.toString());
}
return buf.toString();
}
/**
* Returns the number of tests to be performed in an Evictor run, based on
* the current value of <code>numTestsPerEvictionRun</code> and the number
* of idle instances in the pool.
*
* @see #setNumTestsPerEvictionRun
* @return the number of tests for the Evictor to run
*/
private int getNumTests() {
if (numTestsPerEvictionRun >= 0) {
return Math.min(numTestsPerEvictionRun, idleObjects.size());
} else {
return (int) (Math.ceil(idleObjects.size() /
Math.abs((double) numTestsPerEvictionRun)));
}
}
//--- JMX specific attributes ----------------------------------------------
private void initStats() {
for (int i = 0; i < AVERAGE_TIMING_STATS_CACHE_SIZE; i++) {
activeTimes.add(null);
idleTimes.add(null);
waitTimes.add(null);
}
}
private long getMeanFromStatsCache(LinkedList<Long> cache) {
List<Long> times = new ArrayList<Long>(AVERAGE_TIMING_STATS_CACHE_SIZE);
synchronized (cache) {
times.addAll(cache);
}
double result = 0;
int counter = 0;
Iterator<Long> iter = times.iterator();
while (iter.hasNext()) {
Long time = iter.next();
if (time != null) {
counter++;
result = result * ((counter - 1) / (double) counter) +
time.longValue()/(double) counter;
}
}
return (long) result;
}
public long getBorrowedCount() {
return borrowedCount.get();
}
public long getReturnedCount() {
return returnedCount.get();
}
public long getCreatedCount() {
return createdCount.get();
}
public long getDestroyedCount() {
return destroyedCount.get();
}
public long getDestroyedByEvictorCount() {
return destroyedByEvictorCount.get();
}
public long getDestroyedByBorrowValidationCount() {
return destroyedByBorrowValidationCount.get();
}
public long getMeanActiveTimeMillis() {
return getMeanFromStatsCache(activeTimes);
}
public long getMeanIdleTimeMillis() {
return getMeanFromStatsCache(idleTimes);
}
public long getMeanBorrowWaitTimeMillis() {
return getMeanFromStatsCache(waitTimes);
}
public long getMaxBorrowWaitTimeMillis() {
return maxBorrowWaitTimeMillis;
}
// --- inner classes ----------------------------------------------
/**
* The idle object evictor {@link TimerTask}.
*
* @see GenericObjectPool#setTimeBetweenEvictionRunsMillis
*/
private class Evictor extends TimerTask {
/**
* Run pool maintenance. Evict objects qualifying for eviction and then
* invoke {@link GenericObjectPool#ensureMinIdle()}.
*/
@Override
public void run() {
try {
evict();
} catch (Exception e) {
// ignored
} catch (OutOfMemoryError oome) {
// Log problem but give evictor thread a chance to continue in
// case error is recoverable
oome.printStackTrace(System.err);
}
try {
ensureMinIdle();
} catch (Exception e) {
// ignored
}
}
}
// --- private attributes ---------------------------------------
/**
* The cap on the number of idle instances in the pool.
*
* @see #setMaxIdle
* @see #getMaxIdle
*/
private volatile int maxIdle = GenericObjectPoolConfig.DEFAULT_MAX_IDLE;
/**
* The cap on the minimum number of idle instances in the pool.
*
* @see #setMinIdle
* @see #getMinIdle
*/
private volatile int minIdle = GenericObjectPoolConfig.DEFAULT_MIN_IDLE;
/**
* The cap on the total number of active instances from the pool.
*
* @see #setMaxTotal
* @see #getMaxTotal
*/
private volatile int maxTotal =
GenericObjectPoolConfig.DEFAULT_MAX_TOTAL;
/**
* The maximum amount of time (in millis) the {@link #borrowObject} method
* should block before throwing an exception when the pool is exhausted and
* {@link #getBlockWhenExhausted()} is true.
* When less than 0, the
* {@link #borrowObject} method may block indefinitely.
*
* @see #setMaxWait
* @see #getMaxWait
* @see #setBlockWhenExhausted
* @see #getBlockWhenExhausted
*/
private volatile long maxWait = GenericObjectPoolConfig.DEFAULT_MAX_WAIT;
/**
* When the {@link #borrowObject} method is invoked when the pool is
* exhausted (the maximum number of "active" objects has been reached)
* should the {@link #borrowObject} method block or not?
*
* @see #setBlockWhenExhausted
* @see #getBlockWhenExhausted
*/
private volatile boolean blockWhenExhausted =
GenericObjectPoolConfig.DEFAULT_BLOCK_WHEN_EXHAUSTED;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned by the {@link #borrowObject} method. If the object fails to
* validate, it will be dropped from the pool, and we will attempt to borrow
* another.
*
* @see #setTestOnBorrow
* @see #getTestOnBorrow
*/
private volatile boolean testOnBorrow =
GenericObjectPoolConfig.DEFAULT_TEST_ON_BORROW;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} before being
* returned to the pool within the {@link #returnObject}.
*
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
private volatile boolean testOnReturn =
GenericObjectPoolConfig.DEFAULT_TEST_ON_RETURN;
/**
* When <tt>true</tt>, objects will be
* {@link PoolableObjectFactory#validateObject validated} by the idle object
* evictor (if any). If an object fails to validate, it will be dropped from
* the pool.
*
* @see #setTestWhileIdle
* @see #getTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile boolean testWhileIdle =
GenericObjectPoolConfig.DEFAULT_TEST_WHILE_IDLE;
/**
* The number of milliseconds to sleep between runs of the idle object
* evictor thread. When non-positive, no idle object evictor thread will be
* run.
*
* @see #setTimeBetweenEvictionRunsMillis
* @see #getTimeBetweenEvictionRunsMillis
*/
private volatile long timeBetweenEvictionRunsMillis =
GenericObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* The max number of objects to examine during each run of the idle object
* evictor thread (if any).
* <p>
* When a negative value is supplied,
* <tt>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</tt>
* tests will be run. I.e., when the value is <i>-n</i>, roughly one
* <i>n</i>th of the idle objects will be tested per run.
*
* @see #setNumTestsPerEvictionRun
* @see #getNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile int numTestsPerEvictionRun =
GenericObjectPoolConfig.DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* The minimum amount of time an object may sit idle in the pool before it
* is eligible for eviction by the idle object evictor (if any). When
* non-positive, no objects will be evicted from the pool due to idle time
* alone.
*
* @see #setMinEvictableIdleTimeMillis
* @see #getMinEvictableIdleTimeMillis
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private volatile long minEvictableIdleTimeMillis =
GenericObjectPoolConfig.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/**
* The minimum amount of time an object may sit idle in the pool before it
* is eligible for eviction by the idle object evictor (if any), with the
* extra condition that at least "minIdle" amount of object remain in the
* pool. When non-positive, no objects will be evicted from the pool due to
* idle time alone.
*
* @see #setSoftMinEvictableIdleTimeMillis
* @see #getSoftMinEvictableIdleTimeMillis
*/
private volatile long softMinEvictableIdleTimeMillis =
GenericObjectPoolConfig.DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/** Whether or not the pool behaves as a LIFO queue (last in first out) */
private volatile boolean lifo = GenericObjectPoolConfig.DEFAULT_LIFO;
/** My {@link PoolableObjectFactory}. */
final private PoolableObjectFactory<T> factory;
/**
* My idle object eviction {@link TimerTask}, if any.
*/
private Evictor evictor = null;
/**
* All of the objects currently associated with this pool in any state. It
* excludes objects that have been destroyed. The size of
* {@link #allObjects} will always be less than or equal to {@link
* #_maxActive}.
*/
private final Map<T, PooledObject<T>> allObjects =
new ConcurrentHashMap<T, PooledObject<T>>();
/**
* The combined count of the currently created objects and those in the
* process of being created. Under load, it may exceed {@link #_maxActive}
* if multiple threads try and create a new object at the same time but
* {@link #create(boolean)} will ensure that there are never more than
* {@link #_maxActive} objects created at any one time.
*/
private final AtomicLong createCount = new AtomicLong(0);
/** The queue of idle objects */
private final LinkedBlockingDeque<PooledObject<T>> idleObjects =
new LinkedBlockingDeque<PooledObject<T>>();
/** An iterator for {@link #idleObjects} that is used by the evictor. */
private Iterator<PooledObject<T>> evictionIterator = null;
/** Object used to ensure closed() is only called once. */
private final Object closeLock = new Object();
// JMX specific attributes
private static final int AVERAGE_TIMING_STATS_CACHE_SIZE = 100;
private AtomicLong borrowedCount = new AtomicLong(0);
private AtomicLong returnedCount = new AtomicLong(0);
private AtomicLong createdCount = new AtomicLong(0);
private AtomicLong destroyedCount = new AtomicLong(0);
private AtomicLong destroyedByEvictorCount = new AtomicLong(0);
private AtomicLong destroyedByBorrowValidationCount = new AtomicLong(0);
private final LinkedList<Long> activeTimes = new LinkedList<Long>();
private final LinkedList<Long> idleTimes = new LinkedList<Long>();
private final LinkedList<Long> waitTimes = new LinkedList<Long>();
private Object maxBorrowWaitTimeMillisLock = new Object();
private volatile long maxBorrowWaitTimeMillis = 0;
private ObjectName oname = null;
private static final String ONAME_BASE =
"org.apache.commoms.pool2:type=GenericObjectPool,name=";
}
| Fix POOL-193. Make startEvictor(long) synchronized. Also aligns it with GKOP.
git-svn-id: a66ef3f0e6c00b14098e182847b4bd646263fa09@1213739 13f79535-47bb-0310-9956-ffa450edef68
| src/java/org/apache/commons/pool2/impl/GenericObjectPool.java | Fix POOL-193. Make startEvictor(long) synchronized. Also aligns it with GKOP. | <ide><path>rc/java/org/apache/commons/pool2/impl/GenericObjectPool.java
<ide> * @param delay
<ide> * milliseconds between evictor runs.
<ide> */
<del> protected final void startEvictor(long delay) {
<add> protected final synchronized void startEvictor(long delay) {
<ide> if (null != evictor) {
<ide> EvictionTimer.cancel(evictor);
<ide> evictor = null; |
|
Java | mit | 60beb8a192e81339fafd0bd8711cb201c0573703 | 0 | kevinli194/cellsociety | package cellsociety_team16;
import java.io.File;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;
import parent.Cell;
import parent.CellManager;
import parent.FireCellManager;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.HPos;
import javafx.geometry.Insets;
import javafx.geometry.VPos;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Control;
import javafx.scene.layout.*;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.util.Duration;
public class CellViewer {
private static final String VERY_FAST = "Faster";
private static final String FAST = "Fast";
private static final String NORMAL = "Normal";
private static final String SLOW = "Slow";
private static final String VERY_SLOW = "Slower";
private InitialGameParameters myGameParams;
private XMLParsing myXMLParser;
private boolean myGridSet = false;
private boolean myStepClicked = false;
private boolean myFileSelected = false;
private Button myRestart = new Button("Restart");
private Button myShowGrid = new Button("Show Grid");
private Button myStart= new Button("Start/Resume");
private Button myStop = new Button("Stop/Pause");
private Button myStep = new Button ("Step");
private Button myLastClicked = null;
private Cell[][] myGrid;
// Stores File object .XML
private File myFile;
// Stores the overall Grid (consisting of individual cells)
private GridPane myGridPane;
//Border Pane holds the scene graph
private BorderPane myBorderPane;
private CellManager myCellManager;
private Timeline myAnimation = new Timeline();
private static final String [] POSSIBLE_COLORS = {"white", "red", "blue"};
private final FileChooser fileChooser = new FileChooser();
private final Button openButton = new Button("...");
private final ComboBox<String> speedOptions = new ComboBox<String>();
private int myHeight;
public CellViewer(Timeline animation) {
myAnimation = animation;
}
private void setHeight(int height) {
myHeight = height;
}
public Scene init(Stage stage, int width, int height) {
setHeight(height);
myBorderPane = new BorderPane();
myGridPane = new GridPane();
myXMLParser = new XMLParsing();
myCellManager = new FireCellManager();
myShowGrid.setDisable(true);
//Border Pane holds the scene graph
Scene scene = new Scene(myBorderPane, width, height);
addFileSelector(stage);
addButtons();
setButtonsOnAction();
return scene;
}
private void addIndividualCells() {
myBorderPane.setCenter(myGridPane);
for (int row = 0; row < myGameParams.gridXSize ; row++) {
for (int col = 0; col < myGameParams.gridYSize; col ++) {
GridPane square = new GridPane();
Cell cell = myGrid[row][col];
square.setStyle("-fx-background-color: "+ POSSIBLE_COLORS[cell.getState()] +";");
myGridPane.add(square, col, row);
}
}
// Creates border for each cell
myGridPane.setStyle("-fx-background-color: black; -fx-padding: 2; -fx-hgap: 2; -fx-vgap: 2;");
}
private void addGridConstraints() {
for (int i = 0; i < myGameParams.gridXSize; i++) {
myGridPane.getRowConstraints().add(new RowConstraints(5, Control.USE_COMPUTED_SIZE, Double.POSITIVE_INFINITY, Priority.ALWAYS, VPos.CENTER, true));
}
for (int i = 0; i < myGameParams.gridYSize; i++) {
myGridPane.getColumnConstraints().add(new ColumnConstraints(5, Control.USE_COMPUTED_SIZE, Double.POSITIVE_INFINITY, Priority.ALWAYS, HPos.CENTER, true));
}
// Adding padding so there are borders between perimeter cells and window edges
myGridPane.setPadding(new Insets(0, 10, 10, 0));
}
private void addFileSelector(Stage stage) {
HBox hbox = new HBox();
Text text = new Text("Load an XML file to begin the simulation: ");
openButton.setScaleX(0.8);
openButton.setScaleY(0.8);
openButton.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
fileChooser.setTitle("Open XML File");
fileChooser.getExtensionFilters().addAll(
new ExtensionFilter("XML Files", "*.xml"));
myFile = fileChooser.showOpenDialog(stage);
if (myFile == null) {
System.out.println("No file selected");
Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.initOwner(stage);
VBox textBox = new VBox();
textBox.getChildren().add(new Text("You haven't selected an XML file.\nPlease select one."));
Scene dialogScene = new Scene(textBox, 500, 100);
dialog.setScene(dialogScene);
dialog.show();
} else {
try {
myGameParams = myXMLParser.parseInitialCellsFromFile(myFile);
myFileSelected = true;
myShowGrid.setDisable(false);
} catch (ParserConfigurationException
| SAXException | IOException e1) {
e1.printStackTrace();
}
myGrid = myCellManager.initialize(myGameParams.simulationMode, myGameParams.gridXSize,
myGameParams.gridYSize, myGameParams.initialCells);
}
}
});
hbox.getChildren().addAll(text, openButton);
myBorderPane.setTop(hbox);
}
private Node getNodeFromGridPane(int col, int row) {
for (Node node : myGridPane.getChildren()) {
if (GridPane.getColumnIndex(node) == col && GridPane.getRowIndex(node) == row) {
return node;
}
}
return null;
}
private void addButtons() {
VBox vbox = new VBox();
vbox.setSpacing(myHeight/40);
vbox.setPadding(new Insets(myHeight/4, 0, 0, 2));
Text speed = new Text("Speed");
speedOptions.getItems().addAll(
VERY_SLOW, SLOW, NORMAL, FAST, VERY_FAST);
// By default set this to normal speed
speedOptions.setValue(NORMAL);
// Adding buttons to vertical box. This could have been cleaner with an array of Buttons but from
// a readability standpoint, this is probably better
vbox.getChildren().add(myShowGrid);
vbox.getChildren().add(myRestart);
vbox.getChildren().add(myStart);
vbox.getChildren().add(myStop);
vbox.getChildren().add(myStep);
vbox.getChildren().add(speed);
vbox.getChildren().add(speedOptions);
myBorderPane.setLeft(vbox);
}
private void setButtonsOnAction() {
myStart.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.play();
myLastClicked = myStart;
}
});
myStop.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.pause();
myLastClicked = myStop;
}
});
myRestart.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
}
});
myStep.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.play();
myStepClicked = true;
myLastClicked = myStep;
}
});
myShowGrid.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
addIndividualCells();
addGridConstraints();
myAnimation.play();
myAnimation.pause();
//myShowGrid.setDisable(true);
myLastClicked = myShowGrid;
}
});
}
private EventHandler<ActionEvent> oneFrame = new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent evt) {
checkFileSelectedAndSetFlags();
if ((myLastClicked.equals(myStep))) {
if (myStepClicked) {
updateGrid();
updateDisplay();
myStepClicked = false;
myAnimation.pause();
}
} else {
updateGrid();
updateDisplay();
}
checkSpeedSelection();
}
};
public KeyFrame start () {
return new KeyFrame(Duration.millis(1000), oneFrame);
}
private void checkFileSelectedAndSetFlags() {
if ((myFile != null) && (myFileSelected)) {
myGridSet = true;
myFileSelected = false;
}
}
private void updateGrid() {
if (myGridSet) {
myCellManager.updateGrid();
}
}
// Currently updating display by picking a random color;
private void updateDisplay () {
if (myGridSet) {
for (int i = 0; i < myGameParams.gridXSize; i++) {
for (int j = 0; j < myGameParams.gridYSize; j++) {
Node node = getNodeFromGridPane(i, j);
Cell cell = myGrid[i][j];
node.setStyle("-fx-background-color: "+ POSSIBLE_COLORS[cell.getState()] +";");
}
}
}
}
private void checkSpeedSelection() {
if (speedOptions.getValue().equals(VERY_SLOW)) {
myAnimation.setRate(0.25);
} else {
if ((speedOptions.getValue().equals(SLOW))) {
myAnimation.setRate(0.5);
} else {
if ((speedOptions.getValue().equals(NORMAL))) {
myAnimation.setRate(1.0);
} else {
if ((speedOptions.getValue().equals(FAST))) {
myAnimation.setRate(2.0);
} else {
myAnimation.setRate(4.0);
}
}
}
}
}
} | src/cellsociety_team16/CellViewer.java | package cellsociety_team16;
import java.io.File;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;
import parent.Cell;
import parent.CellManager;
import parent.FireCellManager;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.HPos;
import javafx.geometry.Insets;
import javafx.geometry.VPos;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Control;
import javafx.scene.layout.*;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.util.Duration;
public class CellViewer {
private static final String VERY_FAST = "Faster";
private static final String FAST = "Fast";
private static final String NORMAL = "Normal";
private static final String SLOW = "Slow";
private static final String VERY_SLOW = "Slower";
private InitialGameParameters myGameParams;
private XMLParsing myXMLParser;
private boolean myGridSet = false;
private boolean myStepClicked = false;
private boolean myFileSelected = false;
private Button myRestart = new Button("Restart");
private Button myShowGrid = new Button("Show Grid");
private Button myStart= new Button("Start/Resume");
private Button myStop = new Button("Stop/Pause");
private Button myStep = new Button ("Step");
private Button myLastClicked = null;
private Cell[][] myGrid;
// Stores File object .XML
private File myFile;
// Stores the overall Grid (consisting of individual cells)
private GridPane myGridPane;
//Border Pane holds the scene graph
private BorderPane myBorderPane;
private CellManager myCellManager;
private Timeline myAnimation = new Timeline();
private static final String [] POSSIBLE_COLORS = {"white", "red", "blue"};
private final FileChooser fileChooser = new FileChooser();
private final Button openButton = new Button("...");
private final ComboBox<String> speedOptions = new ComboBox<String>();
private int myHeight;
public CellViewer(Timeline animation) {
myAnimation = animation;
}
private void setHeight(int height) {
myHeight = height;
}
public Scene init(Stage stage, int width, int height) {
setHeight(height);
myBorderPane = new BorderPane();
myGridPane = new GridPane();
myXMLParser = new XMLParsing();
myCellManager = new FireCellManager();
myShowGrid.setDisable(true);
//Border Pane holds the scene graph
Scene scene = new Scene(myBorderPane, width, height);
addFileSelector(stage);
addButtons();
setButtonsOnAction();
return scene;
}
private void addIndividualCells() {
myBorderPane.setCenter(myGridPane);
for (int row = 0; row < myGameParams.gridXSize ; row++) {
for (int col = 0; col < myGameParams.gridYSize; col ++) {
GridPane square = new GridPane();
Cell cell = myGrid[row][col];
square.setStyle("-fx-background-color: "+ POSSIBLE_COLORS[cell.getState()] +";");
myGridPane.add(square, col, row);
}
}
}
private void addGridConstraints() {
for (int i = 0; i < myGameParams.gridXSize; i++) {
myGridPane.getRowConstraints().add(new RowConstraints(5, Control.USE_COMPUTED_SIZE, Double.POSITIVE_INFINITY, Priority.ALWAYS, VPos.CENTER, true));
}
for (int i = 0; i < myGameParams.gridYSize; i++) {
myGridPane.getColumnConstraints().add(new ColumnConstraints(5, Control.USE_COMPUTED_SIZE, Double.POSITIVE_INFINITY, Priority.ALWAYS, HPos.CENTER, true));
}
// Adding padding so there are borders between perimeter cells and window edges
myGridPane.setPadding(new Insets(0, 10, 10, 0));
}
private void addFileSelector(Stage stage) {
HBox hbox = new HBox();
Text text = new Text("Load an XML file to begin the simulation: ");
openButton.setScaleX(0.8);
openButton.setScaleY(0.8);
openButton.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
fileChooser.setTitle("Open XML File");
fileChooser.getExtensionFilters().addAll(
new ExtensionFilter("XML Files", "*.xml"));
myFile = fileChooser.showOpenDialog(stage);
if (myFile == null) {
System.out.println("No file selected");
Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.initOwner(stage);
VBox textBox = new VBox();
textBox.getChildren().add(new Text("You haven't selected an XML file.\nPlease select one."));
Scene dialogScene = new Scene(textBox, 500, 100);
dialog.setScene(dialogScene);
dialog.show();
} else {
try {
myGameParams = myXMLParser.parseInitialCellsFromFile(myFile);
myFileSelected = true;
myShowGrid.setDisable(false);
} catch (ParserConfigurationException
| SAXException | IOException e1) {
e1.printStackTrace();
}
myGrid = myCellManager.initialize(myGameParams.simulationMode, myGameParams.gridXSize,
myGameParams.gridYSize, myGameParams.initialCells);
}
}
});
hbox.getChildren().addAll(text, openButton);
myBorderPane.setTop(hbox);
}
private Node getNodeFromGridPane(int col, int row) {
for (Node node : myGridPane.getChildren()) {
if (GridPane.getColumnIndex(node) == col && GridPane.getRowIndex(node) == row) {
return node;
}
}
return null;
}
private void addButtons() {
VBox vbox = new VBox();
vbox.setSpacing(myHeight/40);
vbox.setPadding(new Insets(myHeight/4, 0, 0, 2));
Text speed = new Text("Speed");
speedOptions.getItems().addAll(
VERY_SLOW, SLOW, NORMAL, FAST, VERY_FAST);
// By default set this to normal speed
speedOptions.setValue(NORMAL);
// Adding buttons to vertical box. This could have been cleaner with an array of Buttons but from
// a readability standpoint, this is probably better
vbox.getChildren().add(myShowGrid);
vbox.getChildren().add(myRestart);
vbox.getChildren().add(myStart);
vbox.getChildren().add(myStop);
vbox.getChildren().add(myStep);
vbox.getChildren().add(speed);
vbox.getChildren().add(speedOptions);
myBorderPane.setLeft(vbox);
}
private void setButtonsOnAction() {
myStart.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.play();
myLastClicked = myStart;
}
});
myStop.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.pause();
myLastClicked = myStop;
}
});
myRestart.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
}
});
myStep.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
myAnimation.play();
myStepClicked = true;
myLastClicked = myStep;
}
});
myShowGrid.setOnAction(
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent e) {
addIndividualCells();
addGridConstraints();
myAnimation.play();
myAnimation.pause();
//myShowGrid.setDisable(true);
myLastClicked = myShowGrid;
}
});
}
private EventHandler<ActionEvent> oneFrame = new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent evt) {
checkFileSelectedAndSetFlags();
if ((myLastClicked.equals(myStep))) {
if (myStepClicked) {
updateGrid();
updateDisplay();
myStepClicked = false;
myAnimation.pause();
}
} else {
updateGrid();
updateDisplay();
}
checkSpeedSelection();
}
};
public KeyFrame start () {
return new KeyFrame(Duration.millis(1000), oneFrame);
}
private void checkFileSelectedAndSetFlags() {
if ((myFile != null) && (myFileSelected)) {
myGridSet = true;
myFileSelected = false;
}
}
private void updateGrid() {
if (myGridSet) {
myCellManager.updateGrid();
}
}
// Currently updating display by picking a random color;
private void updateDisplay () {
if (myGridSet) {
for (int i = 0; i < myGameParams.gridXSize; i++) {
for (int j = 0; j < myGameParams.gridYSize; j++) {
Node node = getNodeFromGridPane(i, j);
Cell cell = myGrid[i][j];
node.setStyle("-fx-background-color: "+ POSSIBLE_COLORS[cell.getState()] +";");
}
}
}
}
private void checkSpeedSelection() {
if (speedOptions.getValue().equals(VERY_SLOW)) {
myAnimation.setRate(0.25);
} else {
if ((speedOptions.getValue().equals(SLOW))) {
myAnimation.setRate(0.5);
} else {
if ((speedOptions.getValue().equals(NORMAL))) {
myAnimation.setRate(1.0);
} else {
if ((speedOptions.getValue().equals(FAST))) {
myAnimation.setRate(2.0);
} else {
myAnimation.setRate(4.0);
}
}
}
}
}
} | Added border for grid cells
| src/cellsociety_team16/CellViewer.java | Added border for grid cells | <ide><path>rc/cellsociety_team16/CellViewer.java
<ide>
<ide> import java.io.File;
<ide> import java.io.IOException;
<add>
<ide> import javax.xml.parsers.ParserConfigurationException;
<add>
<ide> import org.xml.sax.SAXException;
<add>
<ide> import parent.Cell;
<ide> import parent.CellManager;
<ide> import parent.FireCellManager;
<ide> square.setStyle("-fx-background-color: "+ POSSIBLE_COLORS[cell.getState()] +";");
<ide> myGridPane.add(square, col, row);
<ide> }
<del> }
<add> }
<add> // Creates border for each cell
<add> myGridPane.setStyle("-fx-background-color: black; -fx-padding: 2; -fx-hgap: 2; -fx-vgap: 2;");
<ide> }
<ide>
<ide> private void addGridConstraints() { |
|
Java | mit | dcb9fa24cd2a09639e32c095c77d2b0032cc0e44 | 0 | DruidGreeneyes/rivet-core.java,DruidGreeneyes/rivet-core.java | package rivet.core.labels;
import java.util.Arrays;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import rivet.core.exceptions.SizeMismatchException;
import rivet.core.util.Pair;
import rivet.core.vectorpermutations.Permutations;
public class RIVs {
public static RIV addRIVs(final RIV rivA, final RIV rivB)
throws SizeMismatchException {
return rivA.add(rivB);
}
public static double dotProduct(final RIV rivA, final RIV rivB) {
return getMatchingValStream(rivA, rivB)
.mapToDouble((valPair) -> valPair.apply((a, b) -> a * b)).sum();
}
private static IntStream getMatchingKeyStream(final RIV rivA,
final RIV rivB) {
return rivA.keyStream().filter(rivA::contains);
}
private static Stream<Pair<Double, Double>> getMatchingValStream(
final RIV rivA, final RIV rivB) {
return getMatchingKeyStream(rivA, rivB)
.mapToObj((i) -> Pair.make(rivA.get(i), rivB.get(i)));
}
public static RIV permuteRIV(final RIV riv, final Permutations permutations,
final int times) {
return riv.permute(permutations, times);
}
public static double similarity(final RIV rivA, final RIV rivB) {
final double mag = rivA.magnitude() * rivB.magnitude();
return mag == 0 ? 0 : dotProduct(rivA, rivB) / mag;
}
public static RIV sumRIVs(final RIV zeroValue, final RIV... rivs)
throws SizeMismatchException {
return sumRIVs(zeroValue, Arrays.stream(rivs));
}
public static RIV sumRIVs(final RIV zeroValue, final Stream<RIV> rivs)
throws SizeMismatchException {
return rivs.reduce(zeroValue, RIV::destructiveAdd);
}
private RIVs() {
}
}
| src/rivet/core/labels/RIVs.java | package rivet.core.labels;
import java.util.Arrays;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import rivet.core.exceptions.SizeMismatchException;
import rivet.core.util.Pair;
import rivet.core.vectorpermutations.Permutations;
public class RIVs {
public static RIV addRIVs(final RIV rivA, final RIV rivB)
throws SizeMismatchException {
return rivA.add(rivB);
}
public static double dotProduct(final RIV rivA, final RIV rivB) {
return getMatchingValStream(rivA, rivB)
.mapToDouble((valPair) -> valPair.apply((a, b) -> a * b)).sum();
}
private static IntStream getMatchingKeyStream(final RIV rivA,
final RIV rivB) {
return rivA.keyStream().filter(rivA::contains);
}
private static Stream<Pair<Double, Double>> getMatchingValStream(
final RIV rivA, final RIV rivB) {
return getMatchingKeyStream(rivA, rivB)
.mapToObj((i) -> Pair.make(rivA.get(i), rivB.get(i)));
}
public static RIV permuteRIV(final RIV riv, final Permutations permutations,
final int times) {
return riv.permute(permutations, times);
}
public static double similarity(final RIV rivA, final RIV rivB) {
final double mag = rivA.magnitude() * rivB.magnitude();
return mag == 0 ? 0 : dotProduct(rivA, rivB) / mag;
}
public static RIV sumRIVs(final RIV zeroValue, final RIV... rivs)
throws SizeMismatchException {
return sumRIVs(zeroValue, Arrays.stream(rivs));
}
public static RIV sumRIVs(final RIV zeroValue, final Stream<RIV> rivs)
throws SizeMismatchException {
return rivs.reduce(zeroValue, RIV::destructiveAdd);
}
}
| add private constructor to RIVs | src/rivet/core/labels/RIVs.java | add private constructor to RIVs | <ide><path>rc/rivet/core/labels/RIVs.java
<ide> return rivs.reduce(zeroValue, RIV::destructiveAdd);
<ide> }
<ide>
<add> private RIVs() {
<add> }
<add>
<ide> } |
|
Java | artistic-2.0 | 7c63d2776da85a08cdd09f4a3d81eca77b768c53 | 0 | metamolecular/opsin | package uk.ac.cam.ch.wwmm.opsin;
import nu.xom.Attribute;
import nu.xom.Element;
import static uk.ac.cam.ch.wwmm.opsin.XmlDeclarations.*;
/**A token in a chemical name. hex, yl, ane, chloro etc.
* Stores information about the XML element that will be produced for the token.
*
* @author ptc24
* @author dl387
*
*/
class Token {
/**A reference copy of the XML element to produce*/
private final Element elem;
/**Should this token actually be used. Set to true for meaningless tokens e.g. e, o, endOfSubstituent etc.*/
private boolean ignoreWhenWritingXML =false;
/**
* Makes a new token using a regexToken Element
* @param regexTokenElement
*/
Token(Element regexTokenElement) {
elem = new Element(regexTokenElement.getAttributeValue("tagname"));
for (int i = 0, l = regexTokenElement.getAttributeCount(); i < l; i++) {
Attribute att = regexTokenElement.getAttribute(i);
String attName = att.getLocalName();
if (attName.equals("value")){
elem.addAttribute(new Attribute(VALUE_ATR, att.getValue()));
}
else if (attName.equals("type")){
elem.addAttribute(new Attribute(TYPE_ATR, att.getValue()));
}
else if (attName.equals("subType")){
elem.addAttribute(new Attribute(SUBTYPE_ATR, att.getValue()));
}
else if (attName.equals("ignoreWhenWritingXML") && "yes".equals(att.getValue())){
ignoreWhenWritingXML = true;
}
}
}
/**Makes a new Token based on reference elements from an XML file.
*
* @param tokenElement The token element in the XML tokens file.
* @param tokenList The tokenList element the token was taken from.
*/
Token(Element tokenElement, Element tokenList) {
elem = OpsinTools.shallowCopy(tokenElement);
elem.setLocalName(tokenList.getAttributeValue("tagname"));
for (int i = 0, l = tokenList.getAttributeCount(); i < l; i++) {
Attribute att = tokenList.getAttribute(i);
String attName = att.getLocalName();
if (attName.equals("type")){
elem.addAttribute(new Attribute(TYPE_ATR, att.getValue()));
}
else if (attName.equals("subType")){
elem.addAttribute(new Attribute(SUBTYPE_ATR, att.getValue()));
}
else if (attName.equals("ignoreWhenWritingXML") && "yes".equals(att.getValue())){
ignoreWhenWritingXML = true;
}
}
}
/**Makes an XML element of the token.
*
* @param text The string to go in the Text node contained within the Element.
* @return The element produced.
*/
Element makeElement(String text) {
if (!ignoreWhenWritingXML){
Element tokenElement = OpsinTools.shallowCopy(elem);
tokenElement.appendChild(text);
return tokenElement;
}
else{
return null;
}
}
}
| opsin-core/src/main/java/uk/ac/cam/ch/wwmm/opsin/Token.java | package uk.ac.cam.ch.wwmm.opsin;
import nu.xom.Attribute;
import nu.xom.Element;
import static uk.ac.cam.ch.wwmm.opsin.XmlDeclarations.*;
/**A token in a chemical name. hex, yl, ane, chloro etc.
* Stores information about the XML element that will be produced for the token.
*
* @author ptc24
* @author dl387
*
*/
class Token {
/**A reference copy of the XML element to produce*/
private final Element elem;
/**Should this token actually be used. Set to true for meaningless tokens e.g. e, o, endOfSubstituent etc.*/
private boolean ignoreWhenWritingXML =false;
/**
* Makes a new token using a regexToken Element
* @param regexTokenElement
*/
Token(Element regexTokenElement) {
elem = new Element(regexTokenElement.getAttributeValue("tagname"));
if (regexTokenElement.getAttribute("value")!=null){
elem.addAttribute(new Attribute(VALUE_ATR, regexTokenElement.getAttributeValue("value")));
}
if (regexTokenElement.getAttribute("type")!=null){
elem.addAttribute(new Attribute(TYPE_ATR, regexTokenElement.getAttributeValue("type")));
}
if (regexTokenElement.getAttribute("subType")!=null){
elem.addAttribute(new Attribute(SUBTYPE_ATR, regexTokenElement.getAttributeValue("subType")));
}
if ("yes".equals(regexTokenElement.getAttributeValue("ignoreWhenWritingXML"))){
ignoreWhenWritingXML=true;
}
}
/**Makes a new Token based on reference elements from an XML file.
*
* @param tokenElement The token element in the XML tokens file.
* @param tokenList The tokenList element the token was taken from.
*/
Token(Element tokenElement, Element tokenList) {
elem = OpsinTools.shallowCopy(tokenElement);
elem.setLocalName(tokenList.getAttributeValue("tagname"));
if(tokenList.getAttribute("type") != null) {
elem.addAttribute(new Attribute(TYPE_ATR, tokenList.getAttributeValue("type")));
}
if(tokenList.getAttribute("subType") != null) {
elem.addAttribute(new Attribute(SUBTYPE_ATR, tokenList.getAttributeValue("subType")));
}
if ("yes".equals(tokenList.getAttributeValue("ignoreWhenWritingXML"))){
ignoreWhenWritingXML=true;
}
}
/**Makes an XML element of the token.
*
* @param text The string to go in the Text node contained within the Element.
* @return The element produced.
*/
Element makeElement(String text) {
if (!ignoreWhenWritingXML){
Element tokenElement = OpsinTools.shallowCopy(elem);
tokenElement.appendChild(text);
return tokenElement;
}
else{
return null;
}
}
}
| Minor efficiency tweak
| opsin-core/src/main/java/uk/ac/cam/ch/wwmm/opsin/Token.java | Minor efficiency tweak | <ide><path>psin-core/src/main/java/uk/ac/cam/ch/wwmm/opsin/Token.java
<ide> */
<ide> Token(Element regexTokenElement) {
<ide> elem = new Element(regexTokenElement.getAttributeValue("tagname"));
<del> if (regexTokenElement.getAttribute("value")!=null){
<del> elem.addAttribute(new Attribute(VALUE_ATR, regexTokenElement.getAttributeValue("value")));
<del> }
<del> if (regexTokenElement.getAttribute("type")!=null){
<del> elem.addAttribute(new Attribute(TYPE_ATR, regexTokenElement.getAttributeValue("type")));
<del> }
<del> if (regexTokenElement.getAttribute("subType")!=null){
<del> elem.addAttribute(new Attribute(SUBTYPE_ATR, regexTokenElement.getAttributeValue("subType")));
<del> }
<del> if ("yes".equals(regexTokenElement.getAttributeValue("ignoreWhenWritingXML"))){
<del> ignoreWhenWritingXML=true;
<add> for (int i = 0, l = regexTokenElement.getAttributeCount(); i < l; i++) {
<add> Attribute att = regexTokenElement.getAttribute(i);
<add> String attName = att.getLocalName();
<add> if (attName.equals("value")){
<add> elem.addAttribute(new Attribute(VALUE_ATR, att.getValue()));
<add> }
<add> else if (attName.equals("type")){
<add> elem.addAttribute(new Attribute(TYPE_ATR, att.getValue()));
<add> }
<add> else if (attName.equals("subType")){
<add> elem.addAttribute(new Attribute(SUBTYPE_ATR, att.getValue()));
<add> }
<add> else if (attName.equals("ignoreWhenWritingXML") && "yes".equals(att.getValue())){
<add> ignoreWhenWritingXML = true;
<add> }
<ide> }
<ide> }
<ide>
<ide> Token(Element tokenElement, Element tokenList) {
<ide> elem = OpsinTools.shallowCopy(tokenElement);
<ide> elem.setLocalName(tokenList.getAttributeValue("tagname"));
<del> if(tokenList.getAttribute("type") != null) {
<del> elem.addAttribute(new Attribute(TYPE_ATR, tokenList.getAttributeValue("type")));
<del> }
<del> if(tokenList.getAttribute("subType") != null) {
<del> elem.addAttribute(new Attribute(SUBTYPE_ATR, tokenList.getAttributeValue("subType")));
<del> }
<del> if ("yes".equals(tokenList.getAttributeValue("ignoreWhenWritingXML"))){
<del> ignoreWhenWritingXML=true;
<add> for (int i = 0, l = tokenList.getAttributeCount(); i < l; i++) {
<add> Attribute att = tokenList.getAttribute(i);
<add> String attName = att.getLocalName();
<add> if (attName.equals("type")){
<add> elem.addAttribute(new Attribute(TYPE_ATR, att.getValue()));
<add> }
<add> else if (attName.equals("subType")){
<add> elem.addAttribute(new Attribute(SUBTYPE_ATR, att.getValue()));
<add> }
<add> else if (attName.equals("ignoreWhenWritingXML") && "yes".equals(att.getValue())){
<add> ignoreWhenWritingXML = true;
<add> }
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 0964adba83aea602258b95b274780cde6f56e008 | 0 | SeleniumHQ/fluent-selenium | package org.seleniumhq.selenium.fluent;
import org.junit.Test;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import java.util.List;
import static com.thoughtworks.selenium.SeleneseTestBase.assertEquals;
import static junit.framework.TestCase.fail;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class StaleRecoveringTest {
@Test(timeout = 60000)
public void vanillaWebDriverShouldCatchStaleExceptionAndBeAbleToCarryOnAnyway() {
// We have to simulate a stale element exception as
// we can't find a real-world example
FirstGetAttributeIsStaleDriver driver = new FirstGetAttributeIsStaleDriver();
driver.get("http://seleniumhq.github.io/fluent-selenium/7753/index.html");
WebElement elem;
try {
elem = driver.findElement(By.id("cheddarCheeseLoginPassword"));
elem.sendKeys("bar");
getAndVerify(elem);
fail("should have barfed");
} catch(StaleElementReferenceException sere) {
elem = driver.findElement(By.id("cheddarCheeseLoginPassword"));
getAndVerify(elem);
} finally {
driver.quit();
}
assertEquals(2, driver.countOfGetAttribute[0]);
}
private void getAndVerify(WebElement elem) {
String val = elem.getAttribute("value");
assertEquals("bar", val);
}
@Test
public void knownStaleCaseCanBeOvercomeInFluentWebDriverToo() {
// We have to simulate a stale element exception as
// we can't find a real-world example
FirstGetAttributeIsStaleDriver driver = new FirstGetAttributeIsStaleDriver();
driver.get("http://seleniumhq.github.io/fluent-selenium/7753/index.html");
assertEquals(false, driver.staleElementThrown[0]);
FluentWebDriver fwd = new FluentWebDriver(driver);
try {
String val = fwd.input(By.id("cheddarCheeseLoginPassword"))
.sendKeys(Keys.chord(Keys.CONTROL, "a") + "bar")
.getAttribute("value").toString();
try {
assertThat(val, equalTo("bar"));
} catch (AssertionError e) {
// unrelated to the staleness, sometimes the field isn't getting overwritten in the sendKeys()
assertThat(val, equalTo("barI can Haz Password"));
}
} finally {
driver.quit();
}
assertEquals(2, driver.countOfGetAttribute[0]);
assertEquals(true, driver.staleElementThrown[0]);
}
private static class FirstGetAttributeIsStaleDriver extends FirefoxDriver {
public final int[] countOfGetAttribute = {0};
public final boolean[] staleElementThrown = {false};
@Override
public WebElement findElementById(String using) {
final WebElement we = super.findElementById(using);
return new WebElement() {
public void click() {
we.click();
}
public void submit() {
we.submit();
}
public void sendKeys(CharSequence... keysToSend) {
we.sendKeys(keysToSend);
}
public void clear() {
we.clear();
}
public String getTagName() {
return we.getTagName();
}
public String getAttribute(String name) {
if (countOfGetAttribute[0] == 0) {
countOfGetAttribute[0]++;
staleElementThrown[0] = true;
throw new StaleElementReferenceException("boop");
} else {
countOfGetAttribute[0]++;
return we.getAttribute(name);
}
}
public boolean isSelected() {
return we.isSelected();
}
public boolean isEnabled() {
return we.isEnabled();
}
public String getText() {
return we.getText();
}
public List<WebElement> findElements(By by) {
return we.findElements(by);
}
public WebElement findElement(By by) {
return we.findElement(by);
}
public boolean isDisplayed() {
return we.isDisplayed();
}
public Point getLocation() {
return we.getLocation();
}
public Dimension getSize() {
return we.getSize();
}
public String getCssValue(String propertyName) {
return we.getCssValue(propertyName);
}
public <X> X getScreenshotAs(OutputType<X> outputType) throws WebDriverException { return we.getScreenshotAs(outputType); }
};
}
}
}
| java/src/test/java/org/seleniumhq/selenium/fluent/StaleRecoveringTest.java | package org.seleniumhq.selenium.fluent;
import org.junit.Test;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import java.util.List;
import static com.thoughtworks.selenium.SeleneseTestBase.assertEquals;
import static junit.framework.TestCase.fail;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class StaleRecoveringTest {
@Test(timeout = 60000)
public void vanillaWebDriverShouldCatchStaleExceptionAndBeAbleToCarryOnAnyway() {
// We have to simulate a stale element exception as
// we can't find a real-world example
FirstGetAttributeIsStaleDriver driver = new FirstGetAttributeIsStaleDriver();
driver.get("http://seleniumhq.github.io/fluent-selenium/7753/index.html");
WebElement elem;
try {
elem = driver.findElement(By.id("cheddarCheeseLoginPassword"));
elem.sendKeys("bar");
getAndVerify(elem);
fail("should have barfed");
} catch(StaleElementReferenceException sere) {
elem = driver.findElement(By.id("cheddarCheeseLoginPassword"));
getAndVerify(elem);
} finally {
driver.quit();
}
assertEquals(2, driver.countOfGetAttribute[0]);
}
private void getAndVerify(WebElement elem) {
String val = elem.getAttribute("value");
assertEquals("bar", val);
}
@Test
public void knownStaleCaseCanBeOvercomeInFluentWebDriverToo() {
// We have to simulate a stale element exception as
// we can't find a real-world example
FirstGetAttributeIsStaleDriver driver = new FirstGetAttributeIsStaleDriver();
driver.get("http://seleniumhq.github.io/fluent-selenium/7753/index.html");
FluentWebDriver fwd = new FluentWebDriver(driver);
try {
String val = fwd.input(By.id("cheddarCheeseLoginPassword"))
.sendKeys(Keys.chord(Keys.CONTROL, "a") + "bar")
.getAttribute("value").toString();
try {
assertThat(val, equalTo("bar"));
} catch (AssertionError e) {
// unrelated to the staleness, sometimes the field isn't getting overwritten in the sendKeys()
assertThat(val, equalTo("barI can Haz Password"));
}
} finally {
driver.quit();
}
assertEquals(2, driver.countOfGetAttribute[0]);
}
private static class FirstGetAttributeIsStaleDriver extends FirefoxDriver {
public final int[] countOfGetAttribute = {0};
@Override
public WebElement findElementById(String using) {
final WebElement we = super.findElementById(using);
return new WebElement() {
public void click() {
we.click();
}
public void submit() {
we.submit();
}
public void sendKeys(CharSequence... keysToSend) {
we.sendKeys(keysToSend);
}
public void clear() {
we.clear();
}
public String getTagName() {
return we.getTagName();
}
public String getAttribute(String name) {
if (countOfGetAttribute[0] == 0) {
countOfGetAttribute[0]++;
throw new StaleElementReferenceException("boop");
}
countOfGetAttribute[0]++;
return we.getAttribute(name);
}
public boolean isSelected() {
return we.isSelected();
}
public boolean isEnabled() {
return we.isEnabled();
}
public String getText() {
return we.getText();
}
public List<WebElement> findElements(By by) {
return we.findElements(by);
}
public WebElement findElement(By by) {
return we.findElement(by);
}
public boolean isDisplayed() {
return we.isDisplayed();
}
public Point getLocation() {
return we.getLocation();
}
public Dimension getSize() {
return we.getSize();
}
public String getCssValue(String propertyName) {
return we.getCssValue(propertyName);
}
public <X> X getScreenshotAs(OutputType<X> outputType) throws WebDriverException { return we.getScreenshotAs(outputType); }
};
}
}
}
| make test clearer
| java/src/test/java/org/seleniumhq/selenium/fluent/StaleRecoveringTest.java | make test clearer | <ide><path>ava/src/test/java/org/seleniumhq/selenium/fluent/StaleRecoveringTest.java
<ide> FirstGetAttributeIsStaleDriver driver = new FirstGetAttributeIsStaleDriver();
<ide>
<ide> driver.get("http://seleniumhq.github.io/fluent-selenium/7753/index.html");
<add> assertEquals(false, driver.staleElementThrown[0]);
<ide>
<ide> FluentWebDriver fwd = new FluentWebDriver(driver);
<ide>
<ide> }
<ide>
<ide> assertEquals(2, driver.countOfGetAttribute[0]);
<add> assertEquals(true, driver.staleElementThrown[0]);
<ide>
<ide> }
<ide>
<ide> private static class FirstGetAttributeIsStaleDriver extends FirefoxDriver {
<ide> public final int[] countOfGetAttribute = {0};
<add> public final boolean[] staleElementThrown = {false};
<ide>
<ide> @Override
<ide> public WebElement findElementById(String using) {
<ide> public String getAttribute(String name) {
<ide> if (countOfGetAttribute[0] == 0) {
<ide> countOfGetAttribute[0]++;
<add> staleElementThrown[0] = true;
<ide> throw new StaleElementReferenceException("boop");
<add> } else {
<add> countOfGetAttribute[0]++;
<add> return we.getAttribute(name);
<ide> }
<del> countOfGetAttribute[0]++;
<del> return we.getAttribute(name);
<ide> }
<ide>
<ide> public boolean isSelected() { |
|
Java | apache-2.0 | d25e45eecf6513694038e4a713d5db997aeab116 | 0 | mveitas/metrics,AltitudeDigital/metrics,gorzell/metrics,bentatham/metrics,fcrepo4-archive/metrics,gorzell/metrics,rexren/metrics,randomstatistic/metrics,thelastpickle/metrics,slachiewicz/metrics,chenxianghua2014/metrics,scullxbones/metrics-scala,maciej/metrics-scala,dropwizard/metrics,slovdahl/metrics,cirrus-dev/metrics,signalfx/metrics,gburton1/metrics,timezra/metrics,ohr/metrics,jplock/metrics,jasw/metrics,Banno/metrics,ChetnaChaudhari/metrics,gburton1/metrics,dropwizard/metrics,mtakaki/metrics,gorzell/metrics,mspiegel/metrics,fcrepo4-archive/metrics,infusionsoft/yammer-metrics,mt0803/metrics,wickedshimmy/metrics-scala,ind9/metrics,mveitas/metrics,mattnelson/metrics,unitsofmeasurement/metrics,infusionsoft/yammer-metrics,egymgmbh/metrics,wfxiang08/metrics,erikvanoosten/metrics-scala,kevintvh/metrics,tempredirect/metrics,valery1707/dropwizard-metrics,mnuessler/metrics,dropwizard/metrics | package com.yammer.metrics.core.tests;
import com.yammer.metrics.core.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.*;
public class MetricsRegistryTest {
private MetricsRegistry registry;
@Before
public void setUp() throws Exception {
this.registry = new MetricsRegistry();
}
@After
public void tearDown() throws Exception {
registry.shutdown();
}
@Test
public void sortingMetricNamesSortsThemByClassThenScopeThenName() throws Exception {
final MetricName one = new MetricName(Object.class, "one");
final MetricName two = new MetricName(Object.class, "two");
final MetricName three = new MetricName(String.class, "three");
final Counter mOne = registry.newCounter(Object.class, "one");
final Counter mTwo = registry.newCounter(Object.class, "two");
final Counter mThree = registry.newCounter(String.class, "three");
final SortedMap<String, SortedMap<MetricName, Metric>> sortedMetrics = new TreeMap<String, SortedMap<MetricName, Metric>>();
final TreeMap<MetricName, Metric> objectMetrics = new TreeMap<MetricName, Metric>();
objectMetrics.put(one, mOne);
objectMetrics.put(two, mTwo);
sortedMetrics.put(Object.class.getCanonicalName(), objectMetrics);
final TreeMap<MetricName, Metric> stringMetrics = new TreeMap<MetricName, Metric>();
stringMetrics.put(three, mThree);
sortedMetrics.put(String.class.getCanonicalName(), stringMetrics);
assertThat(registry.groupedMetrics(),
is(sortedMetrics));
}
@Test
public void listenersRegisterNewMetrics() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final Gauge<?> gauge = mock(Gauge.class);
registry.newGauge(MetricsRegistryTest.class, "gauge", gauge);
final Counter counter = registry.newCounter(MetricsRegistryTest.class, "counter");
final Histogram histogram = registry.newHistogram(MetricsRegistryTest.class, "histogram");
final Meter meter = registry.newMeter(MetricsRegistryTest.class,
"meter",
"things",
TimeUnit.SECONDS);
final Timer timer = registry.newTimer(MetricsRegistryTest.class, "timer");
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "gauge"), gauge);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter"), counter);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "histogram"), histogram);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "meter"), meter);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "timer"), timer);
}
@Test
public void removedListenersDoNotReceiveEvents() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final Counter counter1 = registry.newCounter(MetricsRegistryTest.class, "counter1");
registry.removeListener(listener);
final Counter counter2 = registry.newCounter(MetricsRegistryTest.class, "counter2");
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter1"), counter1);
verify(listener, never()).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter2"), counter2);
}
@Test
public void metricsCanBeRemoved() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final MetricName name = new MetricName(MetricsRegistryTest.class, "counter1");
final Counter counter1 = registry.newCounter(MetricsRegistryTest.class, "counter1");
registry.removeMetric(MetricsRegistryTest.class, "counter1");
final InOrder inOrder = inOrder(listener);
inOrder.verify(listener).onMetricAdded(name, counter1);
inOrder.verify(listener).onMetricRemoved(name);
}
@Test
public void createdExecutorsAreShutDownOnShutdown() throws Exception {
final ScheduledExecutorService service = registry.newScheduledThreadPool(1, "test");
registry.shutdown();
assertThat(service.isShutdown(),
is(true));
}
}
| metrics-core/src/test/java/com/yammer/metrics/core/tests/MetricsRegistryTest.java | package com.yammer.metrics.core.tests;
import com.yammer.metrics.core.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.*;
public class MetricsRegistryTest {
private MetricsRegistry registry;
@Before
public void setUp() throws Exception {
this.registry = new MetricsRegistry();
}
@After
public void tearDown() throws Exception {
registry.shutdown();
}
@Test
public void sortingMetricNamesSortsThemByClassThenScopeThenName() throws Exception {
final MetricName one = new MetricName(Object.class, "one");
final MetricName two = new MetricName(Object.class, "two");
final MetricName three = new MetricName(String.class, "three");
final Counter mOne = registry.newCounter(Object.class, "one");
final Counter mTwo = registry.newCounter(Object.class, "two");
final Counter mThree = registry.newCounter(String.class, "three");
final SortedMap<String, SortedMap<MetricName, Metric>> sortedMetrics = new TreeMap<String, SortedMap<MetricName, Metric>>();
final TreeMap<MetricName, Metric> objectMetrics = new TreeMap<MetricName, Metric>();
objectMetrics.put(one, mOne);
objectMetrics.put(two, mTwo);
sortedMetrics.put(Object.class.getCanonicalName(), objectMetrics);
final TreeMap<MetricName, Metric> stringMetrics = new TreeMap<MetricName, Metric>();
stringMetrics.put(three, mThree);
sortedMetrics.put(String.class.getCanonicalName(), stringMetrics);
assertThat(registry.groupedMetrics(),
is(sortedMetrics));
}
@Test
public void listenersRegisterNewMetrics() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final Gauge<?> gauge = mock(Gauge.class);
registry.newGauge(MetricsRegistryTest.class, "gauge", gauge);
final Counter counter = registry.newCounter(MetricsRegistryTest.class, "counter");
final Histogram histogram = registry.newHistogram(MetricsRegistryTest.class, "histogram");
final Meter meter = registry.newMeter(MetricsRegistryTest.class,
"meter",
"things",
TimeUnit.SECONDS);
final Timer timer = registry.newTimer(MetricsRegistryTest.class, "timer");
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "gauge"), gauge);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter"), counter);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "histogram"), histogram);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "meter"), meter);
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "timer"), timer);
}
@Test
public void removedListenersDoNotReceiveEvents() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final Counter counter1 = registry.newCounter(MetricsRegistryTest.class, "counter1");
registry.removeListener(listener);
final Counter counter2 = registry.newCounter(MetricsRegistryTest.class, "counter2");
verify(listener).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter1"), counter1);
verify(listener, never()).onMetricAdded(new MetricName(MetricsRegistryTest.class, "counter2"), counter2);
}
@Test
public void metricsCanBeRemoved() throws Exception {
final MetricsRegistryListener listener = mock(MetricsRegistryListener.class);
registry.addListener(listener);
final MetricName name = new MetricName(MetricsRegistryTest.class, "counter1");
final Counter counter1 = registry.newCounter(MetricsRegistryTest.class, "counter1");
registry.removeMetric(MetricsRegistryTest.class, "counter1");
final InOrder inOrder = inOrder(listener);
inOrder.verify(listener).onMetricAdded(name, counter1);
inOrder.verify(listener).onMetricRemoved(name);
}
}
| Added a test for executor shutdown.
| metrics-core/src/test/java/com/yammer/metrics/core/tests/MetricsRegistryTest.java | Added a test for executor shutdown. | <ide><path>etrics-core/src/test/java/com/yammer/metrics/core/tests/MetricsRegistryTest.java
<ide>
<ide> import java.util.SortedMap;
<ide> import java.util.TreeMap;
<add>import java.util.concurrent.ScheduledExecutorService;
<ide> import java.util.concurrent.TimeUnit;
<ide>
<ide> import static org.hamcrest.Matchers.is;
<ide> inOrder.verify(listener).onMetricAdded(name, counter1);
<ide> inOrder.verify(listener).onMetricRemoved(name);
<ide> }
<add>
<add> @Test
<add> public void createdExecutorsAreShutDownOnShutdown() throws Exception {
<add> final ScheduledExecutorService service = registry.newScheduledThreadPool(1, "test");
<add>
<add> registry.shutdown();
<add>
<add> assertThat(service.isShutdown(),
<add> is(true));
<add> }
<ide> } |
|
JavaScript | mit | 54c6f8f19412de1818ceeb5b414a89c0a7be4328 | 0 | panates/sqb-serializer-oracle | /* sqb-serializer-oracle
------------------------
(c) 2017-present Panates
SQB may be freely distributed under the MIT license.
For details and documentation:
https://panates.github.io/sqb-serializer-oracle/
*/
const reservedWords = ['comment', 'dual'];
class OracleSerializer {
constructor() {
}
// noinspection JSMethodCanBeStatic,JSUnusedGlobalSymbols
isReserved(s) {
return reservedWords.includes(String(s).toLowerCase());
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeSelect(instance, obj, inf) {
let out = instance.serializeSelect(obj, inf);
const limit = instance.query._limit || 0;
const offset = Math.max((obj._offset || 0), 0);
if (limit || offset) {
if (instance.config.serverVersion >= 12) {
if (offset)
out += (offset ? '\nOFFSET ' + offset + ' ROWS' : '') +
(limit ? ' FETCH NEXT ' + limit + ' ROWS ONLY' : '');
else out += (limit ? '\nFETCH FIRST ' + limit + ' ROWS ONLY' : '');
} else {
const a = obj._alias;
const order = instance.query._orderby;
if (offset || (order && order.length)) {
out = 'select ' + (a ? a + '.' : '') + '* from (\n\t' +
'select /*+ first_rows(' + (limit || 100) +
') */ t.*, rownum row$number from (\n\t' +
out + '\n\b' +
') t' +
(limit ? ' where rownum <= ' + (limit + offset) : '') +
'\n\b)' + (a ? ' ' + a : '');
if (offset)
out += ' where row$number >= ' + (offset + 1);
} else {
out = 'select ' + (a ? a + '.' : '') + '* from (\n\t' +
out + '\n\b' +
') where rownum <= ' + limit;
}
}
}
return out;
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeFrom(instance, tables, inf) {
return instance.serializeFrom(tables, inf) || 'from dual';
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeCondition(instance, item, inf) {
let s = instance.serializeCondition(item, inf);
if (!item.isRaw) {
s = s.replace(/!= ?null/g, 'is not null')
.replace(/= ?null/g, 'is null')
.replace(/<> ?null/g, 'is not null');
}
return s;
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeDateValue(instance, date, inf) {
const s = instance.serializeDateValue(date, inf);
return s.length <= 12 ?
'to_date(' + s + ', \'yyyy-mm-dd\')' :
'to_date(' + s + ', \'yyyy-mm-dd hh24:mi:ss\')';
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeReturning(instance, bindings, inf) {
let s = instance.serializeReturning(bindings, inf);
if (s) {
instance.returningParams = {};
const a = s.substring(10, s.length).split(/\s*,\s*/);
s += ' into ';
a.forEach((n, i) => {
s += (i ? ', ' : '') + ':returning$' + n;
instance.returningParams['returning$' + n] = bindings[n];
});
return s;
} else
instance.returningParams = undefined;
}
}
module.exports = OracleSerializer;
| lib/serializer.js | /* sqb-serializer-oracle
------------------------
(c) 2017-present Panates
SQB may be freely distributed under the MIT license.
For details and documentation:
https://panates.github.io/sqb-serializer-oracle/
*/
const reservedWords = ['comment', 'dual'];
class OracleSerializer {
constructor() {
}
// noinspection JSMethodCanBeStatic,JSUnusedGlobalSymbols
isReserved(s) {
return reservedWords.includes(String(s).toLowerCase());
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeSelect(instance, obj, inf) {
let out = instance.serializeSelect(obj, inf);
const limit = instance.query._limit || 0;
const offset = Math.max((obj._offset || 0), 0);
if (limit || offset) {
if (instance.config.serverVersion >= 12) {
if (offset)
out += (offset ? '\nOFFSET ' + offset + ' ROWS' : '') +
(limit ? ' FETCH NEXT ' + limit + ' ROWS ONLY' : '');
else out += (limit ? '\nFETCH FIRST ' + limit + ' ROWS ONLY' : '');
} else {
const a = obj._alias;
const order = instance.query._orderby;
if (offset || (order && order.length)) {
out = 'select ' + (a ? a + '.' : '') + '* from (\n\t' +
'select /*+ first_rows(' + (limit || 100) +
') */ t.*, rownum row$number from (\n\t' +
out + '\n\b' +
') t' +
(limit ? ' where rownum <= ' + (limit + offset) : '') +
'\n\b)' + (a ? ' ' + a : '');
if (offset)
out += ' where row$number >= ' + (offset + 1);
} else {
out = 'select ' + (a ? a + '.' : '') + '* from (\n\t' +
out + '\n\b' +
') where rownum <= ' + limit;
}
}
}
return out;
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeFrom(instance, tables, inf) {
return instance.serializeFrom(tables, inf) || 'from dual';
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeCondition(instance, item, inf) {
let s = instance.serializeCondition(item, inf);
if (!item.isRaw) {
s = s.replace(/!= ?null/g, 'is not null')
.replace(/= ?null/g, 'is null')
.replace(/<> ?null/g, 'is not null');
}
return s;
}
//noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
/**
* @override
*/
serializeDateValue(instance, date, inf) {
const s = instance.serializeDateValue(date, inf);
return s.length <= 12 ?
'to_date(' + s + ', \'yyyy-mm-dd\')' :
'to_date(' + s + ', \'yyyy-mm-dd hh24:mi:ss\')';
}
}
module.exports = OracleSerializer;
| Implement "returning" for insert queries
| lib/serializer.js | Implement "returning" for insert queries | <ide><path>ib/serializer.js
<ide> 'to_date(' + s + ', \'yyyy-mm-dd hh24:mi:ss\')';
<ide> }
<ide>
<add> //noinspection JSUnusedGlobalSymbols,JSMethodCanBeStatic
<add> /**
<add> * @override
<add> */
<add> serializeReturning(instance, bindings, inf) {
<add> let s = instance.serializeReturning(bindings, inf);
<add> if (s) {
<add> instance.returningParams = {};
<add> const a = s.substring(10, s.length).split(/\s*,\s*/);
<add> s += ' into ';
<add> a.forEach((n, i) => {
<add> s += (i ? ', ' : '') + ':returning$' + n;
<add> instance.returningParams['returning$' + n] = bindings[n];
<add> });
<add> return s;
<add> } else
<add> instance.returningParams = undefined;
<add> }
<add>
<ide> }
<ide>
<ide> module.exports = OracleSerializer; |
|
Java | apache-2.0 | 42df2055de7e0bdccfa251e38227d296334344aa | 0 | hortonworks/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak | package com.sequenceiq.periscope.domain;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Transient;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@Entity
@DiscriminatorValue("METRIC")
public class MetricAlarm extends BaseAlarm {
@Enumerated(EnumType.STRING)
private Metric metric;
@Enumerated(EnumType.STRING)
private ComparisonOperator comparisonOperator;
private double threshold;
private int period;
@Transient
private long alarmHitsSince;
public Metric getMetric() {
return metric;
}
public void setMetric(Metric metric) {
this.metric = metric;
}
public double getThreshold() {
return threshold;
}
public void setThreshold(double threshold) {
this.threshold = threshold;
}
public int getPeriod() {
return period;
}
public void setPeriod(int period) {
this.period = period;
}
public long getAlarmHitsSince() {
return alarmHitsSince;
}
public void setAlarmHitsSince(long alarmHitsSince) {
this.alarmHitsSince = alarmHitsSince;
}
public ComparisonOperator getComparisonOperator() {
return comparisonOperator;
}
public void setComparisonOperator(ComparisonOperator comparisonOperator) {
this.comparisonOperator = comparisonOperator;
}
@Override
public void reset() {
setAlarmHitsSince(0);
setNotificationSent(false);
}
@Override
public boolean equals(Object o) {
return EqualsBuilder.reflectionEquals(this, o, "notificationSent", "notifications", "scalingPolicy", "alarmHitsSince");
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this, "notificationSent", "notifications", "scalingPolicy", "alarmHitsSince");
}
} | src/main/java/com/sequenceiq/periscope/domain/MetricAlarm.java | package com.sequenceiq.periscope.domain;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Transient;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@Entity
@DiscriminatorValue("METRIC")
public class MetricAlarm extends BaseAlarm {
@Enumerated(EnumType.STRING)
private Metric metric;
private ComparisonOperator comparisonOperator;
private double threshold;
private int period;
@Transient
private long alarmHitsSince;
public Metric getMetric() {
return metric;
}
public void setMetric(Metric metric) {
this.metric = metric;
}
public double getThreshold() {
return threshold;
}
public void setThreshold(double threshold) {
this.threshold = threshold;
}
public int getPeriod() {
return period;
}
public void setPeriod(int period) {
this.period = period;
}
public long getAlarmHitsSince() {
return alarmHitsSince;
}
public void setAlarmHitsSince(long alarmHitsSince) {
this.alarmHitsSince = alarmHitsSince;
}
public ComparisonOperator getComparisonOperator() {
return comparisonOperator;
}
public void setComparisonOperator(ComparisonOperator comparisonOperator) {
this.comparisonOperator = comparisonOperator;
}
@Override
public void reset() {
setAlarmHitsSince(0);
setNotificationSent(false);
}
@Override
public boolean equals(Object o) {
return EqualsBuilder.reflectionEquals(this, o, "notificationSent", "notifications", "scalingPolicy", "alarmHitsSince");
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this, "notificationSent", "notifications", "scalingPolicy", "alarmHitsSince");
}
} | PERI-27 enumtype string added for entity
| src/main/java/com/sequenceiq/periscope/domain/MetricAlarm.java | PERI-27 enumtype string added for entity | <ide><path>rc/main/java/com/sequenceiq/periscope/domain/MetricAlarm.java
<ide>
<ide> @Enumerated(EnumType.STRING)
<ide> private Metric metric;
<add> @Enumerated(EnumType.STRING)
<ide> private ComparisonOperator comparisonOperator;
<ide> private double threshold;
<ide> private int period; |
|
Java | agpl-3.0 | 580758ded45fc929d0b73d3a0d493d79211b8cc5 | 0 | battlecode/battlecode-server,battlecode/battlecode-server | package battlecode.world;
import java.util.ArrayList;
import java.util.Set;
import java.util.TreeSet;
import battlecode.common.GameActionException;
import battlecode.common.GameActionExceptionType;
import battlecode.common.GameConstants;
import battlecode.common.MapLocation;
import battlecode.common.MovementType;
import battlecode.common.RobotType;
import battlecode.common.Team;
import battlecode.common.TerrainTile;
import battlecode.world.signal.AttackSignal;
import battlecode.world.InternalObject;
import battlecode.world.InternalRobot;
import battlecode.world.signal.MovementSignal;
/**
* Represents a map (scalar field) of a neutral AI.
*/
public class NeutralsMap {
/**
* The width and height of the map.
*/
private final int mapWidth, mapHeight;
/**
* The integer scalar field showing the distribution of the neutral AI.
*/
private boolean[][] passable;
private double[][] currentAmount;
private double[][] growthFactor;
private double[][] dX, dY;
private boolean[][] hasNoise;
private int[][] pastrID;
private int[][] nPastrs;
private ArrayList<MapLocation> attacks;
private Set[][] ids;
public NeutralsMap() {
attacks = new ArrayList<MapLocation>();
this.mapWidth = 0;
this.mapHeight = 0;
this.growthFactor = new double[0][0];
this.currentAmount = new double[0][0];
dX = new double[0][0];
dY = new double[0][0];
hasNoise = new boolean[0][0];
passable = new boolean[0][0];
ids = new Set[0][0];
}
public NeutralsMap(double[][] growthFactor, TerrainTile[][] mapTiles) {
attacks = new ArrayList<MapLocation>();
this.mapWidth = growthFactor.length;
int tempMapHeight = 0;
for (int i = 0; i < this.mapWidth; i++) {
tempMapHeight = Math.max(this.mapHeight, growthFactor[i].length);
}
this.mapHeight = tempMapHeight;
this.growthFactor = new double[this.mapWidth][this.mapHeight];
this.currentAmount = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(growthFactor[i], 0, this.growthFactor[i], 0,
this.mapHeight);
}
dX = new double[this.mapWidth][this.mapHeight];
dY = new double[this.mapWidth][this.mapHeight];
hasNoise = new boolean[this.mapWidth][this.mapHeight];
pastrID = new int[this.mapWidth][this.mapHeight];
nPastrs = new int[this.mapWidth][this.mapHeight];
passable = new boolean[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
passable[i][j] = mapTiles[i][j] != TerrainTile.VOID;
pastrID[i][j] = Integer.MAX_VALUE;
if (!passable[i][j]) {
this.growthFactor[i][j] = 0;
}
}
}
ids = new Set[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
ids[i][j] = new TreeSet<Integer>();
}
}
}
public NeutralsMap(NeutralsMap nm) {
this.mapWidth = nm.mapWidth;
this.mapHeight = nm.mapHeight;
this.currentAmount = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.currentAmount[i], 0, this.currentAmount[i], 0,
this.mapHeight);
}
this.growthFactor = new double[mapWidth][mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.growthFactor[i], 0, this.growthFactor[i], 0,
this.mapHeight);
}
this.dX = new double[this.mapWidth][this.mapHeight];
this.dY = new double[this.mapWidth][this.mapHeight];
this.hasNoise = new boolean[this.mapWidth][this.mapHeight];
this.pastrID = new int[this.mapWidth][this.mapHeight];
this.nPastrs = new int[this.mapWidth][this.mapHeight];
this.passable = new boolean[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.passable[i], 0, this.passable[i], 0, this.mapHeight);
}
}
public void createVoid(MapLocation center, int distance) {
for (int x = center.x - distance; x <= center.x + distance; x++) {
for (int y = center.y - distance; y <= center.y + distance; y++) {
if (isValid(x, y)) {
passable[x][y] = false;
}
}
}
}
public double get(MapLocation m) {
if (isValid(m.x, m.y)) {
return currentAmount[m.x][m.y];
} else {
return 0;
}
}
public double[][] copyOfData() {
double[][] cowsCopy = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(this.growthFactor[i], 0, cowsCopy[i], 0,
this.mapHeight);
}
return cowsCopy;
}
public double[][] copyOfCurrentAmounts() {
double[][] cowsCopy = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(this.currentAmount[i], 0, cowsCopy[i], 0,
this.mapHeight);
}
return cowsCopy;
}
public int[][] copyOfCurrentAmountsAsInt() {
int[][] cowsCopy = new int[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
cowsCopy[i][j] = (int) Math.round(this.currentAmount[i][j]);
}
}
return cowsCopy;
}
public boolean isValid(int x, int y) {
return x >= 0 && x < this.mapWidth && y >= 0 && y < this.mapHeight && passable[x][y];
}
@SuppressWarnings("unchecked")
public boolean isValid(int x, int y, int fromX, int fromY) {
// valid if ids of from is a subset of ids of to
if (!isValid(x, y)) return false;
return ids[x][y].containsAll(ids[fromX][fromY]);
}
public void print() {
System.out.println("Neutrals Map!");
for (int j = 0; j < this.mapHeight; j++) {
for (int i = 0; i < this.mapWidth; i++) {
System.out.print(Double.toString(this.currentAmount[i][j]).substring(0, 3) + " (" + this.growthFactor[i][j] + ")\t");
}
System.out.println();
}
System.out.println("END Neutrals Map");
}
public double getScoreChange(Team t, InternalObject[] objs) {
double delta = 0.0;
for (InternalObject obj : objs) {
InternalRobot ir = (InternalRobot) obj;
if (ir.getTeam() != t) {
continue;
}
if (ir.type != RobotType.PASTR && ir.type != RobotType.SOLDIER) continue;
if (ir.type == RobotType.SOLDIER && pastrID[ir.getLocation().x][ir.getLocation().y] < Integer.MAX_VALUE) continue; // soldiers do not milk when in pastr range
int captureRange = 0;
double capturePercentage = GameConstants.ROBOT_MILK_PERCENTAGE;
if (ir.type == RobotType.PASTR) {
captureRange = GameConstants.PASTR_RANGE;
capturePercentage = 1.0;
}
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(ir.getLocation(), captureRange);
double milkGained = 0.0;
for (MapLocation ml : affected) {
if (isValid(ml.x, ml.y)) {
if (ir.type == RobotType.PASTR && ir.getID() == pastrID[ml.x][ml.y] || ir.type == RobotType.SOLDIER) {
milkGained += this.currentAmount[ml.x][ml.y] * capturePercentage;
}
}
}
if (milkGained > GameConstants.MAX_EFFICIENT_COWS && ir.type == RobotType.PASTR) {
milkGained = GameConstants.MAX_EFFICIENT_COWS + Math.pow(milkGained - GameConstants.MAX_EFFICIENT_COWS, GameConstants.MILKING_INEFFICIENCY);
}
delta += milkGained;
}
return delta;
}
@SuppressWarnings("unchecked")
public void updateIds(InternalObject obj) {
InternalRobot ir = (InternalRobot) obj;
if (ir.type != RobotType.PASTR && ir.type != RobotType.SOLDIER) return;
int captureRange = 0;
if (ir.type == RobotType.PASTR) captureRange = GameConstants.PASTR_RANGE;
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(ir.getLocation(), captureRange);
for (MapLocation ml : affected) {
if (isValid(ml.x, ml.y)) {
this.ids[ml.x][ml.y].add(ir.getID());
if (ir.type == RobotType.PASTR) {
pastrID[ml.x][ml.y] = Math.min(pastrID[ml.x][ml.y], ir.getID());
}
}
}
}
final double PI4 = Math.PI / 4;
final double EPSILON = 1.0e-6;
final int[][] dirs = {{-1, 0}, {-1, -1}, {0, -1}, {1, -1},
{1, 0}, {1, 1}, {0, 1}, {-1, 1}};
public void next(InternalObject[] objs) {
for (InternalObject obj : objs) {
updateIds(obj);
}
// Current order:
// 1) cows are destroyed due to attack
// 2) cows move
// 3) cow growth and decay happens
for (int i = 0; i < attacks.size(); i++) {
MapLocation target = attacks.get(i);
if (target.x >= 0 && target.x < this.mapWidth &&
target.y >= 0 && target.y < this.mapHeight) {
this.currentAmount[target.x][target.y] = 0;
}
}
double[][] temp = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
if (this.hasNoise[i][j]) {
if (this.dX[i][j] != 0 || this.dY[i][j] != 0) {
double theta = Math.atan2(this.dY[i][j], this.dX[i][j]); // angle the cows want to move in
double closestThetaDiff = Math.PI / 2;
int movesCount = 0;
for (int k = -4; k < 4; k++) {
double theta0 = k * PI4;
int x = i + dirs[k + 4][0];
int y = j + dirs[k + 4][1];
boolean valid = isValid(x, y, i, j);
if (valid) {
double diff = Math.min(Math.abs(theta - theta0), 2 * Math.PI - Math.abs(theta - theta0));
if (diff < closestThetaDiff - EPSILON) {
closestThetaDiff = diff;
movesCount = 1;
} else if (diff <= closestThetaDiff + EPSILON) {
movesCount++;
}
}
}
if (movesCount > 0) {
for (int k = -4; k < 4; k++) {
double theta0 = k * PI4;
int x = i + dirs[k + 4][0];
int y = j + dirs[k + 4][1];
boolean valid = isValid(x, y, i, j);
if (valid) {
double diff = Math.min(Math.abs(theta - theta0), 2 * Math.PI - Math.abs(theta - theta0));
if (diff <= closestThetaDiff + EPSILON) {
temp[x][y] += this.currentAmount[i][j] / movesCount;
}
}
}
} else {
temp[i][j] += this.currentAmount[i][j];
}
} else {
// scatter
int nScatter = 0;
for (int di = -1; di <= 1; di++) {
for (int dj = -1; dj <= 1; dj++) {
if (!(di == 0 && dj == 0) && isValid(i + di, j + dj, i, j)) {
nScatter++;
}
}
}
if (nScatter == 0) {
temp[i][j] += this.currentAmount[i][j];
} else {
for (int di = -1; di <= 1; di++) {
for (int dj = -1; dj <= 1; dj++) {
if (!(di == 0 && dj == 0) && isValid(i + di, j + dj, i, j)) {
temp[i + di][j + dj] += this.currentAmount[i][j] / nScatter;
}
}
}
}
}
} else {
temp[i][j] += this.currentAmount[i][j];
}
}
}
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
this.currentAmount[i][j] = temp[i][j];
}
}
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
if (this.isValid(i, j)) {
this.currentAmount[i][j] = GameConstants.NEUTRALS_TURN_DECAY * this.currentAmount[i][j] + this.growthFactor[i][j];
}
}
}
//this.print();
}
// for when an attack doesn't generate noise
public void updateWithQuietAttack(MapLocation source) {
attacks.add(source);
}
public void updateWithNoiseSource(MapLocation source, int radiusSquared) {
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(source, radiusSquared);
for (int i = 0; i < affected.length; i++) {
if (isValid(affected[i].x, affected[i].y)) {
hasNoise[affected[i].x][affected[i].y] = true;
int curdX = affected[i].x - source.x;
int curdY = affected[i].y - source.y;
dX[affected[i].x][affected[i].y] += curdX;
dY[affected[i].x][affected[i].y] += curdY;
}
}
}
public void updateWithMovement(MovementSignal movement) {
if (movement.getMovementType() != MovementType.SNEAK) {
MapLocation source = movement.getNewLoc();
updateWithNoiseSource(source, GameConstants.MOVEMENT_SCARE_RANGE);
}
}
public void updateWithAttack(AttackSignal attack) {
attacks.add(attack.getTargetLoc());
updateWithNoiseSource(attack.getTargetLoc(), GameConstants.ATTACK_SCARE_RANGE);
}
public void resetAfterTurn() {
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
this.dX[i][j] = 0;
this.dY[i][j] = 0;
this.ids[i][j].clear();
this.hasNoise[i][j] = false;
this.pastrID[i][j] = Integer.MAX_VALUE;
}
}
attacks.clear();
}
}
| src/main/battlecode/world/NeutralsMap.java | package battlecode.world;
import java.util.ArrayList;
import java.util.Set;
import java.util.TreeSet;
import battlecode.common.GameActionException;
import battlecode.common.GameActionExceptionType;
import battlecode.common.GameConstants;
import battlecode.common.MapLocation;
import battlecode.common.MovementType;
import battlecode.common.RobotType;
import battlecode.common.Team;
import battlecode.common.TerrainTile;
import battlecode.world.signal.AttackSignal;
import battlecode.world.InternalObject;
import battlecode.world.InternalRobot;
import battlecode.world.signal.MovementSignal;
/**
* Represents a map (scalar field) of a neutral AI.
*/
public class NeutralsMap {
/**
* The width and height of the map.
*/
private final int mapWidth, mapHeight;
/**
* The integer scalar field showing the distribution of the neutral AI.
*/
private boolean[][] passable;
private double[][] currentAmount;
private double[][] growthFactor;
private double[][] dX, dY;
private boolean[][] hasNoise;
private int[][] pastrID;
private int[][] nPastrs;
private ArrayList<MapLocation> attacks;
private Set[][] ids;
public NeutralsMap() {
attacks = new ArrayList<MapLocation>();
this.mapWidth = 0;
this.mapHeight = 0;
this.growthFactor = new double[0][0];
this.currentAmount = new double[0][0];
dX = new double[0][0];
dY = new double[0][0];
hasNoise = new boolean[0][0];
passable = new boolean[0][0];
ids = new Set[0][0];
}
public NeutralsMap(double[][] growthFactor, TerrainTile[][] mapTiles) {
attacks = new ArrayList<MapLocation>();
this.mapWidth = growthFactor.length;
int tempMapHeight = 0;
for (int i = 0; i < this.mapWidth; i++) {
tempMapHeight = Math.max(this.mapHeight, growthFactor[i].length);
}
this.mapHeight = tempMapHeight;
this.growthFactor = new double[this.mapWidth][this.mapHeight];
this.currentAmount = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(growthFactor[i], 0, this.growthFactor[i], 0,
this.mapHeight);
}
dX = new double[this.mapWidth][this.mapHeight];
dY = new double[this.mapWidth][this.mapHeight];
hasNoise = new boolean[this.mapWidth][this.mapHeight];
pastrID = new int[this.mapWidth][this.mapHeight];
nPastrs = new int[this.mapWidth][this.mapHeight];
passable = new boolean[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
passable[i][j] = mapTiles[i][j] != TerrainTile.VOID;
pastrID[i][j] = Integer.MAX_VALUE;
}
}
ids = new Set[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
ids[i][j] = new TreeSet<Integer>();
}
}
}
public NeutralsMap(NeutralsMap nm) {
this.mapWidth = nm.mapWidth;
this.mapHeight = nm.mapHeight;
this.currentAmount = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.currentAmount[i], 0, this.currentAmount[i], 0,
this.mapHeight);
}
this.growthFactor = new double[mapWidth][mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.growthFactor[i], 0, this.growthFactor[i], 0,
this.mapHeight);
}
this.dX = new double[this.mapWidth][this.mapHeight];
this.dY = new double[this.mapWidth][this.mapHeight];
this.hasNoise = new boolean[this.mapWidth][this.mapHeight];
this.pastrID = new int[this.mapWidth][this.mapHeight];
this.nPastrs = new int[this.mapWidth][this.mapHeight];
this.passable = new boolean[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(nm.passable[i], 0, this.passable[i], 0, this.mapHeight);
}
}
public void createVoid(MapLocation center, int distance) {
for (int x = center.x - distance; x <= center.x + distance; x++) {
for (int y = center.y - distance; y <= center.y + distance; y++) {
if (isValid(x, y)) {
passable[x][y] = false;
}
}
}
}
public double get(MapLocation m) {
if (isValid(m.x, m.y)) {
return currentAmount[m.x][m.y];
} else {
return 0;
}
}
public double[][] copyOfData() {
double[][] cowsCopy = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(this.growthFactor[i], 0, cowsCopy[i], 0,
this.mapHeight);
}
return cowsCopy;
}
public double[][] copyOfCurrentAmounts() {
double[][] cowsCopy = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
System.arraycopy(this.currentAmount[i], 0, cowsCopy[i], 0,
this.mapHeight);
}
return cowsCopy;
}
public int[][] copyOfCurrentAmountsAsInt() {
int[][] cowsCopy = new int[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
cowsCopy[i][j] = (int) Math.round(this.currentAmount[i][j]);
}
}
return cowsCopy;
}
public boolean isValid(int x, int y) {
return x >= 0 && x < this.mapWidth && y >= 0 && y < this.mapHeight && passable[x][y];
}
@SuppressWarnings("unchecked")
public boolean isValid(int x, int y, int fromX, int fromY) {
// valid if ids of from is a subset of ids of to
if (!isValid(x, y)) return false;
return ids[x][y].containsAll(ids[fromX][fromY]);
}
public void print() {
System.out.println("Neutrals Map!");
for (int j = 0; j < this.mapHeight; j++) {
for (int i = 0; i < this.mapWidth; i++) {
System.out.print(Double.toString(this.currentAmount[i][j]).substring(0, 3) + " (" + this.growthFactor[i][j] + ")\t");
}
System.out.println();
}
System.out.println("END Neutrals Map");
}
public double getScoreChange(Team t, InternalObject[] objs) {
double delta = 0.0;
for (InternalObject obj : objs) {
InternalRobot ir = (InternalRobot) obj;
if (ir.getTeam() != t) {
continue;
}
if (ir.type != RobotType.PASTR && ir.type != RobotType.SOLDIER) continue;
if (ir.type == RobotType.SOLDIER && pastrID[ir.getLocation().x][ir.getLocation().y] < Integer.MAX_VALUE) continue; // soldiers do not milk when in pastr range
int captureRange = 0;
double capturePercentage = GameConstants.ROBOT_MILK_PERCENTAGE;
if (ir.type == RobotType.PASTR) {
captureRange = GameConstants.PASTR_RANGE;
capturePercentage = 1.0;
}
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(ir.getLocation(), captureRange);
double milkGained = 0.0;
for (MapLocation ml : affected) {
if (isValid(ml.x, ml.y)) {
if (ir.type == RobotType.PASTR && ir.getID() == pastrID[ml.x][ml.y] || ir.type == RobotType.SOLDIER) {
milkGained += this.currentAmount[ml.x][ml.y] * capturePercentage;
}
}
}
if (milkGained > GameConstants.MAX_EFFICIENT_COWS && ir.type == RobotType.PASTR) {
milkGained = GameConstants.MAX_EFFICIENT_COWS + Math.pow(milkGained - GameConstants.MAX_EFFICIENT_COWS, GameConstants.MILKING_INEFFICIENCY);
}
delta += milkGained;
}
return delta;
}
@SuppressWarnings("unchecked")
public void updateIds(InternalObject obj) {
InternalRobot ir = (InternalRobot) obj;
if (ir.type != RobotType.PASTR && ir.type != RobotType.SOLDIER) return;
int captureRange = 0;
if (ir.type == RobotType.PASTR) captureRange = GameConstants.PASTR_RANGE;
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(ir.getLocation(), captureRange);
for (MapLocation ml : affected) {
if (isValid(ml.x, ml.y)) {
this.ids[ml.x][ml.y].add(ir.getID());
if (ir.type == RobotType.PASTR) {
pastrID[ml.x][ml.y] = Math.min(pastrID[ml.x][ml.y], ir.getID());
}
}
}
}
final double PI4 = Math.PI / 4;
final double EPSILON = 1.0e-6;
final int[][] dirs = {{-1, 0}, {-1, -1}, {0, -1}, {1, -1},
{1, 0}, {1, 1}, {0, 1}, {-1, 1}};
public void next(InternalObject[] objs) {
for (InternalObject obj : objs) {
updateIds(obj);
}
// Current order:
// 1) cows are destroyed due to attack
// 2) cows move
// 3) cow growth and decay happens
for (int i = 0; i < attacks.size(); i++) {
MapLocation target = attacks.get(i);
if (target.x >= 0 && target.x < this.mapWidth &&
target.y >= 0 && target.y < this.mapHeight) {
this.currentAmount[target.x][target.y] = 0;
}
}
double[][] temp = new double[this.mapWidth][this.mapHeight];
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
if (this.hasNoise[i][j]) {
if (this.dX[i][j] != 0 || this.dY[i][j] != 0) {
double theta = Math.atan2(this.dY[i][j], this.dX[i][j]); // angle the cows want to move in
double closestThetaDiff = Math.PI / 2;
int movesCount = 0;
for (int k = -4; k < 4; k++) {
double theta0 = k * PI4;
int x = i + dirs[k + 4][0];
int y = j + dirs[k + 4][1];
boolean valid = isValid(x, y, i, j);
if (valid) {
double diff = Math.min(Math.abs(theta - theta0), 2 * Math.PI - Math.abs(theta - theta0));
if (diff < closestThetaDiff - EPSILON) {
closestThetaDiff = diff;
movesCount = 1;
} else if (diff <= closestThetaDiff + EPSILON) {
movesCount++;
}
}
}
if (movesCount > 0) {
for (int k = -4; k < 4; k++) {
double theta0 = k * PI4;
int x = i + dirs[k + 4][0];
int y = j + dirs[k + 4][1];
boolean valid = isValid(x, y, i, j);
if (valid) {
double diff = Math.min(Math.abs(theta - theta0), 2 * Math.PI - Math.abs(theta - theta0));
if (diff <= closestThetaDiff + EPSILON) {
temp[x][y] += this.currentAmount[i][j] / movesCount;
}
}
}
} else {
temp[i][j] += this.currentAmount[i][j];
}
} else {
// scatter
int nScatter = 0;
for (int di = -1; di <= 1; di++) {
for (int dj = -1; dj <= 1; dj++) {
if (!(di == 0 && dj == 0) && isValid(i + di, j + dj, i, j)) {
nScatter++;
}
}
}
if (nScatter == 0) {
temp[i][j] += this.currentAmount[i][j];
} else {
for (int di = -1; di <= 1; di++) {
for (int dj = -1; dj <= 1; dj++) {
if (!(di == 0 && dj == 0) && isValid(i + di, j + dj, i, j)) {
temp[i + di][j + dj] += this.currentAmount[i][j] / nScatter;
}
}
}
}
}
} else {
temp[i][j] += this.currentAmount[i][j];
}
}
}
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
this.currentAmount[i][j] = temp[i][j];
}
}
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
if (this.isValid(i, j)) {
this.currentAmount[i][j] = GameConstants.NEUTRALS_TURN_DECAY * this.currentAmount[i][j] + this.growthFactor[i][j];
}
}
}
//this.print();
}
// for when an attack doesn't generate noise
public void updateWithQuietAttack(MapLocation source) {
attacks.add(source);
}
public void updateWithNoiseSource(MapLocation source, int radiusSquared) {
MapLocation[] affected = MapLocation.getAllMapLocationsWithinRadiusSq(source, radiusSquared);
for (int i = 0; i < affected.length; i++) {
if (isValid(affected[i].x, affected[i].y)) {
hasNoise[affected[i].x][affected[i].y] = true;
int curdX = affected[i].x - source.x;
int curdY = affected[i].y - source.y;
dX[affected[i].x][affected[i].y] += curdX;
dY[affected[i].x][affected[i].y] += curdY;
}
}
}
public void updateWithMovement(MovementSignal movement) {
if (movement.getMovementType() != MovementType.SNEAK) {
MapLocation source = movement.getNewLoc();
updateWithNoiseSource(source, GameConstants.MOVEMENT_SCARE_RANGE);
}
}
public void updateWithAttack(AttackSignal attack) {
attacks.add(attack.getTargetLoc());
updateWithNoiseSource(attack.getTargetLoc(), GameConstants.ATTACK_SCARE_RANGE);
}
public void resetAfterTurn() {
for (int i = 0; i < this.mapWidth; i++) {
for (int j = 0; j < this.mapHeight; j++) {
this.dX[i][j] = 0;
this.dY[i][j] = 0;
this.ids[i][j].clear();
this.hasNoise[i][j] = false;
this.pastrID[i][j] = Integer.MAX_VALUE;
}
}
attacks.clear();
}
}
| Set cow growth to zero on VOID squares
Right now, there are some maps that contain VOID squares with non-zero
cow growth, such as siege.xml, even though cows do not grow on VOID
squares. These maps cause RobotController.senseCowGrowth() to return
an array that contains nonzero entries in those squares, even though
the actual cow growth in those squares is zero.
This commit fixes the bug.
| src/main/battlecode/world/NeutralsMap.java | Set cow growth to zero on VOID squares | <ide><path>rc/main/battlecode/world/NeutralsMap.java
<ide> for (int j = 0; j < this.mapHeight; j++) {
<ide> passable[i][j] = mapTiles[i][j] != TerrainTile.VOID;
<ide> pastrID[i][j] = Integer.MAX_VALUE;
<add> if (!passable[i][j]) {
<add> this.growthFactor[i][j] = 0;
<add> }
<ide> }
<ide> }
<ide> ids = new Set[this.mapWidth][this.mapHeight]; |
|
Java | mit | error: pathspec 'core/src/test/java/jenkins/ResilientJsonObjectTest.java' did not match any file(s) known to git
| 1b3eb0b9e7a3ee35d6c40d5e4460f4bb1f17e0e9 | 1 | maikeffi/hudson,jcsirot/jenkins,tangkun75/jenkins,FTG-003/jenkins,yonglehou/jenkins,liupugong/jenkins,jcsirot/jenkins,vijayto/jenkins,everyonce/jenkins,shahharsh/jenkins,mrooney/jenkins,Jochen-A-Fuerbacher/jenkins,Krasnyanskiy/jenkins,duzifang/my-jenkins,github-api-test-org/jenkins,vjuranek/jenkins,dennisjlee/jenkins,Vlatombe/jenkins,FarmGeek4Life/jenkins,my7seven/jenkins,ns163/jenkins,ydubreuil/jenkins,tangkun75/jenkins,Krasnyanskiy/jenkins,csimons/jenkins,wangyikai/jenkins,yonglehou/jenkins,deadmoose/jenkins,CodeShane/jenkins,github-api-test-org/jenkins,recena/jenkins,ndeloof/jenkins,bkmeneguello/jenkins,jzjzjzj/jenkins,mcanthony/jenkins,gitaccountforprashant/gittest,ChrisA89/jenkins,gorcz/jenkins,noikiy/jenkins,intelchen/jenkins,rashmikanta-1984/jenkins,protazy/jenkins,bpzhang/jenkins,jzjzjzj/jenkins,lilyJi/jenkins,gorcz/jenkins,aheritier/jenkins,Wilfred/jenkins,FTG-003/jenkins,NehemiahMi/jenkins,batmat/jenkins,ndeloof/jenkins,sathiya-mit/jenkins,gitaccountforprashant/gittest,goldchang/jenkins,Krasnyanskiy/jenkins,yonglehou/jenkins,aquarellian/jenkins,mattclark/jenkins,gusreiber/jenkins,recena/jenkins,patbos/jenkins,hemantojhaa/jenkins,pjanouse/jenkins,ajshastri/jenkins,jpbriend/jenkins,jenkinsci/jenkins,DanielWeber/jenkins,elkingtonmcb/jenkins,yonglehou/jenkins,patbos/jenkins,aldaris/jenkins,FarmGeek4Life/jenkins,ErikVerheul/jenkins,andresrc/jenkins,hplatou/jenkins,ChrisA89/jenkins,dariver/jenkins,khmarbaise/jenkins,shahharsh/jenkins,MadsNielsen/jtemp,paulmillar/jenkins,synopsys-arc-oss/jenkins,github-api-test-org/jenkins,goldchang/jenkins,hashar/jenkins,aduprat/jenkins,varmenise/jenkins,kohsuke/hudson,ydubreuil/jenkins,nandan4/Jenkins,hemantojhaa/jenkins,vlajos/jenkins,DoctorQ/jenkins,mcanthony/jenkins,Jimilian/jenkins,MarkEWaite/jenkins,iqstack/jenkins,oleg-nenashev/jenkins,msrb/jenkins,Vlatombe/jenkins,gorcz/jenkins,vjuranek/jenkins,AustinKwang/jenkins,vlajos/jenkins,lindzh/jenkins,dennisjlee/jenkins,mpeltonen/jenkins,jk47/jenkins,sathiya-mit/jenkins,sathiya-mit/jenkins,andresrc/jenkins,albers/jenkins,nandan4/Jenkins,bkmeneguello/jenkins,github-api-test-org/jenkins,NehemiahMi/jenkins,gusreiber/jenkins,dbroady1/jenkins,mrooney/jenkins,shahharsh/jenkins,h4ck3rm1k3/jenkins,patbos/jenkins,rsandell/jenkins,pselle/jenkins,godfath3r/jenkins,jglick/jenkins,soenter/jenkins,intelchen/jenkins,rsandell/jenkins,protazy/jenkins,aduprat/jenkins,godfath3r/jenkins,mcanthony/jenkins,mdonohue/jenkins,recena/jenkins,amuniz/jenkins,FarmGeek4Life/jenkins,msrb/jenkins,jcarrothers-sap/jenkins,jpbriend/jenkins,keyurpatankar/hudson,aquarellian/jenkins,huybrechts/hudson,stephenc/jenkins,KostyaSha/jenkins,thomassuckow/jenkins,wuwen5/jenkins,yonglehou/jenkins,chbiel/jenkins,samatdav/jenkins,olivergondza/jenkins,vijayto/jenkins,elkingtonmcb/jenkins,jcarrothers-sap/jenkins,dbroady1/jenkins,ydubreuil/jenkins,varmenise/jenkins,h4ck3rm1k3/jenkins,bpzhang/jenkins,aduprat/jenkins,christ66/jenkins,everyonce/jenkins,amuniz/jenkins,verbitan/jenkins,sathiya-mit/jenkins,guoxu0514/jenkins,mdonohue/jenkins,v1v/jenkins,daniel-beck/jenkins,morficus/jenkins,paulmillar/jenkins,msrb/jenkins,luoqii/jenkins,daniel-beck/jenkins,olivergondza/jenkins,vvv444/jenkins,amruthsoft9/Jenkis,kohsuke/hudson,NehemiahMi/jenkins,github-api-test-org/jenkins,goldchang/jenkins,jenkinsci/jenkins,singh88/jenkins,jcarrothers-sap/jenkins,brunocvcunha/jenkins,hashar/jenkins,vjuranek/jenkins,liupugong/jenkins,samatdav/jenkins,lordofthejars/jenkins,aheritier/jenkins,ikedam/jenkins,escoem/jenkins,daniel-beck/jenkins,lilyJi/jenkins,ns163/jenkins,svanoort/jenkins,Jimilian/jenkins,vijayto/jenkins,DanielWeber/jenkins,Vlatombe/jenkins,hashar/jenkins,ChrisA89/jenkins,duzifang/my-jenkins,amruthsoft9/Jenkis,daspilker/jenkins,SebastienGllmt/jenkins,wangyikai/jenkins,kohsuke/hudson,MichaelPranovich/jenkins_sc,csimons/jenkins,jk47/jenkins,noikiy/jenkins,noikiy/jenkins,singh88/jenkins,vvv444/jenkins,gorcz/jenkins,deadmoose/jenkins,vijayto/jenkins,amruthsoft9/Jenkis,kzantow/jenkins,mattclark/jenkins,sathiya-mit/jenkins,jhoblitt/jenkins,ajshastri/jenkins,csimons/jenkins,soenter/jenkins,aldaris/jenkins,CodeShane/jenkins,hplatou/jenkins,nandan4/Jenkins,alvarolobato/jenkins,tfennelly/jenkins,DoctorQ/jenkins,liupugong/jenkins,MichaelPranovich/jenkins_sc,jenkinsci/jenkins,Ykus/jenkins,varmenise/jenkins,Wilfred/jenkins,vlajos/jenkins,tastatur/jenkins,synopsys-arc-oss/jenkins,dennisjlee/jenkins,wangyikai/jenkins,thomassuckow/jenkins,daniel-beck/jenkins,maikeffi/hudson,bkmeneguello/jenkins,tfennelly/jenkins,seanlin816/jenkins,aldaris/jenkins,amuniz/jenkins,ChrisA89/jenkins,paulwellnerbou/jenkins,DoctorQ/jenkins,rashmikanta-1984/jenkins,MadsNielsen/jtemp,thomassuckow/jenkins,1and1/jenkins,elkingtonmcb/jenkins,SebastienGllmt/jenkins,shahharsh/jenkins,vjuranek/jenkins,1and1/jenkins,azweb76/jenkins,everyonce/jenkins,bkmeneguello/jenkins,fbelzunc/jenkins,jcsirot/jenkins,recena/jenkins,everyonce/jenkins,mrooney/jenkins,akshayabd/jenkins,varmenise/jenkins,tangkun75/jenkins,petermarcoen/jenkins,jpbriend/jenkins,petermarcoen/jenkins,AustinKwang/jenkins,viqueen/jenkins,amruthsoft9/Jenkis,jglick/jenkins,rsandell/jenkins,arunsingh/jenkins,mcanthony/jenkins,MarkEWaite/jenkins,pselle/jenkins,MarkEWaite/jenkins,KostyaSha/jenkins,scoheb/jenkins,alvarolobato/jenkins,jhoblitt/jenkins,MichaelPranovich/jenkins_sc,FTG-003/jenkins,gitaccountforprashant/gittest,scoheb/jenkins,olivergondza/jenkins,arcivanov/jenkins,petermarcoen/jenkins,pjanouse/jenkins,maikeffi/hudson,SebastienGllmt/jenkins,ndeloof/jenkins,jpederzolli/jenkins-1,ikedam/jenkins,lordofthejars/jenkins,Krasnyanskiy/jenkins,svanoort/jenkins,arcivanov/jenkins,pjanouse/jenkins,arunsingh/jenkins,DoctorQ/jenkins,jcarrothers-sap/jenkins,bpzhang/jenkins,DanielWeber/jenkins,kohsuke/hudson,gusreiber/jenkins,v1v/jenkins,gorcz/jenkins,shahharsh/jenkins,SenolOzer/jenkins,svanoort/jenkins,arunsingh/jenkins,viqueen/jenkins,AustinKwang/jenkins,luoqii/jenkins,jk47/jenkins,aheritier/jenkins,jcarrothers-sap/jenkins,brunocvcunha/jenkins,jglick/jenkins,jcarrothers-sap/jenkins,DanielWeber/jenkins,vjuranek/jenkins,luoqii/jenkins,noikiy/jenkins,damianszczepanik/jenkins,khmarbaise/jenkins,github-api-test-org/jenkins,amuniz/jenkins,aquarellian/jenkins,andresrc/jenkins,SenolOzer/jenkins,pjanouse/jenkins,MichaelPranovich/jenkins_sc,ikedam/jenkins,evernat/jenkins,guoxu0514/jenkins,brunocvcunha/jenkins,jenkinsci/jenkins,duzifang/my-jenkins,rsandell/jenkins,synopsys-arc-oss/jenkins,singh88/jenkins,KostyaSha/jenkins,Ykus/jenkins,ns163/jenkins,oleg-nenashev/jenkins,mdonohue/jenkins,samatdav/jenkins,292388900/jenkins,protazy/jenkins,6WIND/jenkins,huybrechts/hudson,AustinKwang/jenkins,daspilker/jenkins,paulmillar/jenkins,brunocvcunha/jenkins,Vlatombe/jenkins,tfennelly/jenkins,SebastienGllmt/jenkins,csimons/jenkins,jzjzjzj/jenkins,guoxu0514/jenkins,brunocvcunha/jenkins,FTG-003/jenkins,aheritier/jenkins,1and1/jenkins,arcivanov/jenkins,gusreiber/jenkins,iqstack/jenkins,svanoort/jenkins,patbos/jenkins,seanlin816/jenkins,DoctorQ/jenkins,duzifang/my-jenkins,protazy/jenkins,Ykus/jenkins,andresrc/jenkins,bpzhang/jenkins,deadmoose/jenkins,batmat/jenkins,dennisjlee/jenkins,lindzh/jenkins,rsandell/jenkins,my7seven/jenkins,morficus/jenkins,singh88/jenkins,FarmGeek4Life/jenkins,verbitan/jenkins,rsandell/jenkins,everyonce/jenkins,dennisjlee/jenkins,albers/jenkins,scoheb/jenkins,huybrechts/hudson,wangyikai/jenkins,aldaris/jenkins,hemantojhaa/jenkins,jpbriend/jenkins,daniel-beck/jenkins,vvv444/jenkins,olivergondza/jenkins,NehemiahMi/jenkins,singh88/jenkins,ajshastri/jenkins,intelchen/jenkins,daspilker/jenkins,my7seven/jenkins,MichaelPranovich/jenkins_sc,liorhson/jenkins,h4ck3rm1k3/jenkins,pselle/jenkins,chbiel/jenkins,ErikVerheul/jenkins,morficus/jenkins,Ykus/jenkins,gusreiber/jenkins,dennisjlee/jenkins,bkmeneguello/jenkins,ydubreuil/jenkins,svanoort/jenkins,ns163/jenkins,lilyJi/jenkins,ikedam/jenkins,1and1/jenkins,KostyaSha/jenkins,hemantojhaa/jenkins,aheritier/jenkins,rlugojr/jenkins,h4ck3rm1k3/jenkins,stephenc/jenkins,rlugojr/jenkins,MadsNielsen/jtemp,aheritier/jenkins,batmat/jenkins,khmarbaise/jenkins,synopsys-arc-oss/jenkins,ChrisA89/jenkins,aheritier/jenkins,AustinKwang/jenkins,daniel-beck/jenkins,chbiel/jenkins,arcivanov/jenkins,Jochen-A-Fuerbacher/jenkins,shahharsh/jenkins,Wilfred/jenkins,vvv444/jenkins,sathiya-mit/jenkins,jglick/jenkins,paulmillar/jenkins,elkingtonmcb/jenkins,keyurpatankar/hudson,Jochen-A-Fuerbacher/jenkins,oleg-nenashev/jenkins,alvarolobato/jenkins,petermarcoen/jenkins,aduprat/jenkins,batmat/jenkins,yonglehou/jenkins,arcivanov/jenkins,rashmikanta-1984/jenkins,akshayabd/jenkins,lilyJi/jenkins,lilyJi/jenkins,thomassuckow/jenkins,hemantojhaa/jenkins,jcsirot/jenkins,dbroady1/jenkins,soenter/jenkins,6WIND/jenkins,mrooney/jenkins,verbitan/jenkins,dariver/jenkins,Wilfred/jenkins,khmarbaise/jenkins,jenkinsci/jenkins,deadmoose/jenkins,patbos/jenkins,h4ck3rm1k3/jenkins,fbelzunc/jenkins,jcarrothers-sap/jenkins,hplatou/jenkins,rlugojr/jenkins,mdonohue/jenkins,jpederzolli/jenkins-1,mrooney/jenkins,Jimilian/jenkins,kzantow/jenkins,damianszczepanik/jenkins,Vlatombe/jenkins,batmat/jenkins,elkingtonmcb/jenkins,pjanouse/jenkins,tastatur/jenkins,andresrc/jenkins,lindzh/jenkins,kzantow/jenkins,aduprat/jenkins,azweb76/jenkins,paulwellnerbou/jenkins,fbelzunc/jenkins,goldchang/jenkins,FarmGeek4Life/jenkins,pselle/jenkins,varmenise/jenkins,kzantow/jenkins,arunsingh/jenkins,my7seven/jenkins,goldchang/jenkins,vlajos/jenkins,keyurpatankar/hudson,v1v/jenkins,ikedam/jenkins,csimons/jenkins,godfath3r/jenkins,christ66/jenkins,gitaccountforprashant/gittest,ydubreuil/jenkins,seanlin816/jenkins,shahharsh/jenkins,verbitan/jenkins,iqstack/jenkins,seanlin816/jenkins,SenolOzer/jenkins,godfath3r/jenkins,thomassuckow/jenkins,khmarbaise/jenkins,keyurpatankar/hudson,olivergondza/jenkins,1and1/jenkins,mattclark/jenkins,jk47/jenkins,DoctorQ/jenkins,FTG-003/jenkins,ndeloof/jenkins,varmenise/jenkins,ikedam/jenkins,patbos/jenkins,morficus/jenkins,ydubreuil/jenkins,mdonohue/jenkins,lindzh/jenkins,mattclark/jenkins,Vlatombe/jenkins,liupugong/jenkins,dariver/jenkins,DanielWeber/jenkins,vlajos/jenkins,CodeShane/jenkins,ns163/jenkins,tangkun75/jenkins,oleg-nenashev/jenkins,tangkun75/jenkins,aldaris/jenkins,mattclark/jenkins,pjanouse/jenkins,maikeffi/hudson,yonglehou/jenkins,vijayto/jenkins,lilyJi/jenkins,jpederzolli/jenkins-1,elkingtonmcb/jenkins,gitaccountforprashant/gittest,ErikVerheul/jenkins,andresrc/jenkins,maikeffi/hudson,stephenc/jenkins,ydubreuil/jenkins,my7seven/jenkins,mcanthony/jenkins,KostyaSha/jenkins,jglick/jenkins,morficus/jenkins,olivergondza/jenkins,Wilfred/jenkins,daspilker/jenkins,evernat/jenkins,soenter/jenkins,damianszczepanik/jenkins,292388900/jenkins,stephenc/jenkins,NehemiahMi/jenkins,liupugong/jenkins,amuniz/jenkins,soenter/jenkins,mdonohue/jenkins,lordofthejars/jenkins,amruthsoft9/Jenkis,akshayabd/jenkins,hplatou/jenkins,lordofthejars/jenkins,amuniz/jenkins,verbitan/jenkins,soenter/jenkins,msrb/jenkins,jpbriend/jenkins,ns163/jenkins,Vlatombe/jenkins,vvv444/jenkins,christ66/jenkins,fbelzunc/jenkins,jpbriend/jenkins,KostyaSha/jenkins,ikedam/jenkins,daniel-beck/jenkins,kohsuke/hudson,mpeltonen/jenkins,CodeShane/jenkins,evernat/jenkins,viqueen/jenkins,mcanthony/jenkins,6WIND/jenkins,csimons/jenkins,MadsNielsen/jtemp,Ykus/jenkins,daniel-beck/jenkins,oleg-nenashev/jenkins,huybrechts/hudson,huybrechts/hudson,damianszczepanik/jenkins,rashmikanta-1984/jenkins,vijayto/jenkins,6WIND/jenkins,Krasnyanskiy/jenkins,jcarrothers-sap/jenkins,292388900/jenkins,morficus/jenkins,vvv444/jenkins,duzifang/my-jenkins,MarkEWaite/jenkins,arunsingh/jenkins,soenter/jenkins,scoheb/jenkins,nandan4/Jenkins,luoqii/jenkins,pselle/jenkins,keyurpatankar/hudson,jzjzjzj/jenkins,stephenc/jenkins,wuwen5/jenkins,intelchen/jenkins,albers/jenkins,mpeltonen/jenkins,amuniz/jenkins,mpeltonen/jenkins,jpederzolli/jenkins-1,jhoblitt/jenkins,thomassuckow/jenkins,jpederzolli/jenkins-1,DoctorQ/jenkins,synopsys-arc-oss/jenkins,Jochen-A-Fuerbacher/jenkins,escoem/jenkins,MarkEWaite/jenkins,hashar/jenkins,jcsirot/jenkins,daspilker/jenkins,viqueen/jenkins,aduprat/jenkins,Ykus/jenkins,dbroady1/jenkins,dbroady1/jenkins,luoqii/jenkins,sathiya-mit/jenkins,tastatur/jenkins,292388900/jenkins,daspilker/jenkins,ajshastri/jenkins,olivergondza/jenkins,wuwen5/jenkins,svanoort/jenkins,SebastienGllmt/jenkins,guoxu0514/jenkins,MichaelPranovich/jenkins_sc,morficus/jenkins,dariver/jenkins,azweb76/jenkins,deadmoose/jenkins,tfennelly/jenkins,alvarolobato/jenkins,azweb76/jenkins,noikiy/jenkins,azweb76/jenkins,pselle/jenkins,hplatou/jenkins,DanielWeber/jenkins,rlugojr/jenkins,v1v/jenkins,tangkun75/jenkins,liupugong/jenkins,escoem/jenkins,damianszczepanik/jenkins,hemantojhaa/jenkins,everyonce/jenkins,oleg-nenashev/jenkins,h4ck3rm1k3/jenkins,github-api-test-org/jenkins,akshayabd/jenkins,dariver/jenkins,tastatur/jenkins,gusreiber/jenkins,gorcz/jenkins,ndeloof/jenkins,aquarellian/jenkins,jglick/jenkins,noikiy/jenkins,NehemiahMi/jenkins,csimons/jenkins,ikedam/jenkins,ajshastri/jenkins,keyurpatankar/hudson,brunocvcunha/jenkins,kohsuke/hudson,khmarbaise/jenkins,wangyikai/jenkins,keyurpatankar/hudson,CodeShane/jenkins,arunsingh/jenkins,azweb76/jenkins,maikeffi/hudson,jhoblitt/jenkins,wuwen5/jenkins,tangkun75/jenkins,gusreiber/jenkins,1and1/jenkins,deadmoose/jenkins,christ66/jenkins,hemantojhaa/jenkins,mrooney/jenkins,gorcz/jenkins,bpzhang/jenkins,batmat/jenkins,viqueen/jenkins,hashar/jenkins,Jochen-A-Fuerbacher/jenkins,FarmGeek4Life/jenkins,MarkEWaite/jenkins,Jochen-A-Fuerbacher/jenkins,jpederzolli/jenkins-1,Krasnyanskiy/jenkins,pjanouse/jenkins,dariver/jenkins,Wilfred/jenkins,SebastienGllmt/jenkins,SebastienGllmt/jenkins,6WIND/jenkins,deadmoose/jenkins,evernat/jenkins,jenkinsci/jenkins,rlugojr/jenkins,recena/jenkins,jzjzjzj/jenkins,wangyikai/jenkins,gitaccountforprashant/gittest,bkmeneguello/jenkins,wuwen5/jenkins,paulwellnerbou/jenkins,alvarolobato/jenkins,luoqii/jenkins,jk47/jenkins,gorcz/jenkins,wangyikai/jenkins,FTG-003/jenkins,singh88/jenkins,jk47/jenkins,petermarcoen/jenkins,oleg-nenashev/jenkins,akshayabd/jenkins,christ66/jenkins,FTG-003/jenkins,lindzh/jenkins,petermarcoen/jenkins,stephenc/jenkins,wuwen5/jenkins,292388900/jenkins,aquarellian/jenkins,chbiel/jenkins,rashmikanta-1984/jenkins,msrb/jenkins,arunsingh/jenkins,viqueen/jenkins,paulmillar/jenkins,gitaccountforprashant/gittest,SenolOzer/jenkins,paulmillar/jenkins,AustinKwang/jenkins,protazy/jenkins,escoem/jenkins,protazy/jenkins,scoheb/jenkins,escoem/jenkins,amruthsoft9/Jenkis,chbiel/jenkins,jhoblitt/jenkins,samatdav/jenkins,vjuranek/jenkins,amruthsoft9/Jenkis,tfennelly/jenkins,seanlin816/jenkins,liorhson/jenkins,albers/jenkins,huybrechts/hudson,albers/jenkins,brunocvcunha/jenkins,lindzh/jenkins,goldchang/jenkins,SenolOzer/jenkins,vjuranek/jenkins,goldchang/jenkins,aquarellian/jenkins,dariver/jenkins,jhoblitt/jenkins,ndeloof/jenkins,ChrisA89/jenkins,elkingtonmcb/jenkins,jk47/jenkins,vvv444/jenkins,stephenc/jenkins,lindzh/jenkins,rsandell/jenkins,bpzhang/jenkins,chbiel/jenkins,daspilker/jenkins,v1v/jenkins,intelchen/jenkins,kzantow/jenkins,duzifang/my-jenkins,jpbriend/jenkins,scoheb/jenkins,viqueen/jenkins,msrb/jenkins,tastatur/jenkins,iqstack/jenkins,jzjzjzj/jenkins,patbos/jenkins,rsandell/jenkins,kohsuke/hudson,alvarolobato/jenkins,CodeShane/jenkins,jenkinsci/jenkins,seanlin816/jenkins,mpeltonen/jenkins,aldaris/jenkins,dbroady1/jenkins,protazy/jenkins,DoctorQ/jenkins,mattclark/jenkins,Jochen-A-Fuerbacher/jenkins,liupugong/jenkins,kzantow/jenkins,ajshastri/jenkins,huybrechts/hudson,NehemiahMi/jenkins,aldaris/jenkins,godfath3r/jenkins,nandan4/Jenkins,paulwellnerbou/jenkins,rashmikanta-1984/jenkins,ChrisA89/jenkins,paulwellnerbou/jenkins,iqstack/jenkins,evernat/jenkins,liorhson/jenkins,recena/jenkins,fbelzunc/jenkins,FarmGeek4Life/jenkins,alvarolobato/jenkins,my7seven/jenkins,evernat/jenkins,shahharsh/jenkins,godfath3r/jenkins,lordofthejars/jenkins,fbelzunc/jenkins,escoem/jenkins,mdonohue/jenkins,batmat/jenkins,khmarbaise/jenkins,tfennelly/jenkins,synopsys-arc-oss/jenkins,ErikVerheul/jenkins,jzjzjzj/jenkins,tfennelly/jenkins,ErikVerheul/jenkins,mpeltonen/jenkins,evernat/jenkins,Jimilian/jenkins,hplatou/jenkins,albers/jenkins,chbiel/jenkins,github-api-test-org/jenkins,arcivanov/jenkins,keyurpatankar/hudson,v1v/jenkins,iqstack/jenkins,jglick/jenkins,KostyaSha/jenkins,aquarellian/jenkins,rlugojr/jenkins,lordofthejars/jenkins,seanlin816/jenkins,vijayto/jenkins,MarkEWaite/jenkins,aduprat/jenkins,liorhson/jenkins,fbelzunc/jenkins,Wilfred/jenkins,lilyJi/jenkins,noikiy/jenkins,scoheb/jenkins,ns163/jenkins,bpzhang/jenkins,intelchen/jenkins,akshayabd/jenkins,recena/jenkins,thomassuckow/jenkins,intelchen/jenkins,mattclark/jenkins,maikeffi/hudson,guoxu0514/jenkins,dennisjlee/jenkins,andresrc/jenkins,liorhson/jenkins,nandan4/Jenkins,MadsNielsen/jtemp,msrb/jenkins,paulwellnerbou/jenkins,vlajos/jenkins,lordofthejars/jenkins,MichaelPranovich/jenkins_sc,mrooney/jenkins,jhoblitt/jenkins,292388900/jenkins,292388900/jenkins,DanielWeber/jenkins,vlajos/jenkins,kohsuke/hudson,samatdav/jenkins,MadsNielsen/jtemp,tastatur/jenkins,hashar/jenkins,jcsirot/jenkins,ErikVerheul/jenkins,KostyaSha/jenkins,iqstack/jenkins,guoxu0514/jenkins,petermarcoen/jenkins,v1v/jenkins,MadsNielsen/jtemp,godfath3r/jenkins,azweb76/jenkins,jcsirot/jenkins,synopsys-arc-oss/jenkins,Jimilian/jenkins,hplatou/jenkins,damianszczepanik/jenkins,paulwellnerbou/jenkins,verbitan/jenkins,my7seven/jenkins,verbitan/jenkins,rlugojr/jenkins,akshayabd/jenkins,damianszczepanik/jenkins,dbroady1/jenkins,MarkEWaite/jenkins,Krasnyanskiy/jenkins,tastatur/jenkins,jzjzjzj/jenkins,arcivanov/jenkins,Jimilian/jenkins,pselle/jenkins,escoem/jenkins,jenkinsci/jenkins,bkmeneguello/jenkins,SenolOzer/jenkins,varmenise/jenkins,singh88/jenkins,christ66/jenkins,paulmillar/jenkins,SenolOzer/jenkins,luoqii/jenkins,Ykus/jenkins,guoxu0514/jenkins,damianszczepanik/jenkins,kzantow/jenkins,liorhson/jenkins,6WIND/jenkins,Jimilian/jenkins,ajshastri/jenkins,CodeShane/jenkins,ndeloof/jenkins,liorhson/jenkins,duzifang/my-jenkins,christ66/jenkins,ErikVerheul/jenkins,samatdav/jenkins,mpeltonen/jenkins,rashmikanta-1984/jenkins,h4ck3rm1k3/jenkins,jpederzolli/jenkins-1,nandan4/Jenkins,maikeffi/hudson,hashar/jenkins,6WIND/jenkins,wuwen5/jenkins,1and1/jenkins,samatdav/jenkins,svanoort/jenkins,goldchang/jenkins,AustinKwang/jenkins,albers/jenkins,everyonce/jenkins,mcanthony/jenkins | package jenkins;
import net.sf.json.JSONObject;
import org.junit.Test;
import org.jvnet.hudson.test.Bug;
/**
* @author Kohsuke Kawaguchi
*/
public class ResilientJsonObjectTest {
public static class Foo { int a; }
/**
* {@link JSONObject} databinding should be able to ignore non-existent fields.
*/
@Test
@Bug(15105)
public void databindingShouldIgnoreUnrecognizedJsonProperty() {
JSONObject o = JSONObject.fromObject("{a:1,b:2}");
Foo f = (Foo)JSONObject.toBean(o,Foo.class);
assert f.a == 1;
}
}
| core/src/test/java/jenkins/ResilientJsonObjectTest.java | added a test to check for regressions in JENKINS-15105.
| core/src/test/java/jenkins/ResilientJsonObjectTest.java | added a test to check for regressions in JENKINS-15105. | <ide><path>ore/src/test/java/jenkins/ResilientJsonObjectTest.java
<add>package jenkins;
<add>
<add>import net.sf.json.JSONObject;
<add>import org.junit.Test;
<add>import org.jvnet.hudson.test.Bug;
<add>
<add>/**
<add> * @author Kohsuke Kawaguchi
<add> */
<add>public class ResilientJsonObjectTest {
<add> public static class Foo { int a; }
<add>
<add> /**
<add> * {@link JSONObject} databinding should be able to ignore non-existent fields.
<add> */
<add> @Test
<add> @Bug(15105)
<add> public void databindingShouldIgnoreUnrecognizedJsonProperty() {
<add> JSONObject o = JSONObject.fromObject("{a:1,b:2}");
<add> Foo f = (Foo)JSONObject.toBean(o,Foo.class);
<add> assert f.a == 1;
<add> }
<add>} |
|
Java | agpl-3.0 | cdcd03241e30d37af41e0990949696fd3f020bd7 | 0 | cinquin/mutinack,cinquin/mutinack,cinquin/mutinack,cinquin/mutinack,cinquin/mutinack | /**
* Mutinack mutation detection program.
* Copyright (C) 2014-2016 Olivier Cinquin
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, version 3.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.org.cinquin.mutinack;
import static uk.org.cinquin.mutinack.misc_util.Util.nonNullify;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import org.eclipse.collections.api.list.MutableList;
import org.eclipse.collections.impl.factory.Lists;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import contrib.edu.stanford.nlp.util.HasInterval;
import contrib.edu.stanford.nlp.util.Interval;
import contrib.net.sf.samtools.Cigar;
import contrib.net.sf.samtools.CigarElement;
import contrib.net.sf.samtools.CigarOperator;
import contrib.net.sf.samtools.SAMFileReader;
import contrib.net.sf.samtools.SAMFileReader.QueryInterval;
import contrib.net.sf.samtools.SAMRecord;
import contrib.net.sf.samtools.SAMRecordIterator;
import contrib.net.sf.samtools.SamPairUtil.PairOrientation;
import contrib.nf.fr.eraasoft.pool.PoolException;
import contrib.uk.org.lidalia.slf4jext.Logger;
import contrib.uk.org.lidalia.slf4jext.LoggerFactory;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.hash.TObjectByteHashMap;
import uk.org.cinquin.mutinack.candidate_sequences.ExtendedAlignmentBlock;
import uk.org.cinquin.mutinack.misc_util.Assert;
import uk.org.cinquin.mutinack.misc_util.SettableInteger;
import uk.org.cinquin.mutinack.misc_util.Util;
import uk.org.cinquin.mutinack.misc_util.exceptions.ParseRTException;
/**
* Hashcode and equality based on read name + first or second of pair.
* @author olivier
*
*/
public final class ExtendedSAMRecord implements HasInterval<Integer> {
static final Logger logger = LoggerFactory.getLogger(ExtendedSAMRecord.class);
public boolean discarded = false;
private final @Nullable Map<String, ExtendedSAMRecord> extSAMCache;
public final @NonNull SAMRecord record;
private final @NonNull String name;
private @Nullable ExtendedSAMRecord mate;
private boolean triedRetrievingMateFromFile = false;
private final @NonNull String mateName;
private final int hashCode;
public @Nullable DuplexRead duplexRead;
private byte @Nullable[] mateVariableBarcode;
public final byte @NonNull[] variableBarcode;
public final byte @Nullable[] constantBarcode;
public final @NonNull SequenceLocation location;
final int medianPhred;
final float averagePhred;
private final Cigar cigar;
/**
* Length of read ignoring trailing Ns.
*/
public final int effectiveLength;
int nReferenceDisagreements = 0;
public static final byte PHRED_NO_ENTRY = -1;
public final @NonNull TObjectByteHashMap<SequenceLocation> basePhredScores =
new TObjectByteHashMap<>(150, 0.5f, PHRED_NO_ENTRY);
private int nClipped = -1;
private Boolean formsWrongPair;
public boolean processed = false;
public boolean duplexAlreadyVisitedForStats = false;
public final int xLoc, yLoc;
public final String runAndTile;
public boolean opticalDuplicate = false;
public boolean hasOpticalDuplicates = false;
public boolean visitedForOptDups = false;
public int tempIndex0 = -1, tempIndex1 = -1;
private final @NonNull MutinackGroup groupSettings;
private final @NonNull Mutinack analyzer;
public static @NonNull String getReadFullName(SAMRecord rec, boolean getMate) {
return (rec.getReadName() + "--" + ((getMate ^ rec.getFirstOfPairFlag())? "1" : "2") + "--" +
(getMate ? rec.getMateAlignmentStart() : rec.getAlignmentStart())) +
(!getMate && rec.getSupplementaryAlignmentFlag() ? "--suppl" : "")/*.intern()*/;
}
public @NonNull String getFullName() {
return name;
}
@Override
public final int hashCode() {
return hashCode;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
return name.equals(((ExtendedSAMRecord) obj).name);
}
private void computeNClipped() {
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
int nClippedLeft = (!getReadNegativeStrandFlag() ?
/* positive strand */
getAlignmentStart() - getUnclippedStart() :
/* negative strand */
/* Note: getMateAlignmentEnd will return Integer.MAX_INT if mate not loaded*/
(getAlignmentStart() <= getMateAlignmentStart() ?
/* adapter run through, causes clipping we should ignore */
0 :
getAlignmentStart() - getUnclippedStart() - adapterClipped));
nClippedLeft = Math.max(0, nClippedLeft);
int nClippedRight = getReadNegativeStrandFlag() ?
/* negative strand */
getUnclippedEnd() - getAlignmentEnd() :
/* positive strand */
(getAlignmentEnd() >= getMateAlignmentEnd() ?
/* adapter run through, causes clipping we should ignore */
0 :
getUnclippedEnd() - getAlignmentEnd() - adapterClipped);
nClippedRight = Math.max(0, nClippedRight);
nClipped = nClippedLeft + nClippedRight;
}
public int getnClipped() {
if (nClipped == -1) {
computeNClipped();
}
return nClipped;
}
public void resetnClipped() {
nClipped = -1;
}
@SuppressWarnings("static-access")
public ExtendedSAMRecord(@NonNull SAMRecord rec, @NonNull String fullName,
@NonNull List<@NonNull AnalysisStats> stats,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@Nullable Map<String, ExtendedSAMRecord> extSAMCache) {
this.groupSettings = Objects.requireNonNull(analyzer.groupSettings);
this.analyzer = Objects.requireNonNull(analyzer);
this.extSAMCache = extSAMCache;
this.name = Objects.requireNonNull(fullName);
this.record = Objects.requireNonNull(rec);
this.cigar = rec.getCigar();
this.location = location;
hashCode = fullName.hashCode();
mateName = getReadFullName(rec, true);
final int readLength = rec.getReadLength();
//Find effective end of read, i.e. first position that is not an 'N' (the trimming
//step run prior to mutation detection might shorten reads that ran into the
//adapter because the insert was shorter than read length, by transforming all
//bases that should be ignored to an N)
@SuppressWarnings("hiding")
int effectiveLength = readLength;
final byte[] read = record.getReadBases();
final byte[] baseQualities = record.getBaseQualities();
if (getReadNegativeStrandFlag()) {
int i = 0;
while (read[i] == 'N' &&
i < readLength - 1) {
i++;
}
effectiveLength = readLength - i;
} else {
while (read[effectiveLength - 1] == 'N' &&
effectiveLength > 0) {
effectiveLength--;
}
}
Assert.isFalse(effectiveLength < 0);
this.effectiveLength = effectiveLength;
int sumBaseQualities0 = 0;
int nConsidered0 = 0;
TIntList qualities = new TIntArrayList(effectiveLength);
int n = Math.min(effectiveLength, readLength / 2);
for (int index1 = 0; index1 < n; index1++) {
nConsidered0++;
final byte b = baseQualities[index1];
sumBaseQualities0 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
int avQuality = sumBaseQualities0 / nConsidered0;
stats.forEach(s-> s.averageReadPhredQuality0.insert(avQuality));
int sumBaseQualities1 = 0;
int nConsidered1 = 0;
for (int index1 = readLength / 2; index1 < effectiveLength; index1++) {
nConsidered1++;
final byte b = baseQualities[index1];
sumBaseQualities1 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
if (nConsidered1 > 0) {
int avQuality1 = sumBaseQualities1 / nConsidered1;
stats.forEach(s -> s.averageReadPhredQuality1.insert(avQuality1));
}
qualities.sort();
medianPhred = qualities.get(qualities.size() / 2);
averagePhred = (sumBaseQualities0 + sumBaseQualities1) / ((float) (nConsidered0 + nConsidered1));
stats.forEach(s -> s.medianReadPhredQuality.insert(medianPhred));
Assert.isTrue(rec.getUnclippedEnd() - 1 >= getAlignmentEnd(),
(Supplier<Object>) () -> "" + (rec.getUnclippedEnd() - 1),
(Supplier<Object>) this::toString,
"Unclipped end is %s for read %s");
Assert.isTrue(rec.getAlignmentStart() - 1 >= getUnclippedStart());
final @NonNull String fullBarcodeString;
String bcAttr = (String) record.getAttribute("BC");
if (groupSettings.getVariableBarcodeEnd() > 0) {
final int firstBarcodeInNameIndex = name.indexOf("BC:Z:");
if (bcAttr == null) {
if (firstBarcodeInNameIndex == -1) {
throw new ParseRTException("Missing first barcode for read " + name +
' ' + record.toString());
}
final int index;
if (record.getFirstOfPairFlag()) {
index = firstBarcodeInNameIndex;
} else {
index = name.indexOf("BC:Z:", firstBarcodeInNameIndex + 1);
if (index == -1) {
throw new ParseRTException("Missing second barcode for read " + name +
' ' + record.toString());
}
}
fullBarcodeString = nonNullify(name.substring(index + 5, name.indexOf('_', index)));
} else {
fullBarcodeString = bcAttr;
}
variableBarcode = Util.getInternedVB(fullBarcodeString.substring(
groupSettings.getVariableBarcodeStart(), groupSettings.getVariableBarcodeEnd() + 1).getBytes());
constantBarcode = Util.getInternedCB(fullBarcodeString.substring(
groupSettings.getConstantBarcodeStart(), groupSettings.getConstantBarcodeEnd() + 1).getBytes());
if (firstBarcodeInNameIndex > -1) {
mateVariableBarcode = getMateBarcode(name, firstBarcodeInNameIndex);
}
} else {
variableBarcode = EMPTY_BARCODE;
constantBarcode = DUMMY_BARCODE;
}
String readName = record.getReadName();
int endFirstChunk = nthIndexOf(readName, ':', 5);
//Interning below required for equality checks performed in optical duplicate detection
runAndTile = record.getReadName().substring(0, endFirstChunk).intern();
byte[] readNameBytes = readName.getBytes();
xLoc = parseInt(readNameBytes, endFirstChunk + 1);
int endXLoc = readName.indexOf(':', endFirstChunk + 1);
yLoc = parseInt(readNameBytes, endXLoc + 1);
//interval = Interval.toInterval(rec.getAlignmentStart(), rec.getAlignmentEnd());
}
private byte[] getBarcode(String s) {
if (record.getFirstOfPairFlag()) {
return getBarcodeFromString(s, true, 0);
} else {
return getBarcodeFromString(s, false, 0);
}
}
private byte[] getMateBarcode(String s, int firstBarcodeInNameIndex) {
if (record.getFirstOfPairFlag()) {
return getBarcodeFromString(s, false, firstBarcodeInNameIndex);
} else {
return getBarcodeFromString(s, true, firstBarcodeInNameIndex);
}
}
@SuppressWarnings("static-access")
private byte[] getBarcodeFromString(String s, boolean firstOccurrence, int startIndex) {
int index = s.indexOf("BC:Z:", startIndex);
if (!firstOccurrence) {
return getBarcodeFromString(s, true, index + 1);
}
return Util.getInternedVB(s.substring(index + 5, s.indexOf('_', index)).substring(
groupSettings.getVariableBarcodeStart(), groupSettings.getVariableBarcodeEnd() + 1).getBytes());
}
private static boolean LENIENT_COORDINATE_PARSING = true;
private static int parseInt(final byte[] b, final int fromIndex) {
final int end = b.length - 1;
int i = fromIndex;
int result = 0;
while (i <= end) {
if (b[i] == ':') {
return result;
}
byte character = b[i];
if (character < 48 || character > 57) {
if (LENIENT_COORDINATE_PARSING) {
return result;
}
throw new ParseRTException("Character " + character + " is not a digit when parsing " + new String(b)
+ " from " + fromIndex);
}
result = 10 * result + b[i] - 48;
i++;
}
return result;
}
private static int nthIndexOf(final String s, final char c, final int n) {
int i = -1;
int found = 0;
while (found < n) {
i = s.indexOf(c, i + 1);
found++;
}
return i;
}
private static final byte @NonNull[] EMPTY_BARCODE = new byte [0];
private static final byte @NonNull[] DUMMY_BARCODE = {'N', 'N', 'N'};
public ExtendedSAMRecord(@NonNull SAMRecord rec,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@NonNull Map<String, ExtendedSAMRecord> extSAMCache) {
this(rec, getReadFullName(rec, false),
analyzer.stats, analyzer, location, extSAMCache);
}
public byte @NonNull[] getMateVariableBarcode() {
if (mateVariableBarcode == null ||
mateVariableBarcode == groupSettings.getNs()) {
checkMate();
if (mate == null) {
mateVariableBarcode = groupSettings.getNs();
} else {
mateVariableBarcode = nonNullify(mate).variableBarcode;
}
}
return Objects.requireNonNull(mateVariableBarcode);
}
@Override
public String toString() {
return (discarded ? "DISCARDED" : "" ) + name + ": " + "startNoBC: " + getAlignmentStart() +
"; endNoBC: " + getAlignmentEnd() +
"; alignmentStart: " + (getReadNegativeStrandFlag() ? "-" : "+") + getAlignmentStart() +
"; alignmentEnd: " + getAlignmentEnd() +
"; cigar: " + record.getCigarString() +
"; length: " + record.getReadLength() +
"; effectiveLength: " + effectiveLength +
"; nClipped: " + (nClipped == -1 ? "Uncomputed" : getnClipped()) +
"; insertSize: " + getInsertSize() +
"; bases: " + new String(record.getReadBases());
}
@Override
public Interval<Integer> getInterval() {
throw new RuntimeException("Unimplemented");
//return interval;
}
public int referencePositionToReadPosition(int refPosition) {
if (refPosition <= getAlignmentStart()) {
return refPosition - getUnclippedStart();
}
List<CigarElement> cElmnts = getCigar().getCigarElements();
final int nElmnts = cElmnts.size();
int ceIndex = 0;
int nReadBasesProcessed = getAlignmentStart() - getUnclippedStart();
final int nBasesToAlign = refPosition - getAlignmentStart();
int nBasesAligned = 0;
while (ceIndex < nElmnts && nBasesAligned < nBasesToAlign) {
final CigarElement c = cElmnts.get(ceIndex);
final int blockLength = c.getLength();
switch(c.getOperator()) {
case M:
int nTakenBases = Math.min(blockLength, nBasesToAlign - nBasesAligned);
nBasesAligned += nTakenBases;
nReadBasesProcessed += nTakenBases;
break;
case I:
nReadBasesProcessed += blockLength;
break;
case D:
case N:
nBasesAligned += blockLength;
break;
default://Nothing to do
}
//Ignoring clipping at end of read
ceIndex++;
}
if (nBasesAligned == nBasesToAlign) {
return nReadBasesProcessed;
} else {
return nReadBasesProcessed + (nBasesToAlign - nBasesAligned);
}
}
public Cigar getCigar() {
return cigar;
}
private int intronAdjustment(final int readPosition, boolean reverse) {
if (!groupSettings.isRnaSeq()) {
return 0;
}
SettableInteger nReadBases = new SettableInteger(0);
SettableInteger intronBases = new SettableInteger(0);
MutableList<CigarElement> cigarElements = Lists.mutable.withAll(getCigar().getCigarElements());
if (reverse) {
cigarElements.reverseThis();
}
cigarElements.detect(e -> {
CigarOperator operator = e.getOperator();
int truncatedLength = operator.consumesReadBases() ?
Math.min(e.getLength(), readPosition - nReadBases.get())
:
e.getLength();
if (operator.consumesReadBases()) {
nReadBases.addAndGet(truncatedLength);
}
if (operator == CigarOperator.N) {
intronBases.addAndGet(e.getLength());
}
if (nReadBases.get() == readPosition) {
return true;
}
return false;
});
Assert.isTrue(nReadBases.get() == readPosition);
return intronBases.get();
}
public static final int NO_MATE_POSITION = Integer.MAX_VALUE - 1000;
public int tooCloseToBarcode(int readPosition, int ignoreFirstNBases) {
final boolean readOnNegativeStrand = getReadNegativeStrandFlag();
final int distance0;
if (readOnNegativeStrand) {
distance0 = readPosition - ((record.getReadLength() - 1) - ignoreFirstNBases);
} else {
distance0 = ignoreFirstNBases - readPosition;
}
//Now check if position is too close to other adapter barcode ligation site,
//or on the wrong side of it
final int refPositionOfMateLigationSite = getRefPositionOfMateLigationSite();
final int distance1;
if (!formsWrongPair() && refPositionOfMateLigationSite != NO_MATE_POSITION) {
final int readPositionOfLigSiteA = referencePositionToReadPosition(refPositionOfMateLigationSite - 1) + 1;
final int readPositionOfLigSiteB = referencePositionToReadPosition(refPositionOfMateLigationSite + 1) - 1;
if (getReadNegativeStrandFlag()) {
distance1 = Math.max(readPositionOfLigSiteA, readPositionOfLigSiteB) + ignoreFirstNBases - readPosition;
} else {
distance1 = readPosition - (Math.min(readPositionOfLigSiteA, readPositionOfLigSiteB ) - ignoreFirstNBases);
}
} else {
//Mate info not available, or pair is "wrong" pair
//Just go by effectiveLength to infer presence of adapter, although
//it should not happen in practice that reads form a wrong pair
//when there is adapter read-through
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
if (readOnNegativeStrand) {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
ignoreFirstNBases + adapterClipped - readPosition;
} else {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
readPosition - (effectiveLength - ignoreFirstNBases - 1);
}
}
return Math.max(distance0, distance1);
}
public int getRefPositionOfMateLigationSite() {
return getReadNegativeStrandFlag() ?
getMateUnclippedStart() :
getMateUnclippedEnd();
}
public int getRefAlignmentStart() {
int referenceStart = getAlignmentStart();
Assert.isFalse(referenceStart < 0);
return referenceStart;
}
public int getRefAlignmentEnd() {
int referenceEnd = getAlignmentEnd();
Assert.isFalse(referenceEnd < 0, () -> "Negative alignment end in read " + this);
return referenceEnd;
}
public int getMateRefAlignmentStart() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentStart();
}
public int getMateRefAlignmentEnd() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentEnd();
}
public int getInsertSize() {
return record.getInferredInsertSize();
}
public ExtendedSAMRecord getMate() {
checkMate();
return mate;
}
//Adapted from SamPairUtil
public PairOrientation getPairOrientation() {
final boolean readIsOnReverseStrand = record.getReadNegativeStrandFlag();
if (record.getReadUnmappedFlag() || !record.getReadPairedFlag() || record.getMateUnmappedFlag()) {
throw new IllegalArgumentException("Invalid SAMRecord: " + record.getReadName() + ". This method only works for SAMRecords " +
"that are paired reads with both reads aligned.");
}
if (readIsOnReverseStrand == record.getMateNegativeStrandFlag()) {
return PairOrientation.TANDEM;
}
final int positiveStrandFivePrimePos =
readIsOnReverseStrand ?
getMateOffsetUnclippedStart()
:
getOffsetUnclippedStart();
final int negativeStrandFivePrimePos =
readIsOnReverseStrand ?
getOffsetUnclippedEnd()
:
getMateOffsetUnclippedEnd();
return
positiveStrandFivePrimePos < negativeStrandFivePrimePos ?
PairOrientation.FR
:
PairOrientation.RF;
}
@SuppressWarnings("null")
public boolean formsWrongPair() {
PairOrientation po;
if (formsWrongPair == null) {
formsWrongPair = record.getReadPairedFlag() && (
record.getReadUnmappedFlag() ||
record.getMateUnmappedFlag() ||
(((mate = checkMate()) != null) && !record.getReferenceIndex().equals(mate.record.getReferenceIndex())) ||
(po = getPairOrientation()) == PairOrientation.TANDEM ||
po == PairOrientation.RF
);
}
return formsWrongPair;
}
public boolean getReadNegativeStrandFlag() {
return record.getReadNegativeStrandFlag();
}
public boolean getReadPositiveStrand() {
return !record.getReadNegativeStrandFlag();
}
public ExtendedSAMRecord checkMate() {
if (mate != null) {
return mate;
}
if (record.getMateUnmappedFlag()) {
return null;
}
if (extSAMCache != null) {
mate = extSAMCache.get(mateName);
}
if (mate == null && !triedRetrievingMateFromFile) {
synchronized (this) {
if (mate == null) {
mate = getRead(analyzer, record.getReadName(), !record.getFirstOfPairFlag(),
new SequenceLocation(record.getMateReferenceName(), groupSettings.indexContigNameReverseMap,
record.getMateAlignmentStart() - 1, false) , -1, 1);
triedRetrievingMateFromFile = true;
}
}
}
return mate;
}
/** Indexing starts at 0
*/
public int getAlignmentStart() {
return record.getAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getUnclippedStart() {
return record.getUnclippedStart() - 1;
}
/** Indexing starts at 0
*/
public int getMateAlignmentStart() {
return record.getMateAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getAlignmentEnd() {
return record.getAlignmentEnd() - 1;
}
/** Indexing starts at 0
*
* @return
*/
public int getMateAlignmentEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getAlignmentEnd();
}
public int getMateUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedEnd();
}
private SequenceLocation unclippedEndHelper(boolean noMatePosition) {
return new SequenceLocation(getLocation().contigIndex, groupSettings.getContigNames(),
noMatePosition ? NO_MATE_POSITION : record.getUnclippedEnd() - 1 - intronAdjustment(16, true));
}
public SequenceLocation getOffsetUnclippedEndLoc() {
return unclippedEndHelper(false);
}
public int getOffsetUnclippedEnd() {
return getOffsetUnclippedEndLoc().position;
}
private SequenceLocation unclippedStartHelper(boolean noMatePosition) {
return new SequenceLocation(getLocation().contigIndex, groupSettings.getContigNames(),
noMatePosition ? NO_MATE_POSITION : record.getUnclippedStart() - 1 + intronAdjustment(16, false));
}
public SequenceLocation getOffsetUnclippedStartLoc() {
return unclippedStartHelper(false);
}
public int getOffsetUnclippedStart() {
return getOffsetUnclippedStartLoc().position;
}
public int getMateOffsetUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedEnd();
}
public SequenceLocation getMateOffsetUnclippedEndLoc() {
checkMate();
if (mate == null) {
return unclippedEndHelper(true);
}
return nonNullify(mate).getOffsetUnclippedEndLoc();
}
public int getMateOffsetUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedStart();
}
public SequenceLocation getMateOffsetUnclippedStartLoc() {
checkMate();
if (mate == null) {
return unclippedStartHelper(true);
}
return nonNullify(mate).getOffsetUnclippedStartLoc();
}
public int getUnclippedEnd() {
return record.getUnclippedEnd() - 1;
}
public int getMateUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedStart();
}
public int getMappingQuality() {
return record.getMappingQuality();
}
public boolean overlapsWith(SequenceLocation otherLocation) {
if (getRefAlignmentStart() > otherLocation.position ||
getRefAlignmentEnd() < otherLocation.position ||
otherLocation.contigIndex != getReferenceIndex()) {
return false;
}
return true;
}
boolean duplexLeft() {
return formsWrongPair() ?
getOffsetUnclippedStart() <= getMateOffsetUnclippedStart()
: getReadPositiveStrand();
}
public @NonNull SequenceLocation getLocation() {
return location;
}
/**
* Not necessarily the same as that of SAMRecord
* @return
*/
public int getReferenceIndex() {
return location.contigIndex;
}
public @NonNull String getReferenceName() {
return location.getContigName();
}
public int getxLoc() {
return xLoc;
}
public int getyLoc() {
return yLoc;
}
public String getRunAndTile() {
return runAndTile;
}
public boolean isOpticalDuplicate() {
return opticalDuplicate;
}
public float getAveragePhred() {
return averagePhred;
}
public List<ExtendedAlignmentBlock> getAlignmentBlocks() {
return ExtendedAlignmentBlock.getAlignmentBlocks(getCigar(), record.getAlignmentStart(), "read cigar");
}
public static @Nullable ExtendedSAMRecord getRead(Mutinack analyzer, String name, boolean firstOfPair,
SequenceLocation location, int avoidAlignmentStart0Based, int windowHalfWidth) {
SAMFileReader bamReader;
try {
bamReader = analyzer.readerPool.getObj();
} catch (PoolException e) {
throw new RuntimeException(e);
}
try {
final QueryInterval[] bamContig = {
bamReader.makeQueryInterval(location.contigName, Math.max(location.position + 1 - windowHalfWidth, 1),
location.position + 1 + windowHalfWidth)};
try (SAMRecordIterator it = bamReader.queryOverlapping(bamContig)) {
while (it.hasNext()) {
SAMRecord record = it.next();
if (record.getReadName().equals(name) && record.getFirstOfPairFlag() == firstOfPair &&
record.getAlignmentStart() - 1 != avoidAlignmentStart0Based) {
return SubAnalyzer.getExtendedNoCaching(record,
new SequenceLocation(location.contigName, analyzer.groupSettings.indexContigNameReverseMap,
record.getAlignmentStart() - 1, false), analyzer);
}
}
return null;
}
} finally {
analyzer.readerPool.returnObj(bamReader);
}
}
}
| src/uk/org/cinquin/mutinack/ExtendedSAMRecord.java | /**
* Mutinack mutation detection program.
* Copyright (C) 2014-2016 Olivier Cinquin
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, version 3.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.org.cinquin.mutinack;
import static uk.org.cinquin.mutinack.misc_util.Util.nonNullify;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import org.eclipse.collections.api.list.MutableList;
import org.eclipse.collections.impl.factory.Lists;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import contrib.edu.stanford.nlp.util.HasInterval;
import contrib.edu.stanford.nlp.util.Interval;
import contrib.net.sf.samtools.Cigar;
import contrib.net.sf.samtools.CigarElement;
import contrib.net.sf.samtools.CigarOperator;
import contrib.net.sf.samtools.SAMFileReader;
import contrib.net.sf.samtools.SAMFileReader.QueryInterval;
import contrib.net.sf.samtools.SAMRecord;
import contrib.net.sf.samtools.SAMRecordIterator;
import contrib.net.sf.samtools.SamPairUtil.PairOrientation;
import contrib.nf.fr.eraasoft.pool.PoolException;
import contrib.uk.org.lidalia.slf4jext.Logger;
import contrib.uk.org.lidalia.slf4jext.LoggerFactory;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.hash.TObjectByteHashMap;
import uk.org.cinquin.mutinack.candidate_sequences.ExtendedAlignmentBlock;
import uk.org.cinquin.mutinack.misc_util.Assert;
import uk.org.cinquin.mutinack.misc_util.SettableInteger;
import uk.org.cinquin.mutinack.misc_util.Util;
import uk.org.cinquin.mutinack.misc_util.exceptions.ParseRTException;
/**
* Hashcode and equality based on read name + first or second of pair.
* @author olivier
*
*/
public final class ExtendedSAMRecord implements HasInterval<Integer> {
static final Logger logger = LoggerFactory.getLogger(ExtendedSAMRecord.class);
public boolean discarded = false;
private final @Nullable Map<String, ExtendedSAMRecord> extSAMCache;
public final @NonNull SAMRecord record;
private final @NonNull String name;
private @Nullable ExtendedSAMRecord mate;
private boolean triedRetrievingMateFromFile = false;
private final @NonNull String mateName;
private final int hashCode;
public @Nullable DuplexRead duplexRead;
private byte @Nullable[] mateVariableBarcode;
public final byte @NonNull[] variableBarcode;
public final byte @Nullable[] constantBarcode;
public final @NonNull SequenceLocation location;
final int medianPhred;
final float averagePhred;
private final Cigar cigar;
/**
* Length of read ignoring trailing Ns.
*/
public final int effectiveLength;
int nReferenceDisagreements = 0;
public static final byte PHRED_NO_ENTRY = -1;
public final @NonNull TObjectByteHashMap<SequenceLocation> basePhredScores =
new TObjectByteHashMap<>(150, 0.5f, PHRED_NO_ENTRY);
private int nClipped = -1;
private Boolean formsWrongPair;
public boolean processed = false;
public boolean duplexAlreadyVisitedForStats = false;
public final int xLoc, yLoc;
public final String runAndTile;
public boolean opticalDuplicate = false;
public boolean hasOpticalDuplicates = false;
public boolean visitedForOptDups = false;
public int tempIndex0 = -1, tempIndex1 = -1;
private final @NonNull MutinackGroup groupSettings;
private final @NonNull Mutinack analyzer;
public static @NonNull String getReadFullName(SAMRecord rec, boolean getMate) {
return (rec.getReadName() + "--" + ((getMate ^ rec.getFirstOfPairFlag())? "1" : "2") + "--" +
(getMate ? rec.getMateAlignmentStart() : rec.getAlignmentStart())) +
(!getMate && rec.getSupplementaryAlignmentFlag() ? "--suppl" : "")/*.intern()*/;
}
public @NonNull String getFullName() {
return name;
}
@Override
public final int hashCode() {
return hashCode;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
return name.equals(((ExtendedSAMRecord) obj).name);
}
private void computeNClipped() {
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
int nClippedLeft = (!getReadNegativeStrandFlag() ?
/* positive strand */
getAlignmentStart() - getUnclippedStart() :
/* negative strand */
/* Note: getMateAlignmentEnd will return Integer.MAX_INT if mate not loaded*/
(getAlignmentStart() <= getMateAlignmentStart() ?
/* adapter run through, causes clipping we should ignore */
0 :
getAlignmentStart() - getUnclippedStart() - adapterClipped));
nClippedLeft = Math.max(0, nClippedLeft);
int nClippedRight = getReadNegativeStrandFlag() ?
/* negative strand */
getUnclippedEnd() - getAlignmentEnd() :
/* positive strand */
(getAlignmentEnd() >= getMateAlignmentEnd() ?
/* adapter run through, causes clipping we should ignore */
0 :
getUnclippedEnd() - getAlignmentEnd() - adapterClipped);
nClippedRight = Math.max(0, nClippedRight);
nClipped = nClippedLeft + nClippedRight;
}
public int getnClipped() {
if (nClipped == -1) {
computeNClipped();
}
return nClipped;
}
public void resetnClipped() {
nClipped = -1;
}
@SuppressWarnings("static-access")
public ExtendedSAMRecord(@NonNull SAMRecord rec, @NonNull String fullName,
@NonNull List<@NonNull AnalysisStats> stats,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@Nullable Map<String, ExtendedSAMRecord> extSAMCache) {
this.groupSettings = Objects.requireNonNull(analyzer.groupSettings);
this.analyzer = Objects.requireNonNull(analyzer);
this.extSAMCache = extSAMCache;
this.name = Objects.requireNonNull(fullName);
this.record = Objects.requireNonNull(rec);
this.cigar = rec.getCigar();
this.location = location;
hashCode = fullName.hashCode();
mateName = getReadFullName(rec, true);
final int readLength = rec.getReadLength();
//Find effective end of read, i.e. first position that is not an 'N' (the trimming
//step run prior to mutation detection might shorten reads that ran into the
//adapter because the insert was shorter than read length, by transforming all
//bases that should be ignored to an N)
@SuppressWarnings("hiding")
int effectiveLength = readLength;
final byte[] read = record.getReadBases();
final byte[] baseQualities = record.getBaseQualities();
if (getReadNegativeStrandFlag()) {
int i = 0;
while (read[i] == 'N' &&
i < readLength - 1) {
i++;
}
effectiveLength = readLength - i;
} else {
while (read[effectiveLength - 1] == 'N' &&
effectiveLength > 0) {
effectiveLength--;
}
}
Assert.isFalse(effectiveLength < 0);
this.effectiveLength = effectiveLength;
int sumBaseQualities0 = 0;
int nConsidered0 = 0;
TIntList qualities = new TIntArrayList(effectiveLength);
int n = Math.min(effectiveLength, readLength / 2);
for (int index1 = 0; index1 < n; index1++) {
nConsidered0++;
final byte b = baseQualities[index1];
sumBaseQualities0 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
int avQuality = sumBaseQualities0 / nConsidered0;
stats.forEach(s-> s.averageReadPhredQuality0.insert(avQuality));
int sumBaseQualities1 = 0;
int nConsidered1 = 0;
for (int index1 = readLength / 2; index1 < effectiveLength; index1++) {
nConsidered1++;
final byte b = baseQualities[index1];
sumBaseQualities1 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
if (nConsidered1 > 0) {
int avQuality1 = sumBaseQualities1 / nConsidered1;
stats.forEach(s -> s.averageReadPhredQuality1.insert(avQuality1));
}
qualities.sort();
medianPhred = qualities.get(qualities.size() / 2);
averagePhred = (sumBaseQualities0 + sumBaseQualities1) / ((float) (nConsidered0 + nConsidered1));
stats.forEach(s -> s.medianReadPhredQuality.insert(medianPhred));
Assert.isTrue(rec.getUnclippedEnd() - 1 >= getAlignmentEnd(),
(Supplier<Object>) () -> "" + (rec.getUnclippedEnd() - 1),
(Supplier<Object>) this::toString,
"Unclipped end is %s for read %s");
Assert.isTrue(rec.getAlignmentStart() - 1 >= getUnclippedStart());
final @NonNull String fullBarcodeString;
String bcAttr = (String) record.getAttribute("BC");
if (groupSettings.getVariableBarcodeEnd() > 0) {
final int firstBarcodeInNameIndex = name.indexOf("BC:Z:");
if (bcAttr == null) {
if (firstBarcodeInNameIndex == -1) {
throw new ParseRTException("Missing first barcode for read " + name +
' ' + record.toString());
}
final int index;
if (record.getFirstOfPairFlag()) {
index = firstBarcodeInNameIndex;
} else {
index = name.indexOf("BC:Z:", firstBarcodeInNameIndex + 1);
if (index == -1) {
throw new ParseRTException("Missing second barcode for read " + name +
' ' + record.toString());
}
}
fullBarcodeString = nonNullify(name.substring(index + 5, name.indexOf('_', index)));
} else {
fullBarcodeString = bcAttr;
}
variableBarcode = Util.getInternedVB(fullBarcodeString.substring(
groupSettings.getVariableBarcodeStart(), groupSettings.getVariableBarcodeEnd() + 1).getBytes());
constantBarcode = Util.getInternedCB(fullBarcodeString.substring(
groupSettings.getConstantBarcodeStart(), groupSettings.getConstantBarcodeEnd() + 1).getBytes());
if (firstBarcodeInNameIndex > -1) {
mateVariableBarcode = getMateBarcode(name, firstBarcodeInNameIndex);
}
} else {
variableBarcode = EMPTY_BARCODE;
constantBarcode = DUMMY_BARCODE;
}
String readName = record.getReadName();
int endFirstChunk = nthIndexOf(readName, ':', 5);
//Interning below required for equality checks performed in optical duplicate detection
runAndTile = record.getReadName().substring(0, endFirstChunk).intern();
byte[] readNameBytes = readName.getBytes();
xLoc = parseInt(readNameBytes, endFirstChunk + 1);
int endXLoc = readName.indexOf(':', endFirstChunk + 1);
yLoc = parseInt(readNameBytes, endXLoc + 1);
//interval = Interval.toInterval(rec.getAlignmentStart(), rec.getAlignmentEnd());
}
private byte[] getBarcode(String s) {
if (record.getFirstOfPairFlag()) {
return getBarcodeFromString(s, true, 0);
} else {
return getBarcodeFromString(s, false, 0);
}
}
private byte[] getMateBarcode(String s, int firstBarcodeInNameIndex) {
if (record.getFirstOfPairFlag()) {
return getBarcodeFromString(s, false, firstBarcodeInNameIndex);
} else {
return getBarcodeFromString(s, true, firstBarcodeInNameIndex);
}
}
@SuppressWarnings("static-access")
private byte[] getBarcodeFromString(String s, boolean firstOccurrence, int startIndex) {
int index = s.indexOf("BC:Z:", startIndex);
if (!firstOccurrence) {
return getBarcodeFromString(s, true, index + 1);
}
return Util.getInternedVB(s.substring(index + 5, s.indexOf('_', index)).substring(
groupSettings.getVariableBarcodeStart(), groupSettings.getVariableBarcodeEnd() + 1).getBytes());
}
private static boolean LENIENT_COORDINATE_PARSING = true;
private static int parseInt(final byte[] b, final int fromIndex) {
final int end = b.length - 1;
int i = fromIndex;
int result = 0;
while (i <= end) {
if (b[i] == ':') {
return result;
}
byte character = b[i];
if (character < 48 || character > 57) {
if (LENIENT_COORDINATE_PARSING) {
return result;
}
throw new ParseRTException("Character " + character + " is not a digit when parsing " + new String(b)
+ " from " + fromIndex);
}
result = 10 * result + b[i] - 48;
i++;
}
return result;
}
private static int nthIndexOf(final String s, final char c, final int n) {
int i = -1;
int found = 0;
while (found < n) {
i = s.indexOf(c, i + 1);
found++;
}
return i;
}
private static final byte @NonNull[] EMPTY_BARCODE = new byte [0];
private static final byte @NonNull[] DUMMY_BARCODE = {'N', 'N', 'N'};
public ExtendedSAMRecord(@NonNull SAMRecord rec,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@NonNull Map<String, ExtendedSAMRecord> extSAMCache) {
this(rec, getReadFullName(rec, false),
analyzer.stats, analyzer, location, extSAMCache);
}
public byte @NonNull[] getMateVariableBarcode() {
if (mateVariableBarcode == null ||
mateVariableBarcode == groupSettings.getNs()) {
checkMate();
if (mate == null) {
mateVariableBarcode = groupSettings.getNs();
} else {
mateVariableBarcode = nonNullify(mate).variableBarcode;
}
}
return Objects.requireNonNull(mateVariableBarcode);
}
@Override
public String toString() {
return (discarded ? "DISCARDED" : "" ) + name + ": " + "startNoBC: " + getAlignmentStart() +
"; endNoBC: " + getAlignmentEnd() +
"; alignmentStart: " + (getReadNegativeStrandFlag() ? "-" : "+") + getAlignmentStart() +
"; alignmentEnd: " + getAlignmentEnd() +
"; cigar: " + record.getCigarString() +
"; length: " + record.getReadLength() +
"; effectiveLength: " + effectiveLength +
"; nClipped: " + (nClipped == -1 ? "Uncomputed" : getnClipped()) +
"; insertSize: " + getInsertSize() +
"; bases: " + new String(record.getReadBases());
}
@Override
public Interval<Integer> getInterval() {
throw new RuntimeException("Unimplemented");
//return interval;
}
public int referencePositionToReadPosition(int refPosition) {
if (refPosition <= getAlignmentStart()) {
return refPosition - getUnclippedStart();
}
List<CigarElement> cElmnts = getCigar().getCigarElements();
final int nElmnts = cElmnts.size();
int ceIndex = 0;
int nReadBasesProcessed = getAlignmentStart() - getUnclippedStart();
final int nBasesToAlign = refPosition - getAlignmentStart();
int nBasesAligned = 0;
while (ceIndex < nElmnts && nBasesAligned < nBasesToAlign) {
final CigarElement c = cElmnts.get(ceIndex);
final int blockLength = c.getLength();
switch(c.getOperator()) {
case M:
int nTakenBases = Math.min(blockLength, nBasesToAlign - nBasesAligned);
nBasesAligned += nTakenBases;
nReadBasesProcessed += nTakenBases;
break;
case I:
nReadBasesProcessed += blockLength;
break;
case D:
case N:
nBasesAligned += blockLength;
break;
default://Nothing to do
}
//Ignoring clipping at end of read
ceIndex++;
}
if (nBasesAligned == nBasesToAlign) {
return nReadBasesProcessed;
} else {
return nReadBasesProcessed + (nBasesToAlign - nBasesAligned);
}
}
public Cigar getCigar() {
return cigar;
}
private int intronAdjustment(final int readPosition, boolean reverse) {
if (!groupSettings.isRnaSeq()) {
return 0;
}
SettableInteger nReadBases = new SettableInteger(0);
SettableInteger intronBases = new SettableInteger(0);
MutableList<CigarElement> cigarElements = Lists.mutable.withAll(getCigar().getCigarElements());
if (reverse) {
cigarElements.reverseThis();
}
cigarElements.detect(e -> {
CigarOperator operator = e.getOperator();
int truncatedLength = operator.consumesReadBases() ?
Math.min(e.getLength(), readPosition - nReadBases.get())
:
e.getLength();
if (operator.consumesReadBases()) {
nReadBases.addAndGet(truncatedLength);
}
if (operator == CigarOperator.N) {
intronBases.addAndGet(e.getLength());
}
if (nReadBases.get() == readPosition) {
return true;
}
return false;
});
Assert.isTrue(nReadBases.get() == readPosition);
return intronBases.get();
}
public static final int NO_MATE_POSITION = Integer.MAX_VALUE - 1000;
public int tooCloseToBarcode(int readPosition, int ignoreFirstNBases) {
final boolean readOnNegativeStrand = getReadNegativeStrandFlag();
final int distance0;
if (readOnNegativeStrand) {
distance0 = readPosition - ((record.getReadLength() - 1) - ignoreFirstNBases);
} else {
distance0 = ignoreFirstNBases - readPosition;
}
//Now check if position is too close to other adapter barcode ligation site,
//or on the wrong side of it
final int refPositionOfMateLigationSite = getRefPositionOfMateLigationSite();
final int distance1;
if (!formsWrongPair() && refPositionOfMateLigationSite != NO_MATE_POSITION) {
final int readPositionOfLigSiteA = referencePositionToReadPosition(refPositionOfMateLigationSite - 1) + 1;
final int readPositionOfLigSiteB = referencePositionToReadPosition(refPositionOfMateLigationSite + 1) - 1;
if (getReadNegativeStrandFlag()) {
distance1 = Math.max(readPositionOfLigSiteA, readPositionOfLigSiteB) + ignoreFirstNBases - readPosition;
} else {
distance1 = readPosition - (Math.min(readPositionOfLigSiteA, readPositionOfLigSiteB ) - ignoreFirstNBases);
}
} else {
//Mate info not available, or pair is "wrong" pair
//Just go by effectiveLength to infer presence of adapter, although
//it should not happen in practice that reads form a wrong pair
//when there is adapter read-through
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
if (readOnNegativeStrand) {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
ignoreFirstNBases + adapterClipped - readPosition;
} else {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
readPosition - (effectiveLength - ignoreFirstNBases - 1);
}
}
return Math.max(distance0, distance1);
}
public int getRefPositionOfMateLigationSite() {
return getReadNegativeStrandFlag() ?
getMateUnclippedStart() :
getMateUnclippedEnd();
}
public int getRefAlignmentStart() {
int referenceStart = getAlignmentStart();
Assert.isFalse(referenceStart < 0);
return referenceStart;
}
public int getRefAlignmentEnd() {
int referenceEnd = getAlignmentEnd();
Assert.isFalse(referenceEnd < 0, () -> "Negative alignment end in read " + this);
return referenceEnd;
}
public int getMateRefAlignmentStart() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentStart();
}
public int getMateRefAlignmentEnd() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentEnd();
}
public int getInsertSize() {
return record.getInferredInsertSize();
}
public ExtendedSAMRecord getMate() {
checkMate();
return mate;
}
//Adapted from SamPairUtil
public PairOrientation getPairOrientation() {
final boolean readIsOnReverseStrand = record.getReadNegativeStrandFlag();
if (record.getReadUnmappedFlag() || !record.getReadPairedFlag() || record.getMateUnmappedFlag()) {
throw new IllegalArgumentException("Invalid SAMRecord: " + record.getReadName() + ". This method only works for SAMRecords " +
"that are paired reads with both reads aligned.");
}
if (readIsOnReverseStrand == record.getMateNegativeStrandFlag()) {
return PairOrientation.TANDEM;
}
final int positiveStrandFivePrimePos =
readIsOnReverseStrand ?
getMateOffsetUnclippedStart()
:
getOffsetUnclippedStart();
final int negativeStrandFivePrimePos =
readIsOnReverseStrand ?
getOffsetUnclippedEnd()
:
getMateOffsetUnclippedEnd();
return
positiveStrandFivePrimePos < negativeStrandFivePrimePos ?
PairOrientation.FR
:
PairOrientation.RF;
}
@SuppressWarnings("null")
public boolean formsWrongPair() {
PairOrientation po;
if (formsWrongPair == null) {
formsWrongPair = record.getReadPairedFlag() && (
record.getReadUnmappedFlag() ||
record.getMateUnmappedFlag() ||
(((mate = checkMate()) != null) && !record.getReferenceIndex().equals(mate.record.getReferenceIndex())) ||
(po = getPairOrientation()) == PairOrientation.TANDEM ||
po == PairOrientation.RF
);
}
return formsWrongPair;
}
public boolean getReadNegativeStrandFlag() {
return record.getReadNegativeStrandFlag();
}
public boolean getReadPositiveStrand() {
return !record.getReadNegativeStrandFlag();
}
private ExtendedSAMRecord checkMate() {
if (mate == null) {
if (extSAMCache != null)
mate = extSAMCache.get(mateName);
if (mate == null && !triedRetrievingMateFromFile && !record.getMateUnmappedFlag()) {
mate = getRead(analyzer, record.getReadName(), !record.getFirstOfPairFlag(),
new SequenceLocation(record.getMateReferenceName(), groupSettings.indexContigNameReverseMap,
record.getMateAlignmentStart() - 1, false) , -1, 1);
triedRetrievingMateFromFile = true;
}
}
return mate;
}
/** Indexing starts at 0
*/
public int getAlignmentStart() {
return record.getAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getUnclippedStart() {
return record.getUnclippedStart() - 1;
}
/** Indexing starts at 0
*/
public int getMateAlignmentStart() {
return record.getMateAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getAlignmentEnd() {
return record.getAlignmentEnd() - 1;
}
/** Indexing starts at 0
*
* @return
*/
public int getMateAlignmentEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getAlignmentEnd();
}
public int getMateUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedEnd();
}
private SequenceLocation unclippedEndHelper(boolean noMatePosition) {
return new SequenceLocation(getLocation().contigIndex, groupSettings.getContigNames(),
noMatePosition ? NO_MATE_POSITION : record.getUnclippedEnd() - 1 - intronAdjustment(16, true));
}
public SequenceLocation getOffsetUnclippedEndLoc() {
return unclippedEndHelper(false);
}
public int getOffsetUnclippedEnd() {
return getOffsetUnclippedEndLoc().position;
}
private SequenceLocation unclippedStartHelper(boolean noMatePosition) {
return new SequenceLocation(getLocation().contigIndex, groupSettings.getContigNames(),
noMatePosition ? NO_MATE_POSITION : record.getUnclippedStart() - 1 + intronAdjustment(16, false));
}
public SequenceLocation getOffsetUnclippedStartLoc() {
return unclippedStartHelper(false);
}
public int getOffsetUnclippedStart() {
return getOffsetUnclippedStartLoc().position;
}
public int getMateOffsetUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedEnd();
}
public SequenceLocation getMateOffsetUnclippedEndLoc() {
checkMate();
if (mate == null) {
return unclippedEndHelper(true);
}
return nonNullify(mate).getOffsetUnclippedEndLoc();
}
public int getMateOffsetUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedStart();
}
public SequenceLocation getMateOffsetUnclippedStartLoc() {
checkMate();
if (mate == null) {
return unclippedStartHelper(true);
}
return nonNullify(mate).getOffsetUnclippedStartLoc();
}
public int getUnclippedEnd() {
return record.getUnclippedEnd() - 1;
}
public int getMateUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedStart();
}
public int getMappingQuality() {
return record.getMappingQuality();
}
public boolean overlapsWith(SequenceLocation otherLocation) {
if (getRefAlignmentStart() > otherLocation.position ||
getRefAlignmentEnd() < otherLocation.position ||
otherLocation.contigIndex != getReferenceIndex()) {
return false;
}
return true;
}
boolean duplexLeft() {
return formsWrongPair() ?
getOffsetUnclippedStart() <= getMateOffsetUnclippedStart()
: getReadPositiveStrand();
}
public @NonNull SequenceLocation getLocation() {
return location;
}
/**
* Not necessarily the same as that of SAMRecord
* @return
*/
public int getReferenceIndex() {
return location.contigIndex;
}
public @NonNull String getReferenceName() {
return location.getContigName();
}
public int getxLoc() {
return xLoc;
}
public int getyLoc() {
return yLoc;
}
public String getRunAndTile() {
return runAndTile;
}
public boolean isOpticalDuplicate() {
return opticalDuplicate;
}
public float getAveragePhred() {
return averagePhred;
}
public List<ExtendedAlignmentBlock> getAlignmentBlocks() {
return ExtendedAlignmentBlock.getAlignmentBlocks(getCigar(), record.getAlignmentStart(), "read cigar");
}
public static @Nullable ExtendedSAMRecord getRead(Mutinack analyzer, String name, boolean firstOfPair,
SequenceLocation location, int avoidAlignmentStart0Based, int windowHalfWidth) {
SAMFileReader bamReader;
try {
bamReader = analyzer.readerPool.getObj();
} catch (PoolException e) {
throw new RuntimeException(e);
}
try {
final QueryInterval[] bamContig = {
bamReader.makeQueryInterval(location.contigName, Math.max(location.position + 1 - windowHalfWidth, 1),
location.position + 1 + windowHalfWidth)};
try (SAMRecordIterator it = bamReader.queryOverlapping(bamContig)) {
while (it.hasNext()) {
SAMRecord record = it.next();
if (record.getReadName().equals(name) && record.getFirstOfPairFlag() == firstOfPair &&
record.getAlignmentStart() - 1 != avoidAlignmentStart0Based) {
return SubAnalyzer.getExtendedNoCaching(record,
new SequenceLocation(location.contigName, analyzer.groupSettings.indexContigNameReverseMap,
record.getAlignmentStart() - 1, false), analyzer);
}
}
return null;
}
} finally {
analyzer.readerPool.returnObj(bamReader);
}
}
}
| Improve checkMate (add synchronization to avoid multiple simultaneous retrievals from file).
| src/uk/org/cinquin/mutinack/ExtendedSAMRecord.java | Improve checkMate (add synchronization to avoid multiple simultaneous retrievals from file). | <ide><path>rc/uk/org/cinquin/mutinack/ExtendedSAMRecord.java
<ide> return !record.getReadNegativeStrandFlag();
<ide> }
<ide>
<del> private ExtendedSAMRecord checkMate() {
<del> if (mate == null) {
<del> if (extSAMCache != null)
<del> mate = extSAMCache.get(mateName);
<del> if (mate == null && !triedRetrievingMateFromFile && !record.getMateUnmappedFlag()) {
<del> mate = getRead(analyzer, record.getReadName(), !record.getFirstOfPairFlag(),
<del> new SequenceLocation(record.getMateReferenceName(), groupSettings.indexContigNameReverseMap,
<del> record.getMateAlignmentStart() - 1, false) , -1, 1);
<del> triedRetrievingMateFromFile = true;
<add> public ExtendedSAMRecord checkMate() {
<add> if (mate != null) {
<add> return mate;
<add> }
<add> if (record.getMateUnmappedFlag()) {
<add> return null;
<add> }
<add> if (extSAMCache != null) {
<add> mate = extSAMCache.get(mateName);
<add> }
<add> if (mate == null && !triedRetrievingMateFromFile) {
<add> synchronized (this) {
<add> if (mate == null) {
<add> mate = getRead(analyzer, record.getReadName(), !record.getFirstOfPairFlag(),
<add> new SequenceLocation(record.getMateReferenceName(), groupSettings.indexContigNameReverseMap,
<add> record.getMateAlignmentStart() - 1, false) , -1, 1);
<add> triedRetrievingMateFromFile = true;
<add> }
<ide> }
<ide> }
<ide> return mate; |
|
JavaScript | bsd-2-clause | c392544553a8994eebc6b2853a54321d9481211b | 0 | bobbybee/libastron-unity,bobbybee/libastron-unity | var fs = require('fs');
var DCFile = [];
var classLookup = {};
var structLookup = {};
var fieldLookup = [];
var reverseFieldLookup = {};
var classFields = {};
var tempDC = [];
var index = 0;
var line = "";
var lindex = -1;
var typedefs = {};
var outside = false;
function searchDC(dc, name){
var i = 0;
while(i < dc[2].length){
if(name == dc[2][i][1])
return i;
++i;
}
return -1;
}
//reads up to delimeter
function readUpTo(del){
if(!del) del = ' ';
var temp = "";
while(line[index] != del) temp += line[index++];
index++; // skip del
return temp;
}
function readUpToEither(dels){
var temp = "";
for(;;) {
if(dels.indexOf(line[index]) > -1) break;
temp += line[index++];
}
var del = line[index++];
return [temp, del];
}
function readLine(){
lindex++;
index = 0;
line = lines[lindex];
if(line.length == 0){
return;
} else if(line[0] == '}'){
outside = false;
DCFile.push(tempDC);
return;
}
if(!outside){
var type = readUpTo(" ");
switch(type){
case 'from': // something pythony.. do I care?
break;
case 'typedef':
var oldT = readUpTo(" ");
var newT = readUpTo(";");
if(newT[newT.length-1] == ']') {
// array clip
newT = newT.slice(0,-1);
newT = newT.split('[');
oldT += '['+newT[1]+']';
newT = newT[0];
}
typedefs[newT] = oldT;
break;
case 'struct':
var structName = readUpTo(" ");
outside = true;
tempDC = ["struct", structName, []];
structLookup[structName] = DCFile.length;
break;
case 'dclass':
var className = readUpTo(" ");
var inherited = [];
if(line[index] == ':'){
// inheritance
index += 2;
loop_cont: for(;;){
var tmp = readUpToEither([",", " "]);
var t_class = DCFile[classLookup[tmp[0]]];
if(!t_class){
console.log("NULL TClass "+(JSON.stringify(tmp)));
console.log(line);
continue loop_cont; // skip for now
}
var j = 0;
while(j < t_class[2].length){
inherited.push(t_class[2][j]);
reverseFieldLookup[className+"::"+t_class[2][j][1]] = reverseFieldLookup[tmp[0]+"::"+t_class[2][j][1]]
++j;
}
index++;
if(tmp[1] == ' ' || line[index] == '{') break;
}
}
outside = true;
tempDC = ["dclass", className, inherited];
classLookup[className] = DCFile.length;
break;
}
} else {
index += 2; // two whitespace.. idk why
tempDC[2].push(readType());
}
}
function readType(){
var res = readUpToEither([" ", "("]);
switch(res[1]){
case ' ': // variable of some sort
var type_v = res[0];
var name_v = readUpToEither([" ", ";"]);
if(name_v[0] == ':'){ // morph
var name_m = res[0];
var components = [];
for(;;){
var temp = readUpToEither([",",";"]);
index += 1;
components.push(temp[0]);
if(temp[1] == ';') break;
}
var modifiers_m = [];
var params_m = [];
var i = 0;
while(i < components.length){
var j = searchDC(tempDC, components[i++]);
if(j == -1){
console.log("ERROR: nonexistant component "+components[i-1]);
}
modifiers_m = tempDC[2][j][2];
params_m = params_m.concat(tempDC[2][j][3])
}
modifiers_m.push["morph"];
reverseFieldLookup[tempDC[1]+"::"+name_m] = fieldLookup.length;
fieldLookup.push([tempDC[1], "function", name_m, modifiers_m, params_m, components]);
return ["function", name_m, modifiers_m, params_m, components];
break;
}
var modifiers_v = [];
if(name_v[1] == ' '){
// modifiers
for(;;){
var tmp_v = readUpToEither([" ", ";"]);
modifiers_v.push(tmp_v[0]);
if(tmp_v[1] == ';') break;
}
}
name_v = name_v[0];
// avoid clobbering array brackets with property name
if(name_v[name_v.length-1] == ']'){
name_v = name_v.slice(0, -2);
type_v += "[]";
}
reverseFieldLookup[tempDC[1]+"::"+name_v] = fieldLookup.length;
fieldLookup.push([tempDC[1], type_v, name_v, modifiers_v]);
return [type_v, name_v, modifiers_v];
case '(': // function
var name_f = res[0];
var params_f = [];
for(;;){
var param_f = readUpToEither([",","(", ")"]);
while(param_f[0] == ' '){
param_f = param_f.slice(1);
}
if(param_f[1] == '('){
readUpTo(")");
if(line[index+1] == '['){
index += 2;
var ind = readUpTo("]");
param_f[0] += " ["+ind+"]";
}
params_f.push(param_f[0]);
if(line[index++] == ')') break;
} else {
params_f.push(param_f[0]);
if(param_f[1] == ')') break;
index++;
}
}
var modifiers_f = [];
if(line[index++] == ' '){
// modifiers
for(;;){
var tmp_f = readUpToEither([" ", ";"]);
modifiers_f.push(tmp_f[0]);
if(tmp_f[1] == ';') break;
}
}
reverseFieldLookup[tempDC[1]+"::"+name_f] = fieldLookup.length;
fieldLookup.push([tempDC[1], "function", name_f, modifiers_f, params_f]);
return ["function", name_f, modifiers_f, params_f];
}
}
module.exports = function(fname) {
contents = fs.readFileSync(fname).toString();
lines = contents.split('\n');
var i = lines.length;
while(i--){ readLine();}
// dump
(function(){
//fs.writeFileSync("./DCFile.js", "module.exports.DCFile="+JSON.stringify(DCFile)+";module.exports.fieldLookup="+JSON.stringify(fieldLookup)+";module.exports.reverseFieldLookup="+JSON.stringify(reverseFieldLookup)+";module.exports.classLookup="+JSON.stringify(classLookup)+";module.exports.structLookup="+JSON.stringify(structLookup)+";module.exports.typedefs="+JSON.stringify(typedefs)+";");
var csrootLevel = "public static string[] DCRoot = new string[] { ";
var csreverseRootLevel = "public static var reverseDCRoot = new Dictionary<string, UInt16>{";
var csfieldLookup = "public static string[][] fieldLookup = new string[][]{";
var csfieldModifierLookup = "public static string[][] fieldModifierLookup = new string[][]{"
var csfieldNameLookup = "public static string[] fieldNameLookup = new string[]{";
var csreverseFieldLookup = "public static var reverseFieldLookup = new Dictionary<string, UInt16> {";
var csclassLookup = "public static var classLookup = new Dictionary<string, UInt16[]> {";
for(f = 0; f < DCFile.length; ++f) {
csrootLevel += "\""+DCFile[f][1]+"\",";
csreverseRootLevel += "{\""+DCFile[f][1]+"\", "+f+"},";
var fieldVals = [];
for(var n = 0; n < DCFile[f][2].length; ++n) {
fieldVals.push(reverseFieldLookup[DCFile[f][1]+"::"+DCFile[f][2][n][1]]);
}
csclassLookup += "{\""+DCFile[f][1]+"\",new UInt16[]{"+(JSON.stringify(fieldVals).slice(1,-1))+"}},"
}
for(f = 0; f < fieldLookup.length; ++f) {
var fieldArgs = fieldLookup[f][4];
for(fs = 0; fs < fieldArgs.length; ++fs) {
fieldArgs[fs] = fieldArgs[fs].split(" ").slice(0,-1).join("");
}
csfieldLookup += "new string [] {"+(JSON.stringify(fieldArgs).slice(1,-1))+"},";
csfieldModifierLookup += "new string [] {"+(JSON.stringify(fieldLookup[f][3]).slice(1,-1))+"},";
csfieldNameLookup += "\""+fieldLookup[f][2]+"\",";
}
var rfKeys = Object.keys(reverseFieldLookup);
for(f = 0; f < rfKeys.length; ++f) {
csreverseFieldLookup += "{\""+rfKeys[f]+"\", "+reverseFieldLookup[rfKeys[f]]+"},";
}
csrootLevel = csrootLevel.slice(0,-1) + "};\n";
csreverseRootLevel = csreverseRootLevel.slice(0,-1) + "};\n";
csfieldLookup = csfieldLookup.slice(0,-1) + "};\n";
csfieldModiferLookup = csfieldModifierLookup.slice(0,-1) + "};\n";
csfieldNameLookup = csfieldNameLookup.slice(0,-1) + "};\n";
csreverseFieldLookup = csreverseFieldLookup.slice(0,-1) + "};\n";
csclassLookup = csclassLookup.slice(0,-1) + "};\n";
console.log("using System;\nusing System.Collections.Generic;\npublic static class DCFile {\n"+csrootLevel+csreverseRootLevel+csfieldLookup+csfieldModiferLookup+csfieldNameLookup+csreverseFieldLookup+csclassLookup+"};");
})();
};
if(process.argv[2]) module.exports(process.argv[2]) | CSharpDCParser.js | var fs = require('fs');
var DCFile = [];
var classLookup = {};
var structLookup = {};
var fieldLookup = [];
var reverseFieldLookup = {};
var classFields = {};
var tempDC = [];
var index = 0;
var line = "";
var lindex = -1;
var typedefs = {};
var outside = false;
function searchDC(dc, name){
var i = 0;
while(i < dc[2].length){
if(name == dc[2][i][1])
return i;
++i;
}
return -1;
}
//reads up to delimeter
function readUpTo(del){
if(!del) del = ' ';
var temp = "";
while(line[index] != del) temp += line[index++];
index++; // skip del
return temp;
}
function readUpToEither(dels){
var temp = "";
for(;;) {
if(dels.indexOf(line[index]) > -1) break;
temp += line[index++];
}
var del = line[index++];
return [temp, del];
}
function readLine(){
lindex++;
index = 0;
line = lines[lindex];
if(line.length == 0){
return;
} else if(line[0] == '}'){
outside = false;
DCFile.push(tempDC);
return;
}
if(!outside){
var type = readUpTo(" ");
switch(type){
case 'from': // something pythony.. do I care?
break;
case 'typedef':
var oldT = readUpTo(" ");
var newT = readUpTo(";");
if(newT[newT.length-1] == ']') {
// array clip
newT = newT.slice(0,-1);
newT = newT.split('[');
oldT += '['+newT[1]+']';
newT = newT[0];
}
typedefs[newT] = oldT;
break;
case 'struct':
var structName = readUpTo(" ");
outside = true;
tempDC = ["struct", structName, []];
structLookup[structName] = DCFile.length;
break;
case 'dclass':
var className = readUpTo(" ");
var inherited = [];
if(line[index] == ':'){
// inheritance
index += 2;
loop_cont: for(;;){
var tmp = readUpToEither([",", " "]);
var t_class = DCFile[classLookup[tmp[0]]];
if(!t_class){
console.log("NULL TClass "+(JSON.stringify(tmp)));
console.log(line);
continue loop_cont; // skip for now
}
var j = 0;
while(j < t_class[2].length){
inherited.push(t_class[2][j]);
reverseFieldLookup[className+"::"+t_class[2][j][1]] = reverseFieldLookup[tmp[0]+"::"+t_class[2][j][1]]
++j;
}
index++;
if(tmp[1] == ' ' || line[index] == '{') break;
}
}
outside = true;
tempDC = ["dclass", className, inherited];
classLookup[className] = DCFile.length;
break;
}
} else {
index += 2; // two whitespace.. idk why
tempDC[2].push(readType());
}
}
function readType(){
var res = readUpToEither([" ", "("]);
switch(res[1]){
case ' ': // variable of some sort
var type_v = res[0];
var name_v = readUpToEither([" ", ";"]);
if(name_v[0] == ':'){ // morph
var name_m = res[0];
var components = [];
for(;;){
var temp = readUpToEither([",",";"]);
index += 1;
components.push(temp[0]);
if(temp[1] == ';') break;
}
var modifiers_m = [];
var params_m = [];
var i = 0;
while(i < components.length){
var j = searchDC(tempDC, components[i++]);
if(j == -1){
console.log("ERROR: nonexistant component "+components[i-1]);
}
modifiers_m = tempDC[2][j][2];
params_m = params_m.concat(tempDC[2][j][3])
}
modifiers_m.push["morph"];
reverseFieldLookup[tempDC[1]+"::"+name_m] = fieldLookup.length;
fieldLookup.push([tempDC[1], "function", name_m, modifiers_m, params_m, components]);
return ["function", name_m, modifiers_m, params_m, components];
break;
}
var modifiers_v = [];
if(name_v[1] == ' '){
// modifiers
for(;;){
var tmp_v = readUpToEither([" ", ";"]);
modifiers_v.push(tmp_v[0]);
if(tmp_v[1] == ';') break;
}
}
name_v = name_v[0];
// avoid clobbering array brackets with property name
if(name_v[name_v.length-1] == ']'){
name_v = name_v.slice(0, -2);
type_v += "[]";
}
reverseFieldLookup[tempDC[1]+"::"+name_v] = fieldLookup.length;
fieldLookup.push([tempDC[1], type_v, name_v, modifiers_v]);
return [type_v, name_v, modifiers_v];
case '(': // function
var name_f = res[0];
var params_f = [];
for(;;){
var param_f = readUpToEither([",","(", ")"]);
while(param_f[0] == ' '){
param_f = param_f.slice(1);
}
if(param_f[1] == '('){
readUpTo(")");
if(line[index+1] == '['){
index += 2;
var ind = readUpTo("]");
param_f[0] += " ["+ind+"]";
}
params_f.push(param_f[0]);
if(line[index++] == ')') break;
} else {
params_f.push(param_f[0]);
if(param_f[1] == ')') break;
index++;
}
}
var modifiers_f = [];
if(line[index++] == ' '){
// modifiers
for(;;){
var tmp_f = readUpToEither([" ", ";"]);
modifiers_f.push(tmp_f[0]);
if(tmp_f[1] == ';') break;
}
}
reverseFieldLookup[tempDC[1]+"::"+name_f] = fieldLookup.length;
fieldLookup.push([tempDC[1], "function", name_f, modifiers_f, params_f]);
return ["function", name_f, modifiers_f, params_f];
}
}
module.exports = function(fname) {
contents = fs.readFileSync(fname).toString();
lines = contents.split('\n');
var i = lines.length;
while(i--){ readLine();}
// dump
(function(){
//fs.writeFileSync("./DCFile.js", "module.exports.DCFile="+JSON.stringify(DCFile)+";module.exports.fieldLookup="+JSON.stringify(fieldLookup)+";module.exports.reverseFieldLookup="+JSON.stringify(reverseFieldLookup)+";module.exports.classLookup="+JSON.stringify(classLookup)+";module.exports.structLookup="+JSON.stringify(structLookup)+";module.exports.typedefs="+JSON.stringify(typedefs)+";");
var csrootLevel = "public static string[] DCRoot = new string[] { ";
var csreverseRootLevel = "public static var reverseDCRoot = new Dictionary<string, UInt16>{";
var csfieldLookup = "public static string[][] fieldLookup = new string[][]{";
var csfieldModifierLookup = "public static string[][] fieldModifierLookup = new string[][]{"
var csreverseFieldLookup = "public static var reverseFieldLookup = new Dictionary<string, UInt16> {";
var csclassLookup = "public static var classLookup = new Dictionary<string, UInt16[]> {";
for(f = 0; f < DCFile.length; ++f) {
csrootLevel += "\""+DCFile[f][1]+"\",";
csreverseRootLevel += "{\""+DCFile[f][1]+"\", "+f+"},";
var fieldVals = [];
for(var n = 0; n < DCFile[f][2].length; ++n) {
fieldVals.push(reverseFieldLookup[DCFile[f][1]+"::"+DCFile[f][2][n][1]]);
}
csclassLookup += "{\""+DCFile[f][1]+"\",new UInt16[]{"+(JSON.stringify(fieldVals).slice(1,-1))+"}},"
}
for(f = 0; f < fieldLookup.length; ++f) {
var fieldArgs = fieldLookup[f][4];
for(fs = 0; fs < fieldArgs.length; ++fs) {
fieldArgs[fs] = fieldArgs[fs].split(" ").slice(0,-1).join("");
}
csfieldLookup += "new string [] {"+(JSON.stringify(fieldArgs).slice(1,-1))+"},";
csfieldModifierLookup += "new string [] {"+(JSON.stringify(fieldLookup[f][3]).slice(1,-1))+"},";
}
var rfKeys = Object.keys(reverseFieldLookup);
for(f = 0; f < rfKeys.length; ++f) {
csreverseFieldLookup += "{\""+rfKeys[f]+"\", "+reverseFieldLookup[rfKeys[f]]+"},";
}
csrootLevel = csrootLevel.slice(0,-1) + "};\n";
csreverseRootLevel = csreverseRootLevel.slice(0,-1) + "};\n";
csfieldLookup = csfieldLookup.slice(0,-1) + "};\n";
csfieldModiferLookup = csfieldModifierLookup.slice(0,-1) + "};\n";
csreverseFieldLookup = csreverseFieldLookup.slice(0,-1) + "};\n";
csclassLookup = csclassLookup.slice(0,-1) + "};\n";
console.log("using System;\nusing System.Collections.Generic;\npublic static class DCFile {\n"+csrootLevel+csreverseRootLevel+csfieldLookup+csfieldModiferLookup+csreverseFieldLookup+csclassLookup+"};");
})();
};
if(process.argv[2]) module.exports(process.argv[2]) | Adds bare names to DC lookup table
| CSharpDCParser.js | Adds bare names to DC lookup table | <ide><path>SharpDCParser.js
<ide>
<ide> var csfieldLookup = "public static string[][] fieldLookup = new string[][]{";
<ide> var csfieldModifierLookup = "public static string[][] fieldModifierLookup = new string[][]{"
<add> var csfieldNameLookup = "public static string[] fieldNameLookup = new string[]{";
<ide> var csreverseFieldLookup = "public static var reverseFieldLookup = new Dictionary<string, UInt16> {";
<ide>
<ide> var csclassLookup = "public static var classLookup = new Dictionary<string, UInt16[]> {";
<ide>
<ide> csfieldLookup += "new string [] {"+(JSON.stringify(fieldArgs).slice(1,-1))+"},";
<ide> csfieldModifierLookup += "new string [] {"+(JSON.stringify(fieldLookup[f][3]).slice(1,-1))+"},";
<add> csfieldNameLookup += "\""+fieldLookup[f][2]+"\",";
<ide> }
<ide>
<ide> var rfKeys = Object.keys(reverseFieldLookup);
<ide> csreverseRootLevel = csreverseRootLevel.slice(0,-1) + "};\n";
<ide> csfieldLookup = csfieldLookup.slice(0,-1) + "};\n";
<ide> csfieldModiferLookup = csfieldModifierLookup.slice(0,-1) + "};\n";
<add> csfieldNameLookup = csfieldNameLookup.slice(0,-1) + "};\n";
<ide> csreverseFieldLookup = csreverseFieldLookup.slice(0,-1) + "};\n";
<ide> csclassLookup = csclassLookup.slice(0,-1) + "};\n";
<ide>
<del> console.log("using System;\nusing System.Collections.Generic;\npublic static class DCFile {\n"+csrootLevel+csreverseRootLevel+csfieldLookup+csfieldModiferLookup+csreverseFieldLookup+csclassLookup+"};");
<add> console.log("using System;\nusing System.Collections.Generic;\npublic static class DCFile {\n"+csrootLevel+csreverseRootLevel+csfieldLookup+csfieldModiferLookup+csfieldNameLookup+csreverseFieldLookup+csclassLookup+"};");
<ide> })();
<ide> };
<ide> |
|
Java | apache-2.0 | af1332531c86e4a407c85b5ab42ca570ab466288 | 0 | opensingular/singular-core,opensingular/singular-core,opensingular/singular-core,opensingular/singular-core | package br.net.mirante.singular.flow.core;
import java.io.Serializable;
import java.util.function.Supplier;
/**
* Reresenta uma referência a ProcessDefinition que pode ser serializada de modo
* que não provoque um serialização de toda a definição. Posteriormente a ser
* restaurada, recarrar a instância de ProcessDefinition sob demanda.
*
* @author Daniel C. Bordin
*/
public abstract class RefProcessDefinition implements Serializable, Supplier<ProcessDefinition<?>> {
private transient ProcessDefinition<?> processDefinition;
protected abstract ProcessDefinition<?> reload();
@Override
public final ProcessDefinition<?> get() {
if (processDefinition == null) {
processDefinition = reload();
}
return processDefinition;
}
public static RefProcessDefinition of(Class<? extends ProcessDefinition<?>> processDefinitionClass) {
return ProcessDefinitionCache.getDefinition(processDefinitionClass).getSerializableReference();
}
public static RefProcessDefinition of(ProcessDefinition<?> definition) {
return definition.getSerializableReference();
}
}
| flow/core/src/main/java/br/net/mirante/singular/flow/core/RefProcessDefinition.java | package br.net.mirante.singular.flow.core;
import java.io.Serializable;
/**
* Reresenta uma referência a ProcessDefinition que pode ser serializada de modo
* que não provoque um serialização de toda a definição. Posteriormente a ser
* restaurada, recarrar a instância de ProcessDefinition sob demanda.
*
* @author Daniel C. Bordin
*/
public abstract class RefProcessDefinition implements Serializable {
private transient ProcessDefinition<?> processDefinition;
protected abstract ProcessDefinition<?> reload();
public final ProcessDefinition<?> get() {
if (processDefinition == null) {
processDefinition = reload();
}
return processDefinition;
}
public void detach() {
processDefinition = null;
}
public static RefProcessDefinition of(Class<? extends ProcessDefinition<?>> processDefinitionClass) {
return ProcessDefinitionCache.getDefinition(processDefinitionClass).getSerializableReference();
}
public static RefProcessDefinition of(ProcessDefinition<?> definition) {
return definition.getSerializableReference();
}
}
| [FLOW-CORE] Removido método não utilizado
| flow/core/src/main/java/br/net/mirante/singular/flow/core/RefProcessDefinition.java | [FLOW-CORE] Removido método não utilizado | <ide><path>low/core/src/main/java/br/net/mirante/singular/flow/core/RefProcessDefinition.java
<ide> package br.net.mirante.singular.flow.core;
<ide>
<ide> import java.io.Serializable;
<add>import java.util.function.Supplier;
<ide>
<ide> /**
<ide> * Reresenta uma referência a ProcessDefinition que pode ser serializada de modo
<ide> *
<ide> * @author Daniel C. Bordin
<ide> */
<del>public abstract class RefProcessDefinition implements Serializable {
<add>public abstract class RefProcessDefinition implements Serializable, Supplier<ProcessDefinition<?>> {
<ide>
<ide> private transient ProcessDefinition<?> processDefinition;
<ide>
<ide> protected abstract ProcessDefinition<?> reload();
<ide>
<add> @Override
<ide> public final ProcessDefinition<?> get() {
<ide> if (processDefinition == null) {
<ide> processDefinition = reload();
<ide> }
<ide> return processDefinition;
<del> }
<del>
<del> public void detach() {
<del> processDefinition = null;
<ide> }
<ide>
<ide> public static RefProcessDefinition of(Class<? extends ProcessDefinition<?>> processDefinitionClass) { |
|
Java | apache-2.0 | bc483ac2e257b7b955efa4a74b429502306bc6a9 | 0 | noemus/kotlin-eclipse,noemus/kotlin-eclipse | /*******************************************************************************
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package org.jetbrains.kotlin.ui.editors.outline;
import java.util.List;
import org.eclipse.jdt.ui.ISharedImages;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.swt.graphics.Image;
import org.jetbrains.kotlin.core.model.KotlinAnalysisFileCache;
import org.jetbrains.kotlin.descriptors.CallableDescriptor;
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
import org.jetbrains.kotlin.psi.KtClass;
import org.jetbrains.kotlin.psi.KtClassInitializer;
import org.jetbrains.kotlin.psi.KtDeclaration;
import org.jetbrains.kotlin.psi.KtElement;
import org.jetbrains.kotlin.psi.KtFile;
import org.jetbrains.kotlin.psi.KtFunction;
import org.jetbrains.kotlin.psi.KtPackageDirective;
import org.jetbrains.kotlin.psi.KtParameter;
import org.jetbrains.kotlin.psi.KtProperty;
import org.jetbrains.kotlin.psi.KtTypeReference;
import org.jetbrains.kotlin.renderer.DescriptorRenderer;
import org.jetbrains.kotlin.resolve.BindingContext;
import org.jetbrains.kotlin.types.KotlinType;
public class PsiLabelProvider extends LabelProvider {
public static final String CLASS_INITIALIZER = "<class initializer>";
@Override
public String getText(Object element) {
if (element instanceof KtElement) {
return getPresentableElement((KtElement) element);
}
return "";
}
@Override
public Image getImage(Object element) {
String imageName = null;
if (element instanceof KtClass) {
if (((KtClass) element).isInterface()) {
imageName = ISharedImages.IMG_OBJS_INTERFACE;
} else {
imageName = ISharedImages.IMG_OBJS_CLASS;
}
} else if (element instanceof KtPackageDirective) {
imageName = ISharedImages.IMG_OBJS_PACKAGE;
} else if (element instanceof KtFunction) {
imageName = ISharedImages.IMG_OBJS_PUBLIC;
} else if (element instanceof KtProperty) {
imageName = ISharedImages.IMG_FIELD_PUBLIC;
}
if (imageName != null) {
return JavaUI.getSharedImages().getImage(imageName);
}
return null;
}
// Source code is taken from org.jetbrains.kotlin.idea.projectView.JetDeclarationTreeNode, updateImple()
private String getPresentableElement(KtElement declaration) {
String text = "";
if (declaration != null) {
text = declaration.getName();
if (text == null) return "";
if (declaration instanceof KtClassInitializer) {
text = CLASS_INITIALIZER;
} else if (declaration instanceof KtProperty) {
KtProperty property = (KtProperty) declaration;
KtTypeReference ref = property.getTypeReference();
if (ref != null) {
text += " ";
text += ":";
text += " ";
text += ref.getText();
} else {
text += computeReturnType(property);
}
} else if (declaration instanceof KtFunction) {
KtFunction function = (KtFunction) declaration;
KtTypeReference receiverTypeRef = function.getReceiverTypeReference();
if (receiverTypeRef != null) {
text = receiverTypeRef.getText() + "." + text;
}
text += "(";
List<KtParameter> parameters = function.getValueParameters();
for (KtParameter parameter : parameters) {
if (parameter.getName() != null) {
text += parameter.getName();
text += " ";
text += ":";
text += " ";
}
KtTypeReference typeReference = parameter.getTypeReference();
if (typeReference != null) {
text += typeReference.getText();
}
text += ", ";
}
if (parameters.size() > 0) text = text.substring(0, text.length() - 2);
text += ")";
KtTypeReference typeReference = function.getTypeReference();
if (typeReference != null) {
text += " ";
text += ":";
text += " ";
text += typeReference.getText();
} else {
text += computeReturnType(function);
}
}
}
return text;
}
private String computeReturnType(KtDeclaration ktDeclaration) {
KtFile ktFile = ktDeclaration.getContainingKtFile();
BindingContext bindingContext = KotlinAnalysisFileCache.INSTANCE.getAnalysisResult(ktFile).getAnalysisResult().getBindingContext();
DeclarationDescriptor declarationDescriptor = bindingContext.get(BindingContext.DECLARATION_TO_DESCRIPTOR, ktDeclaration);
if (declarationDescriptor instanceof CallableDescriptor) {
CallableDescriptor callableDescriptor = (CallableDescriptor) declarationDescriptor;
KotlinType returnType = callableDescriptor.getReturnType();
if (returnType != null) {
return " : " + DescriptorRenderer.ONLY_NAMES_WITH_SHORT_TYPES.renderType(returnType);
}
}
return "";
}
}
| kotlin-eclipse-ui/src/org/jetbrains/kotlin/ui/editors/outline/PsiLabelProvider.java | /*******************************************************************************
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package org.jetbrains.kotlin.ui.editors.outline;
import java.util.List;
import org.eclipse.jdt.ui.ISharedImages;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.swt.graphics.Image;
import org.jetbrains.kotlin.psi.KtClass;
import org.jetbrains.kotlin.psi.KtClassInitializer;
import org.jetbrains.kotlin.psi.KtElement;
import org.jetbrains.kotlin.psi.KtFunction;
import org.jetbrains.kotlin.psi.KtPackageDirective;
import org.jetbrains.kotlin.psi.KtParameter;
import org.jetbrains.kotlin.psi.KtProperty;
import org.jetbrains.kotlin.psi.KtTypeReference;
public class PsiLabelProvider extends LabelProvider {
public static final String CLASS_INITIALIZER = "<class initializer>";
@Override
public String getText(Object element) {
if (element instanceof KtElement) {
return getPresentableElement((KtElement) element);
}
return "";
}
@Override
public Image getImage(Object element) {
String imageName = null;
if (element instanceof KtClass) {
if (((KtClass) element).isInterface()) {
imageName = ISharedImages.IMG_OBJS_INTERFACE;
} else {
imageName = ISharedImages.IMG_OBJS_CLASS;
}
} else if (element instanceof KtPackageDirective) {
imageName = ISharedImages.IMG_OBJS_PACKAGE;
} else if (element instanceof KtFunction) {
imageName = ISharedImages.IMG_OBJS_PUBLIC;
} else if (element instanceof KtProperty) {
imageName = ISharedImages.IMG_FIELD_PUBLIC;
}
if (imageName != null) {
return JavaUI.getSharedImages().getImage(imageName);
}
return null;
}
// Source code is taken from org.jetbrains.kotlin.idea.projectView.JetDeclarationTreeNode, updateImple()
private String getPresentableElement(KtElement declaration) {
String text = "";
if (declaration != null) {
text = declaration.getName();
if (text == null) return "";
if (declaration instanceof KtClassInitializer) {
text = CLASS_INITIALIZER;
} else if (declaration instanceof KtProperty) {
KtProperty property = (KtProperty) declaration;
KtTypeReference ref = property.getTypeReference();
if (ref != null) {
text += " ";
text += ":";
text += " ";
text += ref.getText();
}
} else if (declaration instanceof KtFunction) {
KtFunction function = (KtFunction) declaration;
KtTypeReference receiverTypeRef = function.getReceiverTypeReference();
if (receiverTypeRef != null) {
text = receiverTypeRef.getText() + "." + text;
}
text += "(";
List<KtParameter> parameters = function.getValueParameters();
for (KtParameter parameter : parameters) {
if (parameter.getName() != null) {
text += parameter.getName();
text += " ";
text += ":";
text += " ";
}
KtTypeReference typeReference = parameter.getTypeReference();
if (typeReference != null) {
text += typeReference.getText();
}
text += ", ";
}
if (parameters.size() > 0) text = text.substring(0, text.length() - 2);
text += ")";
KtTypeReference typeReference = function.getTypeReference();
if (typeReference != null) {
text += " ";
text += ":";
text += " ";
text += typeReference.getText();
}
}
}
return text;
}
}
| KT-14095 Eclipse: Outline view could show implicit types too
Change-Id: I158d7d44b8f1724f7fbffd4effcd1873747e2404
Signed-off-by: Simon Scholz <[email protected]>
| kotlin-eclipse-ui/src/org/jetbrains/kotlin/ui/editors/outline/PsiLabelProvider.java | KT-14095 Eclipse: Outline view could show implicit types too | <ide><path>otlin-eclipse-ui/src/org/jetbrains/kotlin/ui/editors/outline/PsiLabelProvider.java
<ide> import org.eclipse.jdt.ui.JavaUI;
<ide> import org.eclipse.jface.viewers.LabelProvider;
<ide> import org.eclipse.swt.graphics.Image;
<add>import org.jetbrains.kotlin.core.model.KotlinAnalysisFileCache;
<add>import org.jetbrains.kotlin.descriptors.CallableDescriptor;
<add>import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
<ide> import org.jetbrains.kotlin.psi.KtClass;
<ide> import org.jetbrains.kotlin.psi.KtClassInitializer;
<add>import org.jetbrains.kotlin.psi.KtDeclaration;
<ide> import org.jetbrains.kotlin.psi.KtElement;
<add>import org.jetbrains.kotlin.psi.KtFile;
<ide> import org.jetbrains.kotlin.psi.KtFunction;
<ide> import org.jetbrains.kotlin.psi.KtPackageDirective;
<ide> import org.jetbrains.kotlin.psi.KtParameter;
<ide> import org.jetbrains.kotlin.psi.KtProperty;
<ide> import org.jetbrains.kotlin.psi.KtTypeReference;
<add>import org.jetbrains.kotlin.renderer.DescriptorRenderer;
<add>import org.jetbrains.kotlin.resolve.BindingContext;
<add>import org.jetbrains.kotlin.types.KotlinType;
<ide>
<ide> public class PsiLabelProvider extends LabelProvider {
<ide>
<ide> text += ":";
<ide> text += " ";
<ide> text += ref.getText();
<add> } else {
<add> text += computeReturnType(property);
<ide> }
<ide> } else if (declaration instanceof KtFunction) {
<ide> KtFunction function = (KtFunction) declaration;
<ide> text += ":";
<ide> text += " ";
<ide> text += typeReference.getText();
<add> } else {
<add> text += computeReturnType(function);
<ide> }
<ide> }
<ide> }
<ide>
<ide> return text;
<ide> }
<add>
<add> private String computeReturnType(KtDeclaration ktDeclaration) {
<add> KtFile ktFile = ktDeclaration.getContainingKtFile();
<add> BindingContext bindingContext = KotlinAnalysisFileCache.INSTANCE.getAnalysisResult(ktFile).getAnalysisResult().getBindingContext();
<add> DeclarationDescriptor declarationDescriptor = bindingContext.get(BindingContext.DECLARATION_TO_DESCRIPTOR, ktDeclaration);
<add> if (declarationDescriptor instanceof CallableDescriptor) {
<add> CallableDescriptor callableDescriptor = (CallableDescriptor) declarationDescriptor;
<add> KotlinType returnType = callableDescriptor.getReturnType();
<add> if (returnType != null) {
<add> return " : " + DescriptorRenderer.ONLY_NAMES_WITH_SHORT_TYPES.renderType(returnType);
<add> }
<add> }
<add>
<add> return "";
<add> }
<ide> } |
|
Java | epl-1.0 | 223b56fa9d0e11d82e3b6d5cd6e291c792843e0b | 0 | jcryptool/crypto,ChristophSonnberger/crypto,tassadarius/crypto,kevinott/crypto,kevinott/crypto,jcryptool/crypto,ChristophSonnberger/crypto,tassadarius/crypto | package org.jcryptool.analysis.substitution.ui.modules;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "org.jcryptool.analysis.substitution.ui.modules.messages"; //$NON-NLS-1$
public static String StatisticsSelector_0;
public static String StatisticsSelector_1;
public static String StatisticsSelector_2;
public static String StatisticsSelector_4;
public static String StatisticsSelector_5;
public static String StatisticsSelector_6;
public static String SubstitutionAnalysisConfigPanel_0;
public static String SubstitutionAnalysisConfigPanel_10;
public static String SubstitutionAnalysisConfigPanel_11;
public static String SubstitutionAnalysisConfigPanel_12;
public static String SubstitutionAnalysisConfigPanel_13;
public static String SubstitutionAnalysisConfigPanel_14;
public static String SubstitutionAnalysisConfigPanel_15;
public static String SubstitutionAnalysisConfigPanel_16;
public static String SubstitutionAnalysisConfigPanel_18;
public static String SubstitutionAnalysisConfigPanel_19;
public static String SubstitutionAnalysisConfigPanel_20;
public static String SubstitutionAnalysisConfigPanel_22;
public static String SubstitutionAnalysisConfigPanel_23;
public static String SubstitutionAnalysisConfigPanel_8;
public static String SubstitutionAnalysisConfigPanel_9;
public static String SubstitutionAnalysisPanel_0;
public static String SubstitutionAnalysisPanel_1;
public static String SubstitutionAnalysisPanel_10;
public static String SubstitutionAnalysisPanel_12;
public static String SubstitutionAnalysisPanel_13;
public static String SubstitutionAnalysisPanel_2;
public static String SubstitutionAnalysisPanel_3;
public static String SubstitutionAnalysisPanel_4;
public static String SubstitutionAnalysisPanel_5;
public static String SubstitutionAnalysisPanel_6;
public static String SubstitutionAnalysisPanel_7;
public static String SubstitutionAnalysisPanel_8;
public static String SubstitutionAnalysisPanel_9;
public static String SubstitutionKeyEditor_8;
public static String SubstitutionKeyEditor_9;
public static String SubstKeyViewer_0;
public static String TextLoadController_0;
public static String TextLoadController_1;
public static String TextLoadController_3;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
| org.jcryptool.analysis.substitution/src/org/jcryptool/analysis/substitution/ui/modules/Messages.java | package org.jcryptool.analysis.substitution.ui.modules;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "org.jcryptool.analysis.substitution.ui.modules.messages"; //$NON-NLS-1$
public static String StatisticsSelector_0;
public static String StatisticsSelector_1;
public static String StatisticsSelector_2;
public static String StatisticsSelector_4;
public static String StatisticsSelector_5;
public static String StatisticsSelector_6;
public static String SubstitutionAnalysisConfigPanel_0;
public static String SubstitutionAnalysisConfigPanel_10;
public static String SubstitutionAnalysisConfigPanel_11;
public static String SubstitutionAnalysisConfigPanel_12;
public static String SubstitutionAnalysisConfigPanel_13;
public static String SubstitutionAnalysisConfigPanel_14;
public static String SubstitutionAnalysisConfigPanel_15;
public static String SubstitutionAnalysisConfigPanel_16;
public static String SubstitutionAnalysisConfigPanel_18;
public static String SubstitutionAnalysisConfigPanel_19;
public static String SubstitutionAnalysisConfigPanel_20;
public static String SubstitutionAnalysisConfigPanel_22;
public static String SubstitutionAnalysisConfigPanel_23;
public static String SubstitutionAnalysisConfigPanel_8;
public static String SubstitutionAnalysisConfigPanel_9;
public static String SubstitutionAnalysisPanel_0;
public static String SubstitutionAnalysisPanel_1;
public static String SubstitutionAnalysisPanel_10;
public static String SubstitutionAnalysisPanel_12;
public static String SubstitutionAnalysisPanel_13;
public static String SubstitutionAnalysisPanel_2;
public static String SubstitutionAnalysisPanel_3;
public static String SubstitutionAnalysisPanel_4;
public static String SubstitutionAnalysisPanel_5;
public static String SubstitutionAnalysisPanel_6;
public static String SubstitutionAnalysisPanel_7;
public static String SubstitutionAnalysisPanel_8;
public static String SubstitutionAnalysisPanel_9;
public static String SubstitutionKeyEditor_8;
public static String SubstitutionKeyEditor_9;
public static String SubstKeyViewer_0;
public static String TextLoadController_0;
public static String TextLoadController_1;
public static String TextLoadController_3;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
| again, Git found some changes that I can't see -- thank goodness only in
a single file this time
| org.jcryptool.analysis.substitution/src/org/jcryptool/analysis/substitution/ui/modules/Messages.java | again, Git found some changes that I can't see -- thank goodness only in a single file this time | ||
JavaScript | unlicense | 94cc1000992b675e7852918f483255935399dae1 | 0 | twolfson/esformatter-var-each | // Load in dependencies
var rocambole = require('rocambole');
var rocamboleToken = require('rocambole-token');
// Define a helper for creating a generic token
exports.createToken = function (options) {
return {
type: options.type, // e.g. Keyword, Whitespace
value: options.value, // e.g. 'var', ' '
root: options.root || null, // e.g. Program node
next: null,
prev: null
};
};
// Define helper for cloning a token chain
// DEV: The first and last nodes will lack a previous and next node respectively
exports.cloneTokenChain = function (tokens, options) {
// For each of the tokens
var newTokens = [];
tokens.forEach(function copyToken (token, index) {
// Clone our token
var newToken = exports.createToken({
type: token.type, // e.g. Keyword, Whitespace
value: token.value, // e.g. 'var', ' '
root: options.root // e.g. Program node
});
// If there is a previous token, attach to it
if (index > 0) {
var lastToken = newTokens[index - 1];
lastToken.next = newToken;
newToken.prev = lastToken;
}
// Save our tokens
newTokens.push(newToken);
});
// Return our new tokens
return newTokens;
};
// Handle setting of options
var options;
exports.setOptions = function (_options) {
options = _options;
};
// Define our transform function
exports._transformNode = function (node) {
// If the token is not a variable declaration (e.g. `var`, `let`), exit early
// https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/Parser_API
// interface VariableDeclaration <: Declaration {
// type: "VariableDeclaration";
// declarations: [ VariableDeclarator ];
// kind: "var" | "let" | "const";
// }
// interface VariableDeclarator <: Node {
// type: "VariableDeclarator";
// id: Pattern;
// init: Expression | null;
// }
if (node.type !== 'VariableDeclaration') {
return node;
}
// If we are inside of a loop, do nothing (e.g. `for`, `while`, `do ... while`)
// DEV: Technically, a while/dowhile can't have a `var` but this is for good measure
var parentType = node.parent ? node.parent.type : '';
if (parentType.match(/WhileStatement|DoWhileStatement|ForStatement|ForInStatement/)) {
return node;
}
// Determine the terminating character
// Example: `var foo = bar;`
// varDeclaration = {type: VariableDeclaration, declarations: [...], kind: 'var'}
// declarators[*] = {type: VariableDeclarator, id: {type: Identifier, name: 'foo'}, init: {type: Literal, value: 'bar'}}
var varDeclaration = node;
var declarators = varDeclaration.declarations;
// Find the head and tail of the var declaration for reuse among its declaration clones
// e.g. `var hello = 'world', goodbye = 'moon';` -> ['var', ' '] = starting tokens; ['hello = world'] = declaration; ...; [';'] = endToken
var startingTokens = [];
rocamboleToken.eachInBetween(varDeclaration.startToken, declarators[0].startToken.prev, function saveToken (token) {
startingTokens.push(token);
});
// Determine whether we use automatic semicolon insertion or not
var endingSemicolonToken = rocamboleToken.findNext(varDeclaration.endToken, function findStatementTerminator (token) {
return rocamboleToken.isSemiColon(token) || rocamboleToken.isBr(token);
});
if (rocamboleToken.isBr(endingSemicolonToken)) {
endingSemicolonToken = null;
}
// Additionally, find the whitespace tokens before our `var` started (e.g. all indents/whitespace)
var preStartingTokens = [];
var token = varDeclaration.startToken.prev;
while (token) {
// If the token is whitespace or an indent, save it
// https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/is.js#L19-L25
if (token.type === 'WhiteSpace' || token.type === 'Indent') {
preStartingTokens.unshift(token);
token = token.prev;
// Otherwise, stop
// DEV: We ignore line breaks because this could be the start of a program
// Also, line breaks can lead to weird edge cases so we keep it consistent/predictable with a single one
} else {
break;
}
}
// Generate a `var` for each of the declarators
// e.g. `var hello = 'world', goodbye = 'moon';` -> `var hello = 'world'; var goodbye = 'moon';`
var declarations = declarators.map(function generateDeclaration (declarator, index) {
// DEV: A brief refresher on nodes and tokens
// Nodes are the AST representation of parts of a program (e.g. Identifier, VariableDeclaration)
// Tokens are the actual chunks of code these represent (e.g. Keyword, WhiteSpace)
// Tokens can be present without there being a node related to them
// Nodes have a prev (previous node on the same level), next (next node on the same level),
// parent (node containing our node), and sometimes something like a `body` key where they declare child nodes
// `body` varies from node type to node type
// Tokens don't have levels but are one giant chain
// Tokens have next (next token to render), prev (previous token to render),
// root (root node of the entire token chain -- i.e. a Program node)
// Nodes also have startToken and endToken which are the tokens that a node will start/end on
// (e.g. `var` is the start token for a VariableDeclaration)
// The only attachment from tokens to nodes is via `range` but this is brittle in rocambole so avoid it
// Generate a new declaration similar to the original
// Example: `var hello = 'world', goodbye = 'moon';` should use `var` and have a trailing semicolon `;`
// https://github.com/millermedeiros/rocambole/blob/a3d0d63d58b769d13bad288aca32c6e2f7766542/rocambole.js#L69-L74
var declaration = {
type: varDeclaration.type, // should always be `VariableDeclaration`
declarations: [declarator],
kind: varDeclaration.kind, // (e.g. `var`, `let`)
toString: varDeclaration.toString
// prev: bound later
// next: bound later
// startToken: bound later
// endToken: bound later
};
});
// Set up linkages for nodes
// DEV: None of these changes will affect the token chain
// However, each `node.toString()` is more/less impractical as there are no tokens bound to declarations
declarations.forEach(function connectNodes (declaration, index) {
// Attach declaration as the declarator's parent node
var declarator = declaration.declarations[0];
declarator.parent = declaration;
// If this is the first node, connect to var declaration's previous node
if (index === 0) {
var varDeclarationPrevNode = varDeclaration.prev;
if (varDeclarationPrevNode) {
declaration.prev = varDeclarationPrevNode;
varDeclarationPrevNode.next = declaration;
}
// Otherwise, connect to the last declaration
} else {
var lastDeclarationNode = declarations[index - 1];
declaration.prev = lastDeclarationNode;
lastDeclarationNode.next = declaration;
}
// If this is the last node, connect it to var declaration's next node
if (index === declarations.length - 1) {
var varDeclarationNextNode = varDeclaration.next;
if (varDeclarationNextNode) {
declaration.next = varDeclarationNextNode;
varDeclarationNextNode.prev = declaration;
}
// Otherwise, do nothing as we will connect to the next node via the previous if/else
} else {
// Do nothing
}
// In all cases, save this var declaration's parent node as this declaration node's parent
declaration.parent = varDeclaration.parent;
});
// Swap the declarations in the `body` of the parent block statement
// e.g. `BlockStatement.body = [{orig VariableDeclaration}, some other expressions]`
// -> `BlockStatement.body = [{new VariableDeclaration}, {another new VariableDeclaration}, some other expressions]`
var varDeclarationParentNode = varDeclaration.parent;
var varDeclarationParentBodyIndex = varDeclarationParentNode.body.indexOf(varDeclaration);
varDeclarationParentNode.body.splice(varDeclarationParentBodyIndex, 1, declarations);
// Handle token bindings (aka the annoying/hard part)
declarations.forEach(function defineAndAttachTokens (declaration, index) {
// DEV: We have a few linkages to perform:
// Example: HEAD; var a = 1, b = 2; TAIL
// VariableDeclaration tokens = ['var', ' ', 'a', ' ', '=', ..., ';']
// VariableDeclarator tokens = ['a', ' ', '=', ..., '1']
// We need to: Link FIRST VariableDeclaration to HEAD
// We need to: Link each VariableDeclaration start to VariableDeclarator start
// We need to: Link each VariableDeclaration end to VariableDeclarator end
// We need to Insert terminating content for each VariableDeclaration between VariableDeclaration's (e.g. `;\n `)
// We need to: Link LAST VariableDeclaration to TAIL
// Define our starting tokens (e.g. `['var', ' ']`)
var declarator = declaration.declarations[0];
var newStartingTokens = exports.cloneTokenChain(startingTokens, {
root: varDeclaration.startToken.root /* Always Program node */
});
// DEV: This is always defined as we always need a `var` keyword
declaration.startToken = newStartingTokens[0];
// FIRST STEP: Link FIRST VariableDeclaration to HEAD
// If there is a previous token, overwrite previous/next bindings
var varDeclarationPrevToken = varDeclaration.startToken.prev;
if (varDeclarationPrevToken) {
varDeclarationPrevToken.next = declaration.startToken;
declaration.startToken.prev = varDeclarationPrevToken;
// Otherwise, we are at the start of the program so update the Program node to consider this token to be the start
} else {
declaration.root.startToken = declaration.startToken;
}
// SECOND STEP: Link each VariableDeclaration start to VariableDeclarator start
// Connect the declaration's ending start token to our declarator's tokens
});
// var newEndingTokens = exports.cloneTokenChain(endingTokens, {
// root: varDeclaration.endToken.root /* Always Program node */
// });
// // Attach declarator's starts/ends to our declaration
// // Handle node
// declarator.parent = declaration;
// // Handle tokens
// // DEV: There is always a starting token since we need a `var`
// declarator.startToken.prev = newStartingTokens[newStartingTokens.length - 1];
// newStartingTokens[newStartingTokens.length - 1].next = declarator.startToken;
// // If there are ending tokens (e.g. `;`), then use them
// if (newEndingTokens.length) {
// declarator.endToken.next = newEndingTokens[0];
// newEndingTokens[0].prev = declarator.endToken;
// }
// // Attach declaration tokens
// declaration.startToken = newStartingTokens[0];
// // If there are ending tokens (e.g. `;`), then use them
// if (newEndingTokens.length) {
// declaration.endToken = newEndingTokens[newEndingTokens.length - 1];
// // Otherwise, use the same end token as the declarator
// } else {
// declaration.endToken = declarator.endToken;
// }
// // If this is the first declaration, replace the previous node/token of the original declaration
// if (index === 0) {
// // Replace nodes
// var varDeclarationPrevNode = varDeclaration.prev;
// if (varDeclarationPrevNode) {
// varDeclarationPrevNode.next = declaration;
// declaration.prev = varDeclarationPrevNode;
// }
// // Replace tokens
// // https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/remove.js#L10-L23
// var varDeclarationPrevToken = varDeclaration.startToken.prev;
// if (varDeclarationPrevToken) {
// varDeclarationPrevToken.next = declaration.startToken;
// declaration.startToken.prev = varDeclarationPrevToken;
// } else if (varDeclaration.startToken.root) {
// var varDeclarationRootNode = varDeclaration.startToken.root;
// varDeclarationRootNode.startToken = declaration.startToken;
// }
// // Otherwise, connect this to the previous declaration
// } else {
// // Attach nodes
// var lastDeclaration = declarations[index - 1];
// lastDeclaration.next = declaration;
// declaration.prev = lastDeclaration;
// // If there are prestarting tokens (e.g. whitespace before a `var`), then build/attach them now
// var linkingStartToken = declaration.startToken;
// var linkingEndToken = declaration.startToken;
// if (preStartingTokens.length) {
// var newPreStartingTokens = exports.cloneTokenChain(preStartingTokens, {
// root: varDeclaration.startToken.prev.root /* Always Program node*/
// });
// linkingStartToken = newPreStartingTokens[0];
// linkingEndToken = newPreStartingTokens[newPreStartingTokens.length - 1];
// linkingEndToken.next = declaration.startToken;
// declaration.startToken.prev = linkingEndToken;
// }
// // If there's a newline between declarators, then inject a newline between indentation and the last declaration
// if (lineBreakTokenBetweenDeclarators) {
// var lineBreakToken = exports.createToken({
// type: lineBreakTokenBetweenDeclarators.type, // e.g. 'LineBreak'
// value: lineBreakTokenBetweenDeclarators.value, // e.g. '\n', '\r\n'
// root: lineBreakTokenBetweenDeclarators.root // e.g. Program node
// });
// lineBreakToken.next = linkingStartToken;
// linkingStartToken.prev = lineBreakToken;
// linkingStartToken = lineBreakToken;
// }
// // Attach tokens
// lastDeclaration.endToken.next = linkingStartToken;
// linkingEndToken.prev = lastDeclaration.endToken;
// }
// // If this is the last declaration, replace the next node/token of the original declaration
// // DEV: There is no otherwise case as we take care of that when linking to "previous" declarations
// if (index === declarators.length - 1) {
// // Replace nodes
// var varDeclarationNextNode = varDeclaration.next;
// if (varDeclarationNextNode) {
// varDeclarationNextNode.prev = declaration;
// declaration.next = varDeclarationNextNode;
// }
// // Replace tokens
// // https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/remove.js#L10-L23
// var varDeclarationNextToken = varDeclaration.endToken.next;
// if (varDeclarationNextToken) {
// varDeclarationNextToken.prev = declaration.endToken;
// declaration.endToken.next = varDeclarationNextToken;
// } else if (varDeclaration.endToken.root) {
// var varDeclarationRootNode = varDeclaration.endToken.root;
// varDeclarationRootNode.endToken = declaration.endToken;
// }
// }
console.log('FINAL', node.parent.toString());
// Return the updated node
return node;
};
// Export our transformation
// https://github.com/millermedeiros/esformatter/tree/v0.4.3#transformbeforeast
exports.transform = function (ast) {
rocambole.moonwalk(ast, exports._transformNode);
};
| lib/esformatter-var-each.js | // Load in dependencies
var rocambole = require('rocambole');
var rocamboleToken = require('rocambole-token');
// Define a helper for creating a generic token
exports.createToken = function (options) {
return {
type: options.type, // e.g. Keyword, Whitespace
value: options.value, // e.g. 'var', ' '
root: options.root || null, // e.g. Program node
next: null,
prev: null
};
};
// Define helper for cloning a token chain
// DEV: The first and last nodes will lack a previous and next node respectively
exports.cloneTokenChain = function (tokens, options) {
// For each of the tokens
var newTokens = [];
tokens.forEach(function copyToken (token, index) {
// Clone our token
var newToken = exports.createToken({
type: token.type, // e.g. Keyword, Whitespace
value: token.value, // e.g. 'var', ' '
root: options.root // e.g. Program node
});
// If there is a previous token, attach to it
if (index > 0) {
var lastToken = newTokens[index - 1];
lastToken.next = newToken;
newToken.prev = lastToken;
}
// Save our tokens
newTokens.push(newToken);
});
// Return our new tokens
return newTokens;
};
// Handle setting of options
var options;
exports.setOptions = function (_options) {
options = _options;
};
// Define our transform function
exports._transformNode = function (node) {
// If the token is not a variable declaration (e.g. `var`, `let`), exit early
// https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/Parser_API
// interface VariableDeclaration <: Declaration {
// type: "VariableDeclaration";
// declarations: [ VariableDeclarator ];
// kind: "var" | "let" | "const";
// }
// interface VariableDeclarator <: Node {
// type: "VariableDeclarator";
// id: Pattern;
// init: Expression | null;
// }
if (node.type !== 'VariableDeclaration') {
return node;
}
// If we are inside of a loop, do nothing (e.g. `for`, `while`, `do ... while`)
// DEV: Technically, a while/dowhile can't have a `var` but this is for good measure
var parentType = node.parent ? node.parent.type : '';
if (parentType.match(/WhileStatement|DoWhileStatement|ForStatement|ForInStatement/)) {
return node;
}
// Determine the terminating character
// Example: `var foo = bar;`
// varDeclaration = {type: VariableDeclaration, declarations: [...], kind: 'var'}
// declarators[*] = {type: VariableDeclarator, id: {type: Identifier, name: 'foo'}, init: {type: Literal, value: 'bar'}}
var varDeclaration = node;
var declarators = varDeclaration.declarations;
// Find the head and tail of the var declaration for reuse among its declaration clones
// e.g. `var hello = 'world', goodbye = 'moon';` -> ['var', ' '] = starting tokens; ['hello = world'] = declaration; ...; [';'] = endToken
var startingTokens = [];
rocamboleToken.eachInBetween(varDeclaration.startToken, declarators[0].startToken.prev, function saveToken (token) {
startingTokens.push(token);
});
// Determine whether we use automatic semicolon insertion or not
var endingSemicolonToken = rocamboleToken.findNext(varDeclaration.endToken, function findStatementTerminator (token) {
return rocamboleToken.isSemiColon(token) || rocamboleToken.isBr(token);
});
if (rocamboleToken.isBr(endingSemicolonToken)) {
endingSemicolonToken = null;
}
// Additionally, find the whitespace tokens before our `var` started (e.g. all indents/whitespace)
var preStartingTokens = [];
var token = varDeclaration.startToken.prev;
while (token) {
// If the token is whitespace or an indent, save it
// https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/is.js#L19-L25
if (token.type === 'WhiteSpace' || token.type === 'Indent') {
preStartingTokens.unshift(token);
token = token.prev;
// Otherwise, stop
// DEV: We ignore line breaks because this could be the start of a program
// Also, line breaks can lead to weird edge cases so we keep it consistent/predictable with a single one
} else {
break;
}
}
// Generate a `var` for each of the declarators
// e.g. `var hello = 'world', goodbye = 'moon';` -> `var hello = 'world'; var goodbye = 'moon';`
var declarations = declarators.map(function generateDeclaration (declarator, index) {
// DEV: A brief refresher on nodes and tokens
// Nodes are the AST representation of parts of a program (e.g. Identifier, VariableDeclaration)
// Tokens are the actual chunks of code these represent (e.g. Keyword, WhiteSpace)
// Tokens can be present without there being a node related to them
// Nodes have a prev (previous node on the same level), next (next node on the same level),
// parent (node containing our node), and sometimes something like a `body` key where they declare child nodes
// `body` varies from node type to node type
// Tokens don't have levels but are one giant chain
// Tokens have next (next token to render), prev (previous token to render),
// root (root node of the entire token chain -- i.e. a Program node)
// Nodes also have startToken and endToken which are the tokens that a node will start/end on
// (e.g. `var` is the start token for a VariableDeclaration)
// The only attachment from tokens to nodes is via `range` but this is brittle in rocambole so avoid it
// Generate a new declaration similar to the original
// Example: `var hello = 'world', goodbye = 'moon';` should use `var` and have a trailing semicolon `;`
// https://github.com/millermedeiros/rocambole/blob/a3d0d63d58b769d13bad288aca32c6e2f7766542/rocambole.js#L69-L74
var declaration = {
type: varDeclaration.type, // should always be `VariableDeclaration`
declarations: [declarator],
kind: varDeclaration.kind, // (e.g. `var`, `let`)
toString: varDeclaration.toString
// prev: bound later
// next: bound later
// startToken: bound later
// endToken: bound later
};
});
// Set up linkages for nodes
// DEV: None of these changes will affect the token chain
// However, each `node.toString()` is more/less impractical as there are no tokens bound to declarations
declarations.forEach(function connectNodes (declaration, index) {
// Attach declaration as the declarator's parent node
var declarator = declaration.declarations[0];
declarator.parent = declaration;
// If this is the first node, connect to var declaration's previous node
if (index === 0) {
var varDeclarationPrevNode = varDeclaration.prev;
if (varDeclarationPrevNode) {
declaration.prev = varDeclarationPrevNode;
varDeclarationPrevNode.next = declaration;
}
// Otherwise, connect to the last declaration
} else {
var lastDeclarationNode = declarations[index - 1];
declaration.prev = lastDeclarationNode;
lastDeclarationNode.next = declaration;
}
// If this is the last node, connect it to var declaration's next node
if (index === declarations.length - 1) {
var varDeclarationNextNode = varDeclaration.next;
if (varDeclarationNextNode) {
declaration.next = varDeclarationNextNode;
varDeclarationNextNode.prev = declaration;
}
// Otherwise, do nothing as we will connect to the next node via the previous if/else
} else {
// Do nothing
}
// In all cases, save this var declaration's parent node as this declaration node's parent
declaration.parent = varDeclaration.parent;
});
// Swap the declarations in the `body` of the parent block statement
// e.g. `BlockStatement.body = [{orig VariableDeclaration}, some other expressions]`
// -> `BlockStatement.body = [{new VariableDeclaration}, {another new VariableDeclaration}, some other expressions]`
var varDeclarationParentNode = varDeclaration.parent;
var varDeclarationParentBodyIndex = varDeclarationParentNode.body.indexOf(varDeclaration);
varDeclarationParentNode.body.splice(varDeclarationParentBodyIndex, 1, declarations);
// // Copy the token chains for our varDeclaration onto the current declaration
// var newStartingTokens = exports.cloneTokenChain(startingTokens, {
// root: varDeclaration.startToken.root /* Always Program node */
// });
// var newEndingTokens = exports.cloneTokenChain(endingTokens, {
// root: varDeclaration.endToken.root /* Always Program node */
// });
// // Attach declarator's starts/ends to our declaration
// // Handle node
// declarator.parent = declaration;
// // Handle tokens
// // DEV: There is always a starting token since we need a `var`
// declarator.startToken.prev = newStartingTokens[newStartingTokens.length - 1];
// newStartingTokens[newStartingTokens.length - 1].next = declarator.startToken;
// // If there are ending tokens (e.g. `;`), then use them
// if (newEndingTokens.length) {
// declarator.endToken.next = newEndingTokens[0];
// newEndingTokens[0].prev = declarator.endToken;
// }
// // Attach declaration tokens
// declaration.startToken = newStartingTokens[0];
// // If there are ending tokens (e.g. `;`), then use them
// if (newEndingTokens.length) {
// declaration.endToken = newEndingTokens[newEndingTokens.length - 1];
// // Otherwise, use the same end token as the declarator
// } else {
// declaration.endToken = declarator.endToken;
// }
// // If this is the first declaration, replace the previous node/token of the original declaration
// if (index === 0) {
// // Replace nodes
// var varDeclarationPrevNode = varDeclaration.prev;
// if (varDeclarationPrevNode) {
// varDeclarationPrevNode.next = declaration;
// declaration.prev = varDeclarationPrevNode;
// }
// // Replace tokens
// // https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/remove.js#L10-L23
// var varDeclarationPrevToken = varDeclaration.startToken.prev;
// if (varDeclarationPrevToken) {
// varDeclarationPrevToken.next = declaration.startToken;
// declaration.startToken.prev = varDeclarationPrevToken;
// } else if (varDeclaration.startToken.root) {
// var varDeclarationRootNode = varDeclaration.startToken.root;
// varDeclarationRootNode.startToken = declaration.startToken;
// }
// // Otherwise, connect this to the previous declaration
// } else {
// // Attach nodes
// var lastDeclaration = declarations[index - 1];
// lastDeclaration.next = declaration;
// declaration.prev = lastDeclaration;
// // If there are prestarting tokens (e.g. whitespace before a `var`), then build/attach them now
// var linkingStartToken = declaration.startToken;
// var linkingEndToken = declaration.startToken;
// if (preStartingTokens.length) {
// var newPreStartingTokens = exports.cloneTokenChain(preStartingTokens, {
// root: varDeclaration.startToken.prev.root /* Always Program node*/
// });
// linkingStartToken = newPreStartingTokens[0];
// linkingEndToken = newPreStartingTokens[newPreStartingTokens.length - 1];
// linkingEndToken.next = declaration.startToken;
// declaration.startToken.prev = linkingEndToken;
// }
// // If there's a newline between declarators, then inject a newline between indentation and the last declaration
// if (lineBreakTokenBetweenDeclarators) {
// var lineBreakToken = exports.createToken({
// type: lineBreakTokenBetweenDeclarators.type, // e.g. 'LineBreak'
// value: lineBreakTokenBetweenDeclarators.value, // e.g. '\n', '\r\n'
// root: lineBreakTokenBetweenDeclarators.root // e.g. Program node
// });
// lineBreakToken.next = linkingStartToken;
// linkingStartToken.prev = lineBreakToken;
// linkingStartToken = lineBreakToken;
// }
// // Attach tokens
// lastDeclaration.endToken.next = linkingStartToken;
// linkingEndToken.prev = lastDeclaration.endToken;
// }
// // If this is the last declaration, replace the next node/token of the original declaration
// // DEV: There is no otherwise case as we take care of that when linking to "previous" declarations
// if (index === declarators.length - 1) {
// // Replace nodes
// var varDeclarationNextNode = varDeclaration.next;
// if (varDeclarationNextNode) {
// varDeclarationNextNode.prev = declaration;
// declaration.next = varDeclarationNextNode;
// }
// // Replace tokens
// // https://github.com/millermedeiros/rocambole-token/blob/fc03674b38f288dc545db0a5b2bdfd2d96cab170/remove.js#L10-L23
// var varDeclarationNextToken = varDeclaration.endToken.next;
// if (varDeclarationNextToken) {
// varDeclarationNextToken.prev = declaration.endToken;
// declaration.endToken.next = varDeclarationNextToken;
// } else if (varDeclaration.endToken.root) {
// var varDeclarationRootNode = varDeclaration.endToken.root;
// varDeclarationRootNode.endToken = declaration.endToken;
// }
// }
console.log('FINAL', node.parent.toString());
// Return the updated node
return node;
};
// Export our transformation
// https://github.com/millermedeiros/esformatter/tree/v0.4.3#transformbeforeast
exports.transform = function (ast) {
rocambole.moonwalk(ast, exports._transformNode);
};
| Making progress on script
| lib/esformatter-var-each.js | Making progress on script | <ide><path>ib/esformatter-var-each.js
<ide> var varDeclarationParentBodyIndex = varDeclarationParentNode.body.indexOf(varDeclaration);
<ide> varDeclarationParentNode.body.splice(varDeclarationParentBodyIndex, 1, declarations);
<ide>
<del> // // Copy the token chains for our varDeclaration onto the current declaration
<del> // var newStartingTokens = exports.cloneTokenChain(startingTokens, {
<del> // root: varDeclaration.startToken.root /* Always Program node */
<del> // });
<add> // Handle token bindings (aka the annoying/hard part)
<add> declarations.forEach(function defineAndAttachTokens (declaration, index) {
<add> // DEV: We have a few linkages to perform:
<add> // Example: HEAD; var a = 1, b = 2; TAIL
<add> // VariableDeclaration tokens = ['var', ' ', 'a', ' ', '=', ..., ';']
<add> // VariableDeclarator tokens = ['a', ' ', '=', ..., '1']
<add> // We need to: Link FIRST VariableDeclaration to HEAD
<add> // We need to: Link each VariableDeclaration start to VariableDeclarator start
<add> // We need to: Link each VariableDeclaration end to VariableDeclarator end
<add> // We need to Insert terminating content for each VariableDeclaration between VariableDeclaration's (e.g. `;\n `)
<add> // We need to: Link LAST VariableDeclaration to TAIL
<add> // Define our starting tokens (e.g. `['var', ' ']`)
<add> var declarator = declaration.declarations[0];
<add> var newStartingTokens = exports.cloneTokenChain(startingTokens, {
<add> root: varDeclaration.startToken.root /* Always Program node */
<add> });
<add> // DEV: This is always defined as we always need a `var` keyword
<add> declaration.startToken = newStartingTokens[0];
<add>
<add> // FIRST STEP: Link FIRST VariableDeclaration to HEAD
<add> // If there is a previous token, overwrite previous/next bindings
<add> var varDeclarationPrevToken = varDeclaration.startToken.prev;
<add> if (varDeclarationPrevToken) {
<add> varDeclarationPrevToken.next = declaration.startToken;
<add> declaration.startToken.prev = varDeclarationPrevToken;
<add> // Otherwise, we are at the start of the program so update the Program node to consider this token to be the start
<add> } else {
<add> declaration.root.startToken = declaration.startToken;
<add> }
<add>
<add> // SECOND STEP: Link each VariableDeclaration start to VariableDeclarator start
<add> // Connect the declaration's ending start token to our declarator's tokens
<add> });
<ide> // var newEndingTokens = exports.cloneTokenChain(endingTokens, {
<ide> // root: varDeclaration.endToken.root /* Always Program node */
<ide> // }); |
|
Java | apache-2.0 | 77e41923dfe0b76bf6b2a82dc30642748a333c19 | 0 | naver/yobi,brainagenet/yobi,ChangsungKim/TestRepository01,ihoneymon/yobi,bloodybear/yona,brainagenet/yobi,yona-projects/yona,Limseunghwan/oss,doortts/fork-yobi,doortts/forked-for-history,ahb0327/yobi,ahb0327/yobi,oolso/yobi,bloodybear/yona,Limseunghwan/oss,doortts/forked-for-history,violetag/demo,ahb0327/yobi,yona-projects/yona,yona-projects/yona,yona-projects/yona,naver/yobi,Limseunghwan/oss,ihoneymon/yobi,doortts/fork-yobi,ChangsungKim/TestRepository01,bloodybear/yona,naver/yobi,doortts/fork-yobi,oolso/yobi,doortts/fork-yobi,violetag/demo,doortts/forked-for-history,oolso/yobi,brainagenet/yobi,ihoneymon/yobi,bloodybear/yona | package utils;
import java.util.Date;
import org.joda.time.*;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class JodaDateUtil {
public static Date today() {
return LocalDate.now().toDate();
}
public static Date now() {
return DateTime.now().toDate();
}
public static Duration ago(DateTime time) {
return new Duration(time, DateTime.now());
}
public static Duration ago(Date time) {
return new Duration(new DateTime(time), DateTime.now());
}
}
| app/utils/JodaDateUtil.java | package utils;
import java.util.Date;
import org.joda.time.*;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class JodaDateUtil {
public static Date today() {
return LocalDate.now().toDate();
}
public static Date now() {
return DateTime.now().toDate();
}
public static Duration ago(DateTime time) {
return new Duration(DateTime.now(), time);
}
public static Duration ago(Date time) {
return new Duration(DateTime.now(), new DateTime(time));
}
}
| fix always show 'Just Now' in board
| app/utils/JodaDateUtil.java | fix always show 'Just Now' in board | <ide><path>pp/utils/JodaDateUtil.java
<ide> }
<ide>
<ide> public static Duration ago(DateTime time) {
<del> return new Duration(DateTime.now(), time);
<add> return new Duration(time, DateTime.now());
<ide> }
<ide>
<ide> public static Duration ago(Date time) {
<del> return new Duration(DateTime.now(), new DateTime(time));
<add> return new Duration(new DateTime(time), DateTime.now());
<ide> }
<ide>
<ide> } |
|
Java | apache-2.0 | c6c85b3e1561c58d4f6fc8d89b5d49c88f259acc | 0 | cefolger/needsmoredojo,cefolger/needsmoredojo | package com.chrisfolger.needsmoredojo.core.amd.importing;
import com.chrisfolger.needsmoredojo.core.amd.CompletionCallback;
import com.chrisfolger.needsmoredojo.core.amd.define.DefineResolver;
import com.chrisfolger.needsmoredojo.core.amd.define.DefineStatement;
import com.chrisfolger.needsmoredojo.core.amd.naming.NameResolver;
import com.chrisfolger.needsmoredojo.core.settings.DojoSettings;
import com.chrisfolger.needsmoredojo.core.util.JSUtil;
import com.intellij.lang.javascript.psi.*;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.Nullable;
public class ImportCreator
{
protected void createImport(String module, JSArrayLiteralExpression imports, JSParameterList parameters)
{
String parameter = NameResolver.defineToParameter(module, ServiceManager.getService(parameters.getProject(), DojoSettings.class).getExceptionsMap());
for(JSParameter element : parameters.getParameters())
{
if(element.getName().equals(parameter))
{
// already defined, so just exit
new Notification("needsmoredojo", "Add new AMD import", parameter + " is already defined ", NotificationType.INFORMATION).notify(parameters.getProject());
return;
}
}
if(imports.getChildren().length == 0)
{
// how to insert
/*
a few cases to consider:
define([
])
In my opinion, this is the most readable and the one ImportCreator will account for best:
define([
])
define([])
*/
String defineText = imports.getText();
if(defineText.contains("\n\n"))
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "\n");
}
else if(defineText.contains("\n"))
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "\n");
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "");
}
if(parameters.getChildren().length == 0)
{
JSUtil.addStatementBeforeElement(parameters, parameters.getLastChild(), parameter, "");
}
else
{
JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
}
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getChildren()[0], String.format("'%s',", module), "\n");
if(parameters.getChildren().length > 0)
{
JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
}
else
{
parameters.addAfter(JSUtil.createStatement(parameters, parameter), parameters.getFirstChild());
}
}
}
/**
* entry point for adding an AMD import to an existing define statement
*
* @param file the file that the import will be added to
* @param module the name of the module the user wants to add
* @return true if the module was added, false otherwise
*/
public boolean addImport(final PsiFile file, final String module)
{
DefineStatement items = new DefineResolver().getDefineStatementItems(file);
if(items == null)
{
return false;
}
return addImport(file, module, items);
}
public boolean addImport(final PsiFile file, final String module, DefineStatement statementToAddTo)
{
createImport(module, statementToAddTo.getArguments(), statementToAddTo.getFunction().getParameterList());
return true;
}
/**
* when the user adds a new import, this code searches for the nearest possible element
* to the cursor that they may have wanted to import and returns a suggested choice.
*
* I know this method is crude/hard to read and could be way more elegant, however it's good enough for now
* and produces quite a lot of benefit for low effort
*
* TODO this is a good candidate for unit testing...
*/
public String getSuggestedImport(@Nullable PsiElement element)
{
if(element == null)
{
return "";
}
String initialChoice = "";
PsiElement parent = element.getParent();
PsiElement previousSibling = element.getPrevSibling();
// (underscore represents cursor)
// we're just over a reference. Example: Site_Util
if (element.getParent() != null && element.getParent() instanceof JSReferenceExpression)
{
initialChoice = element.getText();
}
// we're inside a constructor. Example: new Button({_});
if(element.getParent() instanceof JSObjectLiteralExpression)
{
JSObjectLiteralExpression literal = (JSObjectLiteralExpression) element.getParent();
if(literal.getParent() != null && literal.getParent().getParent() != null && literal.getParent().getParent() instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)literal.getParent().getParent()).getMethodExpression().getText();
}
}
// we're inside a new expression Example: new Button_
if(parent != null && element.getParent().getParent() != null && parent.getParent() instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)parent.getParent()).getMethodExpression().getText();
}
// we're right after a new expression. Example: new Button({}) _
else if (previousSibling != null && previousSibling.getChildren().length > 0 && previousSibling.getChildren()[0] instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)previousSibling.getChildren()[0]).getMethodExpression().getText();
}
// right after a reference. Example: SiteUtil_
else if (previousSibling != null && previousSibling.getChildren().length > 0 && previousSibling.getChildren()[0] instanceof JSReferenceExpression)
{
initialChoice = previousSibling.getChildren()[0].getText();
}
// after a variable declaration. Example: var x = new Button({})_
else if (previousSibling != null && element.getPrevSibling() instanceof JSVarStatement)
{
JSVarStatement statement = (JSVarStatement) element.getPrevSibling();
for(JSVariable variable : statement.getVariables())
{
if(variable.getInitializer() instanceof JSNewExpression)
{
JSNewExpression expression = (JSNewExpression) variable.getInitializer();
// if these conditions are false, it just means the new expression is not complete
if(expression != null && expression.getMethodExpression() != null)
{
initialChoice = expression.getMethodExpression().getText();
}
}
}
}
return initialChoice;
}
}
| src/com/chrisfolger/needsmoredojo/core/amd/importing/ImportCreator.java | package com.chrisfolger.needsmoredojo.core.amd.importing;
import com.chrisfolger.needsmoredojo.core.amd.CompletionCallback;
import com.chrisfolger.needsmoredojo.core.amd.define.DefineResolver;
import com.chrisfolger.needsmoredojo.core.amd.define.DefineStatement;
import com.chrisfolger.needsmoredojo.core.amd.naming.NameResolver;
import com.chrisfolger.needsmoredojo.core.settings.DojoSettings;
import com.chrisfolger.needsmoredojo.core.util.JSUtil;
import com.intellij.lang.javascript.psi.*;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.Nullable;
public class ImportCreator
{
protected void createImport(String module, JSArrayLiteralExpression imports, JSParameterList parameters)
{
String parameter = NameResolver.defineToParameter(module, ServiceManager.getService(parameters.getProject(), DojoSettings.class).getExceptionsMap());
for(JSParameter element : parameters.getParameters())
{
if(element.getName().equals(parameter))
{
// already defined, so just exit
new Notification("needsmoredojo", "Add new AMD import", parameter + " is already defined ", NotificationType.INFORMATION).notify(parameters.getProject());
return;
}
}
if(imports.getChildren().length == 0)
{
// how to insert
/*
a few cases to consider:
define([
])
In my opinion, this is the most readable and the one ImportCreator will account for best:
define([
])
define([])
*/
String defineText = imports.getText();
if(defineText.contains("\n\n"))
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "\n");
}
else if(defineText.contains("\n"))
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "\n");
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "");
}
JSUtil.addStatementBeforeElement(parameters, parameters.getLastChild(), parameter, "");
}
else
{
JSUtil.addStatementBeforeElement(imports, imports.getChildren()[0], String.format("'%s',", module), "\n");
JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
}
}
/**
* entry point for adding an AMD import to an existing define statement
*
* @param file the file that the import will be added to
* @param module the name of the module the user wants to add
* @return true if the module was added, false otherwise
*/
public boolean addImport(final PsiFile file, final String module)
{
DefineStatement items = new DefineResolver().getDefineStatementItems(file);
if(items == null)
{
return false;
}
return addImport(file, module, items);
}
public boolean addImport(final PsiFile file, final String module, DefineStatement statementToAddTo)
{
createImport(module, statementToAddTo.getArguments(), statementToAddTo.getFunction().getParameterList());
return true;
}
/**
* when the user adds a new import, this code searches for the nearest possible element
* to the cursor that they may have wanted to import and returns a suggested choice.
*
* I know this method is crude/hard to read and could be way more elegant, however it's good enough for now
* and produces quite a lot of benefit for low effort
*
* TODO this is a good candidate for unit testing...
*/
public String getSuggestedImport(@Nullable PsiElement element)
{
if(element == null)
{
return "";
}
String initialChoice = "";
PsiElement parent = element.getParent();
PsiElement previousSibling = element.getPrevSibling();
// (underscore represents cursor)
// we're just over a reference. Example: Site_Util
if (element.getParent() != null && element.getParent() instanceof JSReferenceExpression)
{
initialChoice = element.getText();
}
// we're inside a constructor. Example: new Button({_});
if(element.getParent() instanceof JSObjectLiteralExpression)
{
JSObjectLiteralExpression literal = (JSObjectLiteralExpression) element.getParent();
if(literal.getParent() != null && literal.getParent().getParent() != null && literal.getParent().getParent() instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)literal.getParent().getParent()).getMethodExpression().getText();
}
}
// we're inside a new expression Example: new Button_
if(parent != null && element.getParent().getParent() != null && parent.getParent() instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)parent.getParent()).getMethodExpression().getText();
}
// we're right after a new expression. Example: new Button({}) _
else if (previousSibling != null && previousSibling.getChildren().length > 0 && previousSibling.getChildren()[0] instanceof JSNewExpression)
{
initialChoice = ((JSNewExpression)previousSibling.getChildren()[0]).getMethodExpression().getText();
}
// right after a reference. Example: SiteUtil_
else if (previousSibling != null && previousSibling.getChildren().length > 0 && previousSibling.getChildren()[0] instanceof JSReferenceExpression)
{
initialChoice = previousSibling.getChildren()[0].getText();
}
// after a variable declaration. Example: var x = new Button({})_
else if (previousSibling != null && element.getPrevSibling() instanceof JSVarStatement)
{
JSVarStatement statement = (JSVarStatement) element.getPrevSibling();
for(JSVariable variable : statement.getVariables())
{
if(variable.getInitializer() instanceof JSNewExpression)
{
JSNewExpression expression = (JSNewExpression) variable.getInitializer();
// if these conditions are false, it just means the new expression is not complete
if(expression != null && expression.getMethodExpression() != null)
{
initialChoice = expression.getMethodExpression().getText();
}
}
}
}
return initialChoice;
}
}
| fix npe when importing a define with mismatched number of literals and parameters. fixes #142
| src/com/chrisfolger/needsmoredojo/core/amd/importing/ImportCreator.java | fix npe when importing a define with mismatched number of literals and parameters. fixes #142 | <ide><path>rc/com/chrisfolger/needsmoredojo/core/amd/importing/ImportCreator.java
<ide> JSUtil.addStatementBeforeElement(imports, imports.getLastChild(), String.format("'%s'", module), "");
<ide> }
<ide>
<del> JSUtil.addStatementBeforeElement(parameters, parameters.getLastChild(), parameter, "");
<add> if(parameters.getChildren().length == 0)
<add> {
<add> JSUtil.addStatementBeforeElement(parameters, parameters.getLastChild(), parameter, "");
<add> }
<add> else
<add> {
<add> JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
<add> }
<ide> }
<ide> else
<ide> {
<ide> JSUtil.addStatementBeforeElement(imports, imports.getChildren()[0], String.format("'%s',", module), "\n");
<del> JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
<add> if(parameters.getChildren().length > 0)
<add> {
<add> JSUtil.addStatementBeforeElement(parameters, parameters.getChildren()[0], parameter + ",", " ");
<add> }
<add> else
<add> {
<add> parameters.addAfter(JSUtil.createStatement(parameters, parameter), parameters.getFirstChild());
<add> }
<ide> }
<ide> }
<ide> |
|
Java | mit | 2c32e86ccd758f52cf4d762b8e04e0d36b5f6b9c | 0 | maxalthoff/intro-to-java-exercises | /*
Write a program that displays a clock and sets the time with the input from
three text fields.
*/
import javafx.application.Application;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.HBox;
import javafx.scene.layout.BorderPane;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.event.EventHandler;
import javafx.geometry.Pos;
import javafx.geometry.Insets;
public class E16_07 extends Application {
@Override
public void start(Stage primaryStage) {
ClockPane clockPane = new ClockPane();
clockPane.setMaxWidth(clockPane.getW());
clockPane.setMaxHeight(clockPane.getH());
Label lbHour = new Label("Hour");
Label lbMinute = new Label("Minute");
Label lbSecond = new Label("Second");
TextField tfHour = new TextField();
TextField tfMinute = new TextField();
TextField tfSecond = new TextField();
tfHour.setPrefWidth(50);
tfMinute.setPrefWidth(50);
tfSecond.setPrefWidth(50);
HBox controlPane = new HBox(10);
controlPane.getChildren().addAll(lbHour, tfHour, lbMinute, tfMinute,
lbSecond, tfSecond);
controlPane.setAlignment(Pos.CENTER);
BorderPane pane = new BorderPane();
pane.setCenter(clockPane);
pane.setBottom(controlPane);
pane.setMargin(clockPane, new Insets(20));
pane.setMargin(controlPane, new Insets(0, 20, 10, 20));
EventHandler<KeyEvent> setTime = e -> {
if (e.getCode().equals(KeyCode.ENTER)) {
int hour = Integer.parseInt(tfHour.getText());
int minute = Integer.parseInt(tfMinute.getText());
int second = Integer.parseInt(tfSecond.getText());
clockPane.setHour(hour);
clockPane.setMinute(minute);
clockPane.setSecond(second);
}
};
tfHour.setOnKeyPressed(setTime);
tfMinute.setOnKeyPressed(setTime);
tfSecond.setOnKeyPressed(setTime);
Scene scene = new Scene(pane);
primaryStage.setTitle("E16_07");
primaryStage.setScene(scene);
primaryStage.setResizable(false);
primaryStage.show();
}
public static void main(String[] args) {
launch(args);
}
}
| 16/E16_07/E16_07.java | /*
Write a program that displays a clock and sets the time with the input from
three text fields.
*/
import javafx.application.Application;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.HBox;
import javafx.scene.layout.BorderPane;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.event.EventHandler;
import javafx.geometry.Pos;
import javafx.geometry.Insets;
public class E16_07 extends Application {
@Override
public void start(Stage primaryStage) {
ClockPane clockPane = new ClockPane();
Label lbHour = new Label("Hour");
Label lbMinute = new Label("Minute");
Label lbSecond = new Label("Second");
TextField tfHour = new TextField();
TextField tfMinute = new TextField();
TextField tfSecond = new TextField();
tfHour.setPrefWidth(50);
tfMinute.setPrefWidth(50);
tfSecond.setPrefWidth(50);
HBox controlPane = new HBox(10);
controlPane.getChildren().addAll(lbHour, tfHour, lbMinute, tfMinute,
lbSecond, tfSecond);
BorderPane pane = new BorderPane();
pane.setCenter(clockPane);
pane.setBottom(controlPane);
pane.setAlignment(clockPane, Pos.CENTER);
pane.setAlignment(controlPane, Pos.CENTER);
pane.setMargin(clockPane, new Insets(20));
pane.setMargin(controlPane, new Insets(0, 20, 10, 20));
EventHandler<KeyEvent> setTime = e -> {
if (e.getCode().equals(KeyCode.ENTER)) {
int hour = Integer.parseInt(tfHour.getText());
int minute = Integer.parseInt(tfMinute.getText());
int second = Integer.parseInt(tfSecond.getText());
clockPane.setHour(hour);
clockPane.setMinute(minute);
clockPane.setSecond(second);
}
};
tfHour.setOnKeyPressed(setTime);
tfMinute.setOnKeyPressed(setTime);
tfSecond.setOnKeyPressed(setTime);
Scene scene = new Scene(pane);
primaryStage.setTitle("E16_07");
primaryStage.setScene(scene);
primaryStage.show();
}
public static void main(String[] args) {
launch(args);
}
}
| Center clockPane and controlPane in window and set the stage to not resizable
| 16/E16_07/E16_07.java | Center clockPane and controlPane in window and set the stage to not resizable | <ide><path>6/E16_07/E16_07.java
<ide> @Override
<ide> public void start(Stage primaryStage) {
<ide> ClockPane clockPane = new ClockPane();
<add> clockPane.setMaxWidth(clockPane.getW());
<add> clockPane.setMaxHeight(clockPane.getH());
<ide>
<ide> Label lbHour = new Label("Hour");
<ide> Label lbMinute = new Label("Minute");
<ide> HBox controlPane = new HBox(10);
<ide> controlPane.getChildren().addAll(lbHour, tfHour, lbMinute, tfMinute,
<ide> lbSecond, tfSecond);
<add> controlPane.setAlignment(Pos.CENTER);
<ide>
<ide> BorderPane pane = new BorderPane();
<ide> pane.setCenter(clockPane);
<ide> pane.setBottom(controlPane);
<del> pane.setAlignment(clockPane, Pos.CENTER);
<del> pane.setAlignment(controlPane, Pos.CENTER);
<ide> pane.setMargin(clockPane, new Insets(20));
<ide> pane.setMargin(controlPane, new Insets(0, 20, 10, 20));
<ide>
<ide> Scene scene = new Scene(pane);
<ide> primaryStage.setTitle("E16_07");
<ide> primaryStage.setScene(scene);
<add> primaryStage.setResizable(false);
<ide> primaryStage.show();
<ide> }
<ide> |
|
Java | mit | a854ed54b4675d2ad75f66f829fb38793183cc47 | 0 | ancho85/edX-CTec001x-AndroidChat | package edu.galileo.android.androidchat;
import com.firebase.client.AuthData;
import com.firebase.client.Firebase;
import java.util.Map;
/**
* Created by carlos.gomez on 07/06/2016.
* login with google account [email protected] at console.firebase.google.com
* project is edx-ctec001x-androidchat
*/
public class FirebaseHelper {
private Firebase dataReference;
private final static String SEPARATOR = "___";
private final static String CHATS_PATH = "chats";
private final static String USERS_PATH = "users";
private final static String CONTACTS_PATH = "contacts";
private final static String FIREBASE_URL = "https://edx-ctec001x-androidchat.firebaseio.com/";
private static class SingletonHolder{
private static final FirebaseHelper INSTANCE = new FirebaseHelper();
}
public static FirebaseHelper getInstance(){
// Una sola instancia en toda la app
return SingletonHolder.INSTANCE;
}
public FirebaseHelper() {
this.dataReference = new Firebase(FIREBASE_URL);
}
public Firebase getDataReference() {
return dataReference;
}
public String getAuthUserEmail(){
AuthData authData = dataReference.getAuth();
String email = null;
if (authData != null){
Map<String, Object> providerData = authData.getProviderData();
email = providerData.get("email").toString();
}
return email;
}
public Firebase getUserReference(String email){
Firebase userReference = null;
if (email != null){
String emailKey = email.replace(".", "_"); //firebase no permite varios caracteres en la ruta, el punto por ej.
userReference = dataReference.getRoot().child(USERS_PATH).child(emailKey);
}
return userReference;
}
public Firebase getMyUserReference(){ //obtener referencia a MI usuario
return getUserReference(getAuthUserEmail());
}
public Firebase getContactsReference(String email){
return getUserReference(email).child(CONTACTS_PATH);
}
public Firebase getMyContactsReference(){
return getContactsReference(getAuthUserEmail());
}
public Firebase getOneContactReference(String mainEmail, String childEmail){
String childKey = childEmail.replace(".","_");
return getUserReference(mainEmail).child(CONTACTS_PATH).child(childKey);
}
}
| app/src/main/java/edu/galileo/android/androidchat/FirebaseHelper.java | package edu.galileo.android.androidchat;
import com.firebase.client.AuthData;
import com.firebase.client.Firebase;
import java.util.Map;
/**
* Created by carlos.gomez on 07/06/2016.
* login with google account [email protected] at console.firebase.google.com
* project is edx-ctec001x-androidchat
*/
public class FirebaseHelper {
private Firebase dataReference;
private final static String SEPARATOR = "___";
private final static String CHATS_PATH = "chats";
private final static String USERS_PATH = "users";
private final static String CONTACTS_PATH = "contacts";
private final static String FIREBASE_URL = "https://edx-ctec001x-androidchat.firebaseio.com/";
private static class SingletonHolder{
private static final FirebaseHelper INSTANCE = new FirebaseHelper();
}
public static FirebaseHelper getInstance(){
// Una sola instancia en toda la app
return SingletonHolder.INSTANCE;
}
public FirebaseHelper() {
this.dataReference = new Firebase(FIREBASE_URL);
}
public Firebase getDataReference() {
return dataReference;
}
public String getAuthUserEmail(){
AuthData authData = dataReference.getAuth();
String email = null;
if (authData != null){
Map<String, Object> providerData = authData.getProviderData();
email = providerData.get("email").toString();
}
return email;
}
}
| métodos para obtener referencia de usuario y en base a eso los contactos asociados
| app/src/main/java/edu/galileo/android/androidchat/FirebaseHelper.java | métodos para obtener referencia de usuario y en base a eso los contactos asociados | <ide><path>pp/src/main/java/edu/galileo/android/androidchat/FirebaseHelper.java
<ide> }
<ide> return email;
<ide> }
<add>
<add> public Firebase getUserReference(String email){
<add> Firebase userReference = null;
<add> if (email != null){
<add> String emailKey = email.replace(".", "_"); //firebase no permite varios caracteres en la ruta, el punto por ej.
<add> userReference = dataReference.getRoot().child(USERS_PATH).child(emailKey);
<add> }
<add> return userReference;
<add> }
<add>
<add> public Firebase getMyUserReference(){ //obtener referencia a MI usuario
<add> return getUserReference(getAuthUserEmail());
<add> }
<add>
<add> public Firebase getContactsReference(String email){
<add> return getUserReference(email).child(CONTACTS_PATH);
<add> }
<add>
<add> public Firebase getMyContactsReference(){
<add> return getContactsReference(getAuthUserEmail());
<add> }
<add>
<add> public Firebase getOneContactReference(String mainEmail, String childEmail){
<add> String childKey = childEmail.replace(".","_");
<add> return getUserReference(mainEmail).child(CONTACTS_PATH).child(childKey);
<add> }
<ide> } |
|
Java | apache-2.0 | e162d059b6adb0a05098c491ab0a0e88d65611bf | 0 | google/truth,cgruber/truth,google/truth,google/truth,cgruber/truth | /*
* Copyright (c) 2014 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.truth;
import com.google.common.base.Optional;
import javax.annotation.Nullable;
/**
* Propositions for Guava {@link Optional} subjects.
*
* <p>If you are looking a {@code java.util.Optional} subject, please read
* <a href="http://google.github.io/truth/faq#java8">faq#java8</a>
*
* @author Christian Gruber
*/
public final class GuavaOptionalSubject extends Subject<GuavaOptionalSubject, Optional<?>> {
GuavaOptionalSubject(FailureMetadata metadata, @Nullable Optional<?> actual) {
super(metadata, actual);
}
/** Fails if the {@link Optional}{@code <T>} is absent or the subject is null. */
public void isPresent() {
if (actual() == null || !actual().isPresent()) {
failWithoutActual("is present");
}
}
/** Fails if the {@link Optional}{@code <T>} is present or the subject is null.. */
public void isAbsent() {
if (actual() == null || actual().isPresent()) {
fail("is absent");
}
}
/**
* Fails if the {@link Optional}{@code <T>} does not have the given value or the subject is null.
*
* <p>To make more complex assertions on the optional's value split your assertion in two:
*
* <pre>{@code
* assertThat(myOptional).isPresent();
* assertThat(myOptional.get()).contains("foo");
* }</pre>
*/
public void hasValue(Object expected) {
if (expected == null) {
throw new NullPointerException("Optional cannot have a null value.");
}
if (actual() == null || !actual().isPresent()) {
fail("has value", expected);
} else {
Object actual = actual().get();
if (!actual.equals(expected)) {
if (actual.toString().equals(expected.toString())) {
failWithRawMessage(
"Not true that %s (%s) has value <%s> (%s)",
actualAsString(), actual.getClass(), expected, expected.getClass());
} else {
fail("has value", expected);
}
}
}
}
}
| core/src/main/java/com/google/common/truth/GuavaOptionalSubject.java | /*
* Copyright (c) 2014 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.truth;
import com.google.common.base.Optional;
import javax.annotation.Nullable;
/**
* Propositions for Guava {@link Optional} subjects.
*
* @author Christian Gruber
*/
public final class GuavaOptionalSubject extends Subject<GuavaOptionalSubject, Optional<?>> {
GuavaOptionalSubject(FailureMetadata metadata, @Nullable Optional<?> actual) {
super(metadata, actual);
}
/** Fails if the {@link Optional}{@code <T>} is absent or the subject is null. */
public void isPresent() {
if (actual() == null || !actual().isPresent()) {
failWithoutActual("is present");
}
}
/** Fails if the {@link Optional}{@code <T>} is present or the subject is null.. */
public void isAbsent() {
if (actual() == null || actual().isPresent()) {
fail("is absent");
}
}
/**
* Fails if the {@link Optional}{@code <T>} does not have the given value or the subject is null.
*
* <p>To make more complex assertions on the optional's value split your assertion in two:
*
* <pre>{@code
* assertThat(myOptional).isPresent();
* assertThat(myOptional.get()).contains("foo");
* }</pre>
*/
public void hasValue(Object expected) {
if (expected == null) {
throw new NullPointerException("Optional cannot have a null value.");
}
if (actual() == null || !actual().isPresent()) {
fail("has value", expected);
} else {
Object actual = actual().get();
if (!actual.equals(expected)) {
if (actual.toString().equals(expected.toString())) {
failWithRawMessage(
"Not true that %s (%s) has value <%s> (%s)",
actualAsString(), actual.getClass(), expected, expected.getClass());
} else {
fail("has value", expected);
}
}
}
}
}
| Add a note about Truth8 to GuavaOptionalSubject.
RELNOTES=n/a
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=182789881
| core/src/main/java/com/google/common/truth/GuavaOptionalSubject.java | Add a note about Truth8 to GuavaOptionalSubject. | <ide><path>ore/src/main/java/com/google/common/truth/GuavaOptionalSubject.java
<ide>
<ide> /**
<ide> * Propositions for Guava {@link Optional} subjects.
<add> *
<add> * <p>If you are looking a {@code java.util.Optional} subject, please read
<add> * <a href="http://google.github.io/truth/faq#java8">faq#java8</a>
<ide> *
<ide> * @author Christian Gruber
<ide> */ |
|
JavaScript | mit | 01b620d040d9a73d4f0c021b5994ec4ad5abf50e | 0 | jourdanrodrigues/controk-frontend-web,jourdanrodrigues/controk-frontend-web | describe("The user", function() {
it("should be able to open the list of clients", function() {
browser.get("");
element(by.css(".navigation-link")).click(); // Open the modal
element(by.css("a[ui-sref=clients]")).click(); // Click on the "clients" option
expect(element.all(by.css("table")).count()).toBe(1); // Check if the table exists
// Must exist five fields per client
expect(element.all(by.css("table tbody tr td.ng-binding")).then(function (elements) {
return elements.length % 5;
})).toBe(0);
});
it("should be able to open the details of a client", function() {
browser.get("#/clients");
element.all(by.css("table tbody tr td.ng-binding")).first().click();
expect(element.all(by.css("table")).count()).toBe(0); // Check if the table is gone
expect(element.all(by.css(".ui-view [ng-controller]")).count()).toBe(1); // Check if the view was injected
// 14 fields must exist
expect(element.all(by.css(".column")).count()).toBe(17); // + 3 field divisions
expect(element.all(by.css("label")).count()).toBe(14);
expect(element.all(by.css("input")).count()).toBe(13);
expect(element.all(by.css("select")).count()).toBe(1);
});
it("should be able to see the view for client creation", function() {
browser.get("#/clients");
expect(element.all(by.css("button-plus button")).count()).toBe(1);
element(by.css("button-plus button")).click();
expect(element.all(by.css("button-plus button")).count()).toBe(0);
expect(element.all(by.css("button-v button")).count()).toBe(1);
expect(element.all(by.css(".ui-view [ng-controller]")).count()).toBe(1); // Check if the view was injected
// 14 fields must exist
expect(element.all(by.css(".column")).count()).toBe(17); // + 3 field divisions
expect(element.all(by.css("label")).count()).toBe(14);
expect(element.all(by.css("input")).count()).toBe(13);
expect(element.all(by.css("select")).count()).toBe(1);
expect(element.all(by.css("select option")).count()).toBe(3);
});
}); | tests/clients-spec.js | describe("The user", function() {
it("should be able to open the list of clients", function() {
browser.get("");
element(by.css(".navigation-link")).click(); // Open the modal
element(by.css("a[ui-sref=clients]")).click(); // Click on the "clients" option
expect(element.all(by.css("table")).count()).toBe(1); // Check if the table exists
// Must exist five fields per client
expect(element.all(by.css("table tbody tr td.ng-binding")).then(function (elements) {
return elements.length % 5;
})).toBe(0);
});
it("should be able to open the details of a client", function() {
browser.get("");
element(by.css(".navigation-link")).click();
element(by.css("a[ui-sref=clients]")).click();
element.all(by.css("table tbody tr td.ng-binding")).first().click();
expect(element.all(by.css("table")).count()).toBe(0); // Check if the table is gone
expect(element.all(by.css(".ui-view [ng-controller]")).count()).toBe(1); // Check if the view was injected
// 14 fields must exist
expect(element.all(by.css(".column")).count()).toBe(17); // + 3 field divisions
expect(element.all(by.css("label")).count()).toBe(14);
expect(element.all(by.css("input")).count()).toBe(13);
expect(element.all(by.css("select")).count()).toBe(1);
});
}); | Added test for create view.
| tests/clients-spec.js | Added test for create view. | <ide><path>ests/clients-spec.js
<ide> })).toBe(0);
<ide> });
<ide> it("should be able to open the details of a client", function() {
<del> browser.get("");
<add> browser.get("#/clients");
<ide>
<del> element(by.css(".navigation-link")).click();
<del> element(by.css("a[ui-sref=clients]")).click();
<ide> element.all(by.css("table tbody tr td.ng-binding")).first().click();
<ide> expect(element.all(by.css("table")).count()).toBe(0); // Check if the table is gone
<ide> expect(element.all(by.css(".ui-view [ng-controller]")).count()).toBe(1); // Check if the view was injected
<ide> expect(element.all(by.css("input")).count()).toBe(13);
<ide> expect(element.all(by.css("select")).count()).toBe(1);
<ide> });
<add> it("should be able to see the view for client creation", function() {
<add> browser.get("#/clients");
<add>
<add> expect(element.all(by.css("button-plus button")).count()).toBe(1);
<add> element(by.css("button-plus button")).click();
<add>
<add> expect(element.all(by.css("button-plus button")).count()).toBe(0);
<add> expect(element.all(by.css("button-v button")).count()).toBe(1);
<add>
<add> expect(element.all(by.css(".ui-view [ng-controller]")).count()).toBe(1); // Check if the view was injected
<add> // 14 fields must exist
<add> expect(element.all(by.css(".column")).count()).toBe(17); // + 3 field divisions
<add> expect(element.all(by.css("label")).count()).toBe(14);
<add> expect(element.all(by.css("input")).count()).toBe(13);
<add> expect(element.all(by.css("select")).count()).toBe(1);
<add> expect(element.all(by.css("select option")).count()).toBe(3);
<add> });
<ide> }); |
|
Java | apache-2.0 | 962d23c00c1306691244b776e466a6fc1f906b4f | 0 | MovingBlocks/Terasology,Malanius/Terasology,Nanoware/Terasology,Malanius/Terasology,Nanoware/Terasology,kartikey0303/Terasology,MovingBlocks/Terasology,MovingBlocks/Terasology,kartikey0303/Terasology,Nanoware/Terasology | /*
* Copyright 2017 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.chunks.localChunkProvider;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.hash.TShortObjectHashMap;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.terasology.entitySystem.Component;
import org.terasology.entitySystem.entity.EntityManager;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.entity.EntityStore;
import org.terasology.entitySystem.event.Event;
import org.terasology.entitySystem.prefab.Prefab;
import org.terasology.math.geom.Vector3i;
import org.terasology.persistence.ChunkStore;
import org.terasology.world.BlockEntityRegistry;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockManager;
import org.terasology.world.block.OnActivatedBlocks;
import org.terasology.world.block.OnAddedBlocks;
import org.terasology.world.chunks.Chunk;
import org.terasology.world.chunks.event.OnChunkGenerated;
import org.terasology.world.chunks.event.OnChunkLoaded;
import org.terasology.world.chunks.internal.ReadyChunkInfo;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class LocalChunkProviderTest {
private LocalChunkProvider chunkProvider;
private ChunkFinalizer chunkFinalizer;
private EntityManager entityManager;
private BlockManager blockManager;
private BlockEntityRegistry blockEntityRegistry;
private EntityRef worldEntity;
@Before
public void setUp() throws Exception {
entityManager = mock(EntityManager.class);
chunkFinalizer = mock(ChunkFinalizer.class);
blockManager = mock(BlockManager.class);
blockEntityRegistry = mock(BlockEntityRegistry.class);
worldEntity = mock(EntityRef.class);
chunkProvider = new LocalChunkProvider(null,
entityManager, null, blockManager, null, chunkFinalizer, null);
chunkProvider.setBlockEntityRegistry(blockEntityRegistry);
chunkProvider.setWorldEntity(worldEntity);
}
@Test
public void testCompleteUpdateHandlesFinalizedChunkIfReady() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final InOrder inOrder = inOrder(worldEntity);
inOrder.verify(worldEntity).send(any(OnChunkGenerated.class));
inOrder.verify(worldEntity).send(any(OnChunkLoaded.class));
}
@Test
public void testCompleteUpdateGeneratesStoredEntities() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
final EntityStore entityStore = createEntityStoreWithComponents(testComponent);
final List<EntityStore> entityStores = Collections.singletonList(entityStore);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
final EntityRef mockEntity = mock(EntityRef.class);
when(entityManager.create()).thenReturn(mockEntity);
chunkProvider.completeUpdate();
verify(mockEntity).addComponent(eq(testComponent));
}
@Test
public void testCompleteUpdateGeneratesStoredEntitiesFromPrefab() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final Prefab prefab = mock(Prefab.class);
final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
final EntityStore entityStore = new EntityStore(prefab);
entityStore.addComponent(testComponent);
final List<EntityStore> entityStores = Collections.singletonList(entityStore);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
final EntityRef mockEntity = mock(EntityRef.class);
when(entityManager.create(any(Prefab.class))).thenReturn(mockEntity);
chunkProvider.completeUpdate();
verify(entityManager).create(eq(prefab));
verify(mockEntity).addComponent(eq(testComponent));
}
@Test
public void testCompleteUpdateRestoresEntitiesForRestoredChunks() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final ChunkStore chunkStore = mock(ChunkStore.class);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, new TShortObjectHashMap<>(), chunkStore, Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
verify(chunkStore).restoreEntities();
}
@Test
public void testCompleteUpdateSendsBlockAddedEvents() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final TShortObjectHashMap<TIntList> blockPositionMappings = new TShortObjectHashMap<>();
final short blockId = 42;
final EntityRef blockEntity = mock(EntityRef.class);
final Block block = new Block();
block.setEntity(blockEntity);
when(blockManager.getBlock(eq(blockId))).thenReturn(block);
final TIntArrayList positions = new TIntArrayList();
final Vector3i position = new Vector3i(1, 2, 3);
positions.add(position.x);
positions.add(position.y);
positions.add(position.z);
blockPositionMappings.put(blockId, positions);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMappings, mock(ChunkStore.class), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final ArgumentCaptor<Event> eventArgumentCaptor = ArgumentCaptor.forClass(Event.class);
verify(blockEntity, atLeastOnce()).send(eventArgumentCaptor.capture());
final Event event = eventArgumentCaptor.getAllValues().get(0);
assertThat(event, instanceOf(OnAddedBlocks.class));
assertThat(((OnAddedBlocks) event).getBlockPositions(), hasItem(position));
}
@Test
public void testCompleteUpdateSendsBlockActivatedEvents() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final TShortObjectHashMap<TIntList> blockPositionMappings = new TShortObjectHashMap<>();
final short blockId = 42;
final EntityRef blockEntity = mock(EntityRef.class);
final Block block = new Block();
block.setEntity(blockEntity);
when(blockManager.getBlock(eq(blockId))).thenReturn(block);
final TIntArrayList positions = new TIntArrayList();
final Vector3i position = new Vector3i(1, 2, 3);
positions.add(position.x);
positions.add(position.y);
positions.add(position.z);
blockPositionMappings.put(blockId, positions);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMappings, mock(ChunkStore.class), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final ArgumentCaptor<Event> eventArgumentCaptor = ArgumentCaptor.forClass(Event.class);
verify(blockEntity, atLeastOnce()).send(eventArgumentCaptor.capture());
final Event event = eventArgumentCaptor.getAllValues().get(1);
assertThat(event, instanceOf(OnActivatedBlocks.class));
assertThat(((OnActivatedBlocks) event).getBlockPositions(), hasItem(position));
}
private static EntityStore createEntityStoreWithComponents(Component... components) {
return createEntityStoreWithPrefabAndComponents(null, components);
}
private static EntityStore createEntityStoreWithPrefabAndComponents(Prefab prefab, Component... components) {
final EntityStore entityStore = new EntityStore(prefab);
for (Component component : components) {
entityStore.addComponent(component);
}
return entityStore;
}
private static Chunk mockChunkAt(final int x, final int y, final int z) {
final Chunk chunk = mock(Chunk.class);
when(chunk.getPosition()).thenReturn(new Vector3i(x, y, z));
return chunk;
}
private static class ChunkProviderTestComponent implements Component {
}
}
| engine-tests/src/test/java/org/terasology/world/chunks/localChunkProvider/LocalChunkProviderTest.java | /*
* Copyright 2017 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.chunks.localChunkProvider;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.hash.TShortObjectHashMap;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.terasology.entitySystem.Component;
import org.terasology.entitySystem.entity.EntityManager;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.entity.EntityStore;
import org.terasology.entitySystem.event.Event;
import org.terasology.entitySystem.prefab.Prefab;
import org.terasology.math.geom.Vector3i;
import org.terasology.persistence.ChunkStore;
import org.terasology.world.BlockEntityRegistry;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockManager;
import org.terasology.world.block.OnActivatedBlocks;
import org.terasology.world.block.OnAddedBlocks;
import org.terasology.world.chunks.Chunk;
import org.terasology.world.chunks.event.OnChunkGenerated;
import org.terasology.world.chunks.event.OnChunkLoaded;
import org.terasology.world.chunks.internal.ReadyChunkInfo;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class LocalChunkProviderTest {
private LocalChunkProvider chunkProvider;
private ChunkFinalizer chunkFinalizer;
private EntityManager entityManager;
private BlockManager blockManager;
private BlockEntityRegistry blockEntityRegistry;
private EntityRef worldEntity;
@Before
public void setUp() throws Exception {
entityManager = mock(EntityManager.class);
chunkFinalizer = mock(ChunkFinalizer.class);
blockManager = mock(BlockManager.class);
blockEntityRegistry = mock(BlockEntityRegistry.class);
worldEntity = mock(EntityRef.class);
chunkProvider = new LocalChunkProvider(null,
entityManager, null, blockManager, null, chunkFinalizer, null);
chunkProvider.setBlockEntityRegistry(blockEntityRegistry);
chunkProvider.setWorldEntity(worldEntity);
}
@Test
public void testCompleteUpdateHandlesFinalizedChunkIfReady() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final InOrder inOrder = inOrder(worldEntity);
inOrder.verify(worldEntity).send(any(OnChunkGenerated.class));
inOrder.verify(worldEntity).send(any(OnChunkLoaded.class));
}
@Test
public void testCompleteUpdateGeneratesStoredEntities() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final EntityStore entityStore = new EntityStore();
final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
entityStore.addComponent(testComponent);
final List<EntityStore> entityStores = Collections.singletonList(entityStore);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
final EntityRef mockEntity = mock(EntityRef.class);
when(entityManager.create()).thenReturn(mockEntity);
chunkProvider.completeUpdate();
verify(entityManager).create();
verify(mockEntity).addComponent(eq(testComponent));
}
@Test
public void testCompleteUpdateGeneratesStoredEntitiesFromPrefab() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final Prefab prefab = mock(Prefab.class);
final EntityStore entityStore = new EntityStore(prefab);
final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
entityStore.addComponent(testComponent);
final List<EntityStore> entityStores = Collections.singletonList(entityStore);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
final EntityRef mockEntity = mock(EntityRef.class);
when(entityManager.create(any(Prefab.class))).thenReturn(mockEntity);
chunkProvider.completeUpdate();
verify(entityManager).create(eq(prefab));
verify(mockEntity).addComponent(eq(testComponent));
}
@Test
public void testCompleteUpdateRestoresEntitiesForRestoredChunks() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final ChunkStore chunkStore = mock(ChunkStore.class);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, new TShortObjectHashMap<>(), chunkStore, Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
verify(chunkStore).restoreEntities();
}
@Test
public void testCompleteUpdateSendsBlockAddedEvents() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final TShortObjectHashMap<TIntList> blockPositionMapppings = new TShortObjectHashMap<>();
final short blockId = 42;
final EntityRef blockEntity = mock(EntityRef.class);
final Block block = new Block();
block.setEntity(blockEntity);
when(blockManager.getBlock(eq(blockId))).thenReturn(block);
final TIntArrayList positions = new TIntArrayList();
final Vector3i position = new Vector3i(1, 2, 3);
positions.add(position.x);
positions.add(position.y);
positions.add(position.z);
blockPositionMapppings.put(blockId, positions);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMapppings, mock(ChunkStore.class), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final ArgumentCaptor<Event> eventArgumentCaptor = ArgumentCaptor.forClass(Event.class);
verify(blockEntity, atLeastOnce()).send(eventArgumentCaptor.capture());
final Event event = eventArgumentCaptor.getAllValues().get(0);
assertThat(event, instanceOf(OnAddedBlocks.class));
assertThat(((OnAddedBlocks) event).getBlockPositions(), hasItem(position));
}
@Test
public void testCompleteUpdateSendsBlockActivatedEvents() throws Exception {
final Chunk chunk = mockChunkAt(0, 0, 0);
final TShortObjectHashMap<TIntList> blockPositionMapppings = new TShortObjectHashMap<>();
final short blockId = 42;
final EntityRef blockEntity = mock(EntityRef.class);
final Block block = new Block();
block.setEntity(blockEntity);
when(blockManager.getBlock(eq(blockId))).thenReturn(block);
final TIntArrayList positions = new TIntArrayList();
final Vector3i position = new Vector3i(1, 2, 3);
positions.add(position.x);
positions.add(position.y);
positions.add(position.z);
blockPositionMapppings.put(blockId, positions);
final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMapppings, mock(ChunkStore.class), Collections.emptyList());
when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
chunkProvider.completeUpdate();
final ArgumentCaptor<Event> eventArgumentCaptor = ArgumentCaptor.forClass(Event.class);
verify(blockEntity, atLeastOnce()).send(eventArgumentCaptor.capture());
final Event event = eventArgumentCaptor.getAllValues().get(1);
assertThat(event, instanceOf(OnActivatedBlocks.class));
assertThat(((OnActivatedBlocks) event).getBlockPositions(), hasItem(position));
}
private static Chunk mockChunkAt(final int x, final int y, final int z) {
final Chunk chunk = mock(Chunk.class);
when(chunk.getPosition()).thenReturn(new Vector3i(x, y, z));
return chunk;
}
private static class ChunkProviderTestComponent implements Component {
}
}
| Refactor tests
| engine-tests/src/test/java/org/terasology/world/chunks/localChunkProvider/LocalChunkProviderTest.java | Refactor tests | <ide><path>ngine-tests/src/test/java/org/terasology/world/chunks/localChunkProvider/LocalChunkProviderTest.java
<ide> @Test
<ide> public void testCompleteUpdateGeneratesStoredEntities() throws Exception {
<ide> final Chunk chunk = mockChunkAt(0, 0, 0);
<del> final EntityStore entityStore = new EntityStore();
<ide> final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
<add> final EntityStore entityStore = createEntityStoreWithComponents(testComponent);
<add> final List<EntityStore> entityStores = Collections.singletonList(entityStore);
<add> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
<add> when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
<add> final EntityRef mockEntity = mock(EntityRef.class);
<add> when(entityManager.create()).thenReturn(mockEntity);
<add>
<add> chunkProvider.completeUpdate();
<add>
<add> verify(mockEntity).addComponent(eq(testComponent));
<add> }
<add>
<add> @Test
<add> public void testCompleteUpdateGeneratesStoredEntitiesFromPrefab() throws Exception {
<add> final Chunk chunk = mockChunkAt(0, 0, 0);
<add> final Prefab prefab = mock(Prefab.class);
<add> final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
<add> final EntityStore entityStore = new EntityStore(prefab);
<ide> entityStore.addComponent(testComponent);
<ide> final List<EntityStore> entityStores = Collections.singletonList(entityStore);
<ide> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
<ide> when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
<ide> final EntityRef mockEntity = mock(EntityRef.class);
<del> when(entityManager.create()).thenReturn(mockEntity);
<del>
<del> chunkProvider.completeUpdate();
<del>
<del> verify(entityManager).create();
<del> verify(mockEntity).addComponent(eq(testComponent));
<del> }
<del>
<del> @Test
<del> public void testCompleteUpdateGeneratesStoredEntitiesFromPrefab() throws Exception {
<del> final Chunk chunk = mockChunkAt(0, 0, 0);
<del> final Prefab prefab = mock(Prefab.class);
<del> final EntityStore entityStore = new EntityStore(prefab);
<del> final ChunkProviderTestComponent testComponent = new ChunkProviderTestComponent();
<del> entityStore.addComponent(testComponent);
<del> final List<EntityStore> entityStores = Collections.singletonList(entityStore);
<del> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForNewChunk(chunk, new TShortObjectHashMap<>(), entityStores);
<del> when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
<del> final EntityRef mockEntity = mock(EntityRef.class);
<ide> when(entityManager.create(any(Prefab.class))).thenReturn(mockEntity);
<ide>
<ide> chunkProvider.completeUpdate();
<ide> @Test
<ide> public void testCompleteUpdateSendsBlockAddedEvents() throws Exception {
<ide> final Chunk chunk = mockChunkAt(0, 0, 0);
<del> final TShortObjectHashMap<TIntList> blockPositionMapppings = new TShortObjectHashMap<>();
<add> final TShortObjectHashMap<TIntList> blockPositionMappings = new TShortObjectHashMap<>();
<ide> final short blockId = 42;
<ide> final EntityRef blockEntity = mock(EntityRef.class);
<ide> final Block block = new Block();
<ide> positions.add(position.x);
<ide> positions.add(position.y);
<ide> positions.add(position.z);
<del> blockPositionMapppings.put(blockId, positions);
<del> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMapppings, mock(ChunkStore.class), Collections.emptyList());
<add> blockPositionMappings.put(blockId, positions);
<add> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMappings, mock(ChunkStore.class), Collections.emptyList());
<ide> when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
<ide>
<ide> chunkProvider.completeUpdate();
<ide> @Test
<ide> public void testCompleteUpdateSendsBlockActivatedEvents() throws Exception {
<ide> final Chunk chunk = mockChunkAt(0, 0, 0);
<del> final TShortObjectHashMap<TIntList> blockPositionMapppings = new TShortObjectHashMap<>();
<add> final TShortObjectHashMap<TIntList> blockPositionMappings = new TShortObjectHashMap<>();
<ide> final short blockId = 42;
<ide> final EntityRef blockEntity = mock(EntityRef.class);
<ide> final Block block = new Block();
<ide> positions.add(position.x);
<ide> positions.add(position.y);
<ide> positions.add(position.z);
<del> blockPositionMapppings.put(blockId, positions);
<del> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMapppings, mock(ChunkStore.class), Collections.emptyList());
<add> blockPositionMappings.put(blockId, positions);
<add> final ReadyChunkInfo readyChunkInfo = ReadyChunkInfo.createForRestoredChunk(chunk, blockPositionMappings, mock(ChunkStore.class), Collections.emptyList());
<ide> when(chunkFinalizer.completeFinalization()).thenReturn(readyChunkInfo);
<ide>
<ide> chunkProvider.completeUpdate();
<ide> assertThat(((OnActivatedBlocks) event).getBlockPositions(), hasItem(position));
<ide> }
<ide>
<add> private static EntityStore createEntityStoreWithComponents(Component... components) {
<add> return createEntityStoreWithPrefabAndComponents(null, components);
<add> }
<add>
<add> private static EntityStore createEntityStoreWithPrefabAndComponents(Prefab prefab, Component... components) {
<add> final EntityStore entityStore = new EntityStore(prefab);
<add> for (Component component : components) {
<add> entityStore.addComponent(component);
<add> }
<add> return entityStore;
<add> }
<add>
<ide> private static Chunk mockChunkAt(final int x, final int y, final int z) {
<ide> final Chunk chunk = mock(Chunk.class);
<ide> when(chunk.getPosition()).thenReturn(new Vector3i(x, y, z)); |
|
Java | mit | ba0ffd4c3682eb4f50a87a74440ea29253e28bb4 | 0 | magx2/jSupla | package pl.grzeslowski.jsupla.server.netty;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.grzeslowski.jsupla.proto.structs.TSuplaDataPacket;
import pl.grzeslowski.jsupla.server.dispatchers.SuplaDataPacketDispatcher;
import static java.util.Objects.requireNonNull;
@ChannelHandler.Sharable
class SuplaHandler extends SimpleChannelInboundHandler<TSuplaDataPacket> {
private final Logger logger = LoggerFactory.getLogger(SuplaHandler.class);
private final SuplaDataPacketDispatcher dispatcher;
SuplaHandler(SuplaDataPacketDispatcher dispatcher) {
this.dispatcher = requireNonNull(dispatcher);
}
@Override
public void channelRead0(ChannelHandlerContext ctx, TSuplaDataPacket msg) throws Exception {
logger.trace("Got {}", msg);
dispatcher.dispatch(msg).ifPresent(dataPacket -> sendTSuplaDataPacket(ctx, dataPacket));
}
@SuppressWarnings("WeakerAccess")
protected void sendTSuplaDataPacket(ChannelHandlerContext ctx, TSuplaDataPacket dataPacket) {
logger.trace("Sending {}", dataPacket);
ctx.writeAndFlush(dataPacket);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
logger.trace("channelInactive {}", ctx.name());
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("ExceptionCaught in SuplaHandler", cause); // TODO better exception handling
ctx.close();
}
}
| src/main/java/pl/grzeslowski/jsupla/server/netty/SuplaHandler.java | package pl.grzeslowski.jsupla.server.netty;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.grzeslowski.jsupla.proto.structs.TSuplaDataPacket;
import pl.grzeslowski.jsupla.server.dispatchers.SuplaDataPacketDispatcher;
import static java.util.Objects.requireNonNull;
@ChannelHandler.Sharable
class SuplaHandler extends SimpleChannelInboundHandler<TSuplaDataPacket> {
private final Logger logger = LoggerFactory.getLogger(SuplaHandler.class);
private final SuplaDataPacketDispatcher dispatcher;
SuplaHandler(SuplaDataPacketDispatcher dispatcher) {
this.dispatcher = requireNonNull(dispatcher);
}
@Override
public void channelRead0(ChannelHandlerContext ctx, TSuplaDataPacket msg) throws Exception {
logger.trace("Got {}", msg);
dispatcher.dispatch(msg).ifPresent(ctx::writeAndFlush);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
logger.trace("channelInactive {}", ctx.name());
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("ExceptionCaught in SuplaHandler", cause); // TODO better exception handling
ctx.close();
}
}
| Additional method for sending data packet
| src/main/java/pl/grzeslowski/jsupla/server/netty/SuplaHandler.java | Additional method for sending data packet | <ide><path>rc/main/java/pl/grzeslowski/jsupla/server/netty/SuplaHandler.java
<ide> @Override
<ide> public void channelRead0(ChannelHandlerContext ctx, TSuplaDataPacket msg) throws Exception {
<ide> logger.trace("Got {}", msg);
<del> dispatcher.dispatch(msg).ifPresent(ctx::writeAndFlush);
<add> dispatcher.dispatch(msg).ifPresent(dataPacket -> sendTSuplaDataPacket(ctx, dataPacket));
<add> }
<add>
<add> @SuppressWarnings("WeakerAccess")
<add> protected void sendTSuplaDataPacket(ChannelHandlerContext ctx, TSuplaDataPacket dataPacket) {
<add> logger.trace("Sending {}", dataPacket);
<add> ctx.writeAndFlush(dataPacket);
<ide> }
<ide>
<ide> @Override |
|
Java | apache-2.0 | 7a99532fe8b1cece9c308f1cb5fa28963837e577 | 0 | mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Lists.newLinkedList;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Maps.newLinkedHashMap;
import static com.google.common.collect.Sets.newHashSet;
import static java.lang.String.format;
import static java.lang.Thread.currentThread;
import static java.nio.ByteBuffer.wrap;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileLock;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import com.google.common.base.Predicates;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob;
import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector;
import org.apache.jackrabbit.oak.segment.RecordCache;
import org.apache.jackrabbit.oak.segment.RecordCache.DeduplicationCache;
import org.apache.jackrabbit.oak.segment.RecordId;
import org.apache.jackrabbit.oak.segment.Segment;
import org.apache.jackrabbit.oak.segment.SegmentBufferWriter;
import org.apache.jackrabbit.oak.segment.SegmentGraph.SegmentGraphVisitor;
import org.apache.jackrabbit.oak.segment.SegmentId;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.segment.SegmentNodeStore;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
import org.apache.jackrabbit.oak.segment.SegmentStore;
import org.apache.jackrabbit.oak.segment.SegmentTracker;
import org.apache.jackrabbit.oak.segment.SegmentVersion;
import org.apache.jackrabbit.oak.segment.SegmentWriter;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The storage implementation for tar files.
*/
public class FileStore implements SegmentStore {
/** Logger instance */
private static final Logger log = LoggerFactory.getLogger(FileStore.class);
private static final int MB = 1024 * 1024;
private static final Pattern FILE_NAME_PATTERN =
Pattern.compile("(data|bulk)((0|[1-9][0-9]*)[0-9]{4})([a-z])?.tar");
private static final String FILE_NAME_FORMAT = "data%05d%s.tar";
private static final String JOURNAL_FILE_NAME = "journal.log";
private static final String LOCK_FILE_NAME = "repo.lock";
/**
* GC counter for logging purposes
*/
private static final AtomicLong GC_COUNT = new AtomicLong(0);
static final boolean MEMORY_MAPPING_DEFAULT =
"64".equals(System.getProperty("sun.arch.data.model", "32"));
private final SegmentTracker tracker;
private final File directory;
private final BlobStore blobStore;
private final int maxFileSize;
private final boolean memoryMapping;
private volatile List<TarReader> readers;
private int writeNumber;
private File writeFile;
private TarWriter writer;
private final RandomAccessFile journalFile;
private final RandomAccessFile lockFile;
private final FileLock lock;
/**
* The latest head state.
*/
private final AtomicReference<RecordId> head;
/**
* The persisted head of the root journal, used to determine whether the
* latest {@link #head} value should be written to the disk.
*/
private final AtomicReference<RecordId> persistedHead;
/**
* The background flush thread. Automatically flushes the TarMK state
* once every five seconds.
*/
private final BackgroundThread flushThread;
/**
* The background compaction thread. Compacts the TarMK contents whenever
* triggered by the {@link #gc()} method.
*/
private final BackgroundThread compactionThread;
/**
* This background thread periodically asks the {@code SegmentGCOptions}
* to compare the approximate size of the repository with the available disk
* space. The result of this comparison is stored in the state of this
* {@code FileStore}.
*/
private final BackgroundThread diskSpaceThread;
private final SegmentGCOptions gcOptions;
/**
* Flag to request revision cleanup during the next flush.
*/
private final AtomicBoolean cleanupNeeded = new AtomicBoolean(false);
/**
* List of old tar file generations that are waiting to be removed. They can
* not be removed immediately, because they first need to be closed, and the
* JVM needs to release the memory mapped file references.
*/
private final List<File> pendingRemove = newLinkedList();
/**
* Version of the segment storage format.
*/
private final SegmentVersion version;
/**
* {@code GCMonitor} monitoring this instance's gc progress
*/
private final GCMonitor gcMonitor;
/**
* Represents the approximate size on disk of the repository.
*/
private final AtomicLong approximateSize;
/**
* This flag is periodically updated by calling the {@code SegmentGCOptions}
* at regular intervals.
*/
private final AtomicBoolean sufficientDiskSpace;
/**
* Flag signalling shutdown of the file store
*/
private volatile boolean shutdown;
private final ReadWriteLock fileStoreLock = new ReentrantReadWriteLock();
private final FileStoreStats stats;
/**
* Create a new instance of a {@link Builder} for a file store.
* @param directory directory where the tar files are stored
* @return a new {@link Builder} instance.
*/
@Nonnull
public static Builder builder(@Nonnull File directory) {
return new Builder(checkNotNull(directory));
}
/**
* Builder for creating {@link FileStore} instances.
*/
public static class Builder {
private final File directory;
private BlobStore blobStore; // null -> store blobs inline
private NodeState root = EMPTY_NODE;
private int maxFileSize = 256;
private int cacheSize; // 0 -> DEFAULT_MEMORY_CACHE_SIZE
private boolean memoryMapping;
private final LoggingGCMonitor gcMonitor = new LoggingGCMonitor();
private StatisticsProvider statsProvider = StatisticsProvider.NOOP;
private SegmentVersion version = SegmentVersion.LATEST_VERSION;
private SegmentGCOptions gcOptions = SegmentGCOptions.DEFAULT;
private Builder(File directory) {
this.directory = directory;
}
/**
* Specify the {@link BlobStore}.
* @param blobStore
* @return this instance
*/
@Nonnull
public Builder withBlobStore(@Nonnull BlobStore blobStore) {
this.blobStore = checkNotNull(blobStore);
return this;
}
/**
* Specify the initial root node state for the file store
* @param root
* @return this instance
*/
@Nonnull
public Builder withRoot(@Nonnull NodeState root) {
this.root = checkNotNull(root);
return this;
}
/**
* Maximal size of the generated tar files in MB.
* @param maxFileSize
* @return this instance
*/
@Nonnull
public Builder withMaxFileSize(int maxFileSize) {
this.maxFileSize = maxFileSize;
return this;
}
/**
* Size of the cache in MB.
* @param cacheSize
* @return this instance
*/
@Nonnull
public Builder withCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
return this;
}
/**
* Turn caching off
* @return this instance
*/
@Nonnull
public Builder withNoCache() {
this.cacheSize = -1;
return this;
}
/**
* Turn memory mapping on or off
* @param memoryMapping
* @return this instance
*/
@Nonnull
public Builder withMemoryMapping(boolean memoryMapping) {
this.memoryMapping = memoryMapping;
return this;
}
/**
* Set memory mapping to the default value based on OS properties
* @return this instance
*/
@Nonnull
public Builder withDefaultMemoryMapping() {
this.memoryMapping = MEMORY_MAPPING_DEFAULT;
return this;
}
/**
* {@link GCMonitor} for monitoring this files store's gc process.
* @param gcMonitor
* @return this instance
*/
@Nonnull
public Builder withGCMonitor(@Nonnull GCMonitor gcMonitor) {
this.gcMonitor.delegatee = checkNotNull(gcMonitor);
return this;
}
/**
* {@link StatisticsProvider} for collecting statistics related to FileStore
* @param statisticsProvider
* @return this instance
*/
@Nonnull
public Builder withStatisticsProvider(@Nonnull StatisticsProvider statisticsProvider) {
this.statsProvider = checkNotNull(statisticsProvider);
return this;
}
/**
* {@link SegmentVersion} the segment version of the store
* @param version
* @return this instance
*/
@Nonnull
public Builder withSegmentVersion(SegmentVersion version) {
this.version = checkNotNull(version);
return this;
}
@Nonnull
public Builder withGCOptions(SegmentGCOptions gcOptions) {
this.gcOptions = gcOptions;
return this;
}
/**
* Create a new {@link FileStore} instance with the settings specified in this
* builder. If none of the {@code with} methods have been called before calling
* this method, a file store with the following default settings is returned:
* <ul>
* <li>blob store: inline</li>
* <li>root: empty node</li>
* <li>max file size: 256MB</li>
* <li>cache size: 256MB</li>
* <li>memory mapping: on for 64 bit JVMs off otherwise</li>
* <li>whiteboard: none. No {@link GCMonitor} tracking</li>
* <li>statsProvider: StatisticsProvider.NOOP</li>
* </ul>
*
* @return a new file store instance
* @throws IOException
*/
@Nonnull
public FileStore build() throws IOException {
return new FileStore(this, false);
}
public ReadOnlyStore buildReadOnly() throws IOException {
return new ReadOnlyStore(this);
}
}
private FileStore(Builder builder, boolean readOnly) throws IOException {
this.version = builder.version;
if (readOnly) {
checkNotNull(builder.directory);
checkState(builder.directory.exists() && builder.directory.isDirectory());
} else {
checkNotNull(builder.directory).mkdirs();
}
// FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
// SegmentTracker and FileStore have a cyclic dependency, which we should
// try to break. Here we pass along a not fully initialised instances of the
// FileStore to the SegmentTracker, which in turn is in later invoked to write
// the initial node state. Notably before this instance is fully initialised!
// Once consequence of this is that we cannot reliably determine the current
// GC generation while writing the initial head state. See further below.
if (builder.cacheSize < 0) {
this.tracker = new SegmentTracker(this, 0, version);
} else if (builder.cacheSize > 0) {
this.tracker = new SegmentTracker(this, builder.cacheSize, version);
} else {
this.tracker = new SegmentTracker(this, version);
}
this.blobStore = builder.blobStore;
this.directory = builder.directory;
this.maxFileSize = builder.maxFileSize * MB;
this.memoryMapping = builder.memoryMapping;
this.gcMonitor = builder.gcMonitor;
this.gcOptions = builder.gcOptions;
if (readOnly) {
journalFile = new RandomAccessFile(new File(directory,
JOURNAL_FILE_NAME), "r");
} else {
journalFile = new RandomAccessFile(new File(directory,
JOURNAL_FILE_NAME), "rw");
}
Map<Integer, Map<Character, File>> map = collectFiles(directory);
this.readers = newArrayListWithCapacity(map.size());
Integer[] indices = map.keySet().toArray(new Integer[map.size()]);
Arrays.sort(indices);
for (int i = indices.length - 1; i >= 0; i--) {
if (!readOnly) {
readers.add(TarReader.open(map.get(indices[i]), memoryMapping));
} else {
// only try to read-only recover the latest file as that might
// be the *only* one still being accessed by a writer
boolean recover = i == indices.length - 1;
readers.add(TarReader.openRO(map.get(indices[i]),
memoryMapping, recover));
}
}
long initialSize = size();
this.approximateSize = new AtomicLong(initialSize);
this.stats = new FileStoreStats(builder.statsProvider, this, initialSize);
if (!readOnly) {
if (indices.length > 0) {
this.writeNumber = indices[indices.length - 1] + 1;
} else {
this.writeNumber = 0;
}
this.writeFile = new File(directory, String.format(
FILE_NAME_FORMAT, writeNumber, "a"));
this.writer = new TarWriter(writeFile, stats);
}
RecordId id = null;
JournalReader journalReader = new JournalReader(new File(directory, JOURNAL_FILE_NAME));
try {
Iterator<String> heads = journalReader.iterator();
while (id == null && heads.hasNext()) {
String head = heads.next();
try {
RecordId last = RecordId.fromString(tracker, head);
SegmentId segmentId = last.getSegmentId();
if (containsSegment(
segmentId.getMostSignificantBits(),
segmentId.getLeastSignificantBits())) {
id = last;
} else {
log.warn("Unable to access revision {}, rewinding...", last);
}
} catch (IllegalArgumentException ignore) {
log.warn("Skipping invalid record id {}", head);
}
}
} finally {
journalReader.close();
}
journalFile.seek(journalFile.length());
if (!readOnly) {
lockFile = new RandomAccessFile(
new File(directory, LOCK_FILE_NAME), "rw");
lock = lockFile.getChannel().lock();
} else {
lockFile = null;
lock = null;
}
if (id != null) {
head = new AtomicReference<RecordId>(id);
persistedHead = new AtomicReference<RecordId>(id);
} else {
NodeBuilder nodeBuilder = EMPTY_NODE.builder();
nodeBuilder.setChildNode("root", builder.root);
head = new AtomicReference<RecordId>(tracker.getWriter().writeNode(
nodeBuilder.getNodeState()).getRecordId());
persistedHead = new AtomicReference<RecordId>(null);
}
if (!readOnly) {
flushThread = BackgroundThread.run(
"TarMK flush thread [" + directory + "]", 5000, // 5s interval
new Runnable() {
@Override
public void run() {
try {
flush();
} catch (IOException e) {
log.warn("Failed to flush the TarMK at {}", directory, e);
}
}
});
compactionThread = BackgroundThread.run(
"TarMK compaction thread [" + directory + "]", -1,
new Runnable() {
@Override
public void run() {
try {
maybeCompact(true);
} catch (IOException e) {
log.error("Error running compaction", e);
}
}
});
diskSpaceThread = BackgroundThread.run(
"TarMK disk space check [" + directory + "]", MINUTES.toMillis(1), new Runnable() {
@Override
public void run() {
checkDiskSpace();
}
});
} else {
flushThread = null;
compactionThread = null;
diskSpaceThread = null;
}
sufficientDiskSpace = new AtomicBoolean(true);
if (readOnly) {
log.info("TarMK ReadOnly opened: {} (mmap={})", directory,
memoryMapping);
} else {
log.info("TarMK opened: {} (mmap={})", directory, memoryMapping);
}
log.debug("TarMK readers {}", this.readers);
}
// FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
// We cannot determine the current GC generation before the FileStore is fully
// initialised so just return 0 for now.
public int getGcGen() {
if (head == null) {
return 0; // not fully initialised
}
RecordId headId = head.get();
if (headId == null) {
return 0; // not fully initialised
}
return headId.getSegment().getGcGen();
}
public boolean maybeCompact(boolean cleanup) throws IOException {
gcMonitor.info("TarMK GC #{}: started", GC_COUNT.incrementAndGet());
Runtime runtime = Runtime.getRuntime();
long avail = runtime.totalMemory() - runtime.freeMemory();
// FIXME OAK-4281: Rework memory estimation for compaction
// What value should we use for delta?
long delta = 0;
long needed = delta * gcOptions.getMemoryThreshold();
if (needed >= avail) {
gcMonitor.skipped(
"TarMK GC #{}: not enough available memory {} ({} bytes), needed {} ({} bytes)," +
" last merge delta {} ({} bytes), so skipping compaction for now",
GC_COUNT,
humanReadableByteCount(avail), avail,
humanReadableByteCount(needed), needed,
humanReadableByteCount(delta), delta);
if (cleanup) {
cleanupNeeded.set(!gcOptions.isPaused());
}
return false;
}
Stopwatch watch = Stopwatch.createStarted();
boolean compacted = false;
int gainThreshold = gcOptions.getGainThreshold();
boolean runCompaction = true;
if (gainThreshold <= 0) {
gcMonitor.info("TarMK GC #{}: estimation skipped because gain threshold value ({} <= 0)", GC_COUNT,
gainThreshold);
} else if (gcOptions.isPaused()) {
gcMonitor.info("TarMK GC #{}: estimation skipped because compaction is paused", GC_COUNT);
} else {
gcMonitor.info("TarMK GC #{}: estimation started", GC_COUNT);
Supplier<Boolean> shutdown = newShutdownSignal();
CompactionGainEstimate estimate = estimateCompactionGain(shutdown);
if (shutdown.get()) {
gcMonitor.info("TarMK GC #{}: estimation interrupted. Skipping compaction.", GC_COUNT);
return false;
}
long gain = estimate.estimateCompactionGain();
runCompaction = gain >= gainThreshold;
if (runCompaction) {
gcMonitor.info(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Gain is {}% or {}/{} ({}/{} bytes), so running compaction",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), gain,
humanReadableByteCount(estimate.getReachableSize()), humanReadableByteCount(estimate.getTotalSize()),
estimate.getReachableSize(), estimate.getTotalSize());
} else {
if (estimate.getTotalSize() == 0) {
gcMonitor.skipped(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Skipping compaction for now as repository consists of a single tar file only",
GC_COUNT, watch, watch.elapsed(MILLISECONDS));
} else {
gcMonitor.skipped(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Gain is {}% or {}/{} ({}/{} bytes), so skipping compaction for now",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), gain,
humanReadableByteCount(estimate.getReachableSize()), humanReadableByteCount(estimate.getTotalSize()),
estimate.getReachableSize(), estimate.getTotalSize());
}
}
}
if (runCompaction) {
if (!gcOptions.isPaused()) {
compact();
compacted = true;
} else {
gcMonitor.skipped("TarMK GC #{}: compaction paused", GC_COUNT);
}
}
if (cleanup) {
cleanupNeeded.set(!gcOptions.isPaused());
}
return compacted;
}
static Map<Integer, Map<Character, File>> collectFiles(File directory) {
Map<Integer, Map<Character, File>> dataFiles = newHashMap();
Map<Integer, File> bulkFiles = newHashMap();
for (File file : directory.listFiles()) {
Matcher matcher = FILE_NAME_PATTERN.matcher(file.getName());
if (matcher.matches()) {
Integer index = Integer.parseInt(matcher.group(2));
if ("data".equals(matcher.group(1))) {
Map<Character, File> files = dataFiles.get(index);
if (files == null) {
files = newHashMap();
dataFiles.put(index, files);
}
Character generation = 'a';
if (matcher.group(4) != null) {
generation = matcher.group(4).charAt(0);
}
checkState(files.put(generation, file) == null);
} else {
checkState(bulkFiles.put(index, file) == null);
}
}
}
if (!bulkFiles.isEmpty()) {
log.info("Upgrading TarMK file names in {}", directory);
if (!dataFiles.isEmpty()) {
// first put all the data segments at the end of the list
Integer[] indices =
dataFiles.keySet().toArray(new Integer[dataFiles.size()]);
Arrays.sort(indices);
int position = Math.max(
indices[indices.length - 1] + 1,
bulkFiles.size());
for (Integer index : indices) {
Map<Character, File> files = dataFiles.remove(index);
Integer newIndex = position++;
for (Character generation : newHashSet(files.keySet())) {
File file = files.get(generation);
File newFile = new File(
directory,
format(FILE_NAME_FORMAT, newIndex, generation));
log.info("Renaming {} to {}", file, newFile);
file.renameTo(newFile);
files.put(generation, newFile);
}
dataFiles.put(newIndex, files);
}
}
// then add all the bulk segments at the beginning of the list
Integer[] indices =
bulkFiles.keySet().toArray(new Integer[bulkFiles.size()]);
Arrays.sort(indices);
int position = 0;
for (Integer index : indices) {
File file = bulkFiles.remove(index);
Integer newIndex = position++;
File newFile = new File(
directory, format(FILE_NAME_FORMAT, newIndex, "a"));
log.info("Renaming {} to {}", file, newFile);
file.renameTo(newFile);
dataFiles.put(newIndex, singletonMap('a', newFile));
}
}
return dataFiles;
}
public long size() {
fileStoreLock.readLock().lock();
try {
long size = writeFile != null ? writeFile.length() : 0;
for (TarReader reader : readers) {
size += reader.size();
}
return size;
} finally {
fileStoreLock.readLock().unlock();
}
}
public int readerCount(){
fileStoreLock.readLock().lock();
try {
return readers.size();
} finally {
fileStoreLock.readLock().unlock();
}
}
/**
* Returns the number of segments in this TarMK instance.
*
* @return number of segments
*/
private int count() {
fileStoreLock.readLock().lock();
try {
int count = 0;
if (writer != null) {
count += writer.count();
}
for (TarReader reader : readers) {
count += reader.count();
}
return count;
} finally {
fileStoreLock.readLock().unlock();
}
}
/**
* Estimated compaction gain. The result will be undefined if stopped through
* the passed {@code stop} signal.
* @param stop signal for stopping the estimation process.
* @return compaction gain estimate
*/
CompactionGainEstimate estimateCompactionGain(Supplier<Boolean> stop) {
CompactionGainEstimate estimate = new CompactionGainEstimate(getHead(), count(), stop);
fileStoreLock.readLock().lock();
try {
for (TarReader reader : readers) {
reader.accept(estimate);
if (stop.get()) {
break;
}
}
} finally {
fileStoreLock.readLock().unlock();
}
return estimate;
}
public FileStoreStats getStats() {
return stats;
}
public void flush() throws IOException {
flush(cleanupNeeded.getAndSet(false));
}
public void flush(boolean cleanup) throws IOException {
synchronized (persistedHead) {
RecordId before = persistedHead.get();
RecordId after = head.get();
if (cleanup || !after.equals(before)) {
tracker.getWriter().flush();
// FIXME OAK-4291: FileStore.flush prone to races leading to corruption
// There is a small windows that could lead to a corrupted store:
// if we crash right after setting the persisted head but before any delay-flushed
// SegmentBufferWriter instance flushes (see SegmentBufferWriterPool.returnWriter())
// then that data is lost although it might be referenced from the persisted head already.
// Need a test case. Possible fix: return a future from flush() and set the persisted head
// in the completion handler.
writer.flush();
fileStoreLock.writeLock().lock();
try {
log.debug("TarMK journal update {} -> {}", before, after);
journalFile.writeBytes(after.toString10() + " root " + System.currentTimeMillis()+"\n");
journalFile.getChannel().force(false);
persistedHead.set(after);
} finally {
fileStoreLock.writeLock().unlock();
}
if (cleanup) {
// Explicitly give up reference to the previous root state
// otherwise they could block cleanup. See OAK-3347
before = null;
after = null;
pendingRemove.addAll(cleanup());
}
}
// remove all obsolete tar generations
Iterator<File> iterator = pendingRemove.iterator();
while (iterator.hasNext()) {
File file = iterator.next();
log.debug("TarMK GC: Attempting to remove old file {}",
file);
if (!file.exists() || file.delete()) {
log.debug("TarMK GC: Removed old file {}", file);
iterator.remove();
} else {
log.warn("TarMK GC: Failed to remove old file {}. Will retry later.", file);
}
}
}
}
/**
* Run garbage collection on the segment level: reclaim those data segments
* that are from an old segment generation and those bulk segments that are not
* reachable anymore.
* Those tar files that shrink by at least 25% are rewritten to a new tar generation
* skipping the reclaimed segments.
*/
public List<File> cleanup() throws IOException {
Stopwatch watch = Stopwatch.createStarted();
long initialSize = size();
Set<UUID> bulkRefs = newHashSet();
Map<TarReader, TarReader> cleaned = newLinkedHashMap();
fileStoreLock.writeLock().lock();
try {
gcMonitor.info("TarMK GC #{}: cleanup started. Current repository size is {} ({} bytes)",
GC_COUNT, humanReadableByteCount(initialSize), initialSize);
newWriter();
tracker.clearCache();
// Suggest to the JVM that now would be a good time
// to clear stale weak references in the SegmentTracker
System.gc();
for (SegmentId id : tracker.getReferencedSegmentIds()) {
if (!isDataSegmentId(id.getLeastSignificantBits())) {
bulkRefs.add(id.asUUID());
}
}
for (TarReader reader : readers) {
cleaned.put(reader, reader);
}
} finally {
fileStoreLock.writeLock().unlock();
}
// FIXME OAK-4282: Make the number of retained gc generation configurable
int generation = getGcGen() - 1;
Set<UUID> reclaim = newHashSet();
for (TarReader reader : cleaned.keySet()) {
reader.mark(bulkRefs, reclaim, generation);
// FIXME OAK-4165: Too verbose logging during revision gc
log.info("Size of bulk references/reclaim set {}/{}", bulkRefs.size(), reclaim.size());
if (shutdown) {
gcMonitor.info("TarMK GC #{}: cleanup interrupted", GC_COUNT);
break;
}
}
for (TarReader reader : cleaned.keySet()) {
cleaned.put(reader, reader.sweep(reclaim));
if (shutdown) {
gcMonitor.info("TarMK GC #{}: cleanup interrupted", GC_COUNT);
break;
}
}
List<TarReader> oldReaders = newArrayList();
fileStoreLock.writeLock().lock();
try {
// Replace current list of reader with the cleaned readers taking care not to lose
// any new reader that might have come in through concurrent calls to newWriter()
List<TarReader> newReaders = newArrayList();
for (TarReader reader : readers) {
if (cleaned.containsKey(reader)) {
TarReader newReader = cleaned.get(reader);
if (newReader != null) {
newReaders.add(newReader);
}
if (newReader != reader) {
oldReaders.add(reader);
}
} else {
newReaders.add(reader);
}
}
readers = newReaders;
} finally {
fileStoreLock.writeLock().unlock();
}
// Close old readers *after* setting readers to the new readers to avoid accessing
// a closed reader from readSegment()
LinkedList<File> toRemove = newLinkedList();
for (TarReader oldReader : oldReaders) {
closeAndLogOnFail(oldReader);
File file = oldReader.getFile();
gcMonitor.info("TarMK GC #{}: cleanup marking file for deletion: {}", GC_COUNT, file.getName());
toRemove.addLast(file);
}
long finalSize = size();
approximateSize.set(finalSize);
stats.reclaimed(initialSize - finalSize);
gcMonitor.cleaned(initialSize - finalSize, finalSize);
gcMonitor.info("TarMK GC #{}: cleanup completed in {} ({} ms). Post cleanup size is {} ({} bytes)" +
" and space reclaimed {} ({} bytes).",
GC_COUNT, watch, watch.elapsed(MILLISECONDS),
humanReadableByteCount(finalSize), finalSize,
humanReadableByteCount(initialSize - finalSize), initialSize - finalSize);
return toRemove;
}
/**
* Finds all external blob references that are currently accessible
* in this repository and adds them to the given collector. Useful
* for collecting garbage in an external data store.
* <p>
* Note that this method only collects blob references that are already
* stored in the repository (at the time when this method is called), so
* the garbage collector will need some other mechanism for tracking
* in-memory references and references stored while this method is
* running.
* @param collector reference collector called back for each blob reference found
*/
public void collectBlobReferences(ReferenceCollector collector) throws IOException {
tracker.getWriter().flush();
List<TarReader> tarReaders = newArrayList();
fileStoreLock.writeLock().lock();
try {
newWriter();
tarReaders.addAll(this.readers);
} finally {
fileStoreLock.writeLock().unlock();
}
// FIXME OAK-4282: Make the number of retained gc generation configurable
int generation = getGcGen() - 1;
for (TarReader tarReader : tarReaders) {
tarReader.collectBlobReferences(tracker, collector, generation);
}
}
/**
* Returns the cancellation policy for the compaction phase. If the disk
* space was considered insufficient at least once during compaction (or if
* the space was never sufficient to begin with), compaction is considered
* canceled.
* Furthermore when the file store is shutting down, compaction is considered
* canceled.
*
* @return a flag indicating if compaction should be canceled.
*/
private Supplier<Boolean> newCancelCompactionCondition() {
return new Supplier<Boolean>() {
private boolean outOfDiskSpace;
private boolean shutdown;
@Override
public Boolean get() {
// The outOfDiskSpace and shutdown flags can only transition from false (their initial
// values), to true. Once true, there should be no way to go back.
if (!sufficientDiskSpace.get()) {
outOfDiskSpace = true;
}
if (FileStore.this.shutdown) {
this.shutdown = true;
}
return shutdown || outOfDiskSpace;
}
@Override
public String toString() {
if (outOfDiskSpace) {
return "Not enough disk space available";
} else if (shutdown) {
return "FileStore shutdown request received";
} else {
return "";
}
}
};
}
/**
* Returns a signal indication the file store shutting down.
* @return a shutdown signal
*/
private Supplier<Boolean> newShutdownSignal() {
return new Supplier<Boolean>() {
@Override
public Boolean get() {
return shutdown;
}
};
}
/**
* Copy every referenced record in data (non-bulk) segments. Bulk segments
* are fully kept (they are only removed in cleanup, if there is no
* reference to them).
*/
public void compact() throws IOException {
gcMonitor.info("TarMK GC #{}: compaction started, gc options={}", GC_COUNT, gcOptions);
Stopwatch watch = Stopwatch.createStarted();
// FIXME OAK-4277: Finalise de-duplication caches
// Make the capacity and initial depth of the deduplication cache configurable
final DeduplicationCache<String> nodeCache = new DeduplicationCache<String>(1000000, 20);
// FIXME OAK-4280: Compaction cannot be cancelled
// FIXME OAK-4279: Rework offline compaction
// This way of compacting has not progress logging and cannot be cancelled
int gcGeneration = tracker.getGcGen() + 1;
SegmentWriter writer = new SegmentWriter(this, tracker.getSegmentVersion(),
new SegmentBufferWriter(this, tracker.getSegmentVersion(), "c", gcGeneration),
new RecordCache<String>() {
@Override
protected Cache<String> getCache(int generation) {
return nodeCache;
}
});
SegmentNodeState before = getHead();
long existing = before.getChildNode(SegmentNodeStore.CHECKPOINTS)
.getChildNodeCount(Long.MAX_VALUE);
if (existing > 1) {
gcMonitor.warn(
"TarMK GC #{}: compaction found {} checkpoints, you might need to run checkpoint cleanup",
GC_COUNT, existing);
}
SegmentNodeState after = compact(writer, before);
gcMonitor.info("TarMK GC #{}: compacted {} to {}",
GC_COUNT, before.getRecordId(), after.getRecordId());
try {
int cycles = 0;
boolean success = false;
while (cycles++ < gcOptions.getRetryCount()
&& !(success = setHead(before, after))) {
// Some other concurrent changes have been made.
// Rebase (and compact) those changes on top of the
// compacted state before retrying to set the head.
gcMonitor.info("TarMK GC #{}: compaction detected concurrent commits while compacting. " +
"Compacting these commits. Cycle {}", GC_COUNT, cycles);
SegmentNodeState head = getHead();
after = compact(writer, head);
gcMonitor.info("TarMK GC #{}: compacted {} against {} to {}",
GC_COUNT, head.getRecordId(), before.getRecordId(), after.getRecordId());
before = head;
}
if (success) {
tracker.getWriter().addCachedNodes(gcGeneration, nodeCache);
// FIXME OAK-4285: Align cleanup of segment id tables with the new cleanup strategy
// ith clean brutal we need to remove those ids that have been cleaned
// i.e. those whose segment was from an old generation
tracker.clearSegmentIdTables(Predicates.<SegmentId>alwaysFalse());
// FIXME OAK-4283: Align GCMonitor API with implementation
// Refactor GCMonitor: there is no more compaction map stats
gcMonitor.compacted(new long[]{}, new long[]{}, new long[]{});
} else {
gcMonitor.info("TarMK GC #{}: compaction gave up compacting concurrent commits after {} cycles.",
GC_COUNT, cycles - 1);
if (gcOptions.getForceAfterFail()) {
gcMonitor.info("TarMK GC #{}: compaction force compacting remaining commits", GC_COUNT);
if (!forceCompact(writer)) {
gcMonitor.warn("TarMK GC #{}: compaction failed to force compact remaining commits. " +
"Most likely compaction didn't get exclusive access to the store.", GC_COUNT);
}
}
// FIXME OAK-4284: Garbage left behind when compaction does not succeed
// Giving up leaves garbage that will only be cleaned up 2 generations later!
}
gcMonitor.info("TarMK GC #{}: compaction completed in {} ({} ms), after {} cycles",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles - 1);
} catch (InterruptedException e) {
gcMonitor.error("TarMK GC #" + GC_COUNT + ": compaction interrupted", e);
currentThread().interrupt();
} catch (Exception e) {
gcMonitor.error("TarMK GC #" + GC_COUNT + ": compaction encountered an error", e);
}
}
private static SegmentNodeState compact(SegmentWriter writer, NodeState node) throws IOException {
SegmentNodeState compacted = writer.writeNode(node);
writer.flush();
return compacted;
}
private boolean forceCompact(SegmentWriter writer) throws InterruptedException, IOException {
if (rwLock.writeLock().tryLock(gcOptions.getLockWaitTime(), TimeUnit.SECONDS)) {
try {
SegmentNodeState head = getHead();
return setHead(head, compact(writer, head));
} finally {
rwLock.writeLock().unlock();
}
} else {
return false;
}
}
public Iterable<SegmentId> getSegmentIds() {
fileStoreLock.readLock().lock();
try {
List<SegmentId> ids = newArrayList();
if (writer != null) {
for (UUID uuid : writer.getUUIDs()) {
ids.add(tracker.getSegmentId(
uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits()));
}
}
for (TarReader reader : readers) {
for (UUID uuid : reader.getUUIDs()) {
ids.add(tracker.getSegmentId(
uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits()));
}
}
return ids;
} finally {
fileStoreLock.readLock().unlock();
}
}
@Override
public SegmentTracker getTracker() {
return tracker;
}
@Override
public SegmentNodeState getHead() {
return new SegmentNodeState(head.get());
}
// FIXME OAK-4015: Expedite commits from the compactor
// use a lock that can expedite important commits like compaction and checkpoints.
private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
@Override
public boolean setHead(SegmentNodeState base, SegmentNodeState head) {
rwLock.readLock().lock();
try {
RecordId id = this.head.get();
return id.equals(base.getRecordId())
&& this.head.compareAndSet(id, head.getRecordId());
} finally {
rwLock.readLock().unlock();
}
}
@Override
public void close() {
// Flag the store as shutting / shut down
shutdown = true;
// avoid deadlocks by closing (and joining) the background
// threads before acquiring the synchronization lock
closeAndLogOnFail(compactionThread);
closeAndLogOnFail(flushThread);
closeAndLogOnFail(diskSpaceThread);
try {
flush();
// FIXME OAK-4291: FileStore.flush prone to races leading to corruption
// Replace this with a way to "close" the underlying SegmentBufferWriter(s)
// tracker.getWriter().dropCache();
fileStoreLock.writeLock().lock();
try {
closeAndLogOnFail(writer);
List<TarReader> list = readers;
readers = newArrayList();
for (TarReader reader : list) {
closeAndLogOnFail(reader);
}
if (lock != null) {
lock.release();
}
closeAndLogOnFail(lockFile);
closeAndLogOnFail(journalFile);
} finally {
fileStoreLock.writeLock().unlock();
}
} catch (IOException e) {
throw new RuntimeException(
"Failed to close the TarMK at " + directory, e);
}
System.gc(); // for any memory-mappings that are no longer used
log.info("TarMK closed: {}", directory);
}
@Override
public boolean containsSegment(SegmentId id) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
return containsSegment(msb, lsb);
}
private boolean containsSegment(long msb, long lsb) {
for (TarReader reader : readers) {
if (reader.containsEntry(msb, lsb)) {
return true;
}
}
if (writer != null) {
fileStoreLock.readLock().lock();
try {
if (writer.containsEntry(msb, lsb)) {
return true;
}
} finally {
fileStoreLock.readLock().unlock();
}
}
// the writer might have switched to a new file,
// so we need to re-check the readers
for (TarReader reader : readers) {
if (reader.containsEntry(msb, lsb)) {
return true;
}
}
return false;
}
@Override
public Segment readSegment(SegmentId id) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
for (TarReader reader : readers) {
try {
if (reader.isClosed()) {
// Cleanup might already have closed the file.
// The segment should be available from another file.
log.debug("Skipping closed tar file {}", reader);
continue;
}
ByteBuffer buffer = reader.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", reader, e);
}
}
if (writer != null) {
fileStoreLock.readLock().lock();
try {
try {
ByteBuffer buffer = writer.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", writer, e);
}
} finally {
fileStoreLock.readLock().unlock();
}
}
// the writer might have switched to a new file,
// so we need to re-check the readers
for (TarReader reader : readers) {
try {
if (reader.isClosed()) {
// Cleanup might already have closed the file.
// The segment should be available from another file.
log.info("Skipping closed tar file {}", reader);
continue;
}
ByteBuffer buffer = reader.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", reader, e);
}
}
throw new SegmentNotFoundException(id);
}
@Override
public void writeSegment(SegmentId id, byte[] data, int offset, int length) throws IOException {
fileStoreLock.writeLock().lock();
try {
int generation = Segment.getGcGen(wrap(data, offset, length));
long size = writer.writeEntry(
id.getMostSignificantBits(),
id.getLeastSignificantBits(),
data, offset, length, generation);
if (size >= maxFileSize) {
newWriter();
}
approximateSize.addAndGet(TarWriter.BLOCK_SIZE + length + TarWriter.getPaddingSize(length));
} finally {
fileStoreLock.writeLock().unlock();
}
}
/**
* Switch to a new tar writer.
* This method may only be called when holding the write lock of {@link #fileStoreLock}
* @throws IOException
*/
private void newWriter() throws IOException {
if (writer.isDirty()) {
writer.close();
List<TarReader> list =
newArrayListWithCapacity(1 + readers.size());
list.add(TarReader.open(writeFile, memoryMapping));
list.addAll(readers);
readers = list;
writeNumber++;
writeFile = new File(
directory,
String.format(FILE_NAME_FORMAT, writeNumber, "a"));
writer = new TarWriter(writeFile, stats);
}
}
@Override
public Blob readBlob(String blobId) {
if (blobStore != null) {
return new BlobStoreBlob(blobStore, blobId);
}
throw new IllegalStateException("Attempt to read external blob with blobId [" + blobId + "] " +
"without specifying BlobStore");
}
@Override
public BlobStore getBlobStore() {
return blobStore;
}
@Override
public void gc() {
compactionThread.trigger();
}
public Map<String, Set<UUID>> getTarReaderIndex() {
Map<String, Set<UUID>> index = new HashMap<String, Set<UUID>>();
for (TarReader reader : readers) {
index.put(reader.getFile().getAbsolutePath(), reader.getUUIDs());
}
return index;
}
public Map<UUID, List<UUID>> getTarGraph(String fileName) throws IOException {
for (TarReader reader : readers) {
if (fileName.equals(reader.getFile().getName())) {
Map<UUID, List<UUID>> graph = newHashMap();
for (UUID uuid : reader.getUUIDs()) {
graph.put(uuid, null);
}
Map<UUID, List<UUID>> g = reader.getGraph(false);
if (g != null) {
graph.putAll(g);
}
return graph;
}
}
return emptyMap();
}
private void setRevision(String rootRevision) {
fileStoreLock.writeLock().lock();
try {
RecordId id = RecordId.fromString(tracker, rootRevision);
head.set(id);
persistedHead.set(id);
} finally {
fileStoreLock.writeLock().unlock();
}
}
private void checkDiskSpace() {
long repositoryDiskSpace = approximateSize.get();
long availableDiskSpace = directory.getFreeSpace();
boolean updated = gcOptions.isDiskSpaceSufficient(repositoryDiskSpace, availableDiskSpace);
boolean previous = sufficientDiskSpace.getAndSet(updated);
if (previous && !updated) {
log.warn("Available disk space ({}) is too low, current repository size is approx. {}",
humanReadableByteCount(availableDiskSpace),
humanReadableByteCount(repositoryDiskSpace));
}
if (updated && !previous) {
log.info("Available disk space ({}) is sufficient again for repository operations, current repository size is approx. {}",
humanReadableByteCount(availableDiskSpace),
humanReadableByteCount(repositoryDiskSpace));
}
}
/**
* A read only {@link FileStore} implementation that supports
* going back to old revisions.
* <p>
* All write methods are no-ops.
*/
public static class ReadOnlyStore extends FileStore {
private ReadOnlyStore(Builder builder) throws IOException {
super(builder, true);
}
/**
* Go to the specified {@code revision}
*
* @param revision
*/
public void setRevision(String revision) {
super.setRevision(revision);
}
/**
* Include the ids of all segments transitively reachable through forward references from
* {@code referencedIds}. See OAK-3864.
*/
private static void includeForwardReferences(Iterable<TarReader> readers, Set<UUID> referencedIds)
throws IOException {
Set<UUID> fRefs = newHashSet(referencedIds);
do {
// Add direct forward references
for (TarReader reader : readers) {
reader.calculateForwardReferences(fRefs);
if (fRefs.isEmpty()) {
break; // Optimisation: bail out if no references left
}
}
// ... as long as new forward references are found.
} while (referencedIds.addAll(fRefs));
}
/**
* Build the graph of segments reachable from an initial set of segments
* @param roots the initial set of segments
* @param visitor visitor receiving call back while following the segment graph
* @throws IOException
*/
public void traverseSegmentGraph(
@Nonnull Set<UUID> roots,
@Nonnull SegmentGraphVisitor visitor) throws IOException {
List<TarReader> readers = super.readers;
includeForwardReferences(readers, roots);
for (TarReader reader : readers) {
reader.traverseSegmentGraph(checkNotNull(roots), checkNotNull(visitor));
}
}
@Override
public boolean setHead(SegmentNodeState base, SegmentNodeState head) {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void writeSegment(SegmentId id, byte[] data,
int offset, int length) {
throw new UnsupportedOperationException("Read Only Store");
}
/**
* no-op
*/
@Override
public void flush() { /* nop */ }
@Override
public LinkedList<File> cleanup() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void gc() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void compact() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public boolean maybeCompact(boolean cleanup) {
throw new UnsupportedOperationException("Read Only Store");
}
}
public SegmentVersion getVersion() {
return version;
}
private static void closeAndLogOnFail(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException ioe) {
// ignore and log
log.error(ioe.getMessage(), ioe);
}
}
}
private static class LoggingGCMonitor implements GCMonitor {
public GCMonitor delegatee = GCMonitor.EMPTY;
@Override
public void info(String message, Object... arguments) {
log.info(message, arguments);
delegatee.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
log.warn(message, arguments);
delegatee.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
delegatee.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
log.info(reason, arguments);
delegatee.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
delegatee.compacted(segmentCounts, recordCounts, compactionMapWeights);
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
delegatee.cleaned(reclaimedSize, currentSize);
}
}
}
| oak-segment-next/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.file;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Lists.newLinkedList;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Maps.newLinkedHashMap;
import static com.google.common.collect.Sets.newHashSet;
import static java.lang.String.format;
import static java.lang.Thread.currentThread;
import static java.nio.ByteBuffer.wrap;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileLock;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import com.google.common.base.Predicates;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob;
import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector;
import org.apache.jackrabbit.oak.segment.RecordCache;
import org.apache.jackrabbit.oak.segment.RecordCache.DeduplicationCache;
import org.apache.jackrabbit.oak.segment.RecordId;
import org.apache.jackrabbit.oak.segment.Segment;
import org.apache.jackrabbit.oak.segment.SegmentBufferWriter;
import org.apache.jackrabbit.oak.segment.SegmentGraph.SegmentGraphVisitor;
import org.apache.jackrabbit.oak.segment.SegmentId;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.segment.SegmentNodeStore;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
import org.apache.jackrabbit.oak.segment.SegmentStore;
import org.apache.jackrabbit.oak.segment.SegmentTracker;
import org.apache.jackrabbit.oak.segment.SegmentVersion;
import org.apache.jackrabbit.oak.segment.SegmentWriter;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The storage implementation for tar files.
*/
public class FileStore implements SegmentStore {
/** Logger instance */
private static final Logger log = LoggerFactory.getLogger(FileStore.class);
private static final int MB = 1024 * 1024;
private static final Pattern FILE_NAME_PATTERN =
Pattern.compile("(data|bulk)((0|[1-9][0-9]*)[0-9]{4})([a-z])?.tar");
private static final String FILE_NAME_FORMAT = "data%05d%s.tar";
private static final String JOURNAL_FILE_NAME = "journal.log";
private static final String LOCK_FILE_NAME = "repo.lock";
/**
* GC counter for logging purposes
*/
private static final AtomicLong GC_COUNT = new AtomicLong(0);
static final boolean MEMORY_MAPPING_DEFAULT =
"64".equals(System.getProperty("sun.arch.data.model", "32"));
private final SegmentTracker tracker;
private final File directory;
private final BlobStore blobStore;
private final int maxFileSize;
private final boolean memoryMapping;
private volatile List<TarReader> readers;
private int writeNumber;
private File writeFile;
private TarWriter writer;
private final RandomAccessFile journalFile;
private final RandomAccessFile lockFile;
private final FileLock lock;
/**
* The latest head state.
*/
private final AtomicReference<RecordId> head;
/**
* The persisted head of the root journal, used to determine whether the
* latest {@link #head} value should be written to the disk.
*/
private final AtomicReference<RecordId> persistedHead;
/**
* The background flush thread. Automatically flushes the TarMK state
* once every five seconds.
*/
private final BackgroundThread flushThread;
/**
* The background compaction thread. Compacts the TarMK contents whenever
* triggered by the {@link #gc()} method.
*/
private final BackgroundThread compactionThread;
/**
* This background thread periodically asks the {@code SegmentGCOptions}
* to compare the approximate size of the repository with the available disk
* space. The result of this comparison is stored in the state of this
* {@code FileStore}.
*/
private final BackgroundThread diskSpaceThread;
private final SegmentGCOptions gcOptions;
/**
* Flag to request revision cleanup during the next flush.
*/
private final AtomicBoolean cleanupNeeded = new AtomicBoolean(false);
/**
* List of old tar file generations that are waiting to be removed. They can
* not be removed immediately, because they first need to be closed, and the
* JVM needs to release the memory mapped file references.
*/
private final List<File> pendingRemove = newLinkedList();
/**
* Version of the segment storage format.
*/
private final SegmentVersion version;
/**
* {@code GCMonitor} monitoring this instance's gc progress
*/
private final GCMonitor gcMonitor;
/**
* Represents the approximate size on disk of the repository.
*/
private final AtomicLong approximateSize;
/**
* This flag is periodically updated by calling the {@code SegmentGCOptions}
* at regular intervals.
*/
private final AtomicBoolean sufficientDiskSpace;
/**
* Flag signalling shutdown of the file store
*/
private volatile boolean shutdown;
private final ReadWriteLock fileStoreLock = new ReentrantReadWriteLock();
private final FileStoreStats stats;
/**
* Create a new instance of a {@link Builder} for a file store.
* @param directory directory where the tar files are stored
* @return a new {@link Builder} instance.
*/
@Nonnull
public static Builder builder(@Nonnull File directory) {
return new Builder(checkNotNull(directory));
}
/**
* Builder for creating {@link FileStore} instances.
*/
public static class Builder {
private final File directory;
private BlobStore blobStore; // null -> store blobs inline
private NodeState root = EMPTY_NODE;
private int maxFileSize = 256;
private int cacheSize; // 0 -> DEFAULT_MEMORY_CACHE_SIZE
private boolean memoryMapping;
private final LoggingGCMonitor gcMonitor = new LoggingGCMonitor();
private StatisticsProvider statsProvider = StatisticsProvider.NOOP;
private SegmentVersion version = SegmentVersion.LATEST_VERSION;
private SegmentGCOptions gcOptions = SegmentGCOptions.DEFAULT;
private Builder(File directory) {
this.directory = directory;
}
/**
* Specify the {@link BlobStore}.
* @param blobStore
* @return this instance
*/
@Nonnull
public Builder withBlobStore(@Nonnull BlobStore blobStore) {
this.blobStore = checkNotNull(blobStore);
return this;
}
/**
* Specify the initial root node state for the file store
* @param root
* @return this instance
*/
@Nonnull
public Builder withRoot(@Nonnull NodeState root) {
this.root = checkNotNull(root);
return this;
}
/**
* Maximal size of the generated tar files in MB.
* @param maxFileSize
* @return this instance
*/
@Nonnull
public Builder withMaxFileSize(int maxFileSize) {
this.maxFileSize = maxFileSize;
return this;
}
/**
* Size of the cache in MB.
* @param cacheSize
* @return this instance
*/
@Nonnull
public Builder withCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
return this;
}
/**
* Turn caching off
* @return this instance
*/
@Nonnull
public Builder withNoCache() {
this.cacheSize = -1;
return this;
}
/**
* Turn memory mapping on or off
* @param memoryMapping
* @return this instance
*/
@Nonnull
public Builder withMemoryMapping(boolean memoryMapping) {
this.memoryMapping = memoryMapping;
return this;
}
/**
* Set memory mapping to the default value based on OS properties
* @return this instance
*/
@Nonnull
public Builder withDefaultMemoryMapping() {
this.memoryMapping = MEMORY_MAPPING_DEFAULT;
return this;
}
/**
* {@link GCMonitor} for monitoring this files store's gc process.
* @param gcMonitor
* @return this instance
*/
@Nonnull
public Builder withGCMonitor(@Nonnull GCMonitor gcMonitor) {
this.gcMonitor.delegatee = checkNotNull(gcMonitor);
return this;
}
/**
* {@link StatisticsProvider} for collecting statistics related to FileStore
* @param statisticsProvider
* @return this instance
*/
@Nonnull
public Builder withStatisticsProvider(@Nonnull StatisticsProvider statisticsProvider) {
this.statsProvider = checkNotNull(statisticsProvider);
return this;
}
/**
* {@link SegmentVersion} the segment version of the store
* @param version
* @return this instance
*/
@Nonnull
public Builder withSegmentVersion(SegmentVersion version) {
this.version = checkNotNull(version);
return this;
}
@Nonnull
public Builder withGCOptions(SegmentGCOptions gcOptions) {
this.gcOptions = gcOptions;
return this;
}
/**
* Create a new {@link FileStore} instance with the settings specified in this
* builder. If none of the {@code with} methods have been called before calling
* this method, a file store with the following default settings is returned:
* <ul>
* <li>blob store: inline</li>
* <li>root: empty node</li>
* <li>max file size: 256MB</li>
* <li>cache size: 256MB</li>
* <li>memory mapping: on for 64 bit JVMs off otherwise</li>
* <li>whiteboard: none. No {@link GCMonitor} tracking</li>
* <li>statsProvider: StatisticsProvider.NOOP</li>
* </ul>
*
* @return a new file store instance
* @throws IOException
*/
@Nonnull
public FileStore build() throws IOException {
return new FileStore(this, false);
}
public ReadOnlyStore buildReadOnly() throws IOException {
return new ReadOnlyStore(this);
}
}
private FileStore(Builder builder, boolean readOnly) throws IOException {
this.version = builder.version;
if (readOnly) {
checkNotNull(builder.directory);
checkState(builder.directory.exists() && builder.directory.isDirectory());
} else {
checkNotNull(builder.directory).mkdirs();
}
// FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
// SegmentTracker and FileStore have a cyclic dependency, which we should
// try to break. Here we pass along a not fully initialised instances of the
// FileStore to the SegmentTracker, which in turn is in later invoked to write
// the initial node state. Notably before this instance is fully initialised!
// Once consequence of this is that we cannot reliably determine the current
// GC generation while writing the initial head state. See further below.
if (builder.cacheSize < 0) {
this.tracker = new SegmentTracker(this, 0, version);
} else if (builder.cacheSize > 0) {
this.tracker = new SegmentTracker(this, builder.cacheSize, version);
} else {
this.tracker = new SegmentTracker(this, version);
}
this.blobStore = builder.blobStore;
this.directory = builder.directory;
this.maxFileSize = builder.maxFileSize * MB;
this.memoryMapping = builder.memoryMapping;
this.gcMonitor = builder.gcMonitor;
this.gcOptions = builder.gcOptions;
if (readOnly) {
journalFile = new RandomAccessFile(new File(directory,
JOURNAL_FILE_NAME), "r");
} else {
journalFile = new RandomAccessFile(new File(directory,
JOURNAL_FILE_NAME), "rw");
}
Map<Integer, Map<Character, File>> map = collectFiles(directory);
this.readers = newArrayListWithCapacity(map.size());
Integer[] indices = map.keySet().toArray(new Integer[map.size()]);
Arrays.sort(indices);
for (int i = indices.length - 1; i >= 0; i--) {
if (!readOnly) {
readers.add(TarReader.open(map.get(indices[i]), memoryMapping));
} else {
// only try to read-only recover the latest file as that might
// be the *only* one still being accessed by a writer
boolean recover = i == indices.length - 1;
readers.add(TarReader.openRO(map.get(indices[i]),
memoryMapping, recover));
}
}
long initialSize = size();
this.approximateSize = new AtomicLong(initialSize);
this.stats = new FileStoreStats(builder.statsProvider, this, initialSize);
if (!readOnly) {
if (indices.length > 0) {
this.writeNumber = indices[indices.length - 1] + 1;
} else {
this.writeNumber = 0;
}
this.writeFile = new File(directory, String.format(
FILE_NAME_FORMAT, writeNumber, "a"));
this.writer = new TarWriter(writeFile, stats);
}
RecordId id = null;
JournalReader journalReader = new JournalReader(new File(directory, JOURNAL_FILE_NAME));
try {
Iterator<String> heads = journalReader.iterator();
while (id == null && heads.hasNext()) {
String head = heads.next();
try {
RecordId last = RecordId.fromString(tracker, head);
SegmentId segmentId = last.getSegmentId();
if (containsSegment(
segmentId.getMostSignificantBits(),
segmentId.getLeastSignificantBits())) {
id = last;
} else {
log.warn("Unable to access revision {}, rewinding...", last);
}
} catch (IllegalArgumentException ignore) {
log.warn("Skipping invalid record id {}", head);
}
}
} finally {
journalReader.close();
}
journalFile.seek(journalFile.length());
if (!readOnly) {
lockFile = new RandomAccessFile(
new File(directory, LOCK_FILE_NAME), "rw");
lock = lockFile.getChannel().lock();
} else {
lockFile = null;
lock = null;
}
if (id != null) {
head = new AtomicReference<RecordId>(id);
persistedHead = new AtomicReference<RecordId>(id);
} else {
NodeBuilder nodeBuilder = EMPTY_NODE.builder();
nodeBuilder.setChildNode("root", builder.root);
head = new AtomicReference<RecordId>(tracker.getWriter().writeNode(
nodeBuilder.getNodeState()).getRecordId());
persistedHead = new AtomicReference<RecordId>(null);
}
if (!readOnly) {
flushThread = BackgroundThread.run(
"TarMK flush thread [" + directory + "]", 5000, // 5s interval
new Runnable() {
@Override
public void run() {
try {
flush();
} catch (IOException e) {
log.warn("Failed to flush the TarMK at {}", directory, e);
}
}
});
compactionThread = BackgroundThread.run(
"TarMK compaction thread [" + directory + "]", -1,
new Runnable() {
@Override
public void run() {
try {
maybeCompact(true);
} catch (IOException e) {
log.error("Error running compaction", e);
}
}
});
diskSpaceThread = BackgroundThread.run(
"TarMK disk space check [" + directory + "]", MINUTES.toMillis(1), new Runnable() {
@Override
public void run() {
checkDiskSpace();
}
});
} else {
flushThread = null;
compactionThread = null;
diskSpaceThread = null;
}
sufficientDiskSpace = new AtomicBoolean(true);
if (readOnly) {
log.info("TarMK ReadOnly opened: {} (mmap={})", directory,
memoryMapping);
} else {
log.info("TarMK opened: {} (mmap={})", directory, memoryMapping);
}
log.debug("TarMK readers {}", this.readers);
}
// FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
// We cannot determine the current GC generation before the FileStore is fully
// initialised so just return 0 for now.
public int getGcGen() {
if (head == null) {
return 0; // not fully initialised
}
RecordId headId = head.get();
if (headId == null) {
return 0; // not fully initialised
}
return headId.getSegment().getGcGen();
}
public boolean maybeCompact(boolean cleanup) throws IOException {
gcMonitor.info("TarMK GC #{}: started", GC_COUNT.incrementAndGet());
Runtime runtime = Runtime.getRuntime();
long avail = runtime.totalMemory() - runtime.freeMemory();
// FIXME OAK-4281: Rework memory estimation for compaction
// What value should we use for delta?
long delta = 0;
long needed = delta * gcOptions.getMemoryThreshold();
if (needed >= avail) {
gcMonitor.skipped(
"TarMK GC #{}: not enough available memory {} ({} bytes), needed {} ({} bytes)," +
" last merge delta {} ({} bytes), so skipping compaction for now",
GC_COUNT,
humanReadableByteCount(avail), avail,
humanReadableByteCount(needed), needed,
humanReadableByteCount(delta), delta);
if (cleanup) {
cleanupNeeded.set(!gcOptions.isPaused());
}
return false;
}
Stopwatch watch = Stopwatch.createStarted();
boolean compacted = false;
int gainThreshold = gcOptions.getGainThreshold();
boolean runCompaction = true;
if (gainThreshold <= 0) {
gcMonitor.info("TarMK GC #{}: estimation skipped because gain threshold value ({} <= 0)", GC_COUNT,
gainThreshold);
} else if (gcOptions.isPaused()) {
gcMonitor.info("TarMK GC #{}: estimation skipped because compaction is paused", GC_COUNT);
} else {
gcMonitor.info("TarMK GC #{}: estimation started", GC_COUNT);
Supplier<Boolean> shutdown = newShutdownSignal();
CompactionGainEstimate estimate = estimateCompactionGain(shutdown);
if (shutdown.get()) {
gcMonitor.info("TarMK GC #{}: estimation interrupted. Skipping compaction.", GC_COUNT);
return false;
}
long gain = estimate.estimateCompactionGain();
runCompaction = gain >= gainThreshold;
if (runCompaction) {
gcMonitor.info(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Gain is {}% or {}/{} ({}/{} bytes), so running compaction",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), gain,
humanReadableByteCount(estimate.getReachableSize()), humanReadableByteCount(estimate.getTotalSize()),
estimate.getReachableSize(), estimate.getTotalSize());
} else {
if (estimate.getTotalSize() == 0) {
gcMonitor.skipped(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Skipping compaction for now as repository consists of a single tar file only",
GC_COUNT, watch, watch.elapsed(MILLISECONDS));
} else {
gcMonitor.skipped(
"TarMK GC #{}: estimation completed in {} ({} ms). " +
"Gain is {}% or {}/{} ({}/{} bytes), so skipping compaction for now",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), gain,
humanReadableByteCount(estimate.getReachableSize()), humanReadableByteCount(estimate.getTotalSize()),
estimate.getReachableSize(), estimate.getTotalSize());
}
}
}
if (runCompaction) {
if (!gcOptions.isPaused()) {
compact();
compacted = true;
} else {
gcMonitor.skipped("TarMK GC #{}: compaction paused", GC_COUNT);
}
}
if (cleanup) {
cleanupNeeded.set(!gcOptions.isPaused());
}
return compacted;
}
static Map<Integer, Map<Character, File>> collectFiles(File directory) {
Map<Integer, Map<Character, File>> dataFiles = newHashMap();
Map<Integer, File> bulkFiles = newHashMap();
for (File file : directory.listFiles()) {
Matcher matcher = FILE_NAME_PATTERN.matcher(file.getName());
if (matcher.matches()) {
Integer index = Integer.parseInt(matcher.group(2));
if ("data".equals(matcher.group(1))) {
Map<Character, File> files = dataFiles.get(index);
if (files == null) {
files = newHashMap();
dataFiles.put(index, files);
}
Character generation = 'a';
if (matcher.group(4) != null) {
generation = matcher.group(4).charAt(0);
}
checkState(files.put(generation, file) == null);
} else {
checkState(bulkFiles.put(index, file) == null);
}
}
}
if (!bulkFiles.isEmpty()) {
log.info("Upgrading TarMK file names in {}", directory);
if (!dataFiles.isEmpty()) {
// first put all the data segments at the end of the list
Integer[] indices =
dataFiles.keySet().toArray(new Integer[dataFiles.size()]);
Arrays.sort(indices);
int position = Math.max(
indices[indices.length - 1] + 1,
bulkFiles.size());
for (Integer index : indices) {
Map<Character, File> files = dataFiles.remove(index);
Integer newIndex = position++;
for (Character generation : newHashSet(files.keySet())) {
File file = files.get(generation);
File newFile = new File(
directory,
format(FILE_NAME_FORMAT, newIndex, generation));
log.info("Renaming {} to {}", file, newFile);
file.renameTo(newFile);
files.put(generation, newFile);
}
dataFiles.put(newIndex, files);
}
}
// then add all the bulk segments at the beginning of the list
Integer[] indices =
bulkFiles.keySet().toArray(new Integer[bulkFiles.size()]);
Arrays.sort(indices);
int position = 0;
for (Integer index : indices) {
File file = bulkFiles.remove(index);
Integer newIndex = position++;
File newFile = new File(
directory, format(FILE_NAME_FORMAT, newIndex, "a"));
log.info("Renaming {} to {}", file, newFile);
file.renameTo(newFile);
dataFiles.put(newIndex, singletonMap('a', newFile));
}
}
return dataFiles;
}
public long size() {
fileStoreLock.readLock().lock();
try {
long size = writeFile != null ? writeFile.length() : 0;
for (TarReader reader : readers) {
size += reader.size();
}
return size;
} finally {
fileStoreLock.readLock().unlock();
}
}
public int readerCount(){
fileStoreLock.readLock().lock();
try {
return readers.size();
} finally {
fileStoreLock.readLock().unlock();
}
}
/**
* Returns the number of segments in this TarMK instance.
*
* @return number of segments
*/
private int count() {
fileStoreLock.readLock().lock();
try {
int count = 0;
if (writer != null) {
count += writer.count();
}
for (TarReader reader : readers) {
count += reader.count();
}
return count;
} finally {
fileStoreLock.readLock().unlock();
}
}
/**
* Estimated compaction gain. The result will be undefined if stopped through
* the passed {@code stop} signal.
* @param stop signal for stopping the estimation process.
* @return compaction gain estimate
*/
CompactionGainEstimate estimateCompactionGain(Supplier<Boolean> stop) {
CompactionGainEstimate estimate = new CompactionGainEstimate(getHead(), count(), stop);
fileStoreLock.readLock().lock();
try {
for (TarReader reader : readers) {
reader.accept(estimate);
if (stop.get()) {
break;
}
}
} finally {
fileStoreLock.readLock().unlock();
}
return estimate;
}
public FileStoreStats getStats() {
return stats;
}
public void flush() throws IOException {
flush(cleanupNeeded.getAndSet(false));
}
public void flush(boolean cleanup) throws IOException {
synchronized (persistedHead) {
RecordId before = persistedHead.get();
RecordId after = head.get();
if (cleanup || !after.equals(before)) {
tracker.getWriter().flush();
// FIXME OAK-4291: FileStore.flush prone to races leading to corruption
// There is a small windows that could lead to a corrupted store:
// if we crash right after setting the persisted head but before any delay-flushed
// SegmentBufferWriter instance flushes (see SegmentBufferWriterPool.returnWriter())
// then that data is lost although it might be referenced from the persisted head already.
// Need a test case. Possible fix: return a future from flush() and set the persisted head
// in the completion handler.
writer.flush();
fileStoreLock.writeLock().lock();
try {
log.debug("TarMK journal update {} -> {}", before, after);
journalFile.writeBytes(after.toString10() + " root " + System.currentTimeMillis()+"\n");
journalFile.getChannel().force(false);
persistedHead.set(after);
} finally {
fileStoreLock.writeLock().unlock();
}
if (cleanup) {
// Explicitly give up reference to the previous root state
// otherwise they could block cleanup. See OAK-3347
before = null;
after = null;
pendingRemove.addAll(cleanup());
}
}
// remove all obsolete tar generations
Iterator<File> iterator = pendingRemove.iterator();
while (iterator.hasNext()) {
File file = iterator.next();
log.debug("TarMK GC: Attempting to remove old file {}",
file);
if (!file.exists() || file.delete()) {
log.debug("TarMK GC: Removed old file {}", file);
iterator.remove();
} else {
log.warn("TarMK GC: Failed to remove old file {}. Will retry later.", file);
}
}
}
}
/**
* Runs garbage collection on the segment level, which could write new
* generations of tar files. It checks which segments are still reachable,
* and throws away those that are not.
* <p>
* A new generation of a tar file is created (and segments are only
* discarded) if doing so releases more than 25% of the space in a tar file.
*/
public List<File> cleanup() throws IOException {
Stopwatch watch = Stopwatch.createStarted();
long initialSize = size();
Set<UUID> bulkRefs = newHashSet();
Map<TarReader, TarReader> cleaned = newLinkedHashMap();
fileStoreLock.writeLock().lock();
try {
gcMonitor.info("TarMK GC #{}: cleanup started. Current repository size is {} ({} bytes)",
GC_COUNT, humanReadableByteCount(initialSize), initialSize);
newWriter();
tracker.clearCache();
// Suggest to the JVM that now would be a good time
// to clear stale weak references in the SegmentTracker
System.gc();
for (SegmentId id : tracker.getReferencedSegmentIds()) {
if (!isDataSegmentId(id.getLeastSignificantBits())) {
bulkRefs.add(id.asUUID());
}
}
for (TarReader reader : readers) {
cleaned.put(reader, reader);
}
} finally {
fileStoreLock.writeLock().unlock();
}
// FIXME OAK-4282: Make the number of retained gc generation configurable
int generation = getGcGen() - 1;
Set<UUID> reclaim = newHashSet();
for (TarReader reader : cleaned.keySet()) {
reader.mark(bulkRefs, reclaim, generation);
// FIXME OAK-4165: Too verbose logging during revision gc
log.info("Size of bulk references/reclaim set {}/{}", bulkRefs.size(), reclaim.size());
if (shutdown) {
gcMonitor.info("TarMK GC #{}: cleanup interrupted", GC_COUNT);
break;
}
}
for (TarReader reader : cleaned.keySet()) {
cleaned.put(reader, reader.sweep(reclaim));
if (shutdown) {
gcMonitor.info("TarMK GC #{}: cleanup interrupted", GC_COUNT);
break;
}
}
List<TarReader> oldReaders = newArrayList();
fileStoreLock.writeLock().lock();
try {
// Replace current list of reader with the cleaned readers taking care not to lose
// any new reader that might have come in through concurrent calls to newWriter()
List<TarReader> newReaders = newArrayList();
for (TarReader reader : readers) {
if (cleaned.containsKey(reader)) {
TarReader newReader = cleaned.get(reader);
if (newReader != null) {
newReaders.add(newReader);
}
if (newReader != reader) {
oldReaders.add(reader);
}
} else {
newReaders.add(reader);
}
}
readers = newReaders;
} finally {
fileStoreLock.writeLock().unlock();
}
// Close old readers *after* setting readers to the new readers to avoid accessing
// a closed reader from readSegment()
LinkedList<File> toRemove = newLinkedList();
for (TarReader oldReader : oldReaders) {
closeAndLogOnFail(oldReader);
File file = oldReader.getFile();
gcMonitor.info("TarMK GC #{}: cleanup marking file for deletion: {}", GC_COUNT, file.getName());
toRemove.addLast(file);
}
long finalSize = size();
approximateSize.set(finalSize);
stats.reclaimed(initialSize - finalSize);
gcMonitor.cleaned(initialSize - finalSize, finalSize);
gcMonitor.info("TarMK GC #{}: cleanup completed in {} ({} ms). Post cleanup size is {} ({} bytes)" +
" and space reclaimed {} ({} bytes).",
GC_COUNT, watch, watch.elapsed(MILLISECONDS),
humanReadableByteCount(finalSize), finalSize,
humanReadableByteCount(initialSize - finalSize), initialSize - finalSize);
return toRemove;
}
/**
* Finds all external blob references that are currently accessible
* in this repository and adds them to the given collector. Useful
* for collecting garbage in an external data store.
* <p>
* Note that this method only collects blob references that are already
* stored in the repository (at the time when this method is called), so
* the garbage collector will need some other mechanism for tracking
* in-memory references and references stored while this method is
* running.
* @param collector reference collector called back for each blob reference found
*/
public void collectBlobReferences(ReferenceCollector collector) throws IOException {
tracker.getWriter().flush();
List<TarReader> tarReaders = newArrayList();
fileStoreLock.writeLock().lock();
try {
newWriter();
tarReaders.addAll(this.readers);
} finally {
fileStoreLock.writeLock().unlock();
}
// FIXME OAK-4282: Make the number of retained gc generation configurable
int generation = getGcGen() - 1;
for (TarReader tarReader : tarReaders) {
tarReader.collectBlobReferences(tracker, collector, generation);
}
}
/**
* Returns the cancellation policy for the compaction phase. If the disk
* space was considered insufficient at least once during compaction (or if
* the space was never sufficient to begin with), compaction is considered
* canceled.
* Furthermore when the file store is shutting down, compaction is considered
* canceled.
*
* @return a flag indicating if compaction should be canceled.
*/
private Supplier<Boolean> newCancelCompactionCondition() {
return new Supplier<Boolean>() {
private boolean outOfDiskSpace;
private boolean shutdown;
@Override
public Boolean get() {
// The outOfDiskSpace and shutdown flags can only transition from false (their initial
// values), to true. Once true, there should be no way to go back.
if (!sufficientDiskSpace.get()) {
outOfDiskSpace = true;
}
if (FileStore.this.shutdown) {
this.shutdown = true;
}
return shutdown || outOfDiskSpace;
}
@Override
public String toString() {
if (outOfDiskSpace) {
return "Not enough disk space available";
} else if (shutdown) {
return "FileStore shutdown request received";
} else {
return "";
}
}
};
}
/**
* Returns a signal indication the file store shutting down.
* @return a shutdown signal
*/
private Supplier<Boolean> newShutdownSignal() {
return new Supplier<Boolean>() {
@Override
public Boolean get() {
return shutdown;
}
};
}
/**
* Copy every referenced record in data (non-bulk) segments. Bulk segments
* are fully kept (they are only removed in cleanup, if there is no
* reference to them).
*/
public void compact() throws IOException {
gcMonitor.info("TarMK GC #{}: compaction started, gc options={}", GC_COUNT, gcOptions);
Stopwatch watch = Stopwatch.createStarted();
// FIXME OAK-4277: Finalise de-duplication caches
// Make the capacity and initial depth of the deduplication cache configurable
final DeduplicationCache<String> nodeCache = new DeduplicationCache<String>(1000000, 20);
// FIXME OAK-4280: Compaction cannot be cancelled
// FIXME OAK-4279: Rework offline compaction
// This way of compacting has not progress logging and cannot be cancelled
int gcGeneration = tracker.getGcGen() + 1;
SegmentWriter writer = new SegmentWriter(this, tracker.getSegmentVersion(),
new SegmentBufferWriter(this, tracker.getSegmentVersion(), "c", gcGeneration),
new RecordCache<String>() {
@Override
protected Cache<String> getCache(int generation) {
return nodeCache;
}
});
SegmentNodeState before = getHead();
long existing = before.getChildNode(SegmentNodeStore.CHECKPOINTS)
.getChildNodeCount(Long.MAX_VALUE);
if (existing > 1) {
gcMonitor.warn(
"TarMK GC #{}: compaction found {} checkpoints, you might need to run checkpoint cleanup",
GC_COUNT, existing);
}
SegmentNodeState after = compact(writer, before);
gcMonitor.info("TarMK GC #{}: compacted {} to {}",
GC_COUNT, before.getRecordId(), after.getRecordId());
try {
int cycles = 0;
boolean success = false;
while (cycles++ < gcOptions.getRetryCount()
&& !(success = setHead(before, after))) {
// Some other concurrent changes have been made.
// Rebase (and compact) those changes on top of the
// compacted state before retrying to set the head.
gcMonitor.info("TarMK GC #{}: compaction detected concurrent commits while compacting. " +
"Compacting these commits. Cycle {}", GC_COUNT, cycles);
SegmentNodeState head = getHead();
after = compact(writer, head);
gcMonitor.info("TarMK GC #{}: compacted {} against {} to {}",
GC_COUNT, head.getRecordId(), before.getRecordId(), after.getRecordId());
before = head;
}
if (success) {
tracker.getWriter().addCachedNodes(gcGeneration, nodeCache);
// FIXME OAK-4285: Align cleanup of segment id tables with the new cleanup strategy
// ith clean brutal we need to remove those ids that have been cleaned
// i.e. those whose segment was from an old generation
tracker.clearSegmentIdTables(Predicates.<SegmentId>alwaysFalse());
// FIXME OAK-4283: Align GCMonitor API with implementation
// Refactor GCMonitor: there is no more compaction map stats
gcMonitor.compacted(new long[]{}, new long[]{}, new long[]{});
} else {
gcMonitor.info("TarMK GC #{}: compaction gave up compacting concurrent commits after {} cycles.",
GC_COUNT, cycles - 1);
if (gcOptions.getForceAfterFail()) {
gcMonitor.info("TarMK GC #{}: compaction force compacting remaining commits", GC_COUNT);
if (!forceCompact(writer)) {
gcMonitor.warn("TarMK GC #{}: compaction failed to force compact remaining commits. " +
"Most likely compaction didn't get exclusive access to the store.", GC_COUNT);
}
}
// FIXME OAK-4284: Garbage left behind when compaction does not succeed
// Giving up leaves garbage that will only be cleaned up 2 generations later!
}
gcMonitor.info("TarMK GC #{}: compaction completed in {} ({} ms), after {} cycles",
GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles - 1);
} catch (InterruptedException e) {
gcMonitor.error("TarMK GC #" + GC_COUNT + ": compaction interrupted", e);
currentThread().interrupt();
} catch (Exception e) {
gcMonitor.error("TarMK GC #" + GC_COUNT + ": compaction encountered an error", e);
}
}
private static SegmentNodeState compact(SegmentWriter writer, NodeState node) throws IOException {
SegmentNodeState compacted = writer.writeNode(node);
writer.flush();
return compacted;
}
private boolean forceCompact(SegmentWriter writer) throws InterruptedException, IOException {
if (rwLock.writeLock().tryLock(gcOptions.getLockWaitTime(), TimeUnit.SECONDS)) {
try {
SegmentNodeState head = getHead();
return setHead(head, compact(writer, head));
} finally {
rwLock.writeLock().unlock();
}
} else {
return false;
}
}
public Iterable<SegmentId> getSegmentIds() {
fileStoreLock.readLock().lock();
try {
List<SegmentId> ids = newArrayList();
if (writer != null) {
for (UUID uuid : writer.getUUIDs()) {
ids.add(tracker.getSegmentId(
uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits()));
}
}
for (TarReader reader : readers) {
for (UUID uuid : reader.getUUIDs()) {
ids.add(tracker.getSegmentId(
uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits()));
}
}
return ids;
} finally {
fileStoreLock.readLock().unlock();
}
}
@Override
public SegmentTracker getTracker() {
return tracker;
}
@Override
public SegmentNodeState getHead() {
return new SegmentNodeState(head.get());
}
// FIXME OAK-4015: Expedite commits from the compactor
// use a lock that can expedite important commits like compaction and checkpoints.
private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
@Override
public boolean setHead(SegmentNodeState base, SegmentNodeState head) {
rwLock.readLock().lock();
try {
RecordId id = this.head.get();
return id.equals(base.getRecordId())
&& this.head.compareAndSet(id, head.getRecordId());
} finally {
rwLock.readLock().unlock();
}
}
@Override
public void close() {
// Flag the store as shutting / shut down
shutdown = true;
// avoid deadlocks by closing (and joining) the background
// threads before acquiring the synchronization lock
closeAndLogOnFail(compactionThread);
closeAndLogOnFail(flushThread);
closeAndLogOnFail(diskSpaceThread);
try {
flush();
// FIXME OAK-4291: FileStore.flush prone to races leading to corruption
// Replace this with a way to "close" the underlying SegmentBufferWriter(s)
// tracker.getWriter().dropCache();
fileStoreLock.writeLock().lock();
try {
closeAndLogOnFail(writer);
List<TarReader> list = readers;
readers = newArrayList();
for (TarReader reader : list) {
closeAndLogOnFail(reader);
}
if (lock != null) {
lock.release();
}
closeAndLogOnFail(lockFile);
closeAndLogOnFail(journalFile);
} finally {
fileStoreLock.writeLock().unlock();
}
} catch (IOException e) {
throw new RuntimeException(
"Failed to close the TarMK at " + directory, e);
}
System.gc(); // for any memory-mappings that are no longer used
log.info("TarMK closed: {}", directory);
}
@Override
public boolean containsSegment(SegmentId id) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
return containsSegment(msb, lsb);
}
private boolean containsSegment(long msb, long lsb) {
for (TarReader reader : readers) {
if (reader.containsEntry(msb, lsb)) {
return true;
}
}
if (writer != null) {
fileStoreLock.readLock().lock();
try {
if (writer.containsEntry(msb, lsb)) {
return true;
}
} finally {
fileStoreLock.readLock().unlock();
}
}
// the writer might have switched to a new file,
// so we need to re-check the readers
for (TarReader reader : readers) {
if (reader.containsEntry(msb, lsb)) {
return true;
}
}
return false;
}
@Override
public Segment readSegment(SegmentId id) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
for (TarReader reader : readers) {
try {
if (reader.isClosed()) {
// Cleanup might already have closed the file.
// The segment should be available from another file.
log.debug("Skipping closed tar file {}", reader);
continue;
}
ByteBuffer buffer = reader.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", reader, e);
}
}
if (writer != null) {
fileStoreLock.readLock().lock();
try {
try {
ByteBuffer buffer = writer.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", writer, e);
}
} finally {
fileStoreLock.readLock().unlock();
}
}
// the writer might have switched to a new file,
// so we need to re-check the readers
for (TarReader reader : readers) {
try {
if (reader.isClosed()) {
// Cleanup might already have closed the file.
// The segment should be available from another file.
log.info("Skipping closed tar file {}", reader);
continue;
}
ByteBuffer buffer = reader.readEntry(msb, lsb);
if (buffer != null) {
return new Segment(tracker, id, buffer);
}
} catch (IOException e) {
log.warn("Failed to read from tar file {}", reader, e);
}
}
throw new SegmentNotFoundException(id);
}
@Override
public void writeSegment(SegmentId id, byte[] data, int offset, int length) throws IOException {
fileStoreLock.writeLock().lock();
try {
int generation = Segment.getGcGen(wrap(data, offset, length));
long size = writer.writeEntry(
id.getMostSignificantBits(),
id.getLeastSignificantBits(),
data, offset, length, generation);
if (size >= maxFileSize) {
newWriter();
}
approximateSize.addAndGet(TarWriter.BLOCK_SIZE + length + TarWriter.getPaddingSize(length));
} finally {
fileStoreLock.writeLock().unlock();
}
}
/**
* Switch to a new tar writer.
* This method may only be called when holding the write lock of {@link #fileStoreLock}
* @throws IOException
*/
private void newWriter() throws IOException {
if (writer.isDirty()) {
writer.close();
List<TarReader> list =
newArrayListWithCapacity(1 + readers.size());
list.add(TarReader.open(writeFile, memoryMapping));
list.addAll(readers);
readers = list;
writeNumber++;
writeFile = new File(
directory,
String.format(FILE_NAME_FORMAT, writeNumber, "a"));
writer = new TarWriter(writeFile, stats);
}
}
@Override
public Blob readBlob(String blobId) {
if (blobStore != null) {
return new BlobStoreBlob(blobStore, blobId);
}
throw new IllegalStateException("Attempt to read external blob with blobId [" + blobId + "] " +
"without specifying BlobStore");
}
@Override
public BlobStore getBlobStore() {
return blobStore;
}
@Override
public void gc() {
compactionThread.trigger();
}
public Map<String, Set<UUID>> getTarReaderIndex() {
Map<String, Set<UUID>> index = new HashMap<String, Set<UUID>>();
for (TarReader reader : readers) {
index.put(reader.getFile().getAbsolutePath(), reader.getUUIDs());
}
return index;
}
public Map<UUID, List<UUID>> getTarGraph(String fileName) throws IOException {
for (TarReader reader : readers) {
if (fileName.equals(reader.getFile().getName())) {
Map<UUID, List<UUID>> graph = newHashMap();
for (UUID uuid : reader.getUUIDs()) {
graph.put(uuid, null);
}
Map<UUID, List<UUID>> g = reader.getGraph(false);
if (g != null) {
graph.putAll(g);
}
return graph;
}
}
return emptyMap();
}
private void setRevision(String rootRevision) {
fileStoreLock.writeLock().lock();
try {
RecordId id = RecordId.fromString(tracker, rootRevision);
head.set(id);
persistedHead.set(id);
} finally {
fileStoreLock.writeLock().unlock();
}
}
private void checkDiskSpace() {
long repositoryDiskSpace = approximateSize.get();
long availableDiskSpace = directory.getFreeSpace();
boolean updated = gcOptions.isDiskSpaceSufficient(repositoryDiskSpace, availableDiskSpace);
boolean previous = sufficientDiskSpace.getAndSet(updated);
if (previous && !updated) {
log.warn("Available disk space ({}) is too low, current repository size is approx. {}",
humanReadableByteCount(availableDiskSpace),
humanReadableByteCount(repositoryDiskSpace));
}
if (updated && !previous) {
log.info("Available disk space ({}) is sufficient again for repository operations, current repository size is approx. {}",
humanReadableByteCount(availableDiskSpace),
humanReadableByteCount(repositoryDiskSpace));
}
}
/**
* A read only {@link FileStore} implementation that supports
* going back to old revisions.
* <p>
* All write methods are no-ops.
*/
public static class ReadOnlyStore extends FileStore {
private ReadOnlyStore(Builder builder) throws IOException {
super(builder, true);
}
/**
* Go to the specified {@code revision}
*
* @param revision
*/
public void setRevision(String revision) {
super.setRevision(revision);
}
/**
* Include the ids of all segments transitively reachable through forward references from
* {@code referencedIds}. See OAK-3864.
*/
private static void includeForwardReferences(Iterable<TarReader> readers, Set<UUID> referencedIds)
throws IOException {
Set<UUID> fRefs = newHashSet(referencedIds);
do {
// Add direct forward references
for (TarReader reader : readers) {
reader.calculateForwardReferences(fRefs);
if (fRefs.isEmpty()) {
break; // Optimisation: bail out if no references left
}
}
// ... as long as new forward references are found.
} while (referencedIds.addAll(fRefs));
}
/**
* Build the graph of segments reachable from an initial set of segments
* @param roots the initial set of segments
* @param visitor visitor receiving call back while following the segment graph
* @throws IOException
*/
public void traverseSegmentGraph(
@Nonnull Set<UUID> roots,
@Nonnull SegmentGraphVisitor visitor) throws IOException {
List<TarReader> readers = super.readers;
includeForwardReferences(readers, roots);
for (TarReader reader : readers) {
reader.traverseSegmentGraph(checkNotNull(roots), checkNotNull(visitor));
}
}
@Override
public boolean setHead(SegmentNodeState base, SegmentNodeState head) {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void writeSegment(SegmentId id, byte[] data,
int offset, int length) {
throw new UnsupportedOperationException("Read Only Store");
}
/**
* no-op
*/
@Override
public void flush() { /* nop */ }
@Override
public LinkedList<File> cleanup() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void gc() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public void compact() {
throw new UnsupportedOperationException("Read Only Store");
}
@Override
public boolean maybeCompact(boolean cleanup) {
throw new UnsupportedOperationException("Read Only Store");
}
}
public SegmentVersion getVersion() {
return version;
}
private static void closeAndLogOnFail(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException ioe) {
// ignore and log
log.error(ioe.getMessage(), ioe);
}
}
}
private static class LoggingGCMonitor implements GCMonitor {
public GCMonitor delegatee = GCMonitor.EMPTY;
@Override
public void info(String message, Object... arguments) {
log.info(message, arguments);
delegatee.info(message, arguments);
}
@Override
public void warn(String message, Object... arguments) {
log.warn(message, arguments);
delegatee.warn(message, arguments);
}
@Override
public void error(String message, Exception exception) {
delegatee.error(message, exception);
}
@Override
public void skipped(String reason, Object... arguments) {
log.info(reason, arguments);
delegatee.skipped(reason, arguments);
}
@Override
public void compacted(long[] segmentCounts, long[] recordCounts, long[] compactionMapWeights) {
delegatee.compacted(segmentCounts, recordCounts, compactionMapWeights);
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
delegatee.cleaned(reclaimedSize, currentSize);
}
}
}
| OAK-3348: Cross gc sessions might introduce references to pre-compacted segments
Updated Javadoc
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1741252 13f79535-47bb-0310-9956-ffa450edef68
| oak-segment-next/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java | OAK-3348: Cross gc sessions might introduce references to pre-compacted segments Updated Javadoc | <ide><path>ak-segment-next/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java
<ide> }
<ide>
<ide> /**
<del> * Runs garbage collection on the segment level, which could write new
<del> * generations of tar files. It checks which segments are still reachable,
<del> * and throws away those that are not.
<del> * <p>
<del> * A new generation of a tar file is created (and segments are only
<del> * discarded) if doing so releases more than 25% of the space in a tar file.
<add> * Run garbage collection on the segment level: reclaim those data segments
<add> * that are from an old segment generation and those bulk segments that are not
<add> * reachable anymore.
<add> * Those tar files that shrink by at least 25% are rewritten to a new tar generation
<add> * skipping the reclaimed segments.
<ide> */
<ide> public List<File> cleanup() throws IOException {
<ide> Stopwatch watch = Stopwatch.createStarted(); |
|
Java | apache-2.0 | e2ed79ac9381896c28e33e96110dbbfaa80af60e | 0 | surya-janani/sakai,Fudan-University/sakai,buckett/sakai-gitflow,Fudan-University/sakai,clhedrick/sakai,whumph/sakai,rodriguezdevera/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,frasese/sakai,pushyamig/sakai,liubo404/sakai,whumph/sakai,rodriguezdevera/sakai,Fudan-University/sakai,introp-software/sakai,introp-software/sakai,ouit0408/sakai,bzhouduke123/sakai,OpenCollabZA/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,lorenamgUMU/sakai,joserabal/sakai,udayg/sakai,rodriguezdevera/sakai,pushyamig/sakai,bkirschn/sakai,OpenCollabZA/sakai,OpenCollabZA/sakai,udayg/sakai,noondaysun/sakai,introp-software/sakai,duke-compsci290-spring2016/sakai,hackbuteer59/sakai,kingmook/sakai,lorenamgUMU/sakai,ouit0408/sakai,conder/sakai,joserabal/sakai,ktakacs/sakai,conder/sakai,udayg/sakai,wfuedu/sakai,noondaysun/sakai,conder/sakai,Fudan-University/sakai,introp-software/sakai,joserabal/sakai,colczr/sakai,zqian/sakai,rodriguezdevera/sakai,frasese/sakai,hackbuteer59/sakai,buckett/sakai-gitflow,kingmook/sakai,pushyamig/sakai,zqian/sakai,willkara/sakai,clhedrick/sakai,ktakacs/sakai,bkirschn/sakai,puramshetty/sakai,frasese/sakai,kwedoff1/sakai,kwedoff1/sakai,ouit0408/sakai,willkara/sakai,kingmook/sakai,ktakacs/sakai,joserabal/sakai,puramshetty/sakai,willkara/sakai,puramshetty/sakai,bkirschn/sakai,udayg/sakai,introp-software/sakai,lorenamgUMU/sakai,whumph/sakai,bkirschn/sakai,OpenCollabZA/sakai,whumph/sakai,bkirschn/sakai,clhedrick/sakai,noondaysun/sakai,wfuedu/sakai,frasese/sakai,buckett/sakai-gitflow,kingmook/sakai,bzhouduke123/sakai,willkara/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,lorenamgUMU/sakai,ktakacs/sakai,pushyamig/sakai,buckett/sakai-gitflow,willkara/sakai,liubo404/sakai,lorenamgUMU/sakai,frasese/sakai,zqian/sakai,Fudan-University/sakai,bzhouduke123/sakai,zqian/sakai,duke-compsci290-spring2016/sakai,kwedoff1/sakai,kwedoff1/sakai,clhedrick/sakai,wfuedu/sakai,colczr/sakai,lorenamgUMU/sakai,clhedrick/sakai,bzhouduke123/sakai,bzhouduke123/sakai,wfuedu/sakai,ktakacs/sakai,kwedoff1/sakai,ktakacs/sakai,colczr/sakai,pushyamig/sakai,willkara/sakai,bkirschn/sakai,ktakacs/sakai,bkirschn/sakai,whumph/sakai,noondaysun/sakai,clhedrick/sakai,whumph/sakai,bkirschn/sakai,wfuedu/sakai,bzhouduke123/sakai,tl-its-umich-edu/sakai,colczr/sakai,introp-software/sakai,hackbuteer59/sakai,ouit0408/sakai,conder/sakai,introp-software/sakai,rodriguezdevera/sakai,puramshetty/sakai,noondaysun/sakai,hackbuteer59/sakai,buckett/sakai-gitflow,zqian/sakai,noondaysun/sakai,Fudan-University/sakai,tl-its-umich-edu/sakai,wfuedu/sakai,buckett/sakai-gitflow,surya-janani/sakai,zqian/sakai,pushyamig/sakai,liubo404/sakai,tl-its-umich-edu/sakai,hackbuteer59/sakai,liubo404/sakai,Fudan-University/sakai,frasese/sakai,kingmook/sakai,surya-janani/sakai,surya-janani/sakai,conder/sakai,tl-its-umich-edu/sakai,conder/sakai,hackbuteer59/sakai,kwedoff1/sakai,joserabal/sakai,surya-janani/sakai,kingmook/sakai,puramshetty/sakai,wfuedu/sakai,whumph/sakai,noondaysun/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,conder/sakai,frasese/sakai,liubo404/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,wfuedu/sakai,colczr/sakai,udayg/sakai,kingmook/sakai,zqian/sakai,clhedrick/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,OpenCollabZA/sakai,tl-its-umich-edu/sakai,willkara/sakai,clhedrick/sakai,willkara/sakai,whumph/sakai,joserabal/sakai,surya-janani/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,colczr/sakai,puramshetty/sakai,rodriguezdevera/sakai,udayg/sakai,rodriguezdevera/sakai,puramshetty/sakai,conder/sakai,kingmook/sakai,joserabal/sakai,bzhouduke123/sakai,colczr/sakai,duke-compsci290-spring2016/sakai,noondaysun/sakai,buckett/sakai-gitflow,ouit0408/sakai,OpenCollabZA/sakai,udayg/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,introp-software/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,liubo404/sakai,pushyamig/sakai,liubo404/sakai,joserabal/sakai,colczr/sakai,hackbuteer59/sakai,puramshetty/sakai,frasese/sakai,ouit0408/sakai,buckett/sakai-gitflow,liubo404/sakai,ktakacs/sakai,rodriguezdevera/sakai,kwedoff1/sakai,lorenamgUMU/sakai,ouit0408/sakai,Fudan-University/sakai,OpenCollabZA/sakai | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/search/trunk/search-impl/impl/src/java/org/sakaiproject/search/component/Messages.java $
* $Id: Messages.java 59685 2009-04-03 23:36:24Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Aaron Zeckoski (azeckoski @ gmail.com) (aaronz @ vt.edu) (azeckoski @ unicon.net)
**********************************************************************************/
package org.sakaiproject.search.entitybroker;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.EntityView;
import org.sakaiproject.entitybroker.entityprovider.annotations.EntityCustomAction;
import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable;
import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
import org.sakaiproject.entitybroker.entityprovider.search.Restriction;
import org.sakaiproject.entitybroker.entityprovider.search.Search;
import org.sakaiproject.entitybroker.util.AbstractEntityProvider;
import org.sakaiproject.search.api.InvalidSearchQueryException;
import org.sakaiproject.search.api.SearchList;
import org.sakaiproject.search.api.SearchResult;
import org.sakaiproject.search.api.SearchService;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.user.api.UserDirectoryService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Entity provider for Entity broker giving access to search services through a an HTTP method
*
* @author Adrian Fish ([email protected])
* @author Colin Hebert
*/
public class SearchEntityProvider extends AbstractEntityProvider implements ActionsExecutable, Outputable, Describeable {
private static final int DEFAULT_RESULT_COUNT = 10;
private UserDirectoryService userDirectoryService;
private SearchService searchService;
private SiteService siteService;
/**
* Name of the service, here "search"
*
* @return the constant name of this service
*/
@Override
public String getEntityPrefix() {
return "search";
}
/**
* Handled formats, such as JSon and XML
*
* @return formats supported
*/
@Override
public String[] getHandledOutputFormats() {
return new String[]{Formats.JSON, Formats.XML};
}
/**
* Simple search method
*
* @param ref
* @param search
* @return a list of SearchResults
*/
@EntityCustomAction(action = "search", viewKey = EntityView.VIEW_LIST)
public List<SearchResultEntity> search(EntityReference ref, Search search) {
try {
//Get the query sent by the client
String query = extractQuery(search.getRestrictionByProperty("searchTerms"));
//Get the list of contexts (sites) used for this search, or every accessible site if the user hasn't provided a context list
List<String> contexts = extractContexts(search.getRestrictionByProperty("contexts"));
//Set the limit if it hasn't been set already
if (search.getLimit() < 0)
search.setLimit(DEFAULT_RESULT_COUNT);
//Actual search
SearchList searchResults = searchService.search(query, contexts, (int) search.getStart(), (int) search.getLimit());
//Transforms SearchResult in a SearchResultEntity to avoid conflicts with the getId() method (see SRCH-85)
List<SearchResultEntity> results = new ArrayList<SearchResultEntity>(searchResults.size());
for (SearchResult result : searchResults) {
results.add(new SearchResultEntity(result));
}
return results;
} catch (InvalidSearchQueryException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Extract the query from users parameters
*
* @param searchTermsRestriction parameter given to EntityBroker
* @return A search String
* @throws IllegalArgumentException If no query has been provided
*/
private String extractQuery(Restriction searchTermsRestriction) {
if (searchTermsRestriction == null)
throw new IllegalArgumentException("No searchTerms supplied");
StringBuilder searchQuery = new StringBuilder();
for (String term : (String[]) searchTermsRestriction.getArrayValue()) {
//Concatenate with spaces, if the user wants a coma separated value he can easily enter comas in his query.
searchQuery.append(term).append(' ');
}
return searchQuery.toString();
}
/**
* Extract contexts from users parameters
*
* @param contextsRestriction parameter given to EntityBroker
* @return A list of contexts (sites) where the search will be done
*/
private List<String> extractContexts(Restriction contextsRestriction) {
List<String> contexts;
if (contextsRestriction != null)
contexts = Arrays.asList((String[]) contextsRestriction.getArrayValue());
else
// No contexts supplied. Get all the sites the current user is a member of
contexts = getAllSites();
return contexts;
}
/**
* Get all sites available for the current user
*
* @return a list of contexts (sites IDs) available for the current user
*/
private List<String> getAllSites() {
List<Site> sites = siteService.getSites(SiteService.SelectionType.ACCESS, null, null, null, null, null);
List<String> siteIds = new ArrayList<String>(sites.size());
for (Site site : sites) {
if (site != null && site.getId() != null)
siteIds.add(site.getId());
}
//Manually add the user's site
siteIds.add(siteService.getUserSiteId(userDirectoryService.getCurrentUser().getId()));
return siteIds;
}
//--------------------------
//Spring injected components
//--------------------------
public void setSearchService(SearchService searchService) {
this.searchService = searchService;
}
public void setSiteService(SiteService siteService) {
this.siteService = siteService;
}
public void setUserDirectoryService(UserDirectoryService userDirectoryService) {
this.userDirectoryService = userDirectoryService;
}
/**
* A wrapper to customise the result sent through EntityBroker
* <p>
* Wraps a {@link SearchResult} to avoid issues with the {@link org.sakaiproject.search.api.SearchResult#getId()}
* method and {@link EntityReference#checkPrefixId(String, String)}.<br />
* Can also filter which parts of the query are accessible to a remote user.
* </p>
*/
public class SearchResultEntity {
private final SearchResult searchResult;
private SearchResultEntity(SearchResult searchResult) {
this.searchResult = searchResult;
}
public String getReference() {
return searchResult.getReference();
}
public String getContentId() {
return searchResult.getId();
}
public float getScore() {
return searchResult.getScore();
}
public String getSearchResult() {
return searchResult.getSearchResult();
}
public String getTitle() {
return searchResult.getTitle();
}
public String getTool() {
return searchResult.getTool();
}
public String getUrl() {
return searchResult.getUrl();
}
}
}
| search/search-impl/impl/src/java/org/sakaiproject/search/entitybroker/SearchEntityProvider.java | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/search/trunk/search-impl/impl/src/java/org/sakaiproject/search/component/Messages.java $
* $Id: Messages.java 59685 2009-04-03 23:36:24Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Aaron Zeckoski (azeckoski @ gmail.com) (aaronz @ vt.edu) (azeckoski @ unicon.net)
**********************************************************************************/
package org.sakaiproject.search.entitybroker;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.entitybroker.entityprovider.capabilities.CollectionResolvable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable;
import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.entityprovider.search.Search;
import org.sakaiproject.entitybroker.entityprovider.search.Restriction;
import org.sakaiproject.entitybroker.util.AbstractEntityProvider;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.search.api.InvalidSearchQueryException;
import org.sakaiproject.search.api.SearchService;
import org.sakaiproject.search.api.SearchList;
import org.sakaiproject.search.api.SearchResult;
import org.sakaiproject.user.api.UserDirectoryService;
/**
* Provides basic search functionality via EB.
*
* @author Adrian Fish ([email protected])
*/
public class SearchEntityProvider extends AbstractEntityProvider implements CollectionResolvable,Outputable,Describeable {
private static Log log = LogFactory.getLog(SearchEntityProvider.class);
private SearchService searchService = null;
public void setSearchService(SearchService searchService) {
this.searchService = searchService;
}
private SiteService siteService = null;
public void setSiteService(SiteService siteService) {
this.siteService = siteService;
}
public UserDirectoryService userDirectoryService;
public void setUserDirectoryService(UserDirectoryService userDirectoryService) {
this.userDirectoryService = userDirectoryService;
}
public String getEntityPrefix() {
return "search";
}
public String[] getHandledOutputFormats() {
return new String[] { Formats.JSON, Formats.XML };
}
public List<?> getEntities(EntityReference ref, Search search) {
Restriction searchTermsRestriction = search.getRestrictionByProperty("searchTerms");
if(searchTermsRestriction == null)
throw new IllegalArgumentException("No searchTerms supplied");
String searchTerms = searchTermsRestriction.getStringValue();
// fix up the search limits
if (search.getLimit() > 50 || search.getLimit() == 0) {
search.setLimit(50);
}
if (search.getStart() > 49) {
search.setStart(0);
}
List<String> contexts = null;
Restriction contextsRestriction = search.getRestrictionByProperty("contexts");
if(contextsRestriction != null) {
String[] contextsArray = contextsRestriction.getStringValue().split(",");
contexts = Arrays.asList(contextsArray);
}
else {
// No contexts supplied. Get all the sites the current user is a member of
contexts = getAllUsersSites();
}
SearchList searchResults;
try {
searchResults = searchService.search(searchTerms,contexts,(int) search.getStart(),(int) search.getLimit(),"normal","normal");
} catch (InvalidSearchQueryException e) {
throw new IllegalArgumentException(searchTerms + " is not a valid query expression");
}
Restriction toolRestriction = search.getRestrictionByProperty("tool");
String tool = null;
if(toolRestriction != null) {
tool = toolRestriction.getStringValue();
}
List<SearchResultEntity> results = new ArrayList<SearchResultEntity>();
for(SearchResult result : searchResults) {
if(tool == null || result.getTool().equalsIgnoreCase(tool))
results.add(new SearchResultEntity(result));
}
return results;
}
public Object getEntity(EntityReference ref) {
System.out.println("getEntity(" + ref.getReference() + ")");
return null;
}
private List<String> getAllUsersSites() {
List<Site> sites = siteService.getSites(
SiteService.SelectionType.ACCESS,null, null, null, null, null);
List<String> siteIds = new ArrayList<String>(sites.size());
for (Site site: sites) {
if (site != null && site.getId() != null) {
siteIds.add(site.getId());
}
}
siteIds.add(siteService.getUserSiteId(
userDirectoryService.getCurrentUser().getId()));
return siteIds;
}
public class SearchResultEntity {
private SearchResult result = null;
private SearchResultEntity(SearchResult result) {
this.result = result;
}
public String getId() {
return result.getId();
}
public float getScore() {
return result.getScore();
}
public String getSearchResult() {
return result.getSearchResult();
}
public String getTitle() {
return result.getTitle();
}
public String getTool() {
return result.getTool();
}
public String getUrl() {
return result.getUrl();
}
}
}
| SRCH-117 Split the code in SearchEntityProvider and add documentation, patch from Colin Hebert
git-svn-id: b4f6c67d3bb67018ac9204555d8d121f55b59527@124128 66ffb92e-73f9-0310-93c1-f5514f145a0a
| search/search-impl/impl/src/java/org/sakaiproject/search/entitybroker/SearchEntityProvider.java | SRCH-117 Split the code in SearchEntityProvider and add documentation, patch from Colin Hebert | <ide><path>earch/search-impl/impl/src/java/org/sakaiproject/search/entitybroker/SearchEntityProvider.java
<ide>
<ide> package org.sakaiproject.search.entitybroker;
<ide>
<del>import java.util.List;
<add>import org.sakaiproject.entitybroker.EntityReference;
<add>import org.sakaiproject.entitybroker.EntityView;
<add>import org.sakaiproject.entitybroker.entityprovider.annotations.EntityCustomAction;
<add>import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable;
<add>import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable;
<add>import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable;
<add>import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
<add>import org.sakaiproject.entitybroker.entityprovider.search.Restriction;
<add>import org.sakaiproject.entitybroker.entityprovider.search.Search;
<add>import org.sakaiproject.entitybroker.util.AbstractEntityProvider;
<add>import org.sakaiproject.search.api.InvalidSearchQueryException;
<add>import org.sakaiproject.search.api.SearchList;
<add>import org.sakaiproject.search.api.SearchResult;
<add>import org.sakaiproject.search.api.SearchService;
<add>import org.sakaiproject.site.api.Site;
<add>import org.sakaiproject.site.api.SiteService;
<add>import org.sakaiproject.user.api.UserDirectoryService;
<add>
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<del>
<del>import org.apache.commons.logging.Log;
<del>import org.apache.commons.logging.LogFactory;
<del>
<del>import org.sakaiproject.entitybroker.entityprovider.capabilities.CollectionResolvable;
<del>import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable;
<del>import org.sakaiproject.entitybroker.entityprovider.capabilities.Describeable;
<del>import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
<del>import org.sakaiproject.entitybroker.EntityReference;
<del>import org.sakaiproject.entitybroker.entityprovider.search.Search;
<del>import org.sakaiproject.entitybroker.entityprovider.search.Restriction;
<del>import org.sakaiproject.entitybroker.util.AbstractEntityProvider;
<del>import org.sakaiproject.site.api.Site;
<del>import org.sakaiproject.site.api.SiteService;
<del>import org.sakaiproject.search.api.InvalidSearchQueryException;
<del>import org.sakaiproject.search.api.SearchService;
<del>import org.sakaiproject.search.api.SearchList;
<del>import org.sakaiproject.search.api.SearchResult;
<del>import org.sakaiproject.user.api.UserDirectoryService;
<add>import java.util.List;
<ide>
<ide> /**
<del> * Provides basic search functionality via EB.
<add> * Entity provider for Entity broker giving access to search services through a an HTTP method
<ide> *
<ide> * @author Adrian Fish ([email protected])
<add> * @author Colin Hebert
<ide> */
<del>public class SearchEntityProvider extends AbstractEntityProvider implements CollectionResolvable,Outputable,Describeable {
<del>
<del> private static Log log = LogFactory.getLog(SearchEntityProvider.class);
<del>
<del> private SearchService searchService = null;
<del>
<del> public void setSearchService(SearchService searchService) {
<del> this.searchService = searchService;
<del> }
<del>
<del> private SiteService siteService = null;
<del>
<del> public void setSiteService(SiteService siteService) {
<del> this.siteService = siteService;
<del> }
<del>
<del> public UserDirectoryService userDirectoryService;
<del>
<del> public void setUserDirectoryService(UserDirectoryService userDirectoryService) {
<del> this.userDirectoryService = userDirectoryService;
<del> }
<del>
<del> public String getEntityPrefix() {
<del> return "search";
<del> }
<del>
<add>public class SearchEntityProvider extends AbstractEntityProvider implements ActionsExecutable, Outputable, Describeable {
<add> private static final int DEFAULT_RESULT_COUNT = 10;
<add> private UserDirectoryService userDirectoryService;
<add> private SearchService searchService;
<add> private SiteService siteService;
<add>
<add> /**
<add> * Name of the service, here "search"
<add> *
<add> * @return the constant name of this service
<add> */
<add> @Override
<add> public String getEntityPrefix() {
<add> return "search";
<add> }
<add>
<add> /**
<add> * Handled formats, such as JSon and XML
<add> *
<add> * @return formats supported
<add> */
<add> @Override
<ide> public String[] getHandledOutputFormats() {
<del> return new String[] { Formats.JSON, Formats.XML };
<del> }
<del>
<del> public List<?> getEntities(EntityReference ref, Search search) {
<del> Restriction searchTermsRestriction = search.getRestrictionByProperty("searchTerms");
<del>
<del> if(searchTermsRestriction == null)
<del> throw new IllegalArgumentException("No searchTerms supplied");
<del>
<del> String searchTerms = searchTermsRestriction.getStringValue();
<del>
<del> // fix up the search limits
<del> if (search.getLimit() > 50 || search.getLimit() == 0) {
<del> search.setLimit(50);
<del> }
<del> if (search.getStart() > 49) {
<del> search.setStart(0);
<del> }
<del>
<del> List<String> contexts = null;
<del>
<del> Restriction contextsRestriction = search.getRestrictionByProperty("contexts");
<del>
<del> if(contextsRestriction != null) {
<del> String[] contextsArray = contextsRestriction.getStringValue().split(",");
<del> contexts = Arrays.asList(contextsArray);
<del> }
<del> else {
<del> // No contexts supplied. Get all the sites the current user is a member of
<del> contexts = getAllUsersSites();
<del> }
<del>
<del> SearchList searchResults;
<del> try {
<del> searchResults = searchService.search(searchTerms,contexts,(int) search.getStart(),(int) search.getLimit(),"normal","normal");
<del> } catch (InvalidSearchQueryException e) {
<del> throw new IllegalArgumentException(searchTerms + " is not a valid query expression");
<del> }
<del>
<del> Restriction toolRestriction = search.getRestrictionByProperty("tool");
<del>
<del> String tool = null;
<del>
<del> if(toolRestriction != null) {
<del> tool = toolRestriction.getStringValue();
<del> }
<del>
<del> List<SearchResultEntity> results = new ArrayList<SearchResultEntity>();
<del>
<del> for(SearchResult result : searchResults) {
<del> if(tool == null || result.getTool().equalsIgnoreCase(tool))
<del> results.add(new SearchResultEntity(result));
<del> }
<del>
<del> return results;
<del> }
<del>
<del> public Object getEntity(EntityReference ref) {
<del> System.out.println("getEntity(" + ref.getReference() + ")");
<del> return null;
<del> }
<del>
<del> private List<String> getAllUsersSites() {
<del> List<Site> sites = siteService.getSites(
<del> SiteService.SelectionType.ACCESS,null, null, null, null, null);
<add> return new String[]{Formats.JSON, Formats.XML};
<add> }
<add>
<add> /**
<add> * Simple search method
<add> *
<add> * @param ref
<add> * @param search
<add> * @return a list of SearchResults
<add> */
<add> @EntityCustomAction(action = "search", viewKey = EntityView.VIEW_LIST)
<add> public List<SearchResultEntity> search(EntityReference ref, Search search) {
<add> try {
<add> //Get the query sent by the client
<add> String query = extractQuery(search.getRestrictionByProperty("searchTerms"));
<add> //Get the list of contexts (sites) used for this search, or every accessible site if the user hasn't provided a context list
<add> List<String> contexts = extractContexts(search.getRestrictionByProperty("contexts"));
<add>
<add> //Set the limit if it hasn't been set already
<add> if (search.getLimit() < 0)
<add> search.setLimit(DEFAULT_RESULT_COUNT);
<add>
<add> //Actual search
<add> SearchList searchResults = searchService.search(query, contexts, (int) search.getStart(), (int) search.getLimit());
<add>
<add> //Transforms SearchResult in a SearchResultEntity to avoid conflicts with the getId() method (see SRCH-85)
<add> List<SearchResultEntity> results = new ArrayList<SearchResultEntity>(searchResults.size());
<add> for (SearchResult result : searchResults) {
<add> results.add(new SearchResultEntity(result));
<add> }
<add>
<add> return results;
<add> } catch (InvalidSearchQueryException e) {
<add> throw new IllegalArgumentException(e);
<add> }
<add> }
<add>
<add> /**
<add> * Extract the query from users parameters
<add> *
<add> * @param searchTermsRestriction parameter given to EntityBroker
<add> * @return A search String
<add> * @throws IllegalArgumentException If no query has been provided
<add> */
<add> private String extractQuery(Restriction searchTermsRestriction) {
<add> if (searchTermsRestriction == null)
<add> throw new IllegalArgumentException("No searchTerms supplied");
<add>
<add> StringBuilder searchQuery = new StringBuilder();
<add> for (String term : (String[]) searchTermsRestriction.getArrayValue()) {
<add> //Concatenate with spaces, if the user wants a coma separated value he can easily enter comas in his query.
<add> searchQuery.append(term).append(' ');
<add> }
<add> return searchQuery.toString();
<add> }
<add>
<add> /**
<add> * Extract contexts from users parameters
<add> *
<add> * @param contextsRestriction parameter given to EntityBroker
<add> * @return A list of contexts (sites) where the search will be done
<add> */
<add> private List<String> extractContexts(Restriction contextsRestriction) {
<add> List<String> contexts;
<add> if (contextsRestriction != null)
<add> contexts = Arrays.asList((String[]) contextsRestriction.getArrayValue());
<add> else
<add> // No contexts supplied. Get all the sites the current user is a member of
<add> contexts = getAllSites();
<add> return contexts;
<add> }
<add>
<add> /**
<add> * Get all sites available for the current user
<add> *
<add> * @return a list of contexts (sites IDs) available for the current user
<add> */
<add> private List<String> getAllSites() {
<add> List<Site> sites = siteService.getSites(SiteService.SelectionType.ACCESS, null, null, null, null, null);
<ide> List<String> siteIds = new ArrayList<String>(sites.size());
<del> for (Site site: sites) {
<del> if (site != null && site.getId() != null) {
<add> for (Site site : sites) {
<add> if (site != null && site.getId() != null)
<ide> siteIds.add(site.getId());
<del> }
<del> }
<del> siteIds.add(siteService.getUserSiteId(
<del> userDirectoryService.getCurrentUser().getId()));
<add> }
<add>
<add> //Manually add the user's site
<add> siteIds.add(siteService.getUserSiteId(userDirectoryService.getCurrentUser().getId()));
<ide> return siteIds;
<ide> }
<ide>
<del> public class SearchResultEntity {
<del>
<del> private SearchResult result = null;
<del>
<del> private SearchResultEntity(SearchResult result) {
<del> this.result = result;
<del> }
<del>
<del> public String getId() {
<del> return result.getId();
<del> }
<del>
<del> public float getScore() {
<del> return result.getScore();
<del> }
<del>
<del> public String getSearchResult() {
<del> return result.getSearchResult();
<del> }
<del>
<del> public String getTitle() {
<del> return result.getTitle();
<del> }
<del>
<del> public String getTool() {
<del> return result.getTool();
<del> }
<del>
<del> public String getUrl() {
<del> return result.getUrl();
<del> }
<del> }
<add> //--------------------------
<add> //Spring injected components
<add> //--------------------------
<add> public void setSearchService(SearchService searchService) {
<add> this.searchService = searchService;
<add> }
<add>
<add> public void setSiteService(SiteService siteService) {
<add> this.siteService = siteService;
<add> }
<add>
<add> public void setUserDirectoryService(UserDirectoryService userDirectoryService) {
<add> this.userDirectoryService = userDirectoryService;
<add> }
<add>
<add> /**
<add> * A wrapper to customise the result sent through EntityBroker
<add> * <p>
<add> * Wraps a {@link SearchResult} to avoid issues with the {@link org.sakaiproject.search.api.SearchResult#getId()}
<add> * method and {@link EntityReference#checkPrefixId(String, String)}.<br />
<add> * Can also filter which parts of the query are accessible to a remote user.
<add> * </p>
<add> */
<add> public class SearchResultEntity {
<add> private final SearchResult searchResult;
<add>
<add> private SearchResultEntity(SearchResult searchResult) {
<add> this.searchResult = searchResult;
<add> }
<add>
<add> public String getReference() {
<add> return searchResult.getReference();
<add> }
<add>
<add> public String getContentId() {
<add> return searchResult.getId();
<add> }
<add>
<add> public float getScore() {
<add> return searchResult.getScore();
<add> }
<add>
<add> public String getSearchResult() {
<add> return searchResult.getSearchResult();
<add> }
<add>
<add> public String getTitle() {
<add> return searchResult.getTitle();
<add> }
<add>
<add> public String getTool() {
<add> return searchResult.getTool();
<add> }
<add>
<add> public String getUrl() {
<add> return searchResult.getUrl();
<add> }
<add> }
<ide> } |
|
Java | apache-2.0 | b7388d78071794dd3d6af26ae2d4e64813d13209 | 0 | davidsusu/command-history-manager | package hu.webarticum.chm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* The most simple history implementation which can store only a single command.
*/
public class SingleCommandHistory implements History {
private Command command = null;
private final List<Listener> listeners = new ArrayList<Listener>(1);
@Override
public Iterator<Command> iterator() {
if (command == null) {
return Collections.<Command>emptyIterator();
} else {
return Collections.singletonList(command).iterator();
}
}
@Override
public boolean isEmpty() {
return (command == null);
}
@Override
public boolean contains(Command command) {
return (command != null && command == this.command);
}
@Override
public boolean addAndExecute(Command command) {
if (!command.execute()) {
return false;
}
this.command = command;
return true;
}
@Override
public boolean hasNext() {
return (command != null && !command.isExecuted());
}
@Override
public Command getNext() {
return hasNext() ? command : null;
}
@Override
public boolean executeNext() {
return hasNext() ? command.execute() : false;
}
@Override
public boolean hasPrevious() {
return (command != null && command.isExecuted());
}
@Override
public Command getPrevious() {
return hasPrevious() ? command : null;
}
@Override
public boolean rollBackPrevious() {
return hasPrevious() ? command.rollBack() : false;
}
@Override
public boolean moveBefore(Command command) {
if (command != this.command) {
return false;
}
if (!this.command.isExecuted()) {
return true;
}
return this.command.rollBack();
}
@Override
public boolean moveAfter(Command command) {
if (command != this.command) {
return false;
}
if (this.command.isExecuted()) {
return true;
}
return this.command.execute();
}
@Override
public void addListener(Listener listener) {
listeners.add(listener);
}
@Override
public boolean removeListener(Listener listener) {
return listeners.remove(listener);
}
}
| command-history-manager/src/hu/webarticum/chm/SingleCommandHistory.java | package hu.webarticum.chm;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* The most simple history implementation which can store only a single command.
*/
public class SingleCommandHistory implements History {
private Command command = null;
private final List<Listener> listeners = new ArrayList<Listener>(1);
@Override
public Iterator<Command> iterator() {
if (command == null) {
return Collections.<Command>emptyIterator();
} else {
return Collections.singletonList(command).iterator();
}
}
@Override
public boolean isEmpty() {
return (command == null);
}
@Override
public boolean contains(Command command) {
return (command != null && command == this.command);
}
@Override
public boolean addAndExecute(Command command) {
if (!command.execute()) {
return false;
}
this.command = command;
return true;
}
@Override
public boolean hasNext() {
return (command != null && !command.isExecuted());
}
@Override
public Command getNext() {
return hasNext() ? command : null;
}
@Override
public boolean executeNext() {
return hasNext() ? command.execute() : false;
}
@Override
public boolean hasPrevious() {
return (command != null && command.isExecuted());
}
@Override
public Command getPrevious() {
return hasPrevious() ? command : null;
}
@Override
public boolean rollBackPrevious() {
return hasPrevious() ? command.rollBack() : false;
}
@Override
public boolean moveBefore(Command command) {
if (command != this.command) {
return false;
}
if (!this.command.isExecuted()) {
return true;
}
return this.command.rollBack();
}
@Override
public boolean moveAfter(Command command) {
if (command != this.command) {
return false;
}
if (this.command.isExecuted()) {
return true;
}
return this.command.execute();
}
@Override
public void addListener(Listener listener) {
listeners.add(listener);
}
@Override
public boolean removeListener(Listener listener) {
return listeners.remove(listener);
}
}
| Fix typo | command-history-manager/src/hu/webarticum/chm/SingleCommandHistory.java | Fix typo | <ide><path>ommand-history-manager/src/hu/webarticum/chm/SingleCommandHistory.java
<ide> import java.util.List;
<ide>
<ide> /**
<del> * The most simple history implementation which can store only a single command.
<add> * The most simple history implementation which can store only a single command.
<ide> */
<ide>
<ide> public class SingleCommandHistory implements History { |
|
Java | apache-2.0 | 8042cc870db31e33593b489b907bd50dbc18e823 | 0 | qtproject/qtqa-gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit | // Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm.init;
import static com.google.inject.Scopes.SINGLETON;
import static com.google.inject.Stage.PRODUCTION;
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.Die;
import com.google.gerrit.common.IoUtil;
import com.google.gerrit.metrics.DisabledMetricMaker;
import com.google.gerrit.metrics.MetricMaker;
import com.google.gerrit.pgm.init.api.ConsoleUI;
import com.google.gerrit.pgm.init.api.InitFlags;
import com.google.gerrit.pgm.init.api.InstallAllPlugins;
import com.google.gerrit.pgm.init.api.InstallPlugins;
import com.google.gerrit.pgm.init.api.LibraryDownload;
import com.google.gerrit.pgm.init.index.IndexManagerOnInit;
import com.google.gerrit.pgm.init.index.elasticsearch.ElasticIndexModuleOnInit;
import com.google.gerrit.pgm.init.index.lucene.LuceneIndexModuleOnInit;
import com.google.gerrit.pgm.util.SiteProgram;
import com.google.gerrit.server.config.GerritServerConfigModule;
import com.google.gerrit.server.config.SitePath;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.index.IndexModule;
import com.google.gerrit.server.plugins.JarScanner;
import com.google.gerrit.server.schema.NoteDbSchemaUpdater;
import com.google.gerrit.server.schema.UpdateUI;
import com.google.gerrit.server.securestore.SecureStore;
import com.google.gerrit.server.securestore.SecureStoreClassName;
import com.google.gerrit.server.securestore.SecureStoreProvider;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.StatementExecutor;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.spi.Message;
import com.google.inject.util.Providers;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/** Initialize a new Gerrit installation. */
public class BaseInit extends SiteProgram {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final boolean standalone;
protected final PluginsDistribution pluginsDistribution;
private final List<String> pluginsToInstall;
private Injector sysInjector;
protected BaseInit(PluginsDistribution pluginsDistribution, List<String> pluginsToInstall) {
this.standalone = true;
this.pluginsDistribution = pluginsDistribution;
this.pluginsToInstall = pluginsToInstall;
}
public BaseInit(
Path sitePath,
boolean standalone,
PluginsDistribution pluginsDistribution,
List<String> pluginsToInstall) {
super(sitePath);
this.standalone = standalone;
this.pluginsDistribution = pluginsDistribution;
this.pluginsToInstall = pluginsToInstall;
}
@Override
public int run() throws Exception {
final SiteInit init = createSiteInit();
if (beforeInit(init)) {
return 0;
}
init.flags.autoStart = getAutoStart() && init.site.isNew;
init.flags.dev = isDev() && init.site.isNew;
init.flags.skipPlugins = skipPlugins();
init.flags.deleteCaches = getDeleteCaches();
init.flags.isNew = init.site.isNew;
final SiteRun run;
try {
init.initializer.run();
init.flags.deleteOnFailure = false;
Injector sysInjector = createSysInjector(init);
IndexManagerOnInit indexManager = sysInjector.getInstance(IndexManagerOnInit.class);
try {
indexManager.start();
run = createSiteRun(init);
run.upgradeSchema();
init.initializer.postRun(sysInjector);
} finally {
indexManager.stop();
}
} catch (Exception | Error failure) {
if (init.flags.deleteOnFailure) {
recursiveDelete(getSitePath());
}
throw failure;
}
System.err.println("Initialized " + getSitePath().toRealPath().normalize());
afterInit(run);
return 0;
}
protected boolean skipPlugins() {
return false;
}
protected String getSecureStoreLib() {
return null;
}
protected boolean skipAllDownloads() {
return false;
}
protected List<String> getSkippedDownloads() {
return Collections.emptyList();
}
/**
* Invoked before site init is called.
*
* @param init initializer instance.
* @throws Exception
*/
protected boolean beforeInit(SiteInit init) throws Exception {
return false;
}
/**
* Invoked after site init is called.
*
* @param run completed run instance.
* @throws Exception
*/
protected void afterInit(SiteRun run) throws Exception {}
protected List<String> getInstallPlugins() {
try {
if (pluginsToInstall != null && pluginsToInstall.isEmpty()) {
return Collections.emptyList();
}
List<String> names = pluginsDistribution.listPluginNames();
if (pluginsToInstall != null) {
names.removeIf(n -> !pluginsToInstall.contains(n));
}
return names;
} catch (FileNotFoundException e) {
logger.atWarning().log(
"Couldn't find distribution archive location. No plugin will be installed");
return null;
}
}
protected boolean installAllPlugins() {
return false;
}
protected boolean getAutoStart() {
return false;
}
public static class SiteInit {
public final SitePaths site;
final InitFlags flags;
final ConsoleUI ui;
final SitePathInitializer initializer;
@Inject
SiteInit(
final SitePaths site,
final InitFlags flags,
final ConsoleUI ui,
final SitePathInitializer initializer) {
this.site = site;
this.flags = flags;
this.ui = ui;
this.initializer = initializer;
}
}
private SiteInit createSiteInit() {
final ConsoleUI ui = getConsoleUI();
final Path sitePath = getSitePath();
final List<Module> m = new ArrayList<>();
final SecureStoreInitData secureStoreInitData = discoverSecureStoreClass();
final String currentSecureStoreClassName = getConfiguredSecureStoreClass();
if (secureStoreInitData != null
&& currentSecureStoreClassName != null
&& !currentSecureStoreClassName.equals(secureStoreInitData.className)) {
String err =
String.format(
"Different secure store was previously configured: %s. "
+ "Use SwitchSecureStore program to switch between implementations.",
currentSecureStoreClassName);
throw die(err);
}
m.add(new GerritServerConfigModule());
m.add(new InitModule(standalone));
m.add(
new AbstractModule() {
@Override
protected void configure() {
bind(ConsoleUI.class).toInstance(ui);
bind(Path.class).annotatedWith(SitePath.class).toInstance(sitePath);
List<String> plugins =
MoreObjects.firstNonNull(getInstallPlugins(), new ArrayList<String>());
bind(new TypeLiteral<List<String>>() {})
.annotatedWith(InstallPlugins.class)
.toInstance(plugins);
bind(new TypeLiteral<Boolean>() {})
.annotatedWith(InstallAllPlugins.class)
.toInstance(installAllPlugins());
bind(PluginsDistribution.class).toInstance(pluginsDistribution);
String secureStoreClassName;
if (secureStoreInitData != null) {
secureStoreClassName = secureStoreInitData.className;
} else {
secureStoreClassName = currentSecureStoreClassName;
}
if (secureStoreClassName != null) {
ui.message("Using secure store: %s\n", secureStoreClassName);
}
bind(SecureStoreInitData.class).toProvider(Providers.of(secureStoreInitData));
bind(String.class)
.annotatedWith(SecureStoreClassName.class)
.toProvider(Providers.of(secureStoreClassName));
bind(SecureStore.class).toProvider(SecureStoreProvider.class).in(SINGLETON);
bind(new TypeLiteral<List<String>>() {})
.annotatedWith(LibraryDownload.class)
.toInstance(getSkippedDownloads());
bind(Boolean.class).annotatedWith(LibraryDownload.class).toInstance(skipAllDownloads());
bind(MetricMaker.class).to(DisabledMetricMaker.class);
}
});
try {
return Guice.createInjector(PRODUCTION, m).getInstance(SiteInit.class);
} catch (CreationException ce) {
final Message first = ce.getErrorMessages().iterator().next();
Throwable why = first.getCause();
if (why instanceof Die) {
throw (Die) why;
}
final StringBuilder buf = new StringBuilder(ce.getMessage());
while (why != null) {
buf.append("\n");
buf.append(why.getMessage());
why = why.getCause();
if (why != null) {
buf.append("\n caused by ");
}
}
throw die(buf.toString(), new RuntimeException("InitInjector failed", ce));
}
}
protected ConsoleUI getConsoleUI() {
return ConsoleUI.getInstance(false);
}
private SecureStoreInitData discoverSecureStoreClass() {
String secureStore = getSecureStoreLib();
if (Strings.isNullOrEmpty(secureStore)) {
return null;
}
Path secureStoreLib = Paths.get(secureStore);
if (!Files.exists(secureStoreLib)) {
throw new InvalidSecureStoreException(String.format("File %s doesn't exist", secureStore));
}
try (JarScanner scanner = new JarScanner(secureStoreLib)) {
List<String> secureStores = scanner.findSubClassesOf(SecureStore.class);
if (secureStores.isEmpty()) {
throw new InvalidSecureStoreException(
String.format(
"Cannot find class implementing %s interface in %s",
SecureStore.class.getName(), secureStore));
}
if (secureStores.size() > 1) {
throw new InvalidSecureStoreException(
String.format(
"%s has more that one implementation of %s interface",
secureStore, SecureStore.class.getName()));
}
IoUtil.loadJARs(secureStoreLib);
return new SecureStoreInitData(secureStoreLib, secureStores.get(0));
} catch (IOException e) {
throw new InvalidSecureStoreException(String.format("%s is not a valid jar", secureStore));
}
}
public static class SiteRun {
public final ConsoleUI ui;
public final SitePaths site;
public final InitFlags flags;
final NoteDbSchemaUpdater noteDbSchemaUpdater;
final GitRepositoryManager repositoryManager;
@Inject
SiteRun(
ConsoleUI ui,
SitePaths site,
InitFlags flags,
NoteDbSchemaUpdater noteDbSchemaUpdater,
GitRepositoryManager repositoryManager) {
this.ui = ui;
this.site = site;
this.flags = flags;
this.noteDbSchemaUpdater = noteDbSchemaUpdater;
this.repositoryManager = repositoryManager;
}
void upgradeSchema() throws OrmException {
noteDbSchemaUpdater.update(new UpdateUIImpl(ui));
}
private static class UpdateUIImpl implements UpdateUI {
private final ConsoleUI consoleUi;
UpdateUIImpl(ConsoleUI consoleUi) {
this.consoleUi = consoleUi;
}
@Override
public void message(String message) {
System.err.println(message);
System.err.flush();
}
@Override
public boolean yesno(boolean defaultValue, String message) {
return consoleUi.yesno(defaultValue, message);
}
@Override
public void waitForUser() {
consoleUi.waitForUser();
}
@Override
public String readString(String defaultValue, Set<String> allowedValues, String message) {
return consoleUi.readString(defaultValue, allowedValues, message);
}
@Override
public boolean isBatch() {
return consoleUi.isBatch();
}
@Override
public void pruneSchema(StatementExecutor e, List<String> prune) {
// Do nothing in NoteDb.
// TODO(dborowitz): Remove this method in the base class.
}
}
}
private SiteRun createSiteRun(SiteInit init) {
return createSysInjector(init).getInstance(SiteRun.class);
}
private Injector createSysInjector(SiteInit init) {
if (sysInjector == null) {
final List<Module> modules = new ArrayList<>();
modules.add(
new AbstractModule() {
@Override
protected void configure() {
bind(ConsoleUI.class).toInstance(init.ui);
bind(InitFlags.class).toInstance(init.flags);
}
});
Injector dbInjector = createDbInjector();
switch (IndexModule.getIndexType(dbInjector)) {
case LUCENE:
modules.add(new LuceneIndexModuleOnInit());
break;
case ELASTICSEARCH:
modules.add(new ElasticIndexModuleOnInit());
break;
default:
throw new IllegalStateException("unsupported index.type");
}
sysInjector = dbInjector.createChildInjector(modules);
}
return sysInjector;
}
private static void recursiveDelete(Path path) {
final String msg = "warn: Cannot remove ";
try {
Files.walkFileTree(
path,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path f, BasicFileAttributes attrs) throws IOException {
try {
Files.delete(f);
} catch (IOException e) {
System.err.println(msg + f);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException err) {
try {
// Previously warned if err was not null; if dir is not empty as a
// result, will cause an error that will be logged below.
Files.delete(dir);
} catch (IOException e) {
System.err.println(msg + dir);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path f, IOException e) {
System.err.println(msg + f);
return FileVisitResult.CONTINUE;
}
});
} catch (IOException e) {
System.err.println(msg + path);
}
}
protected boolean isDev() {
return false;
}
protected boolean getDeleteCaches() {
return false;
}
}
| java/com/google/gerrit/pgm/init/BaseInit.java | // Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm.init;
import static com.google.inject.Scopes.SINGLETON;
import static com.google.inject.Stage.PRODUCTION;
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.Die;
import com.google.gerrit.common.IoUtil;
import com.google.gerrit.metrics.DisabledMetricMaker;
import com.google.gerrit.metrics.MetricMaker;
import com.google.gerrit.pgm.init.api.ConsoleUI;
import com.google.gerrit.pgm.init.api.InitFlags;
import com.google.gerrit.pgm.init.api.InstallAllPlugins;
import com.google.gerrit.pgm.init.api.InstallPlugins;
import com.google.gerrit.pgm.init.api.LibraryDownload;
import com.google.gerrit.pgm.init.index.IndexManagerOnInit;
import com.google.gerrit.pgm.init.index.elasticsearch.ElasticIndexModuleOnInit;
import com.google.gerrit.pgm.init.index.lucene.LuceneIndexModuleOnInit;
import com.google.gerrit.pgm.util.SiteProgram;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.config.GerritServerConfigModule;
import com.google.gerrit.server.config.SitePath;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.index.IndexModule;
import com.google.gerrit.server.plugins.JarScanner;
import com.google.gerrit.server.schema.NoteDbSchemaUpdater;
import com.google.gerrit.server.schema.ReviewDbFactory;
import com.google.gerrit.server.schema.UpdateUI;
import com.google.gerrit.server.securestore.SecureStore;
import com.google.gerrit.server.securestore.SecureStoreClassName;
import com.google.gerrit.server.securestore.SecureStoreProvider;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
import com.google.gwtorm.server.StatementExecutor;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.spi.Message;
import com.google.inject.util.Providers;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/** Initialize a new Gerrit installation. */
public class BaseInit extends SiteProgram {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final boolean standalone;
protected final PluginsDistribution pluginsDistribution;
private final List<String> pluginsToInstall;
private Injector sysInjector;
protected BaseInit(PluginsDistribution pluginsDistribution, List<String> pluginsToInstall) {
this.standalone = true;
this.pluginsDistribution = pluginsDistribution;
this.pluginsToInstall = pluginsToInstall;
}
public BaseInit(
Path sitePath,
boolean standalone,
PluginsDistribution pluginsDistribution,
List<String> pluginsToInstall) {
super(sitePath);
this.standalone = standalone;
this.pluginsDistribution = pluginsDistribution;
this.pluginsToInstall = pluginsToInstall;
}
@Override
public int run() throws Exception {
final SiteInit init = createSiteInit();
if (beforeInit(init)) {
return 0;
}
init.flags.autoStart = getAutoStart() && init.site.isNew;
init.flags.dev = isDev() && init.site.isNew;
init.flags.skipPlugins = skipPlugins();
init.flags.deleteCaches = getDeleteCaches();
init.flags.isNew = init.site.isNew;
final SiteRun run;
try {
init.initializer.run();
init.flags.deleteOnFailure = false;
Injector sysInjector = createSysInjector(init);
IndexManagerOnInit indexManager = sysInjector.getInstance(IndexManagerOnInit.class);
try {
indexManager.start();
run = createSiteRun(init);
run.upgradeSchema();
init.initializer.postRun(sysInjector);
} finally {
indexManager.stop();
}
} catch (Exception | Error failure) {
if (init.flags.deleteOnFailure) {
recursiveDelete(getSitePath());
}
throw failure;
}
System.err.println("Initialized " + getSitePath().toRealPath().normalize());
afterInit(run);
return 0;
}
protected boolean skipPlugins() {
return false;
}
protected String getSecureStoreLib() {
return null;
}
protected boolean skipAllDownloads() {
return false;
}
protected List<String> getSkippedDownloads() {
return Collections.emptyList();
}
/**
* Invoked before site init is called.
*
* @param init initializer instance.
* @throws Exception
*/
protected boolean beforeInit(SiteInit init) throws Exception {
return false;
}
/**
* Invoked after site init is called.
*
* @param run completed run instance.
* @throws Exception
*/
protected void afterInit(SiteRun run) throws Exception {}
protected List<String> getInstallPlugins() {
try {
if (pluginsToInstall != null && pluginsToInstall.isEmpty()) {
return Collections.emptyList();
}
List<String> names = pluginsDistribution.listPluginNames();
if (pluginsToInstall != null) {
names.removeIf(n -> !pluginsToInstall.contains(n));
}
return names;
} catch (FileNotFoundException e) {
logger.atWarning().log(
"Couldn't find distribution archive location. No plugin will be installed");
return null;
}
}
protected boolean installAllPlugins() {
return false;
}
protected boolean getAutoStart() {
return false;
}
public static class SiteInit {
public final SitePaths site;
final InitFlags flags;
final ConsoleUI ui;
final SitePathInitializer initializer;
@Inject
SiteInit(
final SitePaths site,
final InitFlags flags,
final ConsoleUI ui,
final SitePathInitializer initializer) {
this.site = site;
this.flags = flags;
this.ui = ui;
this.initializer = initializer;
}
}
private SiteInit createSiteInit() {
final ConsoleUI ui = getConsoleUI();
final Path sitePath = getSitePath();
final List<Module> m = new ArrayList<>();
final SecureStoreInitData secureStoreInitData = discoverSecureStoreClass();
final String currentSecureStoreClassName = getConfiguredSecureStoreClass();
if (secureStoreInitData != null
&& currentSecureStoreClassName != null
&& !currentSecureStoreClassName.equals(secureStoreInitData.className)) {
String err =
String.format(
"Different secure store was previously configured: %s. "
+ "Use SwitchSecureStore program to switch between implementations.",
currentSecureStoreClassName);
throw die(err);
}
m.add(new GerritServerConfigModule());
m.add(new InitModule(standalone));
m.add(
new AbstractModule() {
@Override
protected void configure() {
bind(ConsoleUI.class).toInstance(ui);
bind(Path.class).annotatedWith(SitePath.class).toInstance(sitePath);
List<String> plugins =
MoreObjects.firstNonNull(getInstallPlugins(), new ArrayList<String>());
bind(new TypeLiteral<List<String>>() {})
.annotatedWith(InstallPlugins.class)
.toInstance(plugins);
bind(new TypeLiteral<Boolean>() {})
.annotatedWith(InstallAllPlugins.class)
.toInstance(installAllPlugins());
bind(PluginsDistribution.class).toInstance(pluginsDistribution);
String secureStoreClassName;
if (secureStoreInitData != null) {
secureStoreClassName = secureStoreInitData.className;
} else {
secureStoreClassName = currentSecureStoreClassName;
}
if (secureStoreClassName != null) {
ui.message("Using secure store: %s\n", secureStoreClassName);
}
bind(SecureStoreInitData.class).toProvider(Providers.of(secureStoreInitData));
bind(String.class)
.annotatedWith(SecureStoreClassName.class)
.toProvider(Providers.of(secureStoreClassName));
bind(SecureStore.class).toProvider(SecureStoreProvider.class).in(SINGLETON);
bind(new TypeLiteral<List<String>>() {})
.annotatedWith(LibraryDownload.class)
.toInstance(getSkippedDownloads());
bind(Boolean.class).annotatedWith(LibraryDownload.class).toInstance(skipAllDownloads());
bind(MetricMaker.class).to(DisabledMetricMaker.class);
}
});
try {
return Guice.createInjector(PRODUCTION, m).getInstance(SiteInit.class);
} catch (CreationException ce) {
final Message first = ce.getErrorMessages().iterator().next();
Throwable why = first.getCause();
if (why instanceof Die) {
throw (Die) why;
}
final StringBuilder buf = new StringBuilder(ce.getMessage());
while (why != null) {
buf.append("\n");
buf.append(why.getMessage());
why = why.getCause();
if (why != null) {
buf.append("\n caused by ");
}
}
throw die(buf.toString(), new RuntimeException("InitInjector failed", ce));
}
}
protected ConsoleUI getConsoleUI() {
return ConsoleUI.getInstance(false);
}
private SecureStoreInitData discoverSecureStoreClass() {
String secureStore = getSecureStoreLib();
if (Strings.isNullOrEmpty(secureStore)) {
return null;
}
Path secureStoreLib = Paths.get(secureStore);
if (!Files.exists(secureStoreLib)) {
throw new InvalidSecureStoreException(String.format("File %s doesn't exist", secureStore));
}
try (JarScanner scanner = new JarScanner(secureStoreLib)) {
List<String> secureStores = scanner.findSubClassesOf(SecureStore.class);
if (secureStores.isEmpty()) {
throw new InvalidSecureStoreException(
String.format(
"Cannot find class implementing %s interface in %s",
SecureStore.class.getName(), secureStore));
}
if (secureStores.size() > 1) {
throw new InvalidSecureStoreException(
String.format(
"%s has more that one implementation of %s interface",
secureStore, SecureStore.class.getName()));
}
IoUtil.loadJARs(secureStoreLib);
return new SecureStoreInitData(secureStoreLib, secureStores.get(0));
} catch (IOException e) {
throw new InvalidSecureStoreException(String.format("%s is not a valid jar", secureStore));
}
}
public static class SiteRun {
public final ConsoleUI ui;
public final SitePaths site;
public final InitFlags flags;
final NoteDbSchemaUpdater noteDbSchemaUpdater;
final SchemaFactory<ReviewDb> schema;
final GitRepositoryManager repositoryManager;
@Inject
SiteRun(
ConsoleUI ui,
SitePaths site,
InitFlags flags,
NoteDbSchemaUpdater noteDbSchemaUpdater,
@ReviewDbFactory SchemaFactory<ReviewDb> schema,
GitRepositoryManager repositoryManager) {
this.ui = ui;
this.site = site;
this.flags = flags;
this.noteDbSchemaUpdater = noteDbSchemaUpdater;
this.schema = schema;
this.repositoryManager = repositoryManager;
}
void upgradeSchema() throws OrmException {
noteDbSchemaUpdater.update(new UpdateUIImpl(ui));
}
private static class UpdateUIImpl implements UpdateUI {
private final ConsoleUI consoleUi;
UpdateUIImpl(ConsoleUI consoleUi) {
this.consoleUi = consoleUi;
}
@Override
public void message(String message) {
System.err.println(message);
System.err.flush();
}
@Override
public boolean yesno(boolean defaultValue, String message) {
return consoleUi.yesno(defaultValue, message);
}
@Override
public void waitForUser() {
consoleUi.waitForUser();
}
@Override
public String readString(String defaultValue, Set<String> allowedValues, String message) {
return consoleUi.readString(defaultValue, allowedValues, message);
}
@Override
public boolean isBatch() {
return consoleUi.isBatch();
}
@Override
public void pruneSchema(StatementExecutor e, List<String> prune) {
// Do nothing in NoteDb.
// TODO(dborowitz): Remove this method in the base class.
}
}
}
private SiteRun createSiteRun(SiteInit init) {
return createSysInjector(init).getInstance(SiteRun.class);
}
private Injector createSysInjector(SiteInit init) {
if (sysInjector == null) {
final List<Module> modules = new ArrayList<>();
modules.add(
new AbstractModule() {
@Override
protected void configure() {
bind(ConsoleUI.class).toInstance(init.ui);
bind(InitFlags.class).toInstance(init.flags);
}
});
Injector dbInjector = createDbInjector();
switch (IndexModule.getIndexType(dbInjector)) {
case LUCENE:
modules.add(new LuceneIndexModuleOnInit());
break;
case ELASTICSEARCH:
modules.add(new ElasticIndexModuleOnInit());
break;
default:
throw new IllegalStateException("unsupported index.type");
}
sysInjector = dbInjector.createChildInjector(modules);
}
return sysInjector;
}
private static void recursiveDelete(Path path) {
final String msg = "warn: Cannot remove ";
try {
Files.walkFileTree(
path,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path f, BasicFileAttributes attrs) throws IOException {
try {
Files.delete(f);
} catch (IOException e) {
System.err.println(msg + f);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException err) {
try {
// Previously warned if err was not null; if dir is not empty as a
// result, will cause an error that will be logged below.
Files.delete(dir);
} catch (IOException e) {
System.err.println(msg + dir);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path f, IOException e) {
System.err.println(msg + f);
return FileVisitResult.CONTINUE;
}
});
} catch (IOException e) {
System.err.println(msg + path);
}
}
protected boolean isDev() {
return false;
}
protected boolean getDeleteCaches() {
return false;
}
}
| BaseInit: Remove SchemaFactory
Change-Id: Ie6303b85cf0d3286293f48b6489781bfd1ccf6aa
| java/com/google/gerrit/pgm/init/BaseInit.java | BaseInit: Remove SchemaFactory | <ide><path>ava/com/google/gerrit/pgm/init/BaseInit.java
<ide> import com.google.gerrit.pgm.init.index.elasticsearch.ElasticIndexModuleOnInit;
<ide> import com.google.gerrit.pgm.init.index.lucene.LuceneIndexModuleOnInit;
<ide> import com.google.gerrit.pgm.util.SiteProgram;
<del>import com.google.gerrit.reviewdb.server.ReviewDb;
<ide> import com.google.gerrit.server.config.GerritServerConfigModule;
<ide> import com.google.gerrit.server.config.SitePath;
<ide> import com.google.gerrit.server.config.SitePaths;
<ide> import com.google.gerrit.server.index.IndexModule;
<ide> import com.google.gerrit.server.plugins.JarScanner;
<ide> import com.google.gerrit.server.schema.NoteDbSchemaUpdater;
<del>import com.google.gerrit.server.schema.ReviewDbFactory;
<ide> import com.google.gerrit.server.schema.UpdateUI;
<ide> import com.google.gerrit.server.securestore.SecureStore;
<ide> import com.google.gerrit.server.securestore.SecureStoreClassName;
<ide> import com.google.gerrit.server.securestore.SecureStoreProvider;
<ide> import com.google.gwtorm.server.OrmException;
<del>import com.google.gwtorm.server.SchemaFactory;
<ide> import com.google.gwtorm.server.StatementExecutor;
<ide> import com.google.inject.AbstractModule;
<ide> import com.google.inject.CreationException;
<ide> public final SitePaths site;
<ide> public final InitFlags flags;
<ide> final NoteDbSchemaUpdater noteDbSchemaUpdater;
<del> final SchemaFactory<ReviewDb> schema;
<ide> final GitRepositoryManager repositoryManager;
<ide>
<ide> @Inject
<ide> SitePaths site,
<ide> InitFlags flags,
<ide> NoteDbSchemaUpdater noteDbSchemaUpdater,
<del> @ReviewDbFactory SchemaFactory<ReviewDb> schema,
<ide> GitRepositoryManager repositoryManager) {
<ide> this.ui = ui;
<ide> this.site = site;
<ide> this.flags = flags;
<ide> this.noteDbSchemaUpdater = noteDbSchemaUpdater;
<del> this.schema = schema;
<ide> this.repositoryManager = repositoryManager;
<ide> }
<ide> |
|
Java | mit | d14f6277138edc60584547091dfbda413de16ca8 | 0 | EasyPost/easypost-java | /**
* EasyPostResource.java
* This file is a part of EasyPost API SDK.
* (c) 2022 EasyPost
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
package com.easypost.net;
import com.easypost.EasyPost;
import com.easypost.exception.EasyPostException;
import com.easypost.model.Event;
import com.easypost.model.EventDeserializer;
import com.easypost.model.Fee;
import com.easypost.model.Rate;
import com.easypost.model.RateDeserializer;
import com.easypost.model.Shipment;
import com.easypost.model.SmartrateCollection;
import com.easypost.model.SmartrateCollectionDeserializer;
import com.easypost.model.TrackingDetail;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.net.URLStreamHandler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
public abstract class EasyPostResource {
public static final String EASYPOST_SUPPORT_EMAIL = "[email protected]";
private static final int DEFAULT_CONNECT_TIMEOUT_MILLISECONDS = 30000;
private static final int DEFAULT_READ_TIMEOUT_MILLISECONDS = 60000;
private static final double APP_ENGINE_DEFAULT_TIMEOUT_SECONDS = 20.0;
public static final Gson GSON =
new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(Event.class, new EventDeserializer())
.registerTypeAdapter(Rate.class, new RateDeserializer())
.registerTypeAdapter(SmartrateCollection.class, new SmartrateCollectionDeserializer()).create();
public static final Gson PRETTY_PRINT_GSON = new GsonBuilder().setPrettyPrinting().serializeNulls()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(Event.class, new EventDeserializer()).create();
/**
* Returns a string representation of the object.
*/
@Override
public String toString() {
return (String) this.getIdString();
}
/**
* Pretty print the JSON representation of the object.
*
* @return the JSON representation of the object.
*/
public String prettyPrint() {
return String.format("<%s@%s id=%s> JSON: %s", this.getClass().getName(), System.identityHashCode(this),
this.getIdString(), PRETTY_PRINT_GSON.toJson(this));
}
private Object getIdString() {
try {
Field idField = this.getClass().getDeclaredField("id");
return idField.get(this);
} catch (SecurityException e) {
return "";
} catch (NoSuchFieldException e) {
return "";
} catch (IllegalArgumentException e) {
return "";
} catch (IllegalAccessException e) {
return "";
}
}
private static String className(final Class<?> clazz) {
return clazz.getSimpleName().replaceAll("([a-z])([A-Z])", "$1_$2").toLowerCase().replace("$", "");
}
protected static String singleClassURL(final Class<?> clazz) {
return String.format("%s/%s", EasyPost.API_BASE, className(clazz));
}
protected static String classURL(final Class<?> clazz) {
String singleURL = singleClassURL(clazz);
if (singleURL.charAt(singleURL.length() - 1) == 's' || singleURL.charAt(singleURL.length() - 1) == 'h') {
return String.format("%ses", singleClassURL(clazz));
} else {
return String.format("%ss", singleClassURL(clazz));
}
}
protected static String instanceURL(final Class<?> clazz, final String id) {
return String.format("%s/%s", classURL(clazz), id);
}
/**
* Merge two EasyPostResource objects.
*
* @param obj the base object
* @param update the object to merge
*/
public void merge(final EasyPostResource obj, final EasyPostResource update) {
if (!obj.getClass().isAssignableFrom(update.getClass())) {
return;
}
Method[] methods = obj.getClass().getMethods();
for (Method fromMethod : methods) {
if ((fromMethod.getDeclaringClass().equals(obj.getClass()) && fromMethod.getName().startsWith("get")) ||
GLOBAL_FIELD_ACCESSORS.contains(fromMethod.getName())) {
String fromName = fromMethod.getName();
String toName = fromName.replace("get", "set");
try {
Object value = fromMethod.invoke(update, (Object[]) null);
if (value != null) {
Method toMethod = obj.getClass().getMethod(toName, fromMethod.getReturnType());
toMethod.invoke(obj, value);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
public static final String CHARSET = "UTF-8";
private static final String DNS_CACHE_TTL_PROPERTY_NAME = "networkaddress.cache.ttl";
// Set this property to override your environment's default URLStreamHandler.
private static final String CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME = "com.easypost.net.customURLStreamHandler";
protected enum RequestMethod {
GET, POST, DELETE, PUT
}
private static String urlEncodePair(final String key, final String value) throws UnsupportedEncodingException {
return String.format("%s=%s", URLEncoder.encode(key, CHARSET), URLEncoder.encode(value, CHARSET));
}
static Map<String, String> getHeaders(String apiKey) {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Accept-Charset", CHARSET);
headers.put("User-Agent", String.format("EasyPost/v2 JavaClient/%s Java/%s"
, EasyPost.VERSION, System.getProperty("java.version")));
if (apiKey == null) {
apiKey = EasyPost.apiKey;
}
headers.put("Authorization", String.format("Bearer %s", apiKey));
// debug headers
String[] propertyNames = { "os.name", "os.version", "os.arch", "java.version", "java.vendor", "java.vm.version",
"java.vm.vendor" };
Map<String, String> propertyMap = new HashMap<String, String>();
for (String propertyName : propertyNames) {
propertyMap.put(propertyName, System.getProperty(propertyName));
}
propertyMap.put("lang", "Java");
propertyMap.put("publisher", "EasyPost");
headers.put("X-Client-User-Agent", GSON.toJson(propertyMap));
return headers;
}
private static javax.net.ssl.HttpsURLConnection createEasyPostConnection(final String url, final String apiKey)
throws IOException {
URL easypostURL = null;
String customURLStreamHandlerClassName = System.getProperty(CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME, null);
if (customURLStreamHandlerClassName != null) {
// instantiate the custom handler provided
try {
Class<URLStreamHandler> clazz =
(Class<URLStreamHandler>) Class.forName(customURLStreamHandlerClassName);
Constructor<URLStreamHandler> constructor = clazz.getConstructor();
URLStreamHandler customHandler = constructor.newInstance();
easypostURL = new URL(null, url, customHandler);
} catch (ClassNotFoundException e) {
throw new IOException(e);
} catch (SecurityException e) {
throw new IOException(e);
} catch (NoSuchMethodException e) {
throw new IOException(e);
} catch (IllegalArgumentException e) {
throw new IOException(e);
} catch (InstantiationException e) {
throw new IOException(e);
} catch (IllegalAccessException e) {
throw new IOException(e);
} catch (InvocationTargetException e) {
throw new IOException(e);
}
} else {
easypostURL = new URL(url);
}
javax.net.ssl.HttpsURLConnection conn = (javax.net.ssl.HttpsURLConnection) easypostURL.openConnection();
conn.setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLISECONDS);
int readTimeout;
if (EasyPost.readTimeout != 0) {
readTimeout = EasyPost.readTimeout;
} else {
readTimeout = DEFAULT_READ_TIMEOUT_MILLISECONDS;
}
conn.setReadTimeout(readTimeout);
conn.setUseCaches(false);
for (Map.Entry<String, String> header : getHeaders(apiKey).entrySet()) {
conn.setRequestProperty(header.getKey(), header.getValue());
}
return conn;
}
private static javax.net.ssl.HttpsURLConnection writeBody(final javax.net.ssl.HttpsURLConnection conn,
final JsonObject body) throws IOException {
if (body != null) {
conn.setDoOutput(true);
conn.setRequestProperty("Content-Type", "application/json");
OutputStream output = null;
try {
output = conn.getOutputStream();
String jsonString = body.toString();
output.write(jsonString.getBytes(CHARSET));
} finally {
if (output != null) {
output.close();
}
}
}
return conn;
}
private static javax.net.ssl.HttpsURLConnection createGetConnection(final String url, final String query,
final String apiKey) throws IOException {
String getURL = String.format("%s?%s", url, query);
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(getURL, apiKey);
conn.setRequestMethod("GET");
return conn;
}
private static javax.net.ssl.HttpsURLConnection createPostConnection(final String url, final JsonObject body,
final String apiKey) throws IOException {
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(url, apiKey);
conn.setRequestMethod("POST");
conn = writeBody(conn, body);
return conn;
}
private static javax.net.ssl.HttpsURLConnection createDeleteConnection(final String url, final String query,
final String apiKey) throws IOException {
String deleteUrl = String.format("%s?%s", url, query);
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(deleteUrl, apiKey);
conn.setRequestMethod("DELETE");
return conn;
}
private static javax.net.ssl.HttpsURLConnection createPutConnection(final String url, final JsonObject body,
final String apiKey) throws IOException {
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(url, apiKey);
conn.setRequestMethod("PUT");
writeBody(conn, body);
return conn;
}
private static JsonObject createBody(final Map<String, Object> params) {
Gson gson = new Gson();
return gson.toJsonTree(params).getAsJsonObject();
}
private static String createQuery(final Map<String, Object> params) throws UnsupportedEncodingException {
Map<String, String> flatParams = flattenParams(params);
StringBuilder queryStringBuilder = new StringBuilder();
for (Map.Entry<String, String> entry : flatParams.entrySet()) {
queryStringBuilder.append("&");
queryStringBuilder.append(urlEncodePair(entry.getKey(), entry.getValue()));
}
if (queryStringBuilder.length() > 0) {
queryStringBuilder.deleteCharAt(0);
}
return queryStringBuilder.toString();
}
private static Map<String, String> flattenParams(final Map<String, Object> params) {
if (params == null) {
return new HashMap<String, String>();
}
Map<String, String> flatParams = new HashMap<String, String>();
for (Map.Entry<String, Object> entry : params.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof Map<?, ?>) {
Map<String, Object> flatNestedMap = new HashMap<String, Object>();
Map<?, ?> nestedMap = (Map<?, ?>) value;
for (Map.Entry<?, ?> nestedEntry : nestedMap.entrySet()) {
flatNestedMap.put(String.format("%s[%s]", key, nestedEntry.getKey()), nestedEntry.getValue());
}
flatParams.putAll(flattenParams(flatNestedMap));
} else if (value instanceof List) {
Map<String, Object> flatNestedMap = new HashMap<String, Object>();
List<?> nestedList = (List<?>) value;
for (int i = 0; i < nestedList.size(); i++) {
flatNestedMap.put(String.format("%s[%s]", key, i), nestedList.get(i));
flatParams.putAll(flattenParams(flatNestedMap));
}
} else if (value instanceof EasyPostResource) {
flatParams.put(String.format("%s[%s]", key, "id"), value.toString());
} else if (value != null) {
flatParams.put(key, value.toString());
}
}
return flatParams;
}
// represents Errors returned as JSON
private static class ErrorContainer {
private EasyPostResource.Error error;
}
private static class Error {
@SuppressWarnings ("unused")
private String type;
private String message;
private String code;
private String param;
private String error;
public String getType() {
return type;
}
public String getMessage() {
return message;
}
public String getCode() {
return code;
}
public String getParam() {
return param;
}
public String getError() {
return error;
}
}
private static String getResponseBody(final InputStream responseStream) throws IOException {
String rBody = new Scanner(responseStream, CHARSET).useDelimiter("\\A").next();
responseStream.close();
return rBody;
}
private static EasyPostResponse makeURLConnectionRequest(final EasyPostResource.RequestMethod method,
final String url, final String query,
final JsonObject body, final String apiKey)
throws EasyPostException {
javax.net.ssl.HttpsURLConnection conn = null;
try {
switch (method) {
case GET:
conn = createGetConnection(url, query, apiKey);
break;
case POST:
conn = createPostConnection(url, body, apiKey);
break;
case PUT:
conn = createPutConnection(url, body, apiKey);
break;
case DELETE:
conn = createDeleteConnection(url, query, apiKey);
break;
default:
throw new EasyPostException(
String.format("Unrecognized HTTP method %s. Please contact EasyPost at %s.", method,
EasyPostResource.EASYPOST_SUPPORT_EMAIL));
}
int rCode = conn.getResponseCode(); // sends the request
String rBody = null;
if (rCode == HttpURLConnection.HTTP_NO_CONTENT) {
rBody = "";
} else if (rCode >= HttpURLConnection.HTTP_OK && rCode < HttpURLConnection.HTTP_MULT_CHOICE) {
rBody = getResponseBody(conn.getInputStream());
} else {
rBody = getResponseBody(conn.getErrorStream());
}
return new EasyPostResponse(rCode, rBody);
} catch (IOException e) {
throw new EasyPostException(String.format("Could not connect to EasyPost (%s). " +
"Please check your internet connection and try again. If this problem persists," +
"please contact us at %s.", EasyPost.API_BASE, EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
} finally {
if (conn != null) {
conn.disconnect();
}
}
}
protected static <T> T request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, final String apiKey)
throws EasyPostException {
return request(method, url, params, clazz, apiKey, true);
}
protected static <T> T request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, final String apiKey,
final boolean apiKeyRequired) throws EasyPostException {
String originalDNSCacheTTL = null;
boolean allowedToSetTTL = true;
try {
originalDNSCacheTTL = java.security.Security.getProperty(DNS_CACHE_TTL_PROPERTY_NAME);
// disable DNS cache
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, "0");
} catch (SecurityException se) {
allowedToSetTTL = false;
}
try {
return _request(method, url, params, clazz, apiKey, apiKeyRequired);
} finally {
if (allowedToSetTTL) {
if (originalDNSCacheTTL == null) {
// value unspecified by implementation
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, "-1"); // cache forever
} else {
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, originalDNSCacheTTL);
}
}
}
}
@SuppressWarnings("checkstyle:methodname")
protected static <T> T _request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, String apiKey,
final boolean apiKeyRequired) throws EasyPostException {
if ((EasyPost.apiKey == null || EasyPost.apiKey.length() == 0) && (apiKey == null || apiKey.length() == 0)) {
if (apiKeyRequired) {
throw new EasyPostException(String.format(
"No API key provided. (set your API key using 'EasyPost.apiKey = {KEY}'. " +
"Your API key can be found in your EasyPost dashboard, " +
"or you can email us at %s for assistance.", EasyPostResource.EASYPOST_SUPPORT_EMAIL));
}
}
if (apiKey == null) {
apiKey = EasyPost.apiKey;
}
String query = null;
JsonObject body = null;
if (params != null) {
switch (method) {
case GET:
case DELETE:
try {
query = createQuery(params);
} catch (UnsupportedEncodingException e) {
throw new EasyPostException(
String.format("Unable to encode parameters to %s. Please email %s for assistance.",
CHARSET, EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
}
break;
case POST:
case PUT:
try {
body = createBody(params);
} catch (Exception e) {
throw new EasyPostException(String.format(
"Unable to create JSON body from parameters. Please email %s for assistance.",
EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
}
break;
default:
break;
}
}
EasyPostResponse response;
try {
// HTTPSURLConnection verifies SSL cert by default
response = makeURLConnectionRequest(method, url, query, body, apiKey);
} catch (ClassCastException ce) {
// appengine
String appEngineEnv = System.getProperty("com.google.appengine.runtime.environment", null);
if (appEngineEnv != null) {
response = makeAppEngineRequest(method, url, query, body, apiKey);
} else {
throw ce;
}
}
int rCode = response.getResponseCode();
String rBody = response.getResponseBody();
if (rCode < HttpURLConnection.HTTP_OK || rCode >= HttpURLConnection.HTTP_MULT_CHOICE) {
handleAPIError(rBody, rCode);
}
return GSON.fromJson(rBody, clazz);
}
private static void handleAPIError(final String rBody, final int rCode) throws EasyPostException {
try {
EasyPostResource.Error error = GSON.fromJson(rBody, EasyPostResource.Error.class);
if (error.getError().length() > 0) {
throw new EasyPostException(error.getError());
}
throw new EasyPostException(error.getMessage(), error.getParam(), null);
} catch (Exception e) {
throw new EasyPostException(
String.format("An error occurred. Response code: %s Response body: %s", rCode, rBody));
}
}
private static EasyPostResponse makeAppEngineRequest(final RequestMethod method, String url, final String query,
final JsonObject body, final String apiKey)
throws EasyPostException {
String unknownErrorMessage = String.format(
"Sorry, an unknown error occurred while trying to use the Google App Engine runtime." +
"Please email %s for assistance.", EasyPostResource.EASYPOST_SUPPORT_EMAIL);
try {
if ((method == RequestMethod.GET || method == RequestMethod.DELETE) && query != null) {
url = String.format("%s?%s", url, query);
}
URL fetchURL = new URL(url);
Class<?> requestMethodClass = Class.forName("com.google.appengine.api.urlfetch.HTTPMethod");
Object httpMethod = requestMethodClass.getDeclaredField(method.name()).get(null);
Class<?> fetchOptionsBuilderClass = Class.forName("com.google.appengine.api.urlfetch.FetchOptions$Builder");
Object fetchOptions = null;
try {
fetchOptions = fetchOptionsBuilderClass.getDeclaredMethod("validateCertificate").invoke(null);
} catch (NoSuchMethodException e) {
System.err.printf(
"Warning: this App Engine SDK version does not allow verification of SSL certificates;" +
"this exposes you to a MITM attack. Please upgrade your App Engine SDK to >=1.5.0. " +
"If you have questions, email %s.%n", EasyPostResource.EASYPOST_SUPPORT_EMAIL);
fetchOptions = fetchOptionsBuilderClass.getDeclaredMethod("withDefaults").invoke(null);
}
Class<?> fetchOptionsClass = Class.forName("com.google.appengine.api.urlfetch.FetchOptions");
// Heroku times out after 30s, so leave some time for the API to return a response
fetchOptionsClass.getDeclaredMethod("setDeadline", java.lang.Double.class)
.invoke(fetchOptions, APP_ENGINE_DEFAULT_TIMEOUT_SECONDS);
Class<?> requestClass = Class.forName("com.google.appengine.api.urlfetch.HTTPRequest");
Object request = requestClass.getDeclaredConstructor(URL.class, requestMethodClass, fetchOptionsClass)
.newInstance(fetchURL, httpMethod, fetchOptions);
if ((method == RequestMethod.POST || method == RequestMethod.PUT) && body != null) {
String bodyString = body.toString();
requestClass.getDeclaredMethod("setPayload", byte[].class)
.invoke(request, (Object) bodyString.getBytes());
}
for (Map.Entry<String, String> header : getHeaders(apiKey).entrySet()) {
Class<?> httpHeaderClass = Class.forName("com.google.appengine.api.urlfetch.HTTPHeader");
Object reqHeader = httpHeaderClass.getDeclaredConstructor(String.class, String.class)
.newInstance(header.getKey(), header.getValue());
requestClass.getDeclaredMethod("setHeader", httpHeaderClass).invoke(request, reqHeader);
}
Class<?> urlFetchFactoryClass = Class.forName("com.google.appengine.api.urlfetch.URLFetchServiceFactory");
Object urlFetchService = urlFetchFactoryClass.getDeclaredMethod("getURLFetchService").invoke(null);
Method fetchMethod = urlFetchService.getClass().getDeclaredMethod("fetch", requestClass);
fetchMethod.setAccessible(true);
Object response = fetchMethod.invoke(urlFetchService, request);
int responseCode = (Integer) response.getClass().getDeclaredMethod("getResponseCode").invoke(response);
String responseBody =
new String((byte[]) response.getClass().getDeclaredMethod("getContent").invoke(response), CHARSET);
return new EasyPostResponse(responseCode, responseBody);
} catch (InvocationTargetException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (MalformedURLException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (NoSuchFieldException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (SecurityException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (NoSuchMethodException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (ClassNotFoundException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (IllegalArgumentException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (IllegalAccessException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (InstantiationException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (UnsupportedEncodingException e) {
throw new EasyPostException(unknownErrorMessage, e);
}
}
public static final ArrayList<String> GLOBAL_FIELD_ACCESSORS =
new ArrayList<>(Arrays.asList("getCreatedAt", "getUpdatedAt", "getFees"));
private Date createdAt;
private Date updatedAt;
private ArrayList<Fee> fees;
/**
* @return the Date this object was created
*/
public Date getCreatedAt() {
return createdAt;
}
/**
* Set the Date this object was created.
*
* @param createdAt the Date this object was created
*/
public void setCreatedAt(final Date createdAt) {
this.createdAt = createdAt;
}
/**
* @return the Date this object was last updated
*/
public Date getUpdatedAt() {
return updatedAt;
}
/**
* Set the Date this object was last updated.
*
* @param updatedAt the Date this object was last updated
*/
public void setUpdatedAt(final Date updatedAt) {
this.updatedAt = updatedAt;
}
/**
* @return the Fees associated with this object
*/
public ArrayList<Fee> getFees() {
return fees;
}
/**
* Set the Fees associated with this object.
*
* @param fees the Fees associated with this object
*/
public void setFees(final ArrayList<Fee> fees) {
this.fees = fees;
}
/**
* @return the ID of this object
*/
public String getId() {
return "";
}
/**
* @return the API mode used to create this object
*/
public String getMode() {
return "";
}
// Batch
/**
* @return the list of shipments in this batch
*/
public List<Shipment> getShipments() {
return new ArrayList<Shipment>();
}
/**
* @return the URL of the label for this object
*/
public String getLabelUrl() {
return "";
}
// Tracker
/**
* @return the ID of this shipment
*/
public String getShipmentId() {
return "";
}
/**
* @return the tracking code of this shipment
*/
public String getTrackingCode() {
return "";
}
/**
* @return the status of this object
*/
public String getStatus() {
return "";
}
/**
* @return the tracking details of this shipment
*/
public List<TrackingDetail> getTrackingDetails() {
return new ArrayList<TrackingDetail>();
}
}
| src/main/java/com/easypost/net/EasyPostResource.java | /**
* EasyPostResource.java
* This file is a part of EasyPost API SDK.
* (c) 2022 EasyPost
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
package com.easypost.net;
import com.easypost.EasyPost;
import com.easypost.exception.EasyPostException;
import com.easypost.model.Event;
import com.easypost.model.EventDeserializer;
import com.easypost.model.Fee;
import com.easypost.model.Rate;
import com.easypost.model.RateDeserializer;
import com.easypost.model.Shipment;
import com.easypost.model.SmartrateCollection;
import com.easypost.model.SmartrateCollectionDeserializer;
import com.easypost.model.TrackingDetail;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.net.URLStreamHandler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
public abstract class EasyPostResource {
public static final String EASYPOST_SUPPORT_EMAIL = "[email protected]";
private static final int DEFAULT_CONNECT_TIMEOUT_MILLISECONDS = 30000;
private static final int DEFAULT_READ_TIMEOUT_MILLISECONDS = 60000;
private static final double APP_ENGINE_DEFAULT_TIMEOUT_SECONDS = 20.0;
public static final Gson GSON =
new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(Event.class, new EventDeserializer())
.registerTypeAdapter(Rate.class, new RateDeserializer())
.registerTypeAdapter(SmartrateCollection.class, new SmartrateCollectionDeserializer()).create();
public static final Gson PRETTY_PRINT_GSON = new GsonBuilder().setPrettyPrinting().serializeNulls()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(Event.class, new EventDeserializer()).create();
/**
* Returns a string representation of the object.
*/
@Override
public String toString() {
return (String) this.getIdString();
}
/**
* Pretty print the JSON representation of the object.
*
* @return the JSON representation of the object.
*/
public String prettyPrint() {
return String.format("<%s@%s id=%s> JSON: %s", this.getClass().getName(), System.identityHashCode(this),
this.getIdString(), PRETTY_PRINT_GSON.toJson(this));
}
private Object getIdString() {
try {
Field idField = this.getClass().getDeclaredField("id");
return idField.get(this);
} catch (SecurityException e) {
return "";
} catch (NoSuchFieldException e) {
return "";
} catch (IllegalArgumentException e) {
return "";
} catch (IllegalAccessException e) {
return "";
}
}
private static String className(final Class<?> clazz) {
return clazz.getSimpleName().replaceAll("([a-z])([A-Z])", "$1_$2").toLowerCase().replace("$", "");
}
protected static String singleClassURL(final Class<?> clazz) {
return String.format("%s/%s", EasyPost.API_BASE, className(clazz));
}
protected static String classURL(final Class<?> clazz) {
String singleURL = singleClassURL(clazz);
if (singleURL.charAt(singleURL.length() - 1) == 's' || singleURL.charAt(singleURL.length() - 1) == 'h') {
return String.format("%ses", singleClassURL(clazz));
} else {
return String.format("%ss", singleClassURL(clazz));
}
}
protected static String instanceURL(final Class<?> clazz, final String id) {
return String.format("%s/%s", classURL(clazz), id);
}
/**
* Merge two EasyPostResource objects.
*
* @param obj the base object
* @param update the object to merge
*/
public void merge(final EasyPostResource obj, final EasyPostResource update) {
if (!obj.getClass().isAssignableFrom(update.getClass())) {
return;
}
Method[] methods = obj.getClass().getMethods();
for (Method fromMethod : methods) {
if ((fromMethod.getDeclaringClass().equals(obj.getClass()) && fromMethod.getName().startsWith("get")) ||
GLOBAL_FIELD_ACCESSORS.contains(fromMethod.getName())) {
String fromName = fromMethod.getName();
String toName = fromName.replace("get", "set");
try {
Object value = fromMethod.invoke(update, (Object[]) null);
if (value != null) {
Method toMethod = obj.getClass().getMethod(toName, fromMethod.getReturnType());
toMethod.invoke(obj, value);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
public static final String CHARSET = "UTF-8";
private static final String DNS_CACHE_TTL_PROPERTY_NAME = "networkaddress.cache.ttl";
// Set this property to override your environment's default URLStreamHandler.
private static final String CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME = "com.easypost.net.customURLStreamHandler";
protected enum RequestMethod {
GET, POST, DELETE, PUT
}
private static String urlEncodePair(final String key, final String value) throws UnsupportedEncodingException {
return String.format("%s=%s", URLEncoder.encode(key, CHARSET), URLEncoder.encode(value, CHARSET));
}
static Map<String, String> getHeaders(String apiKey) {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Accept-Charset", CHARSET);
headers.put("User-Agent", String.format("EasyPost/v2 JavaClient/%s", EasyPost.VERSION));
if (apiKey == null) {
apiKey = EasyPost.apiKey;
}
headers.put("Authorization", String.format("Bearer %s", apiKey));
// debug headers
String[] propertyNames = { "os.name", "os.version", "os.arch", "java.version", "java.vendor", "java.vm.version",
"java.vm.vendor" };
Map<String, String> propertyMap = new HashMap<String, String>();
for (String propertyName : propertyNames) {
propertyMap.put(propertyName, System.getProperty(propertyName));
}
propertyMap.put("lang", "Java");
propertyMap.put("publisher", "EasyPost");
headers.put("X-Client-User-Agent", GSON.toJson(propertyMap));
return headers;
}
private static javax.net.ssl.HttpsURLConnection createEasyPostConnection(final String url, final String apiKey)
throws IOException {
URL easypostURL = null;
String customURLStreamHandlerClassName = System.getProperty(CUSTOM_URL_STREAM_HANDLER_PROPERTY_NAME, null);
if (customURLStreamHandlerClassName != null) {
// instantiate the custom handler provided
try {
Class<URLStreamHandler> clazz =
(Class<URLStreamHandler>) Class.forName(customURLStreamHandlerClassName);
Constructor<URLStreamHandler> constructor = clazz.getConstructor();
URLStreamHandler customHandler = constructor.newInstance();
easypostURL = new URL(null, url, customHandler);
} catch (ClassNotFoundException e) {
throw new IOException(e);
} catch (SecurityException e) {
throw new IOException(e);
} catch (NoSuchMethodException e) {
throw new IOException(e);
} catch (IllegalArgumentException e) {
throw new IOException(e);
} catch (InstantiationException e) {
throw new IOException(e);
} catch (IllegalAccessException e) {
throw new IOException(e);
} catch (InvocationTargetException e) {
throw new IOException(e);
}
} else {
easypostURL = new URL(url);
}
javax.net.ssl.HttpsURLConnection conn = (javax.net.ssl.HttpsURLConnection) easypostURL.openConnection();
conn.setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLISECONDS);
int readTimeout;
if (EasyPost.readTimeout != 0) {
readTimeout = EasyPost.readTimeout;
} else {
readTimeout = DEFAULT_READ_TIMEOUT_MILLISECONDS;
}
conn.setReadTimeout(readTimeout);
conn.setUseCaches(false);
for (Map.Entry<String, String> header : getHeaders(apiKey).entrySet()) {
conn.setRequestProperty(header.getKey(), header.getValue());
}
return conn;
}
private static javax.net.ssl.HttpsURLConnection writeBody(final javax.net.ssl.HttpsURLConnection conn,
final JsonObject body) throws IOException {
if (body != null) {
conn.setDoOutput(true);
conn.setRequestProperty("Content-Type", "application/json");
OutputStream output = null;
try {
output = conn.getOutputStream();
String jsonString = body.toString();
output.write(jsonString.getBytes(CHARSET));
} finally {
if (output != null) {
output.close();
}
}
}
return conn;
}
private static javax.net.ssl.HttpsURLConnection createGetConnection(final String url, final String query,
final String apiKey) throws IOException {
String getURL = String.format("%s?%s", url, query);
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(getURL, apiKey);
conn.setRequestMethod("GET");
return conn;
}
private static javax.net.ssl.HttpsURLConnection createPostConnection(final String url, final JsonObject body,
final String apiKey) throws IOException {
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(url, apiKey);
conn.setRequestMethod("POST");
conn = writeBody(conn, body);
return conn;
}
private static javax.net.ssl.HttpsURLConnection createDeleteConnection(final String url, final String query,
final String apiKey) throws IOException {
String deleteUrl = String.format("%s?%s", url, query);
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(deleteUrl, apiKey);
conn.setRequestMethod("DELETE");
return conn;
}
private static javax.net.ssl.HttpsURLConnection createPutConnection(final String url, final JsonObject body,
final String apiKey) throws IOException {
javax.net.ssl.HttpsURLConnection conn = createEasyPostConnection(url, apiKey);
conn.setRequestMethod("PUT");
writeBody(conn, body);
return conn;
}
private static JsonObject createBody(final Map<String, Object> params) {
Gson gson = new Gson();
return gson.toJsonTree(params).getAsJsonObject();
}
private static String createQuery(final Map<String, Object> params) throws UnsupportedEncodingException {
Map<String, String> flatParams = flattenParams(params);
StringBuilder queryStringBuilder = new StringBuilder();
for (Map.Entry<String, String> entry : flatParams.entrySet()) {
queryStringBuilder.append("&");
queryStringBuilder.append(urlEncodePair(entry.getKey(), entry.getValue()));
}
if (queryStringBuilder.length() > 0) {
queryStringBuilder.deleteCharAt(0);
}
return queryStringBuilder.toString();
}
private static Map<String, String> flattenParams(final Map<String, Object> params) {
if (params == null) {
return new HashMap<String, String>();
}
Map<String, String> flatParams = new HashMap<String, String>();
for (Map.Entry<String, Object> entry : params.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof Map<?, ?>) {
Map<String, Object> flatNestedMap = new HashMap<String, Object>();
Map<?, ?> nestedMap = (Map<?, ?>) value;
for (Map.Entry<?, ?> nestedEntry : nestedMap.entrySet()) {
flatNestedMap.put(String.format("%s[%s]", key, nestedEntry.getKey()), nestedEntry.getValue());
}
flatParams.putAll(flattenParams(flatNestedMap));
} else if (value instanceof List) {
Map<String, Object> flatNestedMap = new HashMap<String, Object>();
List<?> nestedList = (List<?>) value;
for (int i = 0; i < nestedList.size(); i++) {
flatNestedMap.put(String.format("%s[%s]", key, i), nestedList.get(i));
flatParams.putAll(flattenParams(flatNestedMap));
}
} else if (value instanceof EasyPostResource) {
flatParams.put(String.format("%s[%s]", key, "id"), value.toString());
} else if (value != null) {
flatParams.put(key, value.toString());
}
}
return flatParams;
}
// represents Errors returned as JSON
private static class ErrorContainer {
private EasyPostResource.Error error;
}
private static class Error {
@SuppressWarnings ("unused")
private String type;
private String message;
private String code;
private String param;
private String error;
public String getType() {
return type;
}
public String getMessage() {
return message;
}
public String getCode() {
return code;
}
public String getParam() {
return param;
}
public String getError() {
return error;
}
}
private static String getResponseBody(final InputStream responseStream) throws IOException {
String rBody = new Scanner(responseStream, CHARSET).useDelimiter("\\A").next();
responseStream.close();
return rBody;
}
private static EasyPostResponse makeURLConnectionRequest(final EasyPostResource.RequestMethod method,
final String url, final String query,
final JsonObject body, final String apiKey)
throws EasyPostException {
javax.net.ssl.HttpsURLConnection conn = null;
try {
switch (method) {
case GET:
conn = createGetConnection(url, query, apiKey);
break;
case POST:
conn = createPostConnection(url, body, apiKey);
break;
case PUT:
conn = createPutConnection(url, body, apiKey);
break;
case DELETE:
conn = createDeleteConnection(url, query, apiKey);
break;
default:
throw new EasyPostException(
String.format("Unrecognized HTTP method %s. Please contact EasyPost at %s.", method,
EasyPostResource.EASYPOST_SUPPORT_EMAIL));
}
int rCode = conn.getResponseCode(); // sends the request
String rBody = null;
if (rCode == HttpURLConnection.HTTP_NO_CONTENT) {
rBody = "";
} else if (rCode >= HttpURLConnection.HTTP_OK && rCode < HttpURLConnection.HTTP_MULT_CHOICE) {
rBody = getResponseBody(conn.getInputStream());
} else {
rBody = getResponseBody(conn.getErrorStream());
}
return new EasyPostResponse(rCode, rBody);
} catch (IOException e) {
throw new EasyPostException(String.format("Could not connect to EasyPost (%s). " +
"Please check your internet connection and try again. If this problem persists," +
"please contact us at %s.", EasyPost.API_BASE, EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
} finally {
if (conn != null) {
conn.disconnect();
}
}
}
protected static <T> T request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, final String apiKey)
throws EasyPostException {
return request(method, url, params, clazz, apiKey, true);
}
protected static <T> T request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, final String apiKey,
final boolean apiKeyRequired) throws EasyPostException {
String originalDNSCacheTTL = null;
boolean allowedToSetTTL = true;
try {
originalDNSCacheTTL = java.security.Security.getProperty(DNS_CACHE_TTL_PROPERTY_NAME);
// disable DNS cache
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, "0");
} catch (SecurityException se) {
allowedToSetTTL = false;
}
try {
return _request(method, url, params, clazz, apiKey, apiKeyRequired);
} finally {
if (allowedToSetTTL) {
if (originalDNSCacheTTL == null) {
// value unspecified by implementation
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, "-1"); // cache forever
} else {
java.security.Security.setProperty(DNS_CACHE_TTL_PROPERTY_NAME, originalDNSCacheTTL);
}
}
}
}
@SuppressWarnings("checkstyle:methodname")
protected static <T> T _request(final EasyPostResource.RequestMethod method, final String url,
final Map<String, Object> params, final Class<T> clazz, String apiKey,
final boolean apiKeyRequired) throws EasyPostException {
if ((EasyPost.apiKey == null || EasyPost.apiKey.length() == 0) && (apiKey == null || apiKey.length() == 0)) {
if (apiKeyRequired) {
throw new EasyPostException(String.format(
"No API key provided. (set your API key using 'EasyPost.apiKey = {KEY}'. " +
"Your API key can be found in your EasyPost dashboard, " +
"or you can email us at %s for assistance.", EasyPostResource.EASYPOST_SUPPORT_EMAIL));
}
}
if (apiKey == null) {
apiKey = EasyPost.apiKey;
}
String query = null;
JsonObject body = null;
if (params != null) {
switch (method) {
case GET:
case DELETE:
try {
query = createQuery(params);
} catch (UnsupportedEncodingException e) {
throw new EasyPostException(
String.format("Unable to encode parameters to %s. Please email %s for assistance.",
CHARSET, EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
}
break;
case POST:
case PUT:
try {
body = createBody(params);
} catch (Exception e) {
throw new EasyPostException(String.format(
"Unable to create JSON body from parameters. Please email %s for assistance.",
EasyPostResource.EASYPOST_SUPPORT_EMAIL), e);
}
break;
default:
break;
}
}
EasyPostResponse response;
try {
// HTTPSURLConnection verifies SSL cert by default
response = makeURLConnectionRequest(method, url, query, body, apiKey);
} catch (ClassCastException ce) {
// appengine
String appEngineEnv = System.getProperty("com.google.appengine.runtime.environment", null);
if (appEngineEnv != null) {
response = makeAppEngineRequest(method, url, query, body, apiKey);
} else {
throw ce;
}
}
int rCode = response.getResponseCode();
String rBody = response.getResponseBody();
if (rCode < HttpURLConnection.HTTP_OK || rCode >= HttpURLConnection.HTTP_MULT_CHOICE) {
handleAPIError(rBody, rCode);
}
return GSON.fromJson(rBody, clazz);
}
private static void handleAPIError(final String rBody, final int rCode) throws EasyPostException {
try {
EasyPostResource.Error error = GSON.fromJson(rBody, EasyPostResource.Error.class);
if (error.getError().length() > 0) {
throw new EasyPostException(error.getError());
}
throw new EasyPostException(error.getMessage(), error.getParam(), null);
} catch (Exception e) {
throw new EasyPostException(
String.format("An error occurred. Response code: %s Response body: %s", rCode, rBody));
}
}
private static EasyPostResponse makeAppEngineRequest(final RequestMethod method, String url, final String query,
final JsonObject body, final String apiKey)
throws EasyPostException {
String unknownErrorMessage = String.format(
"Sorry, an unknown error occurred while trying to use the Google App Engine runtime." +
"Please email %s for assistance.", EasyPostResource.EASYPOST_SUPPORT_EMAIL);
try {
if ((method == RequestMethod.GET || method == RequestMethod.DELETE) && query != null) {
url = String.format("%s?%s", url, query);
}
URL fetchURL = new URL(url);
Class<?> requestMethodClass = Class.forName("com.google.appengine.api.urlfetch.HTTPMethod");
Object httpMethod = requestMethodClass.getDeclaredField(method.name()).get(null);
Class<?> fetchOptionsBuilderClass = Class.forName("com.google.appengine.api.urlfetch.FetchOptions$Builder");
Object fetchOptions = null;
try {
fetchOptions = fetchOptionsBuilderClass.getDeclaredMethod("validateCertificate").invoke(null);
} catch (NoSuchMethodException e) {
System.err.printf(
"Warning: this App Engine SDK version does not allow verification of SSL certificates;" +
"this exposes you to a MITM attack. Please upgrade your App Engine SDK to >=1.5.0. " +
"If you have questions, email %s.%n", EasyPostResource.EASYPOST_SUPPORT_EMAIL);
fetchOptions = fetchOptionsBuilderClass.getDeclaredMethod("withDefaults").invoke(null);
}
Class<?> fetchOptionsClass = Class.forName("com.google.appengine.api.urlfetch.FetchOptions");
// Heroku times out after 30s, so leave some time for the API to return a response
fetchOptionsClass.getDeclaredMethod("setDeadline", java.lang.Double.class)
.invoke(fetchOptions, APP_ENGINE_DEFAULT_TIMEOUT_SECONDS);
Class<?> requestClass = Class.forName("com.google.appengine.api.urlfetch.HTTPRequest");
Object request = requestClass.getDeclaredConstructor(URL.class, requestMethodClass, fetchOptionsClass)
.newInstance(fetchURL, httpMethod, fetchOptions);
if ((method == RequestMethod.POST || method == RequestMethod.PUT) && body != null) {
String bodyString = body.toString();
requestClass.getDeclaredMethod("setPayload", byte[].class)
.invoke(request, (Object) bodyString.getBytes());
}
for (Map.Entry<String, String> header : getHeaders(apiKey).entrySet()) {
Class<?> httpHeaderClass = Class.forName("com.google.appengine.api.urlfetch.HTTPHeader");
Object reqHeader = httpHeaderClass.getDeclaredConstructor(String.class, String.class)
.newInstance(header.getKey(), header.getValue());
requestClass.getDeclaredMethod("setHeader", httpHeaderClass).invoke(request, reqHeader);
}
Class<?> urlFetchFactoryClass = Class.forName("com.google.appengine.api.urlfetch.URLFetchServiceFactory");
Object urlFetchService = urlFetchFactoryClass.getDeclaredMethod("getURLFetchService").invoke(null);
Method fetchMethod = urlFetchService.getClass().getDeclaredMethod("fetch", requestClass);
fetchMethod.setAccessible(true);
Object response = fetchMethod.invoke(urlFetchService, request);
int responseCode = (Integer) response.getClass().getDeclaredMethod("getResponseCode").invoke(response);
String responseBody =
new String((byte[]) response.getClass().getDeclaredMethod("getContent").invoke(response), CHARSET);
return new EasyPostResponse(responseCode, responseBody);
} catch (InvocationTargetException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (MalformedURLException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (NoSuchFieldException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (SecurityException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (NoSuchMethodException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (ClassNotFoundException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (IllegalArgumentException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (IllegalAccessException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (InstantiationException e) {
throw new EasyPostException(unknownErrorMessage, e);
} catch (UnsupportedEncodingException e) {
throw new EasyPostException(unknownErrorMessage, e);
}
}
public static final ArrayList<String> GLOBAL_FIELD_ACCESSORS =
new ArrayList<>(Arrays.asList("getCreatedAt", "getUpdatedAt", "getFees"));
private Date createdAt;
private Date updatedAt;
private ArrayList<Fee> fees;
/**
* @return the Date this object was created
*/
public Date getCreatedAt() {
return createdAt;
}
/**
* Set the Date this object was created.
*
* @param createdAt the Date this object was created
*/
public void setCreatedAt(final Date createdAt) {
this.createdAt = createdAt;
}
/**
* @return the Date this object was last updated
*/
public Date getUpdatedAt() {
return updatedAt;
}
/**
* Set the Date this object was last updated.
*
* @param updatedAt the Date this object was last updated
*/
public void setUpdatedAt(final Date updatedAt) {
this.updatedAt = updatedAt;
}
/**
* @return the Fees associated with this object
*/
public ArrayList<Fee> getFees() {
return fees;
}
/**
* Set the Fees associated with this object.
*
* @param fees the Fees associated with this object
*/
public void setFees(final ArrayList<Fee> fees) {
this.fees = fees;
}
/**
* @return the ID of this object
*/
public String getId() {
return "";
}
/**
* @return the API mode used to create this object
*/
public String getMode() {
return "";
}
// Batch
/**
* @return the list of shipments in this batch
*/
public List<Shipment> getShipments() {
return new ArrayList<Shipment>();
}
/**
* @return the URL of the label for this object
*/
public String getLabelUrl() {
return "";
}
// Tracker
/**
* @return the ID of this shipment
*/
public String getShipmentId() {
return "";
}
/**
* @return the tracking code of this shipment
*/
public String getTrackingCode() {
return "";
}
/**
* @return the status of this object
*/
public String getStatus() {
return "";
}
/**
* @return the tracking details of this shipment
*/
public List<TrackingDetail> getTrackingDetails() {
return new ArrayList<TrackingDetail>();
}
}
| add Java version to the User-Agent in requests (#83)
| src/main/java/com/easypost/net/EasyPostResource.java | add Java version to the User-Agent in requests (#83) | <ide><path>rc/main/java/com/easypost/net/EasyPostResource.java
<ide> static Map<String, String> getHeaders(String apiKey) {
<ide> Map<String, String> headers = new HashMap<String, String>();
<ide> headers.put("Accept-Charset", CHARSET);
<del> headers.put("User-Agent", String.format("EasyPost/v2 JavaClient/%s", EasyPost.VERSION));
<add> headers.put("User-Agent", String.format("EasyPost/v2 JavaClient/%s Java/%s"
<add> , EasyPost.VERSION, System.getProperty("java.version")));
<ide>
<ide> if (apiKey == null) {
<ide> apiKey = EasyPost.apiKey; |
|
JavaScript | agpl-3.0 | be4fb56732f000c07958a467fe9d4570b31a593e | 0 | ansuz/dnssb,ansuz/dnssb | var Pull = require("pull-stream");
var KVSet = require("kvset");
var Pad = require("pad-ipv6");
var Query = module.exports = {};
function compareRecordsBySerial(a, b) {
return b.data.serial - a.data.serial;
}
function compareRecords(a, b) {
return a.name > b.name ? 1 : a.name < b.name ? -1 :
a.type > b.type ? 1 : a.type < b.type ? -1 :
a.class > b.class ? 1 : a.class < b.class ? -1 :
0
}
function isRecordEqual(a, b) {
return (a === b) || (a && b
&& a.name === b.name
&& a.type === b.type
&& a.class === b.class);
}
function mergeRecords(into, from) {
if (from) from.filter(function (a) {
return into.every(function (b) {
return !isRecordEqual(a, b);
})
}).forEach(function (rec) {
into.push(rec);
});
}
function mergeResults(into, from) {
mergeRecords(into.additionals, from.additionals);
mergeRecords(into.answers, from.answers);
mergeRecords(into.authorities, from.authorities);
mergeRecords(into.questions, from.questions);
if (from.expires < into.expires) into.expires = from.expires;
into.domainExists |= from.domainExists
into.authoritative |= from.authoritative
into.cache &= from.cache
}
Query.branches = function (sbot, name, type, _class, cb) {
if (!_class) _class = "IN";
var branches = [];
Pull(Query.all(sbot),
Pull.filter(function (record) {
return record.name == name
&& record.type == type
&& record.class == _class;
}),
Query.drainSet(function (record) {
branches.push(record.id);
}, function (err) {
cb(err, branches);
}));
};
function msgToRecord(msg) {
var c = msg.value.content;
var r = c && c.record;
if (!r) return;
r.id = msg.key;
r.author = msg.value.author;
r.timestamp = msg.value.timestamp;
r.branch = c.branch;
if (!r.ttl) r.ttl = 500;
if (!r.class) r.class = "IN";
if (r.value) r.data = r.value, delete r.value
if (r.type === 'AAAA') r.data = Pad(r.data);
return r;
}
Query.all = function (sbot) {
return Pull(sbot.messagesByType({
type: 'ssb-dns',
}),
Pull.map(msgToRecord),
Pull.filter());
};
function recordsInDomain(sbot, name) {
var path = name.split(/\./g).reverse()
/* enable this when records without path propery are to be deprecated:
// use ssb-query if it is supported
if (sbot.query) return sbot.query.read({
query: [{$filter: {value: {content: {
type: 'ssb-dns',
path: {$prefix: path}
}}}}]
});
*/
// fallback to logt
return Pull(sbot.messagesByType({
type: 'ssb-dns',
}),
Pull.filter(function (msg) {
var c = msg.value.content;
var p = c.path;
if (!p) {
var name = c.record && c.record.name;
if (typeof name !== 'string') return false;
p = name.split(/\./).reverse()
}
for (var i = 0; i < path.length; i++) {
if (path[i] !== p[i]) return false;
}
return true;
}));
}
Query.inDomain = function (sbot, name) {
if (!name) return Query.all(sbot);
return Pull(recordsInDomain(sbot, name),
Pull.map(msgToRecord),
Pull.filter());
};
function expandName(name, wildcard) {
var names = {'': true};
names[name] = true;
for (var labels = name.split(/\./); labels.length; labels.shift()) {
if (wildcard) labels[0] = wildcard;
names[labels.join('.')] = true;
}
return names;
}
function Records() {
this.recs = [];
}
Records.prototype.addRecord = function (record) {
this.recs.push(record);
};
Records.prototype.getRecords = function () {
return this.recs;
};
function Wildcards() {
this.lengths = [];
this.recordsByLength = {};
}
function RecordsMap() {
this.recs = {};
}
RecordsMap.prototype.addRecord = function (r) {
if (r.name in this.recs) {
this.recs[r.name].push(r);
} else {
this.recs[r.name] = [r];
}
};
RecordsMap.prototype.popRecords = function (name) {
name = name.replace(/\.$/, '');
var recs = this.recs[name];
delete this.recs[name];
return recs || [];
};
Wildcards.prototype.addRecord = function (record) {
var len = record.name.length;
if (len in this.recordsByLength) {
this.recordsByLength[len].push(record);
} else {
this.recordsByLength[len] = [record];
this.lengths.push(len);
}
};
Wildcards.prototype.getRecords = function () {
// get records for the longest name length
if (this.lengths.length) {
var len = Math.max.apply(Math, this.lengths);
return this.recordsByLength[len] || [];
}
return [];
};
Wildcards.prototype.getTopRecords = function () {
// get records for the shortest name length
if (this.lengths.length) {
var len = Math.min.apply(Math, this.lengths);
return this.recordsByLength[len] || [];
}
return [];
};
function ZoneSerials() {
this.serials = {};
}
ZoneSerials.prototype.addRecord = function (zones) {
for (var zone in zones) {
this.serials[zone] = (+this.serials[zone] || 0) + 1;
}
};
ZoneSerials.prototype.getSerial = function (zone) {
return this.serials[zone] % 0x100000000;
};
Query.drainSet = function (each, onEnd) {
var set = new KVSet();
return Pull.drain(function (record) {
if (record.branch) set.remove(record.branch);
if (record.data) set.add(record.id, record);
}, function (err) {
if (err) return onEnd(err);
for (var key in set.heads) {
var record = set.heads[key];
try {
each(record);
} catch(e) {
return onEnd(e);
}
}
onEnd(null);
});
};
Query.collectSet = function (cb) {
var records = [];
return Query.drainSet(function (record) {
records.push(record);
}, function (err) {
return cb(err, records);
});
};
var nonRecurseTypes = {
CNAME: true,
AXFR: true,
IXFR: true,
};
Query.query = function (sbot, question, cb) {
if (nonRecurseTypes[question.type]) {
Query.querySingle(sbot, question, cb);
} else {
Query.queryRecursive(sbot, question, [], cb);
}
}
// recursive query
Query.queryRecursive = function (sbot, question, stack, cb) {
var result = {
cache: true,
answers: [],
authorities: [],
additionals: [],
questions: [],
expires: Date.now() + 60*60e3,
};
// avoid infinite recursion
if (stack.some(function (q) {
return isRecordEqual(q, question);
})) {
return cb(null, result)
}
var waiting = 1;
Query.querySingle(sbot, question, next);
function next(err, res) {
if (err) return waiting = 0, cb(err);
mergeResults(result, res)
// recurse on CNAMEs
var stack2 = stack.concat(question)
res.answers.filter(function (answer) {
return answer.type === 'CNAME';
}).map(function (record) {
return {
class: question.class,
type: question.type,
name: record.data.replace(/\.$/, '')
}
}).forEach(function (q) {
waiting++
Query.queryRecursive(sbot, q, stack2, next)
});
if (!--waiting) cb(null, result);
}
}
Query.querySingle = function (sbot, question, cb) {
// look up records that match a question, including wildcard records
// and zone authority records
var qName = question.name.toLowerCase();
var authorityDomains = expandName(qName);
var wildcardDomains = expandName(qName, '*');
var isIncrementalTransfer = question.type === 'IXFR'
var isTransfer = isIncrementalTransfer || question.type === 'AXFR'
isIncrementalTransfer = false // TODO: fix IXFR
var authorities = new Wildcards();
var maybeGlue = new RecordsMap();
var answers = isTransfer ? new Records() : new Wildcards();
var zoneSerials = new ZoneSerials();
var result = {
cache: !isTransfer
};
Pull(Query.all(sbot),
Pull.filter(function (record) {
var recordDomains = expandName(record.name);
zoneSerials.addRecord(recordDomains);
if (isIncrementalTransfer
&& zoneSerials.getSerial(qName) < question.serial) {
return false
}
var nameMatches = isTransfer ? qName in recordDomains :
record.name in wildcardDomains;
if (nameMatches) {
result.domainExists = true;
}
if (!isTransfer) {
if (record.type === 'A' || record.type === 'AAAA') {
// include all because we might need them for glue
return true;
}
if (record.type === 'NS' || record.type === 'SOA') {
return record.name in authorityDomains
}
}
return nameMatches
&& (isTransfer
|| question.type === record.type
|| question.type === '*'
|| 'CNAME' === record.type)
&& (question.class === record.class
|| question.class === '*');
}),
Query.drainSet(function (record) {
if (record.type === 'NS' || record.type === 'SOA') {
result.authoritative = true;
if (question.class === record.class
&& question.type === record.type
&& record.name in wildcardDomains) {
answers.addRecord(record);
} else {
authorities.addRecord(record);
if (isTransfer) {
answers.addRecord(record);
}
}
} else if (!isTransfer
&& (record.type === 'A' || record.type === 'AAAA')
&& (!(record.name in wildcardDomains)
|| (question.type !== record.type && question.type !== '*')
|| (question.class !== record.class && question.class !== '*'))
) {
maybeGlue.addRecord(record);
} else {
answers.addRecord(record);
}
}, function (err) {
if (err) return cb(err);
var ttl = 3600; // max internal ttl
result.answers = answers.getRecords();
result.answers.forEach(function (record) {
if (record.ttl < ttl) ttl = record.ttl;
});
result.additionals = [];
result.authorities = isTransfer
? authorities.getTopRecords()
: authorities.getRecords();
result.authorities.forEach(updateAuthority);
result.answers.forEach(updateAuthority);
function updateAuthority(r) {
if (r.type === 'SOA') {
if (!r.data.serial) {
// special case: calculate a serial for the SOA
r.data.serial = zoneSerials.getSerial(r.name);
}
if (r.ttl < ttl) ttl = r.ttl;
if (!result.answers.length) {
if (r.data.ttl < ttl) ttl = r.data.ttl;
}
if (!isTransfer) {
result.additionals = result.additionals.concat(
maybeGlue.popRecords(r.data.mname))
}
} else if (r.type === 'NS') {
if (!isTransfer) {
result.additionals = result.additionals.concat(
maybeGlue.popRecords(r.data))
}
}
}
result.expires = Date.now() + ttl * 60e3;
result.questions = [];
if (isTransfer) {
// RFC 5936, Section 2.2
result.questions.push(question);
// pick a SOA record to use as the bookend
var soa = result.authorities.filter(function (r) {
return r.type === 'SOA';
}).sort(compareRecordsBySerial)[0];
result.authorities.length = 0;
if (soa) {
result.answers = [soa].concat(
result.answers.filter(function (r) {
return r !== soa
}).sort(compareRecords),
[soa]
);
}
} else {
// only include SOA if there are no answers
if (result.answers.length > 0) {
result.authorities = result.authorities.filter(function (r) {
return r.type !== 'SOA';
});
}
// include SOA if there are no answers, NS if there are
result.authorities = result.authorities.filter(function (r) {
return r.type !== (result.answers.length ? 'SOA' : 'NS');
});
// resolve wildcards in answers
result.answers.forEach(function (r) {
if (r.name !== qName && r.name in wildcardDomains) {
r.name = qName;
}
})
}
cb(null, result);
}));
};
| lib/query.js | var Pull = require("pull-stream");
var KVSet = require("kvset");
var Pad = require("pad-ipv6");
var Query = module.exports = {};
function compareRecordsBySerial(a, b) {
return b.data.serial - a.data.serial;
}
function compareRecords(a, b) {
return a.name > b.name ? 1 : a.name < b.name ? -1 :
a.type > b.type ? 1 : a.type < b.type ? -1 :
a.class > b.class ? 1 : a.class < b.class ? -1 :
0
}
function isRecordEqual(a, b) {
return (a === b) || (a && b
&& a.name === b.name
&& a.type === b.type
&& a.class === b.class);
}
function mergeRecords(into, from) {
if (from) from.filter(function (a) {
return into.every(function (b) {
return !isRecordEqual(a, b);
})
}).forEach(function (rec) {
into.push(rec);
});
}
function mergeResults(into, from) {
mergeRecords(into.additionals, from.additionals);
mergeRecords(into.answers, from.answers);
mergeRecords(into.authorities, from.authorities);
mergeRecords(into.questions, from.questions);
if (from.expires < into.expires) into.expires = from.expires;
into.domainExists |= from.domainExists
into.authoritative |= from.authoritative
into.cache &= from.cache
}
Query.branches = function (sbot, name, type, _class, cb) {
if (!_class) _class = "IN";
var branches = [];
Pull(Query.all(sbot),
Pull.filter(function (record) {
return record.name == name
&& record.type == type
&& record.class == _class;
}),
Query.drainSet(function (record) {
branches.push(record.id);
}, function (err) {
cb(err, branches);
}));
};
function msgToRecord(msg) {
var c = msg.value.content;
var r = c && c.record;
if (!r) return;
r.id = msg.key;
r.author = msg.value.author;
r.timestamp = msg.value.timestamp;
r.branch = c.branch;
if (!r.ttl) r.ttl = 500;
if (!r.class) r.class = "IN";
if (r.value) r.data = r.value, delete r.value
if (r.type === 'AAAA') r.data = Pad(r.data);
return r;
}
Query.all = function (sbot) {
return Pull(sbot.messagesByType({
type: 'ssb-dns',
}),
Pull.map(msgToRecord),
Pull.filter());
};
function recordsInDomain(sbot, name) {
var path = name.split(/\./g).reverse()
/* enable this when records without path propery are to be deprecated:
// use ssb-query if it is supported
if (sbot.query) return sbot.query.read({
query: [{$filter: {value: {content: {
type: 'ssb-dns',
path: {$prefix: path}
}}}}]
});
*/
// fallback to logt
return Pull(sbot.messagesByType({
type: 'ssb-dns',
}),
Pull.filter(function (msg) {
var c = msg.value.content;
var p = c.path;
if (!p) {
var name = c.record && c.record.name;
if (typeof name !== 'string') return false;
p = name.split(/\./).reverse()
}
for (var i = 0; i < path.length; i++) {
if (path[i] !== p[i]) return false;
}
return true;
}));
}
Query.inDomain = function (sbot, name) {
if (!name) return Query.all(sbot);
return Pull(recordsInDomain(sbot, name),
Pull.map(msgToRecord),
Pull.filter());
};
function expandName(name, wildcard) {
var names = {'': true};
names[name] = true;
for (var labels = name.split(/\./); labels.length; labels.shift()) {
if (wildcard) labels[0] = wildcard;
names[labels.join('.')] = true;
}
return names;
}
function Records() {
this.recs = [];
}
Records.prototype.addRecord = function (record) {
this.recs.push(record);
};
Records.prototype.getRecords = function () {
return this.recs;
};
function Wildcards() {
this.lengths = [];
this.recordsByLength = {};
}
function RecordsMap() {
this.recs = {};
}
RecordsMap.prototype.addRecord = function (r) {
if (r.name in this.recs) {
this.recs[r.name].push(r);
} else {
this.recs[r.name] = [r];
}
};
RecordsMap.prototype.popRecords = function (name) {
name = name.replace(/\.$/, '');
var recs = this.recs[name];
delete this.recs[name];
return recs || [];
};
Wildcards.prototype.addRecord = function (record) {
var len = record.name.length;
if (len in this.recordsByLength) {
this.recordsByLength[len].push(record);
} else {
this.recordsByLength[len] = [record];
this.lengths.push(len);
}
};
Wildcards.prototype.getRecords = function () {
// get records for the longest name length
if (this.lengths.length) {
var len = Math.max.apply(Math, this.lengths);
return this.recordsByLength[len] || [];
}
return [];
};
Wildcards.prototype.getTopRecords = function () {
// get records for the shortest name length
if (this.lengths.length) {
var len = Math.min.apply(Math, this.lengths);
return this.recordsByLength[len] || [];
}
return [];
};
function ZoneSerials() {
this.serials = {};
}
ZoneSerials.prototype.addRecord = function (zones) {
for (var zone in zones) {
this.serials[zone] = (+this.serials[zone] || 0) + 1;
}
};
ZoneSerials.prototype.getSerial = function (zone) {
return this.serials[zone] % 0x100000000;
};
Query.drainSet = function (each, onEnd) {
var set = new KVSet();
return Pull.drain(function (record) {
if (record.branch) set.remove(record.branch);
set.add(record.id, record);
}, function (err) {
if (err) return onEnd(err);
for (var key in set.heads) {
var record = set.heads[key];
try {
each(record);
} catch(e) {
return onEnd(e);
}
}
onEnd(null);
});
};
Query.collectSet = function (cb) {
var records = [];
return Query.drainSet(function (record) {
records.push(record);
}, function (err) {
return cb(err, records);
});
};
var nonRecurseTypes = {
CNAME: true,
AXFR: true,
IXFR: true,
};
Query.query = function (sbot, question, cb) {
if (nonRecurseTypes[question.type]) {
Query.querySingle(sbot, question, cb);
} else {
Query.queryRecursive(sbot, question, [], cb);
}
}
// recursive query
Query.queryRecursive = function (sbot, question, stack, cb) {
var result = {
cache: true,
answers: [],
authorities: [],
additionals: [],
questions: [],
expires: Date.now() + 60*60e3,
};
// avoid infinite recursion
if (stack.some(function (q) {
return isRecordEqual(q, question);
})) {
return cb(null, result)
}
var waiting = 1;
Query.querySingle(sbot, question, next);
function next(err, res) {
if (err) return waiting = 0, cb(err);
mergeResults(result, res)
// recurse on CNAMEs
var stack2 = stack.concat(question)
res.answers.filter(function (answer) {
return answer.type === 'CNAME';
}).map(function (record) {
return {
class: question.class,
type: question.type,
name: record.data.replace(/\.$/, '')
}
}).forEach(function (q) {
waiting++
Query.queryRecursive(sbot, q, stack2, next)
});
if (!--waiting) cb(null, result);
}
}
Query.querySingle = function (sbot, question, cb) {
// look up records that match a question, including wildcard records
// and zone authority records
var qName = question.name.toLowerCase();
var authorityDomains = expandName(qName);
var wildcardDomains = expandName(qName, '*');
var isIncrementalTransfer = question.type === 'IXFR'
var isTransfer = isIncrementalTransfer || question.type === 'AXFR'
isIncrementalTransfer = false // TODO: fix IXFR
var authorities = new Wildcards();
var maybeGlue = new RecordsMap();
var answers = isTransfer ? new Records() : new Wildcards();
var zoneSerials = new ZoneSerials();
var result = {
cache: !isTransfer
};
Pull(Query.all(sbot),
Pull.filter(function (record) {
var recordDomains = expandName(record.name);
zoneSerials.addRecord(recordDomains);
if (isIncrementalTransfer
&& zoneSerials.getSerial(qName) < question.serial) {
return false
}
var nameMatches = isTransfer ? qName in recordDomains :
record.name in wildcardDomains;
if (nameMatches) {
result.domainExists = true;
}
if (!isTransfer) {
if (record.type === 'A' || record.type === 'AAAA') {
// include all because we might need them for glue
return true;
}
if (record.type === 'NS' || record.type === 'SOA') {
return record.name in authorityDomains
}
}
return nameMatches
&& (isTransfer
|| question.type === record.type
|| question.type === '*'
|| 'CNAME' === record.type)
&& (question.class === record.class
|| question.class === '*');
}),
Query.drainSet(function (record) {
if (record.type === 'NS' || record.type === 'SOA') {
result.authoritative = true;
if (question.class === record.class
&& question.type === record.type
&& record.name in wildcardDomains) {
answers.addRecord(record);
} else {
authorities.addRecord(record);
if (isTransfer) {
answers.addRecord(record);
}
}
} else if (!isTransfer
&& (record.type === 'A' || record.type === 'AAAA')
&& (!(record.name in wildcardDomains)
|| (question.type !== record.type && question.type !== '*')
|| (question.class !== record.class && question.class !== '*'))
) {
maybeGlue.addRecord(record);
} else {
answers.addRecord(record);
}
}, function (err) {
if (err) return cb(err);
var ttl = 3600; // max internal ttl
result.answers = answers.getRecords();
result.answers.forEach(function (record) {
if (record.ttl < ttl) ttl = record.ttl;
});
result.additionals = [];
result.authorities = isTransfer
? authorities.getTopRecords()
: authorities.getRecords();
result.authorities.forEach(updateAuthority);
result.answers.forEach(updateAuthority);
function updateAuthority(r) {
if (r.type === 'SOA') {
if (!r.data.serial) {
// special case: calculate a serial for the SOA
r.data.serial = zoneSerials.getSerial(r.name);
}
if (r.ttl < ttl) ttl = r.ttl;
if (!result.answers.length) {
if (r.data.ttl < ttl) ttl = r.data.ttl;
}
if (!isTransfer) {
result.additionals = result.additionals.concat(
maybeGlue.popRecords(r.data.mname))
}
} else if (r.type === 'NS') {
if (!isTransfer) {
result.additionals = result.additionals.concat(
maybeGlue.popRecords(r.data))
}
}
}
result.expires = Date.now() + ttl * 60e3;
result.questions = [];
if (isTransfer) {
// RFC 5936, Section 2.2
result.questions.push(question);
// pick a SOA record to use as the bookend
var soa = result.authorities.filter(function (r) {
return r.type === 'SOA';
}).sort(compareRecordsBySerial)[0];
result.authorities.length = 0;
if (soa) {
result.answers = [soa].concat(
result.answers.filter(function (r) {
return r !== soa
}).sort(compareRecords),
[soa]
);
}
} else {
// only include SOA if there are no answers
if (result.answers.length > 0) {
result.authorities = result.authorities.filter(function (r) {
return r.type !== 'SOA';
});
}
// include SOA if there are no answers, NS if there are
result.authorities = result.authorities.filter(function (r) {
return r.type !== (result.answers.length ? 'SOA' : 'NS');
});
// resolve wildcards in answers
result.answers.forEach(function (r) {
if (r.name !== qName && r.name in wildcardDomains) {
r.name = qName;
}
})
}
cb(null, result);
}));
};
| Serve only records with data
| lib/query.js | Serve only records with data | <ide><path>ib/query.js
<ide> var set = new KVSet();
<ide> return Pull.drain(function (record) {
<ide> if (record.branch) set.remove(record.branch);
<del> set.add(record.id, record);
<add> if (record.data) set.add(record.id, record);
<ide> }, function (err) {
<ide> if (err) return onEnd(err);
<ide> for (var key in set.heads) { |
|
Java | apache-2.0 | 90e33b2700001f6f760663f9f15f1b2264f3a75d | 0 | jamesdbloom/mockserver,jamesdbloom/mockserver,jamesdbloom/mockserver,jamesdbloom/mockserver,jamesdbloom/mockserver | package org.mockserver.maven;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockserver.client.proxy.ProxyClient;
import org.mockserver.client.server.MockServerClient;
import org.mockserver.mockserver.MockServer;
import org.mockserver.mockserver.MockServerBuilder;
import org.mockserver.proxy.http.HttpProxy;
import org.mockserver.proxy.http.HttpProxyBuilder;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* @author jamesdbloom
*/
public class InstanceHolderTest {
@Mock
private HttpProxyBuilder mockProxyBuilder;
@Mock
private MockServerBuilder mockMockServerBuilder;
@Mock
private HttpProxy mockProxy;
@Mock
private MockServer mockMockServer;
@Mock
private MockServerClient mockServerClient;
@Mock
private ProxyClient proxyClient;
@InjectMocks
private InstanceHolder instanceHolder;
@Before
public void setupMock() {
instanceHolder = new InstanceHolder();
initMocks(this);
InstanceHolder.mockServerBuilder = mockMockServerBuilder;
InstanceHolder.proxyBuilder = mockProxyBuilder;
when(mockMockServerBuilder.withHTTPPort(anyInt())).thenReturn(mockMockServerBuilder);
when(mockMockServerBuilder.withHTTPSPort(anyInt())).thenReturn(mockMockServerBuilder);
when(mockProxyBuilder.withHTTPPort(anyInt())).thenReturn(mockProxyBuilder);
when(mockProxyBuilder.withHTTPSPort(anyInt())).thenReturn(mockProxyBuilder);
when(mockProxy.isRunning()).thenReturn(false);
when(mockMockServer.isRunning()).thenReturn(false);
}
@After
public void shutdownProxyAndMockServer() {
instanceHolder.stop();
}
@Test
public void shouldStartServerAndProxyOnBothPorts() {
// when
instanceHolder.start(1, 2, 3, 4, null);
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(2);
verify(mockProxyBuilder).withHTTPPort(3);
verify(mockProxyBuilder).withHTTPSPort(4);
}
@Test
public void shouldStartOnlyServerOnBothPorts() {
// when
instanceHolder.start(1, 2, -1, -1, null);
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(2);
verifyNoMoreInteractions(mockProxyBuilder);
}
@Test
public void shouldStartOnlyServerOnHttpPort() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(1, -1, -1, -1, new ExampleInitializationClass());
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(-1);
verifyNoMoreInteractions(mockProxyBuilder);
assertNotNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldStartOnlyServerOnHttpsPort() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, 1, -1, -1, new ExampleInitializationClass());
// then
verify(mockMockServerBuilder).withHTTPPort(-1);
verify(mockMockServerBuilder).withHTTPSPort(1);
verifyNoMoreInteractions(mockProxyBuilder);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldStartOnlyProxyOnBothPorts() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, -1, 3, 4, new ExampleInitializationClass());
// then
verifyNoMoreInteractions(mockMockServerBuilder);
verify(mockProxyBuilder).withHTTPPort(3);
verify(mockProxyBuilder).withHTTPSPort(4);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldRunInitializationClass() {
// given
ExampleInitializationClass.mockServerClient = null;
// when
instanceHolder.start(1, 2, -1, -1, new ExampleInitializationClass());
// then
assertNotNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldNotStartServerOrProxy() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, -1, -1, -1, new ExampleInitializationClass());
// then
verifyNoMoreInteractions(mockMockServerBuilder);
verifyNoMoreInteractions(mockProxyBuilder);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test(expected = IllegalStateException.class)
public void shouldThrowExceptionIfServerRunning() {
// given
when(mockMockServer.isRunning()).thenReturn(true);
// when
instanceHolder.start(1, 2, 3, 4, null);
}
@Test(expected = IllegalStateException.class)
public void shouldThrowExceptionIfProxyRunning() {
// given
when(mockProxy.isRunning()).thenReturn(true);
// when
instanceHolder.start(1, 2, 3, 4, null);
}
@Test
public void shouldStopMockServer() {
// given
when(mockMockServer.isRunning()).thenReturn(true);
when(mockProxy.isRunning()).thenReturn(true);
// when
instanceHolder.stop();
// then
verify(mockMockServer).stop();
verify(mockProxy).stop();
}
@Test
@Ignore
public void shouldStopMockServerAndProxyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(1, 2);
// then
verify(mockServerClient).stop();
verify(proxyClient).stop();
}
@Test
@Ignore
public void shouldStopMockServerOnlyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(1, -1);
// then
verify(mockServerClient).stop();
verify(proxyClient, times(0)).stop();
}
@Test
@Ignore
public void shouldStopProxyOnlyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(-1, 2);
// then
verify(mockServerClient, times(0)).stop();
verify(proxyClient).stop();
}
}
| mockserver-maven-plugin/src/test/java/org/mockserver/maven/InstanceHolderTest.java | package org.mockserver.maven;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockserver.client.proxy.ProxyClient;
import org.mockserver.client.server.MockServerClient;
import org.mockserver.mockserver.MockServer;
import org.mockserver.mockserver.MockServerBuilder;
import org.mockserver.proxy.http.HttpProxy;
import org.mockserver.proxy.http.HttpProxyBuilder;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* @author jamesdbloom
*/
public class InstanceHolderTest {
@Mock
private HttpProxyBuilder mockProxyBuilder;
@Mock
private MockServerBuilder mockMockServerBuilder;
@Mock
private HttpProxy mockProxy;
@Mock
private MockServer mockMockServer;
@Mock
private MockServerClient mockServerClient;
@Mock
private ProxyClient proxyClient;
@InjectMocks
private InstanceHolder instanceHolder;
@Before
public void setupMock() {
instanceHolder = new InstanceHolder();
initMocks(this);
InstanceHolder.mockServerBuilder = mockMockServerBuilder;
InstanceHolder.proxyBuilder = mockProxyBuilder;
when(mockMockServerBuilder.withHTTPPort(anyInt())).thenReturn(mockMockServerBuilder);
when(mockMockServerBuilder.withHTTPSPort(anyInt())).thenReturn(mockMockServerBuilder);
when(mockProxyBuilder.withHTTPPort(anyInt())).thenReturn(mockProxyBuilder);
when(mockProxyBuilder.withHTTPSPort(anyInt())).thenReturn(mockProxyBuilder);
when(mockProxy.isRunning()).thenReturn(false);
when(mockMockServer.isRunning()).thenReturn(false);
}
@After
public void shutdownProxyAndMockServer() {
instanceHolder.stop();
}
@Test
public void shouldStartServerAndProxyOnBothPorts() {
// when
instanceHolder.start(1, 2, 3, 4, null);
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(2);
verify(mockProxyBuilder).withHTTPPort(3);
verify(mockProxyBuilder).withHTTPSPort(4);
}
@Test
public void shouldStartOnlyServerOnBothPorts() {
// when
instanceHolder.start(1, 2, -1, -1, null);
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(2);
verifyNoMoreInteractions(mockProxyBuilder);
}
@Test
public void shouldStartOnlyServerOnHttpPort() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(1, -1, -1, -1, new ExampleInitializationClass());
// then
verify(mockMockServerBuilder).withHTTPPort(1);
verify(mockMockServerBuilder).withHTTPSPort(-1);
verifyNoMoreInteractions(mockProxyBuilder);
assertNotNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldStartOnlyServerOnHttpsPort() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, 1, -1, -1, new ExampleInitializationClass());
// then
verify(mockMockServerBuilder).withHTTPPort(-1);
verify(mockMockServerBuilder).withHTTPSPort(1);
verifyNoMoreInteractions(mockProxyBuilder);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldStartOnlyProxyOnBothPorts() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, -1, 3, 4, new ExampleInitializationClass());
// then
verifyNoMoreInteractions(mockMockServerBuilder);
verify(mockProxyBuilder).withHTTPPort(3);
verify(mockProxyBuilder).withHTTPSPort(4);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldRunInitializationClass() {
// given
ExampleInitializationClass.mockServerClient = null;
// when
instanceHolder.start(1, 2, -1, -1, new ExampleInitializationClass());
// then
assertNotNull(ExampleInitializationClass.mockServerClient);
}
@Test
public void shouldNotStartServerOrProxy() {
// when
ExampleInitializationClass.mockServerClient = null;
instanceHolder.start(-1, -1, -1, -1, new ExampleInitializationClass());
// then
verifyNoMoreInteractions(mockMockServerBuilder);
verifyNoMoreInteractions(mockProxyBuilder);
assertNull(ExampleInitializationClass.mockServerClient);
}
@Test(expected = IllegalStateException.class)
public void shouldThrowExceptionIfServerRunning() {
// given
when(mockMockServer.isRunning()).thenReturn(true);
// when
instanceHolder.start(1, 2, 3, 4, null);
}
@Test(expected = IllegalStateException.class)
public void shouldThrowExceptionIfProxyRunning() {
// given
when(mockProxy.isRunning()).thenReturn(true);
// when
instanceHolder.start(1, 2, 3, 4, null);
}
@Test
public void shouldStopMockServer() {
// given
when(mockMockServer.isRunning()).thenReturn(true);
when(mockProxy.isRunning()).thenReturn(true);
// when
instanceHolder.stop();
// then
verify(mockMockServer).stop();
verify(mockProxy).stop();
}
@Test
public void shouldStopMockServerAndProxyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(1, 2);
// then
verify(mockServerClient).stop();
verify(proxyClient).stop();
}
@Test
public void shouldStopMockServerOnlyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(1, -1);
// then
verify(mockServerClient).stop();
verify(proxyClient, times(0)).stop();
}
@Test
public void shouldStopProxyOnlyRemotely() {
// given
InstanceHolder embeddedJettyHolder = spy(instanceHolder);
doReturn(mockServerClient).when(embeddedJettyHolder).newMockServerClient(1);
doReturn(proxyClient).when(embeddedJettyHolder).newProxyClient(2);
// when
embeddedJettyHolder.stop(-1, 2);
// then
verify(mockServerClient, times(0)).stop();
verify(proxyClient).stop();
}
}
| ignoring tests that are unreliable on build machine
| mockserver-maven-plugin/src/test/java/org/mockserver/maven/InstanceHolderTest.java | ignoring tests that are unreliable on build machine | <ide><path>ockserver-maven-plugin/src/test/java/org/mockserver/maven/InstanceHolderTest.java
<ide>
<ide> import org.junit.After;
<ide> import org.junit.Before;
<add>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.mockito.InjectMocks;
<ide> import org.mockito.Mock;
<ide> }
<ide>
<ide> @Test
<add> @Ignore
<ide> public void shouldStopMockServerAndProxyRemotely() {
<ide> // given
<ide> InstanceHolder embeddedJettyHolder = spy(instanceHolder);
<ide> }
<ide>
<ide> @Test
<add> @Ignore
<ide> public void shouldStopMockServerOnlyRemotely() {
<ide> // given
<ide> InstanceHolder embeddedJettyHolder = spy(instanceHolder);
<ide> }
<ide>
<ide> @Test
<add> @Ignore
<ide> public void shouldStopProxyOnlyRemotely() {
<ide> // given
<ide> InstanceHolder embeddedJettyHolder = spy(instanceHolder); |
|
Java | apache-2.0 | eeea0e34ca8e5ca2666fc9787099765018dbeed9 | 0 | ishan1604/Smack,unisontech/Smack,andrey42/Smack,magnetsystems/message-smack,Flowdalic/Smack,deeringc/Smack,igorexax3mal/Smack,kkroid/OnechatSmack,esl/Smack,annovanvliet/Smack,cjpx00008/Smack,ishan1604/Smack,lovely3x/Smack,Tibo-lg/Smack,vito-c/Smack,lovely3x/Smack,andrey42/Smack,deeringc/Smack,dpr-odoo/Smack,u20024804/Smack,igorexax3mal/Smack,mar-v-in/Smack,vito-c/Smack,Tibo-lg/Smack,magnetsystems/message-smack,cjpx00008/Smack,igniterealtime/Smack,qingsong-xu/Smack,dpr-odoo/Smack,kkroid/OnechatSmack,unisontech/Smack,esl/Smack,hy9902/Smack,unisontech/Smack,TTalkIM/Smack,ayne/Smack,ayne/Smack,TTalkIM/Smack,igorexax3mal/Smack,igniterealtime/Smack,mar-v-in/Smack,mar-v-in/Smack,chuangWu/Smack,Flowdalic/Smack,xuIcream/Smack,chuangWu/Smack,hy9902/Smack,qingsong-xu/Smack,vanitasvitae/Smack,Tibo-lg/Smack,annovanvliet/Smack,kkroid/OnechatSmack,igniterealtime/Smack,u20024804/Smack,TTalkIM/Smack,vanitasvitae/smack-omemo,ayne/Smack,ishan1604/Smack,vanitasvitae/smack-omemo,opg7371/Smack,vanitasvitae/Smack,hy9902/Smack,vanitasvitae/Smack,magnetsystems/message-smack,esl/Smack,xuIcream/Smack,qingsong-xu/Smack,andrey42/Smack,dpr-odoo/Smack,xuIcream/Smack,annovanvliet/Smack,lovely3x/Smack,chuangWu/Smack,deeringc/Smack,Flowdalic/Smack,opg7371/Smack,vanitasvitae/smack-omemo,cjpx00008/Smack,opg7371/Smack,u20024804/Smack | /**
*
* Copyright the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.bytestreams.socks5;
import java.io.IOException;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import org.jivesoftware.smack.AbstractConnectionListener;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.SmackException.FeatureNotSupportedException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.ConnectionCreationListener;
import org.jivesoftware.smack.XMPPConnectionRegistry;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.XMPPException.XMPPErrorException;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.XMPPError;
import org.jivesoftware.smackx.bytestreams.BytestreamListener;
import org.jivesoftware.smackx.bytestreams.BytestreamManager;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream.StreamHost;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream.StreamHostUsed;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo;
import org.jivesoftware.smackx.disco.packet.DiscoverItems;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo.Identity;
import org.jivesoftware.smackx.disco.packet.DiscoverItems.Item;
import org.jivesoftware.smackx.filetransfer.FileTransferManager;
/**
* The Socks5BytestreamManager class handles establishing SOCKS5 Bytestreams as specified in the <a
* href="http://xmpp.org/extensions/xep-0065.html">XEP-0065</a>.
* <p>
* A SOCKS5 Bytestream is negotiated partly over the XMPP XML stream and partly over a separate
* socket. The actual transfer though takes place over a separately created socket.
* <p>
* A SOCKS5 Bytestream generally has three parties, the initiator, the target, and the stream host.
* The stream host is a specialized SOCKS5 proxy setup on a server, or, the initiator can act as the
* stream host.
* <p>
* To establish a SOCKS5 Bytestream invoke the {@link #establishSession(String)} method. This will
* negotiate a SOCKS5 Bytestream with the given target JID and return a socket.
* <p>
* If a session ID for the SOCKS5 Bytestream was already negotiated (e.g. while negotiating a file
* transfer) invoke {@link #establishSession(String, String)}.
* <p>
* To handle incoming SOCKS5 Bytestream requests add an {@link Socks5BytestreamListener} to the
* manager. There are two ways to add this listener. If you want to be informed about incoming
* SOCKS5 Bytestreams from a specific user add the listener by invoking
* {@link #addIncomingBytestreamListener(BytestreamListener, String)}. If the listener should
* respond to all SOCKS5 Bytestream requests invoke
* {@link #addIncomingBytestreamListener(BytestreamListener)}.
* <p>
* Note that the registered {@link Socks5BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
* <p>
* If no {@link Socks5BytestreamListener}s are registered, all incoming SOCKS5 Bytestream requests
* will be rejected by returning a <not-acceptable/> error to the initiator.
*
* @author Henning Staib
*/
public final class Socks5BytestreamManager implements BytestreamManager {
/*
* create a new Socks5BytestreamManager and register a shutdown listener on every established
* connection
*/
static {
XMPPConnectionRegistry.addConnectionCreationListener(new ConnectionCreationListener() {
public void connectionCreated(final XMPPConnection connection) {
// create the manager for this connection
Socks5BytestreamManager.getBytestreamManager(connection);
// register shutdown listener
connection.addConnectionListener(new AbstractConnectionListener() {
@Override
public void connectionClosed() {
Socks5BytestreamManager.getBytestreamManager(connection).disableService();
}
@Override
public void connectionClosedOnError(Exception e) {
Socks5BytestreamManager.getBytestreamManager(connection).disableService();
}
@Override
public void reconnectionSuccessful() {
// re-create the manager for this connection
Socks5BytestreamManager.getBytestreamManager(connection);
}
});
}
});
}
/* prefix used to generate session IDs */
private static final String SESSION_ID_PREFIX = "js5_";
/* random generator to create session IDs */
private final static Random randomGenerator = new Random();
/* stores one Socks5BytestreamManager for each XMPP connection */
private final static Map<XMPPConnection, Socks5BytestreamManager> managers = new HashMap<XMPPConnection, Socks5BytestreamManager>();
/* XMPP connection */
private final XMPPConnection connection;
/*
* assigns a user to a listener that is informed if a bytestream request for this user is
* received
*/
private final Map<String, BytestreamListener> userListeners = new ConcurrentHashMap<String, BytestreamListener>();
/*
* list of listeners that respond to all bytestream requests if there are not user specific
* listeners for that request
*/
private final List<BytestreamListener> allRequestListeners = Collections.synchronizedList(new LinkedList<BytestreamListener>());
/* listener that handles all incoming bytestream requests */
private final InitiationListener initiationListener;
/* timeout to wait for the response to the SOCKS5 Bytestream initialization request */
private int targetResponseTimeout = 10000;
/* timeout for connecting to the SOCKS5 proxy selected by the target */
private int proxyConnectionTimeout = 10000;
/* blacklist of errornous SOCKS5 proxies */
private final List<String> proxyBlacklist = Collections.synchronizedList(new LinkedList<String>());
/* remember the last proxy that worked to prioritize it */
private String lastWorkingProxy = null;
/* flag to enable/disable prioritization of last working proxy */
private boolean proxyPrioritizationEnabled = true;
/*
* list containing session IDs of SOCKS5 Bytestream initialization packets that should be
* ignored by the InitiationListener
*/
private List<String> ignoredBytestreamRequests = Collections.synchronizedList(new LinkedList<String>());
/**
* Returns the Socks5BytestreamManager to handle SOCKS5 Bytestreams for a given
* {@link XMPPConnection}.
* <p>
* If no manager exists a new is created and initialized.
*
* @param connection the XMPP connection or <code>null</code> if given connection is
* <code>null</code>
* @return the Socks5BytestreamManager for the given XMPP connection
*/
public static synchronized Socks5BytestreamManager getBytestreamManager(XMPPConnection connection) {
if (connection == null) {
return null;
}
Socks5BytestreamManager manager = managers.get(connection);
if (manager == null) {
manager = new Socks5BytestreamManager(connection);
managers.put(connection, manager);
manager.activate();
}
return manager;
}
/**
* Private constructor.
*
* @param connection the XMPP connection
*/
private Socks5BytestreamManager(XMPPConnection connection) {
this.connection = connection;
this.initiationListener = new InitiationListener(this);
}
/**
* Adds BytestreamListener that is called for every incoming SOCKS5 Bytestream request unless
* there is a user specific BytestreamListener registered.
* <p>
* If no listeners are registered all SOCKS5 Bytestream request are rejected with a
* <not-acceptable/> error.
* <p>
* Note that the registered {@link BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
*
* @param listener the listener to register
*/
public void addIncomingBytestreamListener(BytestreamListener listener) {
this.allRequestListeners.add(listener);
}
/**
* Removes the given listener from the list of listeners for all incoming SOCKS5 Bytestream
* requests.
*
* @param listener the listener to remove
*/
public void removeIncomingBytestreamListener(BytestreamListener listener) {
this.allRequestListeners.remove(listener);
}
/**
* Adds BytestreamListener that is called for every incoming SOCKS5 Bytestream request from the
* given user.
* <p>
* Use this method if you are awaiting an incoming SOCKS5 Bytestream request from a specific
* user.
* <p>
* If no listeners are registered all SOCKS5 Bytestream request are rejected with a
* <not-acceptable/> error.
* <p>
* Note that the registered {@link BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
*
* @param listener the listener to register
* @param initiatorJID the JID of the user that wants to establish a SOCKS5 Bytestream
*/
public void addIncomingBytestreamListener(BytestreamListener listener, String initiatorJID) {
this.userListeners.put(initiatorJID, listener);
}
/**
* Removes the listener for the given user.
*
* @param initiatorJID the JID of the user the listener should be removed
*/
public void removeIncomingBytestreamListener(String initiatorJID) {
this.userListeners.remove(initiatorJID);
}
/**
* Use this method to ignore the next incoming SOCKS5 Bytestream request containing the given
* session ID. No listeners will be notified for this request and and no error will be returned
* to the initiator.
* <p>
* This method should be used if you are awaiting a SOCKS5 Bytestream request as a reply to
* another packet (e.g. file transfer).
*
* @param sessionID to be ignored
*/
public void ignoreBytestreamRequestOnce(String sessionID) {
this.ignoredBytestreamRequests.add(sessionID);
}
/**
* Disables the SOCKS5 Bytestream manager by removing the SOCKS5 Bytestream feature from the
* service discovery, disabling the listener for SOCKS5 Bytestream initiation requests and
* resetting its internal state, which includes removing this instance from the managers map.
* <p>
* To re-enable the SOCKS5 Bytestream feature invoke {@link #getBytestreamManager(XMPPConnection)}.
* Using the file transfer API will automatically re-enable the SOCKS5 Bytestream feature.
*/
public synchronized void disableService() {
// remove initiation packet listener
this.connection.removePacketListener(this.initiationListener);
// shutdown threads
this.initiationListener.shutdown();
// clear listeners
this.allRequestListeners.clear();
this.userListeners.clear();
// reset internal state
this.lastWorkingProxy = null;
this.proxyBlacklist.clear();
this.ignoredBytestreamRequests.clear();
// remove manager from static managers map
managers.remove(this.connection);
// shutdown local SOCKS5 proxy if there are no more managers for other connections
if (managers.size() == 0) {
Socks5Proxy.getSocks5Proxy().stop();
}
// remove feature from service discovery
ServiceDiscoveryManager serviceDiscoveryManager = ServiceDiscoveryManager.getInstanceFor(this.connection);
// check if service discovery is not already disposed by connection shutdown
if (serviceDiscoveryManager != null) {
serviceDiscoveryManager.removeFeature(Bytestream.NAMESPACE);
}
}
/**
* Returns the timeout to wait for the response to the SOCKS5 Bytestream initialization request.
* Default is 10000ms.
*
* @return the timeout to wait for the response to the SOCKS5 Bytestream initialization request
*/
public int getTargetResponseTimeout() {
if (this.targetResponseTimeout <= 0) {
this.targetResponseTimeout = 10000;
}
return targetResponseTimeout;
}
/**
* Sets the timeout to wait for the response to the SOCKS5 Bytestream initialization request.
* Default is 10000ms.
*
* @param targetResponseTimeout the timeout to set
*/
public void setTargetResponseTimeout(int targetResponseTimeout) {
this.targetResponseTimeout = targetResponseTimeout;
}
/**
* Returns the timeout for connecting to the SOCKS5 proxy selected by the target. Default is
* 10000ms.
*
* @return the timeout for connecting to the SOCKS5 proxy selected by the target
*/
public int getProxyConnectionTimeout() {
if (this.proxyConnectionTimeout <= 0) {
this.proxyConnectionTimeout = 10000;
}
return proxyConnectionTimeout;
}
/**
* Sets the timeout for connecting to the SOCKS5 proxy selected by the target. Default is
* 10000ms.
*
* @param proxyConnectionTimeout the timeout to set
*/
public void setProxyConnectionTimeout(int proxyConnectionTimeout) {
this.proxyConnectionTimeout = proxyConnectionTimeout;
}
/**
* Returns if the prioritization of the last working SOCKS5 proxy on successive SOCKS5
* Bytestream connections is enabled. Default is <code>true</code>.
*
* @return <code>true</code> if prioritization is enabled, <code>false</code> otherwise
*/
public boolean isProxyPrioritizationEnabled() {
return proxyPrioritizationEnabled;
}
/**
* Enable/disable the prioritization of the last working SOCKS5 proxy on successive SOCKS5
* Bytestream connections.
*
* @param proxyPrioritizationEnabled enable/disable the prioritization of the last working
* SOCKS5 proxy
*/
public void setProxyPrioritizationEnabled(boolean proxyPrioritizationEnabled) {
this.proxyPrioritizationEnabled = proxyPrioritizationEnabled;
}
/**
* Establishes a SOCKS5 Bytestream with the given user and returns the Socket to send/receive
* data to/from the user.
* <p>
* Use this method to establish SOCKS5 Bytestreams to users accepting all incoming Socks5
* bytestream requests since this method doesn't provide a way to tell the user something about
* the data to be sent.
* <p>
* To establish a SOCKS5 Bytestream after negotiation the kind of data to be sent (e.g. file
* transfer) use {@link #establishSession(String, String)}.
*
* @param targetJID the JID of the user a SOCKS5 Bytestream should be established
* @return the Socket to send/receive data to/from the user
* @throws XMPPException if the user doesn't support or accept SOCKS5 Bytestreams, if no Socks5
* Proxy could be found, if the user couldn't connect to any of the SOCKS5 Proxies
* @throws IOException if the bytestream could not be established
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws SmackException if there was no response from the server.
*/
public Socks5BytestreamSession establishSession(String targetJID) throws XMPPException,
IOException, InterruptedException, SmackException {
String sessionID = getNextSessionID();
return establishSession(targetJID, sessionID);
}
/**
* Establishes a SOCKS5 Bytestream with the given user using the given session ID and returns
* the Socket to send/receive data to/from the user.
*
* @param targetJID the JID of the user a SOCKS5 Bytestream should be established
* @param sessionID the session ID for the SOCKS5 Bytestream request
* @return the Socket to send/receive data to/from the user
* @throws IOException if the bytestream could not be established
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws NoResponseException
* @throws SmackException if the target does not support SOCKS5.
* @throws XMPPException
*/
public Socks5BytestreamSession establishSession(String targetJID, String sessionID)
throws IOException, InterruptedException, NoResponseException, SmackException, XMPPException{
XMPPErrorException discoveryException = null;
// check if target supports SOCKS5 Bytestream
if (!supportsSocks5(targetJID)) {
throw new FeatureNotSupportedException("SOCKS5 Bytestream", targetJID);
}
List<String> proxies = new ArrayList<String>();
// determine SOCKS5 proxies from XMPP-server
try {
proxies.addAll(determineProxies());
} catch (XMPPErrorException e) {
// don't abort here, just remember the exception thrown by determineProxies()
// determineStreamHostInfos() will at least add the local Socks5 proxy (if enabled)
discoveryException = e;
}
// determine address and port of each proxy
List<StreamHost> streamHosts = determineStreamHostInfos(proxies);
if (streamHosts.isEmpty()) {
if (discoveryException != null) {
throw discoveryException;
} else {
throw new SmackException("no SOCKS5 proxies available");
}
}
// compute digest
String digest = Socks5Utils.createDigest(sessionID, this.connection.getUser(), targetJID);
// prioritize last working SOCKS5 proxy if exists
if (this.proxyPrioritizationEnabled && this.lastWorkingProxy != null) {
StreamHost selectedStreamHost = null;
for (StreamHost streamHost : streamHosts) {
if (streamHost.getJID().equals(this.lastWorkingProxy)) {
selectedStreamHost = streamHost;
break;
}
}
if (selectedStreamHost != null) {
streamHosts.remove(selectedStreamHost);
streamHosts.add(0, selectedStreamHost);
}
}
Socks5Proxy socks5Proxy = Socks5Proxy.getSocks5Proxy();
try {
// add transfer digest to local proxy to make transfer valid
socks5Proxy.addTransfer(digest);
// create initiation packet
Bytestream initiation = createBytestreamInitiation(sessionID, targetJID, streamHosts);
// send initiation packet
Packet response = connection.createPacketCollectorAndSend(initiation).nextResultOrThrow(
getTargetResponseTimeout());
// extract used stream host from response
StreamHostUsed streamHostUsed = ((Bytestream) response).getUsedHost();
StreamHost usedStreamHost = initiation.getStreamHost(streamHostUsed.getJID());
if (usedStreamHost == null) {
throw new SmackException("Remote user responded with unknown host");
}
// build SOCKS5 client
Socks5Client socks5Client = new Socks5ClientForInitiator(usedStreamHost, digest,
this.connection, sessionID, targetJID);
// establish connection to proxy
Socket socket = socks5Client.getSocket(getProxyConnectionTimeout());
// remember last working SOCKS5 proxy to prioritize it for next request
this.lastWorkingProxy = usedStreamHost.getJID();
// negotiation successful, return the output stream
return new Socks5BytestreamSession(socket, usedStreamHost.getJID().equals(
this.connection.getUser()));
}
catch (TimeoutException e) {
throw new IOException("Timeout while connecting to SOCKS5 proxy");
}
finally {
// remove transfer digest if output stream is returned or an exception
// occurred
socks5Proxy.removeTransfer(digest);
}
}
/**
* Returns <code>true</code> if the given target JID supports feature SOCKS5 Bytestream.
*
* @param targetJID the target JID
* @return <code>true</code> if the given target JID supports feature SOCKS5 Bytestream
* otherwise <code>false</code>
* @throws XMPPErrorException
* @throws NoResponseException
* @throws NotConnectedException
*/
private boolean supportsSocks5(String targetJID) throws NoResponseException, XMPPErrorException, NotConnectedException {
return ServiceDiscoveryManager.getInstanceFor(connection).supportsFeature(targetJID, Bytestream.NAMESPACE);
}
/**
* Returns a list of JIDs of SOCKS5 proxies by querying the XMPP server. The SOCKS5 proxies are
* in the same order as returned by the XMPP server.
*
* @return list of JIDs of SOCKS5 proxies
* @throws XMPPErrorException if there was an error querying the XMPP server for SOCKS5 proxies
* @throws NoResponseException if there was no response from the server.
* @throws NotConnectedException
*/
private List<String> determineProxies() throws NoResponseException, XMPPErrorException, NotConnectedException {
ServiceDiscoveryManager serviceDiscoveryManager = ServiceDiscoveryManager.getInstanceFor(this.connection);
List<String> proxies = new ArrayList<String>();
// get all items from XMPP server
DiscoverItems discoverItems = serviceDiscoveryManager.discoverItems(this.connection.getServiceName());
// query all items if they are SOCKS5 proxies
for (Item item : discoverItems.getItems()) {
// skip blacklisted servers
if (this.proxyBlacklist.contains(item.getEntityID())) {
continue;
}
DiscoverInfo proxyInfo;
try {
proxyInfo = serviceDiscoveryManager.discoverInfo(item.getEntityID());
}
catch (NoResponseException|XMPPErrorException e) {
// blacklist errornous server
proxyBlacklist.add(item.getEntityID());
continue;
}
// item must have category "proxy" and type "bytestream"
for (Identity identity : proxyInfo.getIdentities()) {
if ("proxy".equalsIgnoreCase(identity.getCategory())
&& "bytestreams".equalsIgnoreCase(identity.getType())) {
proxies.add(item.getEntityID());
break;
}
/*
* server is not a SOCKS5 proxy, blacklist server to skip next time a Socks5
* bytestream should be established
*/
this.proxyBlacklist.add(item.getEntityID());
}
}
return proxies;
}
/**
* Returns a list of stream hosts containing the IP address an the port for the given list of
* SOCKS5 proxy JIDs. The order of the returned list is the same as the given list of JIDs
* excluding all SOCKS5 proxies who's network settings could not be determined. If a local
* SOCKS5 proxy is running it will be the first item in the list returned.
*
* @param proxies a list of SOCKS5 proxy JIDs
* @return a list of stream hosts containing the IP address an the port
*/
private List<StreamHost> determineStreamHostInfos(List<String> proxies) {
List<StreamHost> streamHosts = new ArrayList<StreamHost>();
// add local proxy on first position if exists
List<StreamHost> localProxies = getLocalStreamHost();
if (localProxies != null) {
streamHosts.addAll(localProxies);
}
// query SOCKS5 proxies for network settings
for (String proxy : proxies) {
Bytestream streamHostRequest = createStreamHostRequest(proxy);
try {
Bytestream response = (Bytestream) connection.createPacketCollectorAndSend(
streamHostRequest).nextResultOrThrow();
streamHosts.addAll(response.getStreamHosts());
}
catch (Exception e) {
// blacklist errornous proxies
this.proxyBlacklist.add(proxy);
}
}
return streamHosts;
}
/**
* Returns a IQ packet to query a SOCKS5 proxy its network settings.
*
* @param proxy the proxy to query
* @return IQ packet to query a SOCKS5 proxy its network settings
*/
private Bytestream createStreamHostRequest(String proxy) {
Bytestream request = new Bytestream();
request.setType(IQ.Type.get);
request.setTo(proxy);
return request;
}
/**
* Returns the stream host information of the local SOCKS5 proxy containing the IP address and
* the port or null if local SOCKS5 proxy is not running.
*
* @return the stream host information of the local SOCKS5 proxy or null if local SOCKS5 proxy
* is not running
*/
private List<StreamHost> getLocalStreamHost() {
// get local proxy singleton
Socks5Proxy socks5Server = Socks5Proxy.getSocks5Proxy();
if (!socks5Server.isRunning()) {
// server is not running
return null;
}
List<String> addresses = socks5Server.getLocalAddresses();
if (addresses.isEmpty()) {
// local address could not be determined
return null;
}
final int port = socks5Server.getPort();
List<StreamHost> streamHosts = new ArrayList<StreamHost>();
outerloop: for (String address : addresses) {
// Prevent loopback addresses from appearing as streamhost
final String[] loopbackAddresses = { "127.0.0.1", "0:0:0:0:0:0:0:1", "::1" };
for (String loopbackAddress : loopbackAddresses) {
// Use 'startsWith' here since IPv6 addresses may have scope ID,
// ie. the part after the '%' sign.
if (address.startsWith(loopbackAddress)) {
continue outerloop;
}
}
streamHosts.add(new StreamHost(connection.getUser(), address, port));
}
return streamHosts;
}
/**
* Returns a SOCKS5 Bytestream initialization request packet with the given session ID
* containing the given stream hosts for the given target JID.
*
* @param sessionID the session ID for the SOCKS5 Bytestream
* @param targetJID the target JID of SOCKS5 Bytestream request
* @param streamHosts a list of SOCKS5 proxies the target should connect to
* @return a SOCKS5 Bytestream initialization request packet
*/
private Bytestream createBytestreamInitiation(String sessionID, String targetJID,
List<StreamHost> streamHosts) {
Bytestream initiation = new Bytestream(sessionID);
// add all stream hosts
for (StreamHost streamHost : streamHosts) {
initiation.addStreamHost(streamHost);
}
initiation.setType(IQ.Type.set);
initiation.setTo(targetJID);
return initiation;
}
/**
* Responses to the given packet's sender with a XMPP error that a SOCKS5 Bytestream is not
* accepted.
* <p>
* Specified in XEP-65 5.3.1 (Example 13)
* </p>
*
* @param packet Packet that should be answered with a not-acceptable error
* @throws NotConnectedException
*/
protected void replyRejectPacket(IQ packet) throws NotConnectedException {
XMPPError xmppError = new XMPPError(XMPPError.Condition.not_acceptable);
IQ errorIQ = IQ.createErrorResponse(packet, xmppError);
this.connection.sendPacket(errorIQ);
}
/**
* Activates the Socks5BytestreamManager by registering the SOCKS5 Bytestream initialization
* listener and enabling the SOCKS5 Bytestream feature.
*/
private void activate() {
// register bytestream initiation packet listener
this.connection.addPacketListener(this.initiationListener,
this.initiationListener.getFilter());
// enable SOCKS5 feature
enableService();
}
/**
* Adds the SOCKS5 Bytestream feature to the service discovery.
*/
private void enableService() {
ServiceDiscoveryManager manager = ServiceDiscoveryManager.getInstanceFor(this.connection);
manager.addFeature(Bytestream.NAMESPACE);
}
/**
* Returns a new unique session ID.
*
* @return a new unique session ID
*/
private String getNextSessionID() {
StringBuilder buffer = new StringBuilder();
buffer.append(SESSION_ID_PREFIX);
buffer.append(Math.abs(randomGenerator.nextLong()));
return buffer.toString();
}
/**
* Returns the XMPP connection.
*
* @return the XMPP connection
*/
protected XMPPConnection getConnection() {
return this.connection;
}
/**
* Returns the {@link BytestreamListener} that should be informed if a SOCKS5 Bytestream request
* from the given initiator JID is received.
*
* @param initiator the initiator's JID
* @return the listener
*/
protected BytestreamListener getUserListener(String initiator) {
return this.userListeners.get(initiator);
}
/**
* Returns a list of {@link BytestreamListener} that are informed if there are no listeners for
* a specific initiator.
*
* @return list of listeners
*/
protected List<BytestreamListener> getAllRequestListeners() {
return this.allRequestListeners;
}
/**
* Returns the list of session IDs that should be ignored by the InitialtionListener
*
* @return list of session IDs
*/
protected List<String> getIgnoredBytestreamRequests() {
return ignoredBytestreamRequests;
}
}
| smack-extensions/src/main/java/org/jivesoftware/smackx/bytestreams/socks5/Socks5BytestreamManager.java | /**
*
* Copyright the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.bytestreams.socks5;
import java.io.IOException;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import org.jivesoftware.smack.AbstractConnectionListener;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.SmackException.FeatureNotSupportedException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.ConnectionCreationListener;
import org.jivesoftware.smack.XMPPConnectionRegistry;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.XMPPException.XMPPErrorException;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.XMPPError;
import org.jivesoftware.smackx.bytestreams.BytestreamListener;
import org.jivesoftware.smackx.bytestreams.BytestreamManager;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream.StreamHost;
import org.jivesoftware.smackx.bytestreams.socks5.packet.Bytestream.StreamHostUsed;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo;
import org.jivesoftware.smackx.disco.packet.DiscoverItems;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo.Identity;
import org.jivesoftware.smackx.disco.packet.DiscoverItems.Item;
import org.jivesoftware.smackx.filetransfer.FileTransferManager;
/**
* The Socks5BytestreamManager class handles establishing SOCKS5 Bytestreams as specified in the <a
* href="http://xmpp.org/extensions/xep-0065.html">XEP-0065</a>.
* <p>
* A SOCKS5 Bytestream is negotiated partly over the XMPP XML stream and partly over a separate
* socket. The actual transfer though takes place over a separately created socket.
* <p>
* A SOCKS5 Bytestream generally has three parties, the initiator, the target, and the stream host.
* The stream host is a specialized SOCKS5 proxy setup on a server, or, the initiator can act as the
* stream host.
* <p>
* To establish a SOCKS5 Bytestream invoke the {@link #establishSession(String)} method. This will
* negotiate a SOCKS5 Bytestream with the given target JID and return a socket.
* <p>
* If a session ID for the SOCKS5 Bytestream was already negotiated (e.g. while negotiating a file
* transfer) invoke {@link #establishSession(String, String)}.
* <p>
* To handle incoming SOCKS5 Bytestream requests add an {@link Socks5BytestreamListener} to the
* manager. There are two ways to add this listener. If you want to be informed about incoming
* SOCKS5 Bytestreams from a specific user add the listener by invoking
* {@link #addIncomingBytestreamListener(BytestreamListener, String)}. If the listener should
* respond to all SOCKS5 Bytestream requests invoke
* {@link #addIncomingBytestreamListener(BytestreamListener)}.
* <p>
* Note that the registered {@link Socks5BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
* <p>
* If no {@link Socks5BytestreamListener}s are registered, all incoming SOCKS5 Bytestream requests
* will be rejected by returning a <not-acceptable/> error to the initiator.
*
* @author Henning Staib
*/
public final class Socks5BytestreamManager implements BytestreamManager {
/*
* create a new Socks5BytestreamManager and register a shutdown listener on every established
* connection
*/
static {
XMPPConnectionRegistry.addConnectionCreationListener(new ConnectionCreationListener() {
public void connectionCreated(final XMPPConnection connection) {
// create the manager for this connection
Socks5BytestreamManager.getBytestreamManager(connection);
// register shutdown listener
connection.addConnectionListener(new AbstractConnectionListener() {
@Override
public void connectionClosed() {
Socks5BytestreamManager.getBytestreamManager(connection).disableService();
}
@Override
public void connectionClosedOnError(Exception e) {
Socks5BytestreamManager.getBytestreamManager(connection).disableService();
}
@Override
public void reconnectionSuccessful() {
// re-create the manager for this connection
Socks5BytestreamManager.getBytestreamManager(connection);
}
});
}
});
}
/* prefix used to generate session IDs */
private static final String SESSION_ID_PREFIX = "js5_";
/* random generator to create session IDs */
private final static Random randomGenerator = new Random();
/* stores one Socks5BytestreamManager for each XMPP connection */
private final static Map<XMPPConnection, Socks5BytestreamManager> managers = new HashMap<XMPPConnection, Socks5BytestreamManager>();
/* XMPP connection */
private final XMPPConnection connection;
/*
* assigns a user to a listener that is informed if a bytestream request for this user is
* received
*/
private final Map<String, BytestreamListener> userListeners = new ConcurrentHashMap<String, BytestreamListener>();
/*
* list of listeners that respond to all bytestream requests if there are not user specific
* listeners for that request
*/
private final List<BytestreamListener> allRequestListeners = Collections.synchronizedList(new LinkedList<BytestreamListener>());
/* listener that handles all incoming bytestream requests */
private final InitiationListener initiationListener;
/* timeout to wait for the response to the SOCKS5 Bytestream initialization request */
private int targetResponseTimeout = 10000;
/* timeout for connecting to the SOCKS5 proxy selected by the target */
private int proxyConnectionTimeout = 10000;
/* blacklist of errornous SOCKS5 proxies */
private final List<String> proxyBlacklist = Collections.synchronizedList(new LinkedList<String>());
/* remember the last proxy that worked to prioritize it */
private String lastWorkingProxy = null;
/* flag to enable/disable prioritization of last working proxy */
private boolean proxyPrioritizationEnabled = true;
/*
* list containing session IDs of SOCKS5 Bytestream initialization packets that should be
* ignored by the InitiationListener
*/
private List<String> ignoredBytestreamRequests = Collections.synchronizedList(new LinkedList<String>());
/**
* Returns the Socks5BytestreamManager to handle SOCKS5 Bytestreams for a given
* {@link XMPPConnection}.
* <p>
* If no manager exists a new is created and initialized.
*
* @param connection the XMPP connection or <code>null</code> if given connection is
* <code>null</code>
* @return the Socks5BytestreamManager for the given XMPP connection
*/
public static synchronized Socks5BytestreamManager getBytestreamManager(XMPPConnection connection) {
if (connection == null) {
return null;
}
Socks5BytestreamManager manager = managers.get(connection);
if (manager == null) {
manager = new Socks5BytestreamManager(connection);
managers.put(connection, manager);
manager.activate();
}
return manager;
}
/**
* Private constructor.
*
* @param connection the XMPP connection
*/
private Socks5BytestreamManager(XMPPConnection connection) {
this.connection = connection;
this.initiationListener = new InitiationListener(this);
}
/**
* Adds BytestreamListener that is called for every incoming SOCKS5 Bytestream request unless
* there is a user specific BytestreamListener registered.
* <p>
* If no listeners are registered all SOCKS5 Bytestream request are rejected with a
* <not-acceptable/> error.
* <p>
* Note that the registered {@link BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
*
* @param listener the listener to register
*/
public void addIncomingBytestreamListener(BytestreamListener listener) {
this.allRequestListeners.add(listener);
}
/**
* Removes the given listener from the list of listeners for all incoming SOCKS5 Bytestream
* requests.
*
* @param listener the listener to remove
*/
public void removeIncomingBytestreamListener(BytestreamListener listener) {
this.allRequestListeners.remove(listener);
}
/**
* Adds BytestreamListener that is called for every incoming SOCKS5 Bytestream request from the
* given user.
* <p>
* Use this method if you are awaiting an incoming SOCKS5 Bytestream request from a specific
* user.
* <p>
* If no listeners are registered all SOCKS5 Bytestream request are rejected with a
* <not-acceptable/> error.
* <p>
* Note that the registered {@link BytestreamListener} will NOT be notified on incoming Socks5
* bytestream requests sent in the context of <a
* href="http://xmpp.org/extensions/xep-0096.html">XEP-0096</a> file transfer. (See
* {@link FileTransferManager})
*
* @param listener the listener to register
* @param initiatorJID the JID of the user that wants to establish a SOCKS5 Bytestream
*/
public void addIncomingBytestreamListener(BytestreamListener listener, String initiatorJID) {
this.userListeners.put(initiatorJID, listener);
}
/**
* Removes the listener for the given user.
*
* @param initiatorJID the JID of the user the listener should be removed
*/
public void removeIncomingBytestreamListener(String initiatorJID) {
this.userListeners.remove(initiatorJID);
}
/**
* Use this method to ignore the next incoming SOCKS5 Bytestream request containing the given
* session ID. No listeners will be notified for this request and and no error will be returned
* to the initiator.
* <p>
* This method should be used if you are awaiting a SOCKS5 Bytestream request as a reply to
* another packet (e.g. file transfer).
*
* @param sessionID to be ignored
*/
public void ignoreBytestreamRequestOnce(String sessionID) {
this.ignoredBytestreamRequests.add(sessionID);
}
/**
* Disables the SOCKS5 Bytestream manager by removing the SOCKS5 Bytestream feature from the
* service discovery, disabling the listener for SOCKS5 Bytestream initiation requests and
* resetting its internal state, which includes removing this instance from the managers map.
* <p>
* To re-enable the SOCKS5 Bytestream feature invoke {@link #getBytestreamManager(XMPPConnection)}.
* Using the file transfer API will automatically re-enable the SOCKS5 Bytestream feature.
*/
public synchronized void disableService() {
// remove initiation packet listener
this.connection.removePacketListener(this.initiationListener);
// shutdown threads
this.initiationListener.shutdown();
// clear listeners
this.allRequestListeners.clear();
this.userListeners.clear();
// reset internal state
this.lastWorkingProxy = null;
this.proxyBlacklist.clear();
this.ignoredBytestreamRequests.clear();
// remove manager from static managers map
managers.remove(this.connection);
// shutdown local SOCKS5 proxy if there are no more managers for other connections
if (managers.size() == 0) {
Socks5Proxy.getSocks5Proxy().stop();
}
// remove feature from service discovery
ServiceDiscoveryManager serviceDiscoveryManager = ServiceDiscoveryManager.getInstanceFor(this.connection);
// check if service discovery is not already disposed by connection shutdown
if (serviceDiscoveryManager != null) {
serviceDiscoveryManager.removeFeature(Bytestream.NAMESPACE);
}
}
/**
* Returns the timeout to wait for the response to the SOCKS5 Bytestream initialization request.
* Default is 10000ms.
*
* @return the timeout to wait for the response to the SOCKS5 Bytestream initialization request
*/
public int getTargetResponseTimeout() {
if (this.targetResponseTimeout <= 0) {
this.targetResponseTimeout = 10000;
}
return targetResponseTimeout;
}
/**
* Sets the timeout to wait for the response to the SOCKS5 Bytestream initialization request.
* Default is 10000ms.
*
* @param targetResponseTimeout the timeout to set
*/
public void setTargetResponseTimeout(int targetResponseTimeout) {
this.targetResponseTimeout = targetResponseTimeout;
}
/**
* Returns the timeout for connecting to the SOCKS5 proxy selected by the target. Default is
* 10000ms.
*
* @return the timeout for connecting to the SOCKS5 proxy selected by the target
*/
public int getProxyConnectionTimeout() {
if (this.proxyConnectionTimeout <= 0) {
this.proxyConnectionTimeout = 10000;
}
return proxyConnectionTimeout;
}
/**
* Sets the timeout for connecting to the SOCKS5 proxy selected by the target. Default is
* 10000ms.
*
* @param proxyConnectionTimeout the timeout to set
*/
public void setProxyConnectionTimeout(int proxyConnectionTimeout) {
this.proxyConnectionTimeout = proxyConnectionTimeout;
}
/**
* Returns if the prioritization of the last working SOCKS5 proxy on successive SOCKS5
* Bytestream connections is enabled. Default is <code>true</code>.
*
* @return <code>true</code> if prioritization is enabled, <code>false</code> otherwise
*/
public boolean isProxyPrioritizationEnabled() {
return proxyPrioritizationEnabled;
}
/**
* Enable/disable the prioritization of the last working SOCKS5 proxy on successive SOCKS5
* Bytestream connections.
*
* @param proxyPrioritizationEnabled enable/disable the prioritization of the last working
* SOCKS5 proxy
*/
public void setProxyPrioritizationEnabled(boolean proxyPrioritizationEnabled) {
this.proxyPrioritizationEnabled = proxyPrioritizationEnabled;
}
/**
* Establishes a SOCKS5 Bytestream with the given user and returns the Socket to send/receive
* data to/from the user.
* <p>
* Use this method to establish SOCKS5 Bytestreams to users accepting all incoming Socks5
* bytestream requests since this method doesn't provide a way to tell the user something about
* the data to be sent.
* <p>
* To establish a SOCKS5 Bytestream after negotiation the kind of data to be sent (e.g. file
* transfer) use {@link #establishSession(String, String)}.
*
* @param targetJID the JID of the user a SOCKS5 Bytestream should be established
* @return the Socket to send/receive data to/from the user
* @throws XMPPException if the user doesn't support or accept SOCKS5 Bytestreams, if no Socks5
* Proxy could be found, if the user couldn't connect to any of the SOCKS5 Proxies
* @throws IOException if the bytestream could not be established
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws SmackException if there was no response from the server.
*/
public Socks5BytestreamSession establishSession(String targetJID) throws XMPPException,
IOException, InterruptedException, SmackException {
String sessionID = getNextSessionID();
return establishSession(targetJID, sessionID);
}
/**
* Establishes a SOCKS5 Bytestream with the given user using the given session ID and returns
* the Socket to send/receive data to/from the user.
*
* @param targetJID the JID of the user a SOCKS5 Bytestream should be established
* @param sessionID the session ID for the SOCKS5 Bytestream request
* @return the Socket to send/receive data to/from the user
* @throws IOException if the bytestream could not be established
* @throws InterruptedException if the current thread was interrupted while waiting
* @throws NoResponseException
* @throws SmackException if the target does not support SOCKS5.
* @throws XMPPException
*/
public Socks5BytestreamSession establishSession(String targetJID, String sessionID)
throws IOException, InterruptedException, NoResponseException, SmackException, XMPPException{
XMPPErrorException discoveryException = null;
// check if target supports SOCKS5 Bytestream
if (!supportsSocks5(targetJID)) {
throw new FeatureNotSupportedException("SOCKS5 Bytestream", targetJID);
}
List<String> proxies = new ArrayList<String>();
// determine SOCKS5 proxies from XMPP-server
try {
proxies.addAll(determineProxies());
} catch (XMPPErrorException e) {
// don't abort here, just remember the exception thrown by determineProxies()
// determineStreamHostInfos() will at least add the local Socks5 proxy (if enabled)
discoveryException = e;
}
// determine address and port of each proxy
List<StreamHost> streamHosts = determineStreamHostInfos(proxies);
if (streamHosts.isEmpty()) {
if (discoveryException != null) {
throw discoveryException;
} else {
throw new SmackException("no SOCKS5 proxies available");
}
}
// compute digest
String digest = Socks5Utils.createDigest(sessionID, this.connection.getUser(), targetJID);
// prioritize last working SOCKS5 proxy if exists
if (this.proxyPrioritizationEnabled && this.lastWorkingProxy != null) {
StreamHost selectedStreamHost = null;
for (StreamHost streamHost : streamHosts) {
if (streamHost.getJID().equals(this.lastWorkingProxy)) {
selectedStreamHost = streamHost;
break;
}
}
if (selectedStreamHost != null) {
streamHosts.remove(selectedStreamHost);
streamHosts.add(0, selectedStreamHost);
}
}
Socks5Proxy socks5Proxy = Socks5Proxy.getSocks5Proxy();
try {
// add transfer digest to local proxy to make transfer valid
socks5Proxy.addTransfer(digest);
// create initiation packet
Bytestream initiation = createBytestreamInitiation(sessionID, targetJID, streamHosts);
// send initiation packet
Packet response = connection.createPacketCollectorAndSend(initiation).nextResultOrThrow(
getTargetResponseTimeout());
// extract used stream host from response
StreamHostUsed streamHostUsed = ((Bytestream) response).getUsedHost();
StreamHost usedStreamHost = initiation.getStreamHost(streamHostUsed.getJID());
if (usedStreamHost == null) {
throw new SmackException("Remote user responded with unknown host");
}
// build SOCKS5 client
Socks5Client socks5Client = new Socks5ClientForInitiator(usedStreamHost, digest,
this.connection, sessionID, targetJID);
// establish connection to proxy
Socket socket = socks5Client.getSocket(getProxyConnectionTimeout());
// remember last working SOCKS5 proxy to prioritize it for next request
this.lastWorkingProxy = usedStreamHost.getJID();
// negotiation successful, return the output stream
return new Socks5BytestreamSession(socket, usedStreamHost.getJID().equals(
this.connection.getUser()));
}
catch (TimeoutException e) {
throw new IOException("Timeout while connecting to SOCKS5 proxy");
}
finally {
// remove transfer digest if output stream is returned or an exception
// occurred
socks5Proxy.removeTransfer(digest);
}
}
/**
* Returns <code>true</code> if the given target JID supports feature SOCKS5 Bytestream.
*
* @param targetJID the target JID
* @return <code>true</code> if the given target JID supports feature SOCKS5 Bytestream
* otherwise <code>false</code>
* @throws XMPPErrorException
* @throws NoResponseException
* @throws NotConnectedException
*/
private boolean supportsSocks5(String targetJID) throws NoResponseException, XMPPErrorException, NotConnectedException {
return ServiceDiscoveryManager.getInstanceFor(connection).supportsFeature(targetJID, Bytestream.NAMESPACE);
}
/**
* Returns a list of JIDs of SOCKS5 proxies by querying the XMPP server. The SOCKS5 proxies are
* in the same order as returned by the XMPP server.
*
* @return list of JIDs of SOCKS5 proxies
* @throws XMPPErrorException if there was an error querying the XMPP server for SOCKS5 proxies
* @throws NoResponseException if there was no response from the server.
* @throws NotConnectedException
*/
private List<String> determineProxies() throws NoResponseException, XMPPErrorException, NotConnectedException {
ServiceDiscoveryManager serviceDiscoveryManager = ServiceDiscoveryManager.getInstanceFor(this.connection);
List<String> proxies = new ArrayList<String>();
// get all items from XMPP server
DiscoverItems discoverItems = serviceDiscoveryManager.discoverItems(this.connection.getServiceName());
// query all items if they are SOCKS5 proxies
for (Item item : discoverItems.getItems()) {
// skip blacklisted servers
if (this.proxyBlacklist.contains(item.getEntityID())) {
continue;
}
DiscoverInfo proxyInfo;
try {
proxyInfo = serviceDiscoveryManager.discoverInfo(item.getEntityID());
}
catch (NoResponseException|XMPPErrorException e) {
// blacklist errornous server
proxyBlacklist.add(item.getEntityID());
continue;
}
// item must have category "proxy" and type "bytestream"
for (Identity identity : proxyInfo.getIdentities()) {
if ("proxy".equalsIgnoreCase(identity.getCategory())
&& "bytestreams".equalsIgnoreCase(identity.getType())) {
proxies.add(item.getEntityID());
break;
}
/*
* server is not a SOCKS5 proxy, blacklist server to skip next time a Socks5
* bytestream should be established
*/
this.proxyBlacklist.add(item.getEntityID());
}
}
return proxies;
}
/**
* Returns a list of stream hosts containing the IP address an the port for the given list of
* SOCKS5 proxy JIDs. The order of the returned list is the same as the given list of JIDs
* excluding all SOCKS5 proxies who's network settings could not be determined. If a local
* SOCKS5 proxy is running it will be the first item in the list returned.
*
* @param proxies a list of SOCKS5 proxy JIDs
* @return a list of stream hosts containing the IP address an the port
*/
private List<StreamHost> determineStreamHostInfos(List<String> proxies) {
List<StreamHost> streamHosts = new ArrayList<StreamHost>();
// add local proxy on first position if exists
List<StreamHost> localProxies = getLocalStreamHost();
if (localProxies != null) {
streamHosts.addAll(localProxies);
}
// query SOCKS5 proxies for network settings
for (String proxy : proxies) {
Bytestream streamHostRequest = createStreamHostRequest(proxy);
try {
Bytestream response = (Bytestream) connection.createPacketCollectorAndSend(
streamHostRequest).nextResultOrThrow();
streamHosts.addAll(response.getStreamHosts());
}
catch (Exception e) {
// blacklist errornous proxies
this.proxyBlacklist.add(proxy);
}
}
return streamHosts;
}
/**
* Returns a IQ packet to query a SOCKS5 proxy its network settings.
*
* @param proxy the proxy to query
* @return IQ packet to query a SOCKS5 proxy its network settings
*/
private Bytestream createStreamHostRequest(String proxy) {
Bytestream request = new Bytestream();
request.setType(IQ.Type.get);
request.setTo(proxy);
return request;
}
/**
* Returns the stream host information of the local SOCKS5 proxy containing the IP address and
* the port or null if local SOCKS5 proxy is not running.
*
* @return the stream host information of the local SOCKS5 proxy or null if local SOCKS5 proxy
* is not running
*/
private List<StreamHost> getLocalStreamHost() {
// get local proxy singleton
Socks5Proxy socks5Server = Socks5Proxy.getSocks5Proxy();
if (!socks5Server.isRunning()) {
// server is not running
return null;
}
List<String> addresses = socks5Server.getLocalAddresses();
if (addresses.isEmpty()) {
// local address could not be determined
return null;
}
final int port = socks5Server.getPort();
List<StreamHost> streamHosts = new ArrayList<StreamHost>();
outerloop: for (String address : addresses) {
// Prevent loopback addresses from appearing as streamhost
final String[] loopbackAddresses = { "127.0.0.1", "0:0:0:0:0:0:0:1" };
for (String loopbackAddress : loopbackAddresses) {
// Use 'startsWith' here since IPv6 addresses may have scope ID,
// ie. the part after the '%' sign.
if (address.startsWith(loopbackAddress)) {
continue outerloop;
}
}
streamHosts.add(new StreamHost(connection.getUser(), address, port));
}
return streamHosts;
}
/**
* Returns a SOCKS5 Bytestream initialization request packet with the given session ID
* containing the given stream hosts for the given target JID.
*
* @param sessionID the session ID for the SOCKS5 Bytestream
* @param targetJID the target JID of SOCKS5 Bytestream request
* @param streamHosts a list of SOCKS5 proxies the target should connect to
* @return a SOCKS5 Bytestream initialization request packet
*/
private Bytestream createBytestreamInitiation(String sessionID, String targetJID,
List<StreamHost> streamHosts) {
Bytestream initiation = new Bytestream(sessionID);
// add all stream hosts
for (StreamHost streamHost : streamHosts) {
initiation.addStreamHost(streamHost);
}
initiation.setType(IQ.Type.set);
initiation.setTo(targetJID);
return initiation;
}
/**
* Responses to the given packet's sender with a XMPP error that a SOCKS5 Bytestream is not
* accepted.
* <p>
* Specified in XEP-65 5.3.1 (Example 13)
* </p>
*
* @param packet Packet that should be answered with a not-acceptable error
* @throws NotConnectedException
*/
protected void replyRejectPacket(IQ packet) throws NotConnectedException {
XMPPError xmppError = new XMPPError(XMPPError.Condition.not_acceptable);
IQ errorIQ = IQ.createErrorResponse(packet, xmppError);
this.connection.sendPacket(errorIQ);
}
/**
* Activates the Socks5BytestreamManager by registering the SOCKS5 Bytestream initialization
* listener and enabling the SOCKS5 Bytestream feature.
*/
private void activate() {
// register bytestream initiation packet listener
this.connection.addPacketListener(this.initiationListener,
this.initiationListener.getFilter());
// enable SOCKS5 feature
enableService();
}
/**
* Adds the SOCKS5 Bytestream feature to the service discovery.
*/
private void enableService() {
ServiceDiscoveryManager manager = ServiceDiscoveryManager.getInstanceFor(this.connection);
manager.addFeature(Bytestream.NAMESPACE);
}
/**
* Returns a new unique session ID.
*
* @return a new unique session ID
*/
private String getNextSessionID() {
StringBuilder buffer = new StringBuilder();
buffer.append(SESSION_ID_PREFIX);
buffer.append(Math.abs(randomGenerator.nextLong()));
return buffer.toString();
}
/**
* Returns the XMPP connection.
*
* @return the XMPP connection
*/
protected XMPPConnection getConnection() {
return this.connection;
}
/**
* Returns the {@link BytestreamListener} that should be informed if a SOCKS5 Bytestream request
* from the given initiator JID is received.
*
* @param initiator the initiator's JID
* @return the listener
*/
protected BytestreamListener getUserListener(String initiator) {
return this.userListeners.get(initiator);
}
/**
* Returns a list of {@link BytestreamListener} that are informed if there are no listeners for
* a specific initiator.
*
* @return list of listeners
*/
protected List<BytestreamListener> getAllRequestListeners() {
return this.allRequestListeners;
}
/**
* Returns the list of session IDs that should be ignored by the InitialtionListener
*
* @return list of session IDs
*/
protected List<String> getIgnoredBytestreamRequests() {
return ignoredBytestreamRequests;
}
}
| Add '::1' to the loopbackAddresses
| smack-extensions/src/main/java/org/jivesoftware/smackx/bytestreams/socks5/Socks5BytestreamManager.java | Add '::1' to the loopbackAddresses | <ide><path>mack-extensions/src/main/java/org/jivesoftware/smackx/bytestreams/socks5/Socks5BytestreamManager.java
<ide> List<StreamHost> streamHosts = new ArrayList<StreamHost>();
<ide> outerloop: for (String address : addresses) {
<ide> // Prevent loopback addresses from appearing as streamhost
<del> final String[] loopbackAddresses = { "127.0.0.1", "0:0:0:0:0:0:0:1" };
<add> final String[] loopbackAddresses = { "127.0.0.1", "0:0:0:0:0:0:0:1", "::1" };
<ide> for (String loopbackAddress : loopbackAddresses) {
<ide> // Use 'startsWith' here since IPv6 addresses may have scope ID,
<ide> // ie. the part after the '%' sign. |
|
Java | apache-2.0 | c6439710299ec3332245fc73755dc022b85af884 | 0 | maxmind/minfraud-api-java,maxmind/minfraud-api-java | package com.maxmind.minfraud.response;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* This class contains minFraud response data related to the credit card.
*/
public final class CreditCard {
private final Issuer issuer;
private final String brand;
private final String country;
private final Boolean isIssuedInBillingAddressCountry;
private final Boolean isPrepaid;
private final Type type;
public CreditCard(
@JsonProperty("brand") String brand,
@JsonProperty("country") String country,
@JsonProperty("is_issued_in_billing_address_country") Boolean isIssuedInBillingAddressCountry,
@JsonProperty("is_prepaid") Boolean isPrepaid,
@JsonProperty("issuer") Issuer issuer,
@JsonProperty("type") Type type
) {
this.brand = brand;
this.country = country;
this.isIssuedInBillingAddressCountry = isIssuedInBillingAddressCountry;
this.isPrepaid = isPrepaid;
this.issuer = issuer == null ? new Issuer() : issuer;
this.type = type == null ? Type.BLANK : type;
}
public CreditCard() {
this(null, null, null, null, null, null);
}
/**
* @return The {@code Issuer} model object.
*/
public Issuer getIssuer() {
return issuer;
}
/**
* @return The credit card brand.
*/
@JsonProperty("brand")
public String getBrand() {
return brand;
}
/**
* @return The two letter <a href="http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2">
* ISO 3166-1 alpha-2</a> country code associated with the location
* of the majority of customers using this credit card as determined
* by their billing address. In cases where the location of customers
* is highly mixed, this defaults to the country of the bank issuing
* the card.
*/
public String getCountry() {
return country;
}
/**
* @return True if the country of the billing address matches the country
* of the majority of customers using that IIN. In cases where the
* location of customers is highly mixed, the match is to the country of
* the bank issuing the card.
*/
@JsonProperty("is_issued_in_billing_address_country")
public Boolean isIssuedInBillingAddressCountry() {
return isIssuedInBillingAddressCountry;
}
/**
* @return True if the card is a prepaid card. False if not prepaid. If
* the IIN was not provided or is unknown, null will be returned.
*/
@JsonProperty("is_prepaid")
public Boolean isPrepaid() {
return isPrepaid;
}
/**
* @return The credit card type.
*/
@JsonProperty("type")
public Type getType() {
return type;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("CreditCard{");
sb.append("issuer=").append(this.issuer);
sb.append(", brand='").append(this.brand).append('\'');
sb.append(", country='").append(this.country).append('\'');
sb.append(", isIssuedInBillingAddressCountry=").append(this.isIssuedInBillingAddressCountry);
sb.append(", isPrepaid=").append(this.isPrepaid);
sb.append(", type='").append(this.type).append('\'');
sb.append('}');
return sb.toString();
}
/**
* The enumerated credit card types
*/
public enum Type {
BLANK,
CHARGE,
CREDIT,
DEBIT;
@JsonCreator
public static Type forValue(String value) {
/* We don't handle BLANK here because we expect minFraud to leave
* fields out entirely, not set it to an empty string. */
return Type.valueOf(value.toUpperCase());
}
public String toString() {
if (this == BLANK) {
return "";
}
return this.name().toLowerCase();
}
}
}
| src/main/java/com/maxmind/minfraud/response/CreditCard.java | package com.maxmind.minfraud.response;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* This class contains minFraud response data related to the credit card.
*/
public final class CreditCard {
private final Issuer issuer;
private final String brand;
private final String country;
private final Boolean isIssuedInBillingAddressCountry;
private final Boolean isPrepaid;
private final Type type;
public CreditCard(
@JsonProperty("brand") String brand,
@JsonProperty("country") String country,
@JsonProperty("is_issued_in_billing_address_country") Boolean isIssuedInBillingAddressCountry,
@JsonProperty("is_prepaid") Boolean isPrepaid,
@JsonProperty("issuer") Issuer issuer,
@JsonProperty("type") Type type
) {
this.brand = brand;
this.country = country;
this.isIssuedInBillingAddressCountry = isIssuedInBillingAddressCountry;
this.isPrepaid = isPrepaid;
this.issuer = issuer == null ? new Issuer() : issuer;
this.type = type == null ? Type.BLANK : type;
}
public CreditCard() {
this(null, null, null, null, null, null);
}
/**
* @return The {@code Issuer} model object.
*/
public Issuer getIssuer() {
return issuer;
}
/**
* @return The credit card brand.
*/
@JsonProperty("brand")
public String getBrand() {
return brand;
}
/**
* @return The two letter <a href="http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2">
* ISO 3166-1 alpha-2</a> country code associated with the location
* of the majority of customers using this credit card as determined
* by their billing address. In cases where the location of customers
* is highly mixed, this defaults to the country of the bank issuing
* the card.
*/
public String getCountry() {
return country;
}
/**
* @return True if the country of the billing address matches the country
* of the majority of customers using that IIN. In cases where the
* location of customers is highly mixed, the match is to the country of
* the bank issuing the card.
*/
@JsonProperty("is_issued_in_billing_address_country")
public Boolean isIssuedInBillingAddressCountry() {
return isIssuedInBillingAddressCountry;
}
/**
* @return True if the card is a prepaid card. False if not prepaid. If
* the IIN was not provided or is unknown, null will be returned.
*/
@JsonProperty("is_prepaid")
public Boolean isPrepaid() {
return isPrepaid;
}
/**
* @return The credit card type.
*/
@JsonProperty("type")
public Type getType() {
return type;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("CreditCard{");
sb.append("issuer=").append(this.issuer);
sb.append(", brand='").append(this.brand).append('\'');
sb.append(", country='").append(this.country).append('\'');
sb.append(", isIssuedInBillingAddressCountry=").append(this.isIssuedInBillingAddressCountry);
sb.append(", isPrepaid=").append(this.isPrepaid);
sb.append(", type='").append(this.type).append('\'');
sb.append('}');
return sb.toString();
}
/**
* The enumerated credit card types
*/
public enum Type {
BLANK,
CHARGE,
CREDIT,
DEBIT;
@JsonCreator
public static Type forValue(String value) {
/* We don't handle BLANK here because we expect minFraud to leave
* fields out entirely, not set it to an empty string. */
return Type.valueOf(value.toUpperCase());
}
public String toString() {
if (this == BLANK) {
return "";
}
return this.name().toLowerCase();
}
}
}
| The response.CreditCard class does not use the JsonIgnore annotation
| src/main/java/com/maxmind/minfraud/response/CreditCard.java | The response.CreditCard class does not use the JsonIgnore annotation | <ide><path>rc/main/java/com/maxmind/minfraud/response/CreditCard.java
<ide> package com.maxmind.minfraud.response;
<ide>
<ide> import com.fasterxml.jackson.annotation.JsonCreator;
<del>import com.fasterxml.jackson.annotation.JsonIgnore;
<ide> import com.fasterxml.jackson.annotation.JsonProperty;
<ide>
<ide> /** |
|
Java | apache-2.0 | 9fc25bd19eab385156be07cb81a4db03bf9a26a2 | 0 | atomix/copycat,atomix/copycat | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package io.atomix.copycat.client.session;
import io.atomix.catalyst.util.Assert;
import io.atomix.catalyst.util.concurrent.Scheduled;
import io.atomix.catalyst.util.concurrent.ThreadContext;
import io.atomix.copycat.client.ConnectionStrategy;
import io.atomix.copycat.client.error.RaftError;
import io.atomix.copycat.client.request.KeepAliveRequest;
import io.atomix.copycat.client.request.RegisterRequest;
import io.atomix.copycat.client.request.UnregisterRequest;
import io.atomix.copycat.client.response.KeepAliveResponse;
import io.atomix.copycat.client.response.RegisterResponse;
import io.atomix.copycat.client.response.Response;
import io.atomix.copycat.client.response.UnregisterResponse;
import io.atomix.copycat.client.util.ClientConnection;
import java.net.ConnectException;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
/**
* Client session manager.
*
* @author <a href="http://github.com/kuujo>Jordan Halterman</a>
*/
final class ClientSessionManager {
private final ClientSessionState state;
private final ClientConnection connection;
private final ThreadContext context;
private final ConnectionStrategy strategy;
private Duration interval;
private Scheduled keepAlive;
ClientSessionManager(ClientConnection connection, ClientSessionState state, ThreadContext context, ConnectionStrategy connectionStrategy) {
this.connection = Assert.notNull(connection, "connection");
this.state = Assert.notNull(state, "state");
this.context = Assert.notNull(context, "context");
this.strategy = Assert.notNull(connectionStrategy, "connectionStrategy");
}
/**
* Opens the session manager.
*
* @return A completable future to be called once the session manager is opened.
*/
public CompletableFuture<Void> open() {
CompletableFuture<Void> future = new CompletableFuture<>();
context.executor().execute(() -> register(new RegisterAttempt(1, future)));
return future;
}
/**
* Registers a session.
*/
private void register(RegisterAttempt attempt) {
state.getLogger().debug("Registering session: attempt {}", attempt.attempt);
RegisterRequest request = RegisterRequest.builder()
.withClient(state.getClientId())
.build();
state.getLogger().debug("Sending {}", request);
connection.reset().<RegisterRequest, RegisterResponse>send(request).whenComplete((response, error) -> {
if (error == null) {
state.getLogger().debug("Received {}", response);
if (response.status() == Response.Status.OK) {
interval = Duration.ofMillis(response.timeout()).dividedBy(2);
connection.reset(response.leader(), response.members());
state.setSessionId(response.session())
.setState(Session.State.OPEN);
state.getLogger().info("Registered session {}", response.session());
attempt.complete();
keepAlive();
} else {
strategy.attemptFailed(attempt);
}
} else {
strategy.attemptFailed(attempt);
}
});
}
/**
* Sends a keep-alive request to the cluster.
*/
private void keepAlive() {
keepAlive(true);
}
/**
* Sends a keep-alive request to the cluster.
*/
private void keepAlive(boolean retryOnFailure) {
long sessionId = state.getSessionId();
// If the current sessions state is unstable, reset the connection before sending a keep-alive.
if (state.getState() == Session.State.UNSTABLE)
connection.reset();
KeepAliveRequest request = KeepAliveRequest.builder()
.withSession(sessionId)
.withCommandSequence(state.getCommandResponse())
.withEventIndex(state.getCompleteIndex())
.build();
scheduleKeepAlive();
state.getLogger().debug("{} - Sending {}", sessionId, request);
connection.<KeepAliveRequest, KeepAliveResponse>send(request).whenComplete((response, error) -> {
if (state.getState() != Session.State.CLOSED) {
if (error == null) {
state.getLogger().debug("{} - Received {}", sessionId, response);
// If the request was successful, update the address selector and schedule the next keep-alive.
if (response.status() == Response.Status.OK) {
connection.reset(response.leader(), response.members());
state.setState(Session.State.OPEN);
scheduleKeepAlive();
}
// If the session is unknown, immediate expire the session.
else if (response.error() == RaftError.Type.UNKNOWN_SESSION_ERROR) {
state.setState(Session.State.EXPIRED);
}
// If a leader is still set in the address selector, unset the leader and attempt to send another keep-alive.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
keepAlive(false);
}
// If no leader was set, set the session state to unstable and schedule another keep-alive.
else {
state.setState(Session.State.UNSTABLE);
scheduleKeepAlive();
}
}
// If a leader is still set in the address selector, unset the leader and attempt to send another keep-alive.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
keepAlive(false);
}
// If no leader was set, set the session state to unstable and schedule another keep-alive.
else {
state.setState(Session.State.UNSTABLE);
scheduleKeepAlive();
}
}
});
}
/**
* Schedules a keep-alive request.
*/
private void scheduleKeepAlive() {
if (keepAlive != null)
keepAlive.cancel();
keepAlive = context.schedule(interval, () -> {
keepAlive = null;
keepAlive();
});
}
/**
* Closes the session manager.
*
* @return A completable future to be completed once the session manager is closed.
*/
public CompletableFuture<Void> close() {
if (state.getState() == Session.State.EXPIRED)
return CompletableFuture.completedFuture(null);
CompletableFuture<Void> future = new CompletableFuture<>();
context.executor().execute(() -> {
if (keepAlive != null)
keepAlive.cancel();
unregister(future);
});
return future;
}
/**
* Unregisters the session.
*/
private void unregister(CompletableFuture<Void> future) {
unregister(true, future);
}
/**
* Unregisters the session.
*
* @param future A completable future to be completed once the session is unregistered.
*/
private void unregister(boolean retryOnFailure, CompletableFuture<Void> future) {
long sessionId = state.getSessionId();
state.getLogger().debug("Unregistering session: {}", sessionId);
// If a keep-alive request is already pending, cancel it.
if (keepAlive != null)
keepAlive.cancel();
// If the current sessions state is unstable, reset the connection before sending an unregister request.
if (state.getState() == Session.State.UNSTABLE)
connection.reset();
UnregisterRequest request = UnregisterRequest.builder()
.withSession(sessionId)
.build();
state.getLogger().debug("{} - Sending {}", sessionId, request);
connection.<UnregisterRequest, UnregisterResponse>send(request).whenComplete((response, error) -> {
if (state.getState() != Session.State.CLOSED) {
if (error == null) {
state.getLogger().debug("{} - Received {}", sessionId, response);
// If the request was successful, update the session state and complete the close future.
if (response.status() == Response.Status.OK) {
state.setState(Session.State.CLOSED);
future.complete(null);
}
// If the session is unknown, immediate expire the session and complete the close future.
else if (response.error() == RaftError.Type.UNKNOWN_SESSION_ERROR) {
state.setState(Session.State.EXPIRED);
future.complete(null);
}
// If a leader is still set in the address selector, unset the leader and send another unregister attempt.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
unregister(false, future);
}
// If no leader was set, set the session state to unstable and schedule another unregister attempt.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, () -> unregister(future));
}
}
// If a leader is still set in the address selector, unset the leader and send another unregister attempt.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
unregister(false, future);
}
// If no leader was set, set the session state to unstable and schedule another unregister attempt.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, () -> unregister(future));
}
}
});
}
/**
* Kills the client session manager.
*
* @return A completable future to be completed once the session manager is killed.
*/
public CompletableFuture<Void> kill() {
return CompletableFuture.runAsync(() -> {
if (keepAlive != null)
keepAlive.cancel();
state.setState(Session.State.CLOSED);
}, context.executor());
}
@Override
public String toString() {
return String.format("%s[session=%d]", getClass().getSimpleName(), state.getSessionId());
}
/**
* Client session connection attempt.
*/
private final class RegisterAttempt implements ConnectionStrategy.Attempt {
private final int attempt;
private final CompletableFuture<Void> future;
private RegisterAttempt(int attempt, CompletableFuture<Void> future) {
this.attempt = attempt;
this.future = future;
}
@Override
public int attempt() {
return attempt;
}
/**
* Completes the attempt successfully.
*/
public void complete() {
complete(null);
}
/**
* Completes the attempt successfully.
*
* @param result The attempt result.
*/
public void complete(Void result) {
future.complete(result);
}
@Override
public void fail() {
future.completeExceptionally(new ConnectException("failed to register session"));
}
@Override
public void fail(Throwable error) {
future.completeExceptionally(error);
}
@Override
public void retry() {
state.getLogger().debug("Retrying session register attempt");
register(new RegisterAttempt(attempt + 1, future));
}
@Override
public void retry(Duration after) {
state.getLogger().debug("Retrying session register attempt");
context.schedule(after, () -> register(new RegisterAttempt(attempt + 1, future)));
}
}
}
| client/src/main/java/io/atomix/copycat/client/session/ClientSessionManager.java | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package io.atomix.copycat.client.session;
import io.atomix.catalyst.util.Assert;
import io.atomix.catalyst.util.concurrent.Scheduled;
import io.atomix.catalyst.util.concurrent.ThreadContext;
import io.atomix.copycat.client.ConnectionStrategy;
import io.atomix.copycat.client.error.RaftError;
import io.atomix.copycat.client.request.KeepAliveRequest;
import io.atomix.copycat.client.request.RegisterRequest;
import io.atomix.copycat.client.request.UnregisterRequest;
import io.atomix.copycat.client.response.KeepAliveResponse;
import io.atomix.copycat.client.response.RegisterResponse;
import io.atomix.copycat.client.response.Response;
import io.atomix.copycat.client.response.UnregisterResponse;
import io.atomix.copycat.client.util.ClientConnection;
import java.net.ConnectException;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
/**
* Client session manager.
*
* @author <a href="http://github.com/kuujo>Jordan Halterman</a>
*/
final class ClientSessionManager {
private final ClientSessionState state;
private final ClientConnection connection;
private final ThreadContext context;
private final ConnectionStrategy strategy;
private Duration interval;
private Scheduled keepAlive;
ClientSessionManager(ClientConnection connection, ClientSessionState state, ThreadContext context, ConnectionStrategy connectionStrategy) {
this.connection = Assert.notNull(connection, "connection");
this.state = Assert.notNull(state, "state");
this.context = Assert.notNull(context, "context");
this.strategy = Assert.notNull(connectionStrategy, "connectionStrategy");
}
/**
* Opens the session manager.
*
* @return A completable future to be called once the session manager is opened.
*/
public CompletableFuture<Void> open() {
CompletableFuture<Void> future = new CompletableFuture<>();
context.executor().execute(() -> register(new RegisterAttempt(1, future)));
return future;
}
/**
* Registers a session.
*/
private void register(RegisterAttempt attempt) {
state.getLogger().debug("Registering session: attempt {}", attempt.attempt);
RegisterRequest request = RegisterRequest.builder()
.withClient(state.getClientId())
.build();
state.getLogger().debug("Sending {}", request);
connection.reset().<RegisterRequest, RegisterResponse>send(request).whenComplete((response, error) -> {
if (error == null) {
state.getLogger().debug("Received {}", response);
if (response.status() == Response.Status.OK) {
interval = Duration.ofMillis(response.timeout()).dividedBy(2);
connection.reset(response.leader(), response.members());
state.setSessionId(response.session())
.setState(Session.State.OPEN);
state.getLogger().info("Registered session {}", response.session());
attempt.complete();
keepAlive();
} else {
strategy.attemptFailed(attempt);
}
} else {
strategy.attemptFailed(attempt);
}
});
}
/**
* Sends a keep-alive request to the cluster.
*/
private void keepAlive() {
keepAlive(true);
}
/**
* Sends a keep-alive request to the cluster.
*/
private void keepAlive(boolean retryOnFailure) {
long sessionId = state.getSessionId();
// If the current sessions state is unstable, reset the connection before sending a keep-alive.
if (state.getState() == Session.State.UNSTABLE)
connection.reset();
KeepAliveRequest request = KeepAliveRequest.builder()
.withSession(sessionId)
.withCommandSequence(state.getCommandResponse())
.withEventIndex(state.getCompleteIndex())
.build();
state.getLogger().debug("{} - Sending {}", sessionId, request);
connection.<KeepAliveRequest, KeepAliveResponse>send(request).whenComplete((response, error) -> {
if (state.getState() != Session.State.CLOSED) {
if (error == null) {
state.getLogger().debug("{} - Received {}", sessionId, response);
// If the request was successful, update the address selector and schedule the next keep-alive.
if (response.status() == Response.Status.OK) {
connection.reset(response.leader(), response.members());
state.setState(Session.State.OPEN);
keepAlive = context.schedule(interval, this::keepAlive);
}
// If the session is unknown, immediate expire the session.
else if (response.error() == RaftError.Type.UNKNOWN_SESSION_ERROR) {
state.setState(Session.State.EXPIRED);
}
// If a leader is still set in the address selector, unset the leader and attempt to send another keep-alive.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
keepAlive(false);
}
// If no leader was set, set the session state to unstable and schedule another keep-alive.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, this::keepAlive);
}
}
// If a leader is still set in the address selector, unset the leader and attempt to send another keep-alive.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
keepAlive(false);
}
// If no leader was set, set the session state to unstable and schedule another keep-alive.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, this::keepAlive);
}
}
});
}
/**
* Closes the session manager.
*
* @return A completable future to be completed once the session manager is closed.
*/
public CompletableFuture<Void> close() {
if (state.getState() == Session.State.EXPIRED)
return CompletableFuture.completedFuture(null);
CompletableFuture<Void> future = new CompletableFuture<>();
context.executor().execute(() -> {
if (keepAlive != null)
keepAlive.cancel();
unregister(future);
});
return future;
}
/**
* Unregisters the session.
*/
private void unregister(CompletableFuture<Void> future) {
unregister(true, future);
}
/**
* Unregisters the session.
*
* @param future A completable future to be completed once the session is unregistered.
*/
private void unregister(boolean retryOnFailure, CompletableFuture<Void> future) {
long sessionId = state.getSessionId();
state.getLogger().debug("Unregistering session: {}", sessionId);
// If a keep-alive request is already pending, cancel it.
if (keepAlive != null)
keepAlive.cancel();
// If the current sessions state is unstable, reset the connection before sending an unregister request.
if (state.getState() == Session.State.UNSTABLE)
connection.reset();
UnregisterRequest request = UnregisterRequest.builder()
.withSession(sessionId)
.build();
state.getLogger().debug("{} - Sending {}", sessionId, request);
connection.<UnregisterRequest, UnregisterResponse>send(request).whenComplete((response, error) -> {
if (state.getState() != Session.State.CLOSED) {
if (error == null) {
state.getLogger().debug("{} - Received {}", sessionId, response);
// If the request was successful, update the session state and complete the close future.
if (response.status() == Response.Status.OK) {
state.setState(Session.State.CLOSED);
future.complete(null);
}
// If the session is unknown, immediate expire the session and complete the close future.
else if (response.error() == RaftError.Type.UNKNOWN_SESSION_ERROR) {
state.setState(Session.State.EXPIRED);
future.complete(null);
}
// If a leader is still set in the address selector, unset the leader and send another unregister attempt.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
unregister(false, future);
}
// If no leader was set, set the session state to unstable and schedule another unregister attempt.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, () -> unregister(future));
}
}
// If a leader is still set in the address selector, unset the leader and send another unregister attempt.
// This will ensure that the address selector selects all servers without filtering on the leader.
else if (retryOnFailure && connection.leader() != null) {
connection.reset(null, connection.servers());
unregister(false, future);
}
// If no leader was set, set the session state to unstable and schedule another unregister attempt.
else {
state.setState(Session.State.UNSTABLE);
keepAlive = context.schedule(interval, () -> unregister(future));
}
}
});
}
/**
* Kills the client session manager.
*
* @return A completable future to be completed once the session manager is killed.
*/
public CompletableFuture<Void> kill() {
return CompletableFuture.runAsync(() -> {
if (keepAlive != null)
keepAlive.cancel();
state.setState(Session.State.CLOSED);
}, context.executor());
}
@Override
public String toString() {
return String.format("%s[session=%d]", getClass().getSimpleName(), state.getSessionId());
}
/**
* Client session connection attempt.
*/
private final class RegisterAttempt implements ConnectionStrategy.Attempt {
private final int attempt;
private final CompletableFuture<Void> future;
private RegisterAttempt(int attempt, CompletableFuture<Void> future) {
this.attempt = attempt;
this.future = future;
}
@Override
public int attempt() {
return attempt;
}
/**
* Completes the attempt successfully.
*/
public void complete() {
complete(null);
}
/**
* Completes the attempt successfully.
*
* @param result The attempt result.
*/
public void complete(Void result) {
future.complete(result);
}
@Override
public void fail() {
future.completeExceptionally(new ConnectException("failed to register session"));
}
@Override
public void fail(Throwable error) {
future.completeExceptionally(error);
}
@Override
public void retry() {
state.getLogger().debug("Retrying session register attempt");
register(new RegisterAttempt(attempt + 1, future));
}
@Override
public void retry(Duration after) {
state.getLogger().debug("Retrying session register attempt");
context.schedule(after, () -> register(new RegisterAttempt(attempt + 1, future)));
}
}
}
| Implement external timeouts for keep-alives.
| client/src/main/java/io/atomix/copycat/client/session/ClientSessionManager.java | Implement external timeouts for keep-alives. | <ide><path>lient/src/main/java/io/atomix/copycat/client/session/ClientSessionManager.java
<ide> .withEventIndex(state.getCompleteIndex())
<ide> .build();
<ide>
<add> scheduleKeepAlive();
<add>
<ide> state.getLogger().debug("{} - Sending {}", sessionId, request);
<ide> connection.<KeepAliveRequest, KeepAliveResponse>send(request).whenComplete((response, error) -> {
<ide> if (state.getState() != Session.State.CLOSED) {
<ide> if (response.status() == Response.Status.OK) {
<ide> connection.reset(response.leader(), response.members());
<ide> state.setState(Session.State.OPEN);
<del> keepAlive = context.schedule(interval, this::keepAlive);
<add> scheduleKeepAlive();
<ide> }
<ide> // If the session is unknown, immediate expire the session.
<ide> else if (response.error() == RaftError.Type.UNKNOWN_SESSION_ERROR) {
<ide> // If no leader was set, set the session state to unstable and schedule another keep-alive.
<ide> else {
<ide> state.setState(Session.State.UNSTABLE);
<del> keepAlive = context.schedule(interval, this::keepAlive);
<add> scheduleKeepAlive();
<ide> }
<ide> }
<ide> // If a leader is still set in the address selector, unset the leader and attempt to send another keep-alive.
<ide> // If no leader was set, set the session state to unstable and schedule another keep-alive.
<ide> else {
<ide> state.setState(Session.State.UNSTABLE);
<del> keepAlive = context.schedule(interval, this::keepAlive);
<add> scheduleKeepAlive();
<ide> }
<ide> }
<add> });
<add> }
<add>
<add> /**
<add> * Schedules a keep-alive request.
<add> */
<add> private void scheduleKeepAlive() {
<add> if (keepAlive != null)
<add> keepAlive.cancel();
<add> keepAlive = context.schedule(interval, () -> {
<add> keepAlive = null;
<add> keepAlive();
<ide> });
<ide> }
<ide> |
|
Java | apache-2.0 | b6cd6e76674461c081aaaa7080d09b88d3ee420b | 0 | Distrotech/intellij-community,samthor/intellij-community,petteyg/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,petteyg/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,da1z/intellij-community,robovm/robovm-studio,asedunov/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,holmes/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,fnouama/intellij-community,signed/intellij-community,kdwink/intellij-community,holmes/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,samthor/intellij-community,supersven/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,orekyuu/intellij-community,slisson/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,ibinti/intellij-community,holmes/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,supersven/intellij-community,robovm/robovm-studio,amith01994/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,da1z/intellij-community,semonte/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,apixandru/intellij-community,apixandru/intellij-community,retomerz/intellij-community,allotria/intellij-community,izonder/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,akosyakov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,semonte/intellij-community,adedayo/intellij-community,signed/intellij-community,vvv1559/intellij-community,caot/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,ibinti/intellij-community,caot/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,semonte/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,supersven/intellij-community,allotria/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,slisson/intellij-community,slisson/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,kool79/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,allotria/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,apixandru/intellij-community,caot/intellij-community,jagguli/intellij-community,samthor/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,retomerz/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,ibinti/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,slisson/intellij-community,samthor/intellij-community,signed/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,vladmm/intellij-community,izonder/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,jagguli/intellij-community,signed/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,fitermay/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,jagguli/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,supersven/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,da1z/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,adedayo/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,izonder/intellij-community,caot/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,vladmm/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,clumsy/intellij-community,kool79/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,semonte/intellij-community,Lekanich/intellij-community,slisson/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,slisson/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,ibinti/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,supersven/intellij-community,semonte/intellij-community,holmes/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,caot/intellij-community,apixandru/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,signed/intellij-community,youdonghai/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,apixandru/intellij-community,caot/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,samthor/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,clumsy/intellij-community,izonder/intellij-community,wreckJ/intellij-community,supersven/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,adedayo/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,asedunov/intellij-community,allotria/intellij-community,semonte/intellij-community,diorcety/intellij-community,holmes/intellij-community,nicolargo/intellij-community,signed/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,caot/intellij-community,FHannes/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,signed/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,adedayo/intellij-community,fitermay/intellij-community,kool79/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,supersven/intellij-community,da1z/intellij-community,ibinti/intellij-community,signed/intellij-community,signed/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,clumsy/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,caot/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,izonder/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,ryano144/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,holmes/intellij-community,blademainer/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,diorcety/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,kdwink/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,petteyg/intellij-community,hurricup/intellij-community,fnouama/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,kool79/intellij-community,allotria/intellij-community,nicolargo/intellij-community,holmes/intellij-community,petteyg/intellij-community,retomerz/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,kool79/intellij-community,FHannes/intellij-community,izonder/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,slisson/intellij-community,ryano144/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,ryano144/intellij-community,caot/intellij-community,adedayo/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,apixandru/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,samthor/intellij-community,kool79/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,samthor/intellij-community,petteyg/intellij-community,diorcety/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,supersven/intellij-community,da1z/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,apixandru/intellij-community,blademainer/intellij-community,samthor/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,semonte/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,kool79/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,signed/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,holmes/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,FHannes/intellij-community,retomerz/intellij-community,fnouama/intellij-community,xfournet/intellij-community | package com.jetbrains.python.console;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
import com.intellij.openapi.wm.ex.ToolWindowManagerListener;
import com.intellij.openapi.wm.impl.content.ToolWindowContentUi;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentFactory;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
/**
* @author traff
*/
public class PythonConsoleToolWindow {
private final Project myProject;
private boolean myInitialized = false;
public PythonConsoleToolWindow(Project project) {
myProject = project;
}
public static PythonConsoleToolWindow getInstance(@NotNull Project project) {
return project.getComponent(PythonConsoleToolWindow.class);
}
public void init(final @NotNull ToolWindow toolWindow, final @NotNull RunContentDescriptor contentDescriptor) {
addContent(toolWindow, contentDescriptor);
if (!myInitialized) {
doInit(toolWindow);
}
}
private void doInit(final ToolWindow toolWindow) {
myInitialized = true;
toolWindow.setToHideOnEmptyContent(true);
((ToolWindowManagerEx)ToolWindowManager.getInstance(myProject)).addToolWindowManagerListener(new ToolWindowManagerListener() {
@Override
public void toolWindowRegistered(@NotNull String id) {
}
@Override
public void stateChanged() {
ToolWindow window = getToolWindow();
if (window != null) {
boolean visible = window.isVisible();
if (visible && toolWindow.getContentManager().getContentCount() == 0) {
RunPythonConsoleAction.runPythonConsole(myProject, null, toolWindow);
}
}
}
});
}
private static void addContent(ToolWindow toolWindow, RunContentDescriptor contentDescriptor) {
toolWindow.getComponent().putClientProperty(ToolWindowContentUi.HIDE_ID_LABEL, "true");
Content content = toolWindow.getContentManager().findContent(contentDescriptor.getDisplayName());
if (content == null) {
content = createContent(contentDescriptor);
toolWindow.getContentManager().addContent(content);
}
else {
SimpleToolWindowPanel panel = new SimpleToolWindowPanel(false, true);
resetContent(contentDescriptor, panel, content);
}
toolWindow.getContentManager().setSelectedContent(content);
}
public ToolWindow getToolWindow() {
return ToolWindowManager.getInstance(myProject).getToolWindow(PythonConsoleToolWindowFactory.ID);
}
private static Content createContent(final @NotNull RunContentDescriptor contentDescriptor) {
SimpleToolWindowPanel panel = new SimpleToolWindowPanel(false, true);
final Content content = ContentFactory.SERVICE.getInstance().createContent(panel, contentDescriptor.getDisplayName(), false);
content.setCloseable(true);
resetContent(contentDescriptor, panel, content);
return content;
}
private static void resetContent(RunContentDescriptor contentDescriptor, SimpleToolWindowPanel panel, Content content) {
panel.setContent(contentDescriptor.getComponent());
//panel.addFocusListener(createFocusListener(toolWindow));
content.setComponent(panel);
content.setPreferredFocusableComponent(contentDescriptor.getComponent());
}
private static FocusListener createFocusListener(final ToolWindow toolWindow) {
return new FocusListener() {
@Override
public void focusGained(FocusEvent e) {
JComponent component = getComponentToFocus(toolWindow);
if (component != null) {
component.requestFocusInWindow();
}
}
@Override
public void focusLost(FocusEvent e) {
}
};
}
private static JComponent getComponentToFocus(ToolWindow window) {
return window.getContentManager().getComponent();
}
}
| python/src/com/jetbrains/python/console/PythonConsoleToolWindow.java | package com.jetbrains.python.console;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
import com.intellij.openapi.wm.ex.ToolWindowManagerListener;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentFactory;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
/**
* @author traff
*/
public class PythonConsoleToolWindow {
private final Project myProject;
private boolean myInitialized = false;
public PythonConsoleToolWindow(Project project) {
myProject = project;
}
public static PythonConsoleToolWindow getInstance(@NotNull Project project) {
return project.getComponent(PythonConsoleToolWindow.class);
}
public void init(final @NotNull ToolWindow toolWindow, final @NotNull RunContentDescriptor contentDescriptor) {
addContent(toolWindow, contentDescriptor);
if (!myInitialized) {
doInit(toolWindow);
}
}
private void doInit(final ToolWindow toolWindow) {
myInitialized = true;
toolWindow.setToHideOnEmptyContent(true);
((ToolWindowManagerEx)ToolWindowManager.getInstance(myProject)).addToolWindowManagerListener(new ToolWindowManagerListener() {
@Override
public void toolWindowRegistered(@NotNull String id) {
}
@Override
public void stateChanged() {
ToolWindow window = getToolWindow();
if (window != null) {
boolean visible = window.isVisible();
if (visible && toolWindow.getContentManager().getContentCount() == 0) {
RunPythonConsoleAction.runPythonConsole(myProject, null, toolWindow);
}
}
}
});
}
private static void addContent(ToolWindow toolWindow, RunContentDescriptor contentDescriptor) {
Content content = toolWindow.getContentManager().findContent(contentDescriptor.getDisplayName());
if (content == null) {
content = createContent(contentDescriptor);
toolWindow.getContentManager().addContent(content);
}
else {
SimpleToolWindowPanel panel = new SimpleToolWindowPanel(false, true);
resetContent(contentDescriptor, panel, content);
}
toolWindow.getContentManager().setSelectedContent(content);
}
public ToolWindow getToolWindow() {
return ToolWindowManager.getInstance(myProject).getToolWindow(PythonConsoleToolWindowFactory.ID);
}
private static Content createContent(final @NotNull RunContentDescriptor contentDescriptor) {
SimpleToolWindowPanel panel = new SimpleToolWindowPanel(false, true);
final Content content = ContentFactory.SERVICE.getInstance().createContent(panel, contentDescriptor.getDisplayName(), false);
content.setCloseable(true);
resetContent(contentDescriptor, panel, content);
return content;
}
private static void resetContent(RunContentDescriptor contentDescriptor, SimpleToolWindowPanel panel, Content content) {
panel.setContent(contentDescriptor.getComponent());
//panel.addFocusListener(createFocusListener(toolWindow));
content.setComponent(panel);
content.setPreferredFocusableComponent(contentDescriptor.getComponent());
}
private static FocusListener createFocusListener(final ToolWindow toolWindow) {
return new FocusListener() {
@Override
public void focusGained(FocusEvent e) {
JComponent component = getComponentToFocus(toolWindow);
if (component != null) {
component.requestFocusInWindow();
}
}
@Override
public void focusLost(FocusEvent e) {
}
};
}
private static JComponent getComponentToFocus(ToolWindow window) {
return window.getContentManager().getComponent();
}
}
| Don't show tool window id label for Python console.
| python/src/com/jetbrains/python/console/PythonConsoleToolWindow.java | Don't show tool window id label for Python console. | <ide><path>ython/src/com/jetbrains/python/console/PythonConsoleToolWindow.java
<ide> import com.intellij.openapi.wm.ToolWindowManager;
<ide> import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
<ide> import com.intellij.openapi.wm.ex.ToolWindowManagerListener;
<add>import com.intellij.openapi.wm.impl.content.ToolWindowContentUi;
<ide> import com.intellij.ui.content.Content;
<ide> import com.intellij.ui.content.ContentFactory;
<ide> import org.jetbrains.annotations.NotNull;
<ide> }
<ide>
<ide> private static void addContent(ToolWindow toolWindow, RunContentDescriptor contentDescriptor) {
<add> toolWindow.getComponent().putClientProperty(ToolWindowContentUi.HIDE_ID_LABEL, "true");
<add>
<ide> Content content = toolWindow.getContentManager().findContent(contentDescriptor.getDisplayName());
<ide> if (content == null) {
<ide> content = createContent(contentDescriptor); |
|
Java | mit | c1bcf5132507b7de2c31e7e43bc19a8cb94542ed | 0 | qilei/mybatis-pagination,qilei/mybatis-pagination | package org.mybatis.pagination.service;
import java.util.List;
import javax.annotation.Resource;
import com.google.common.collect.Lists;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mybatis.pagination.domain.*;
import org.mybatis.pagination.dto.PageMyBatis;
import org.mybatis.pagination.dto.datatables.PagingCriteria;
import org.mybatis.pagination.dto.datatables.SearchField;
import org.mybatis.pagination.dto.datatables.SortDirection;
import org.mybatis.pagination.dto.datatables.SortField;
import org.mybatis.pagination.mapper.ResourcesMapper;
import org.mybatis.pagination.mapper.ResourcesSubMapper;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* <p>
* .
* </p>
*
* @author mumu@yfyang
* @version 1.0 2013-09-09 2:15 PM
* @since JDK 1.5
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath*:spring/test-context.xml")
public class MapperTest {
@Resource
private ResourcesMapper resourcesMapper;
@Resource
private ResourcesSubMapper resourcesSubMapper;
// @Before
// //使用该注释会使用事务,而且在测试完成之后会回滚事务,也就是说在该方法中做出的一切操作都不会对数据库中的数据产生任何影响
//// @Rollback(false) //这里设置为false,就让事务不回滚
// @Rollback
// public void setUp() throws Exception {
// Resources resources;
// for (int i = 0; i < 100; i++) {
// resources = new Resources();
// resources.setId(UUID.randomUUID().toString());
// resources.setName("测试数据" + i);
// resources.setPath("test/pageh/" + i);
// resourcesMapper.insertSelective(resources);
// }
// }
// @Before
// //使用该注释会使用事务,而且在测试完成之后会回滚事务,也就是说在该方法中做出的一切操作都不会对数据库中的数据产生任何影响
//// @Rollback(false) //这里设置为false,就让事务不回滚
// @Rollback
// public void setUp() throws Exception {
// ResourcesSub resourcesSub;
// for (int i = 0; i < 100; i++) {
// resourcesSub = new ResourcesSub();
// resourcesSub.setId(UUID.randomUUID().toString());
// resourcesSub.setName("测试数据sub" + i);
// resourcesSub.setResId("0063e998-8597-4328-9a01-49a574096530");
// resourcesSubMapper.insertSelective(resourcesSub);
// }
// }
// @Test
// public void testPagaination() throws Exception {
//
// PagingCriteria baseCriteria = PagingCriteria.createCriteria(10, 10, 2);
// PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
// for (Resources pageMyBati : pageMyBatis) {
// System.out.println(pageMyBati);
// }
// }
@Test
public void testPagaination() throws Exception {
PagingCriteria baseCriteria = PagingCriteria.createCriteria(10, 10, 1);
PageMyBatis<ComplexResourcesSub> pageMyBatis = resourcesSubMapper.selectByPage(baseCriteria);
for (ComplexResourcesSub pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
// @Test
// public void testPagainationSqlContainOrder() throws Exception {
// PagingCriteria baseCriteria = PagingCriteria.createCriteria(0, 15, 15);
// PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrder(baseCriteria);
// for (Resources pageMyBati : pageMyBatis) {
// System.out.println(pageMyBati);
// }
//
// }
@Test
public void testPagainationAndWrap() throws Exception {
PagingCriteria baseCriteria = PagingCriteria.createCriteria(0, 15, 15);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
System.out.println("pageMyBatis.warp() = " + pageMyBatis.warp());
}
@Test
public void testPagainationAndOrder() throws Exception {
List<SortField> sortFields = Lists.newArrayList();
sortFields.add(new SortField("name", SortDirection.DESC));
sortFields.add(new SortField("path", SortDirection.ASC));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSort(20, 15, 15, sortFields);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPagainationAndSearch() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPagainationAndOrderSearch() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
//searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
ResourcesPageCriteria criteria = new ResourcesPageCriteria(baseCriteria);
criteria.setName("11");
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrder(criteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPaginationMoreWhere() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
// searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
ResourcesPageCriteria criteria = new ResourcesPageCriteria(baseCriteria);
criteria.setName("测试数据18");
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrderAndWhere(criteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
}
| src/test/java/org/mybatis/pagination/service/MapperTest.java | package org.mybatis.pagination.service;
import java.util.List;
import javax.annotation.Resource;
import com.google.common.collect.Lists;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mybatis.pagination.domain.*;
import org.mybatis.pagination.dto.PageMyBatis;
import org.mybatis.pagination.dto.datatables.PagingCriteria;
import org.mybatis.pagination.dto.datatables.SearchField;
import org.mybatis.pagination.dto.datatables.SortDirection;
import org.mybatis.pagination.dto.datatables.SortField;
import org.mybatis.pagination.mapper.ResourcesMapper;
import org.mybatis.pagination.mapper.ResourcesSubMapper;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* <p>
* .
* </p>
*
* @author mumu@yfyang
* @version 1.0 2013-09-09 2:15 PM
* @since JDK 1.5
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath*:spring/test-context.xml")
public class MapperTest {
@Resource
private ResourcesMapper resourcesMapper;
@Resource
private ResourcesSubMapper resourcesSubMapper;
// @Before
// //使用该注释会使用事务,而且在测试完成之后会回滚事务,也就是说在该方法中做出的一切操作都不会对数据库中的数据产生任何影响
//// @Rollback(false) //这里设置为false,就让事务不回滚
// @Rollback
// public void setUp() throws Exception {
// Resources resources;
// for (int i = 0; i < 100; i++) {
// resources = new Resources();
// resources.setId(UUID.randomUUID().toString());
// resources.setName("测试数据" + i);
// resources.setPath("test/pageh/" + i);
// resourcesMapper.insertSelective(resources);
// }
// }
// @Before
// //使用该注释会使用事务,而且在测试完成之后会回滚事务,也就是说在该方法中做出的一切操作都不会对数据库中的数据产生任何影响
//// @Rollback(false) //这里设置为false,就让事务不回滚
// @Rollback
// public void setUp() throws Exception {
// ResourcesSub resourcesSub;
// for (int i = 0; i < 100; i++) {
// resourcesSub = new ResourcesSub();
// resourcesSub.setId(UUID.randomUUID().toString());
// resourcesSub.setName("测试数据sub" + i);
// resourcesSub.setResId("0063e998-8597-4328-9a01-49a574096530");
// resourcesSubMapper.insertSelective(resourcesSub);
// }
// }
// @Test
// public void testPagaination() throws Exception {
//
// PagingCriteria baseCriteria = PagingCriteria.createCriteria(10, 10, 2);
// PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
// for (Resources pageMyBati : pageMyBatis) {
// System.out.println(pageMyBati);
// }
// }
@Test
public void testPagaination() throws Exception {
PagingCriteria baseCriteria = PagingCriteria.createCriteria(10, 10, 1);
PageMyBatis<ComplexResourcesSub> pageMyBatis = resourcesSubMapper.selectByPage(baseCriteria);
for (ComplexResourcesSub pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
// @Test
// public void testPagainationSqlContainOrder() throws Exception {
// PagingCriteria baseCriteria = PagingCriteria.createCriteria(0, 15, 15);
// PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrder(baseCriteria);
// for (Resources pageMyBati : pageMyBatis) {
// System.out.println(pageMyBati);
// }
//
// }
@Test
public void testPagainationAndWrap() throws Exception {
PagingCriteria baseCriteria = PagingCriteria.createCriteria(0, 15, 15);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
System.out.println("pageMyBatis.warp() = " + pageMyBatis.warp());
}
@Test
public void testPagainationAndOrder() throws Exception {
List<SortField> sortFields = Lists.newArrayList();
sortFields.add(new SortField("name", SortDirection.DESC));
sortFields.add(new SortField("path", SortDirection.ASC));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSort(20, 15, 15, sortFields);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPagainationAndSearch() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPage(baseCriteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPagainationAndOrderSearch() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
//searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
ResourcesPageCriteria criteria = new ResourcesPageCriteria(baseCriteria);
criteria.setName("11");
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrder(criteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
@Test
public void testPaginationMoreWhere() throws Exception {
List<SearchField> searchFields = Lists.newArrayList();
// searchFields.add(new SearchField("name", false, false, "11"));
PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
ResourcesPageCriteria criteria = new ResourcesPageCriteria(baseCriteria);
criteria.setName("测试数据18");
PageMyBatis<Resources> pageMyBatis = resourcesMapper.selectByPageOrderAndWhere(criteria);
for (Resources pageMyBati : pageMyBatis) {
System.out.println(pageMyBati);
}
}
}
| 测试git remote
| src/test/java/org/mybatis/pagination/service/MapperTest.java | 测试git remote | <ide><path>rc/test/java/org/mybatis/pagination/service/MapperTest.java
<ide> public void testPaginationMoreWhere() throws Exception {
<ide> List<SearchField> searchFields = Lists.newArrayList();
<ide> // searchFields.add(new SearchField("name", false, false, "11"));
<del>
<ide> PagingCriteria baseCriteria = PagingCriteria.createCriteriaWithSearch(0, 15, 1, searchFields);
<ide> ResourcesPageCriteria criteria = new ResourcesPageCriteria(baseCriteria);
<ide> criteria.setName("测试数据18"); |
|
Java | apache-2.0 | 370459e53584ca594296c27ee87b13c9e6280eec | 0 | sandeep-n/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,patilvikram/apex-malhar,vrozov/incubator-apex-malhar,chandnisingh/apex-malhar,vrozov/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,prasannapramod/apex-malhar,vrozov/incubator-apex-malhar,siyuanh/incubator-apex-malhar,apache/incubator-apex-malhar,tweise/incubator-apex-malhar,patilvikram/apex-malhar,DataTorrent/incubator-apex-malhar,ilganeli/incubator-apex-malhar,patilvikram/apex-malhar,siyuanh/apex-malhar,skekre98/apex-mlhr,patilvikram/apex-malhar,vrozov/apex-malhar,sandeep-n/incubator-apex-malhar,siyuanh/apex-malhar,PramodSSImmaneni/apex-malhar,tushargosavi/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,trusli/apex-malhar,apache/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,prasannapramod/apex-malhar,skekre98/apex-mlhr,DataTorrent/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,siyuanh/apex-malhar,tweise/incubator-apex-malhar,ilganeli/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,tweise/incubator-apex-malhar,tweise/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,DataTorrent/Megh,siyuanh/apex-malhar,chandnisingh/apex-malhar,chinmaykolhatkar/apex-malhar,yogidevendra/incubator-apex-malhar,yogidevendra/apex-malhar,prasannapramod/apex-malhar,siyuanh/apex-malhar,yogidevendra/apex-malhar,siyuanh/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,siyuanh/apex-malhar,tweise/incubator-apex-malhar,davidyan74/apex-malhar,skekre98/apex-mlhr,tweise/apex-malhar,yogidevendra/incubator-apex-malhar,brightchen/apex-malhar,PramodSSImmaneni/apex-malhar,ananthc/apex-malhar,brightchen/apex-malhar,ilganeli/incubator-apex-malhar,skekre98/apex-mlhr,chinmaykolhatkar/incubator-apex-malhar,davidyan74/apex-malhar,ananthc/apex-malhar,yogidevendra/apex-malhar,tushargosavi/incubator-apex-malhar,prasannapramod/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,vrozov/apex-malhar,davidyan74/apex-malhar,chinmaykolhatkar/apex-malhar,trusli/apex-malhar,skekre98/apex-mlhr,DataTorrent/incubator-apex-malhar,siyuanh/incubator-apex-malhar,tweise/apex-malhar,ananthc/apex-malhar,siyuanh/incubator-apex-malhar,tweise/incubator-apex-malhar,patilvikram/apex-malhar,siyuanh/incubator-apex-malhar,skekre98/apex-mlhr,ilganeli/incubator-apex-malhar,trusli/apex-malhar,patilvikram/apex-malhar,vrozov/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,brightchen/apex-malhar,chandnisingh/apex-malhar,davidyan74/apex-malhar,siyuanh/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,trusli/apex-malhar,trusli/apex-malhar,ilganeli/incubator-apex-malhar,prasannapramod/apex-malhar,chinmaykolhatkar/apex-malhar,yogidevendra/incubator-apex-malhar,vrozov/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,chandnisingh/apex-malhar,apache/incubator-apex-malhar,vrozov/apex-malhar,tweise/incubator-apex-malhar,ananthc/apex-malhar,chinmaykolhatkar/apex-malhar,PramodSSImmaneni/apex-malhar,ilganeli/incubator-apex-malhar,ananthc/apex-malhar,sandeep-n/incubator-apex-malhar,siyuanh/incubator-apex-malhar,trusli/apex-malhar,chinmaykolhatkar/apex-malhar,patilvikram/apex-malhar,siyuanh/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,davidyan74/apex-malhar,ananthc/apex-malhar,vrozov/apex-malhar,DataTorrent/Megh,sandeep-n/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,ilganeli/incubator-apex-malhar,chandnisingh/apex-malhar,chandnisingh/apex-malhar,PramodSSImmaneni/apex-malhar,brightchen/apex-malhar,apache/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,brightchen/apex-malhar,tweise/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,tweise/apex-malhar,vrozov/apex-malhar,trusli/apex-malhar,chandnisingh/apex-malhar,apache/incubator-apex-malhar,brightchen/apex-malhar,apache/incubator-apex-malhar,apache/incubator-apex-malhar,yogidevendra/apex-malhar,vrozov/apex-malhar,vrozov/incubator-apex-malhar,davidyan74/apex-malhar,vrozov/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,tweise/apex-malhar,tushargosavi/incubator-apex-malhar,yogidevendra/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,brightchen/apex-malhar,sandeep-n/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,tweise/apex-malhar,prasannapramod/apex-malhar,yogidevendra/apex-malhar,DataTorrent/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar | /*
* Copyright (c) 2012 Malhar, Inc.
* All Rights Reserved.
*/
package com.malhartech.lib.math;
import com.malhartech.annotation.InputPortFieldAnnotation;
import com.malhartech.annotation.OutputPortFieldAnnotation;
import com.malhartech.api.DefaultInputPort;
import com.malhartech.api.DefaultOutputPort;
import com.malhartech.api.StreamCodec;
import com.malhartech.lib.util.BaseNumberKeyValueOperator;
import com.malhartech.lib.util.KeyValPair;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.mutable.MutableDouble;
/**
*
* Adds all values for each key in "numerator" and "denominator", and at the end of window emits the margin for each key
* (1 - numerator/denominator). <p>
* <br>The values are added for each key within the window and for each stream.<br>
* <br>
* <b>Ports</b>:<br>
* <b>numerator</b>: expects KeyValPair<K,V><br>
* <b>denominator</b>: expects KeyValPair<K,V><br>
* <b>margin</b>: emits HashMap<K,Double>, one entry per key per window<br>
* <br>
* <b>Properties</b>:<br>
* <b>inverse</b>: if set to true the key in the filter will block tuple<br>
* <b>filterBy</b>: List of keys to filter on<br>
* <br>
* <b>Specific compile time checks</b>: None<br>
* <b>Specific run time checks</b>: None<br>
* <p>
* <b>Benchmarks</b>: Blast as many tuples as possible in inline mode<br>
* <table border="1" cellspacing=1 cellpadding=1 summary="Benchmark table for MarginMap<K,V extends Number> operator template">
* <tr><th>In-Bound</th><th>Out-bound</th><th>Comments</th></tr>
* <tr><td><b>37 Million K,V pairs/s</b></td><td>One tuple per key per window per port</td><td>In-bound rate is the main determinant of performance. Tuples are assumed to be
* immutable. If you use mutable tuples and have lots of keys, the benchmarks may be lower</td></tr>
* </table><br>
* <p>
* <b>Function Table (K=String, V=Integer) and percent set to true</b>:
* <table border="1" cellspacing=1 cellpadding=1 summary="Function table for MarginMap<K,V extends Number> operator template">
* <tr><th rowspan=2>Tuple Type (api)</th><th colspan=2>In-bound (process)</th><th>Out-bound (emit)</th></tr>
* <tr><th><i>numerator</i>(KeyValPair<K,V>)</th><th><i>denominator</i>(KeyValPair<K,V>)</th><th><i>margin</i>(KeyValPair<K,Double>)</th></tr>
* <tr><td>Begin Window (beginWindow())</td><td>N/A</td><td>N/A</td><td>N/A</td></tr>
* <tr><td>Data (process())</td><td></td><td>{a=2,a=8}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=2,b=20,c=4000}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=1}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=10,b=5}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=55,b=12}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{c=500,d=282}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=22}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=14}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{b=7,e=3}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=46,e=2}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=4,a=23,g=5,h=44}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{c=1500}</td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{a=40,b=30}</td><td></td></tr>
* <tr><td>End Window (endWindow())</td><td>N/A</td><td>N/A</td><td>a=28<br>b=0<br>c=-100<br>d=50<br>e=33.3</td></tr>
* </table>
* <br>
*
* @author Locknath Shil <[email protected]><br>
* <br>
*/
public class MarginKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
{
@InputPortFieldAnnotation(name = "numerator")
public final transient DefaultInputPort<KeyValPair<K, V>> numerator = new DefaultInputPort<KeyValPair<K, V>>(this)
{
/**
* Adds tuple to the numerator hash.
*/
@Override
public void process(KeyValPair<K, V> tuple)
{
addTuple(tuple, numerators);
}
/**
* Set StreamCodec used for partitioning.
*/
@Override
public Class<? extends StreamCodec<KeyValPair<K, V>>> getStreamCodec()
{
return getKeyValPairStreamCodec();
}
};
@InputPortFieldAnnotation(name = "denominator")
public final transient DefaultInputPort<KeyValPair<K, V>> denominator = new DefaultInputPort<KeyValPair<K, V>>(this)
{
/**
* Adds tuple to the denominator hash.
*/
@Override
public void process(KeyValPair<K, V> tuple)
{
addTuple(tuple, denominators);
}
/**
* Set StreamCodec used for partitioning.
*/
@Override
public Class<? extends StreamCodec<KeyValPair<K, V>>> getStreamCodec()
{
return getKeyValPairStreamCodec();
}
};
/**
* Adds the value for each key.
*
* @param tuple
* @param map
*/
public void addTuple(KeyValPair<K, V> tuple, Map<K, MutableDouble> map)
{
K key = tuple.getKey();
if (!doprocessKey(key) || (tuple.getValue() == null)) {
return;
}
MutableDouble val = map.get(key);
if (val == null) {
val = new MutableDouble(0.0);
map.put(cloneKey(key), val);
}
val.add(tuple.getValue().doubleValue());
}
@OutputPortFieldAnnotation(name = "margin")
public final transient DefaultOutputPort<KeyValPair<K, V>> margin = new DefaultOutputPort<KeyValPair<K, V>>(this);
protected transient HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
protected transient HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
protected boolean percent = false;
/**
* getter function for percent
*
* @return percent
*/
public boolean getPercent()
{
return percent;
}
/**
* setter function for percent
*
* @param val sets percent
*/
public void setPercent(boolean val)
{
percent = val;
}
/**
* Generates tuples for each key and emits them. Only keys that are in the denominator are iterated on
* If the key is only in the numerator, it gets ignored (cannot do divide by 0)
* Clears internal data
*/
@Override
public void endWindow()
{
Double val;
for (Map.Entry<K, MutableDouble> e: denominators.entrySet()) {
K key = e.getKey();
MutableDouble nval = numerators.get(key);
if (nval == null) {
nval = new MutableDouble(0.0);
}
else {
numerators.remove(key); // so that all left over keys can be reported
}
if (percent) {
val = (1 - nval.doubleValue() / e.getValue().doubleValue()) * 100;
}
else {
val = 1 - nval.doubleValue() / e.getValue().doubleValue();
}
margin.emit(new KeyValPair(key, getValue(val.doubleValue())));
}
numerators.clear();
denominators.clear();
}
}
| library/src/main/java/com/malhartech/lib/math/MarginKeyVal.java | /*
* Copyright (c) 2012 Malhar, Inc.
* All Rights Reserved.
*/
package com.malhartech.lib.math;
import com.malhartech.annotation.InputPortFieldAnnotation;
import com.malhartech.annotation.OutputPortFieldAnnotation;
import com.malhartech.api.DefaultInputPort;
import com.malhartech.api.DefaultOutputPort;
import com.malhartech.api.StreamCodec;
import com.malhartech.lib.util.BaseNumberKeyValueOperator;
import com.malhartech.lib.util.KeyValPair;
import com.malhartech.lib.util.MutableDouble;
import java.util.HashMap;
import java.util.Map;
/**
*
* Adds all values for each key in "numerator" and "denominator", and at the end of window emits the margin for each key
* (1 - numerator/denominator). <p>
* <br>The values are added for each key within the window and for each stream.<br>
* <br>
* <b>Ports</b>:<br>
* <b>numerator</b>: expects KeyValPair<K,V><br>
* <b>denominator</b>: expects KeyValPair<K,V><br>
* <b>margin</b>: emits HashMap<K,Double>, one entry per key per window<br>
* <br>
* <b>Properties</b>:<br>
* <b>inverse</b>: if set to true the key in the filter will block tuple<br>
* <b>filterBy</b>: List of keys to filter on<br>
* <br>
* <b>Specific compile time checks</b>: None<br>
* <b>Specific run time checks</b>: None<br>
* <p>
* <b>Benchmarks</b>: Blast as many tuples as possible in inline mode<br>
* <table border="1" cellspacing=1 cellpadding=1 summary="Benchmark table for MarginMap<K,V extends Number> operator template">
* <tr><th>In-Bound</th><th>Out-bound</th><th>Comments</th></tr>
* <tr><td><b>37 Million K,V pairs/s</b></td><td>One tuple per key per window per port</td><td>In-bound rate is the main determinant of performance. Tuples are assumed to be
* immutable. If you use mutable tuples and have lots of keys, the benchmarks may be lower</td></tr>
* </table><br>
* <p>
* <b>Function Table (K=String, V=Integer) and percent set to true</b>:
* <table border="1" cellspacing=1 cellpadding=1 summary="Function table for MarginMap<K,V extends Number> operator template">
* <tr><th rowspan=2>Tuple Type (api)</th><th colspan=2>In-bound (process)</th><th>Out-bound (emit)</th></tr>
* <tr><th><i>numerator</i>(KeyValPair<K,V>)</th><th><i>denominator</i>(KeyValPair<K,V>)</th><th><i>margin</i>(KeyValPair<K,Double>)</th></tr>
* <tr><td>Begin Window (beginWindow())</td><td>N/A</td><td>N/A</td><td>N/A</td></tr>
* <tr><td>Data (process())</td><td></td><td>{a=2,a=8}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=2,b=20,c=4000}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=1}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{a=10,b=5}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=55,b=12}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{c=500,d=282}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=22}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=14}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{b=7,e=3}</td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=46,e=2}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td>{d=4,a=23,g=5,h=44}</td><td></td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{c=1500}</td><td></td></tr>
* <tr><td>Data (process())</td><td></td><td>{a=40,b=30}</td><td></td></tr>
* <tr><td>End Window (endWindow())</td><td>N/A</td><td>N/A</td><td>a=28<br>b=0<br>c=-100<br>d=50<br>e=33.3</td></tr>
* </table>
* <br>
*
* @author Locknath Shil <[email protected]><br>
* <br>
*/
public class MarginKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
{
@InputPortFieldAnnotation(name = "numerator")
public final transient DefaultInputPort<KeyValPair<K, V>> numerator = new DefaultInputPort<KeyValPair<K, V>>(this)
{
/**
* Adds tuple to the numerator hash.
*/
@Override
public void process(KeyValPair<K, V> tuple)
{
addTuple(tuple, numerators);
}
/**
* Set StreamCodec used for partitioning.
*/
@Override
public Class<? extends StreamCodec<KeyValPair<K, V>>> getStreamCodec()
{
return getKeyValPairStreamCodec();
}
};
@InputPortFieldAnnotation(name = "denominator")
public final transient DefaultInputPort<KeyValPair<K, V>> denominator = new DefaultInputPort<KeyValPair<K, V>>(this)
{
/**
* Adds tuple to the denominator hash.
*/
@Override
public void process(KeyValPair<K, V> tuple)
{
addTuple(tuple, denominators);
}
/**
* Set StreamCodec used for partitioning.
*/
@Override
public Class<? extends StreamCodec<KeyValPair<K, V>>> getStreamCodec()
{
return getKeyValPairStreamCodec();
}
};
/**
* Adds the value for each key.
*
* @param tuple
* @param map
*/
public void addTuple(KeyValPair<K, V> tuple, Map<K, MutableDouble> map)
{
K key = tuple.getKey();
if (!doprocessKey(key) || (tuple.getValue() == null)) {
return;
}
MutableDouble val = map.get(key);
if (val == null) {
val = new MutableDouble(0.0);
map.put(cloneKey(key), val);
}
val.value += tuple.getValue().doubleValue();
}
@OutputPortFieldAnnotation(name = "margin")
public final transient DefaultOutputPort<KeyValPair<K, V>> margin = new DefaultOutputPort<KeyValPair<K, V>>(this);
protected transient HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
protected transient HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
protected boolean percent = false;
/**
* getter function for percent
*
* @return percent
*/
public boolean getPercent()
{
return percent;
}
/**
* setter function for percent
*
* @param val sets percent
*/
public void setPercent(boolean val)
{
percent = val;
}
/**
* Generates tuples for each key and emits them. Only keys that are in the denominator are iterated on
* If the key is only in the numerator, it gets ignored (cannot do divide by 0)
* Clears internal data
*/
@Override
public void endWindow()
{
Double val;
for (Map.Entry<K, MutableDouble> e: denominators.entrySet()) {
K key = e.getKey();
MutableDouble nval = numerators.get(key);
if (nval == null) {
nval = new MutableDouble(0.0);
}
else {
numerators.remove(key); // so that all left over keys can be reported
}
if (percent) {
val = (1 - nval.value / e.getValue().value) * 100;
}
else {
val = 1 - nval.value / e.getValue().value;
}
margin.emit(new KeyValPair(key, getValue(val.doubleValue())));
}
numerators.clear();
denominators.clear();
}
}
| use apache MutableDouble.
| library/src/main/java/com/malhartech/lib/math/MarginKeyVal.java | use apache MutableDouble. | <ide><path>ibrary/src/main/java/com/malhartech/lib/math/MarginKeyVal.java
<ide> import com.malhartech.api.StreamCodec;
<ide> import com.malhartech.lib.util.BaseNumberKeyValueOperator;
<ide> import com.malhartech.lib.util.KeyValPair;
<del>import com.malhartech.lib.util.MutableDouble;
<ide> import java.util.HashMap;
<ide> import java.util.Map;
<add>import org.apache.commons.lang3.mutable.MutableDouble;
<ide>
<ide> /**
<ide> *
<ide> val = new MutableDouble(0.0);
<ide> map.put(cloneKey(key), val);
<ide> }
<del> val.value += tuple.getValue().doubleValue();
<add> val.add(tuple.getValue().doubleValue());
<ide> }
<ide> @OutputPortFieldAnnotation(name = "margin")
<ide> public final transient DefaultOutputPort<KeyValPair<K, V>> margin = new DefaultOutputPort<KeyValPair<K, V>>(this);
<ide> numerators.remove(key); // so that all left over keys can be reported
<ide> }
<ide> if (percent) {
<del> val = (1 - nval.value / e.getValue().value) * 100;
<add> val = (1 - nval.doubleValue() / e.getValue().doubleValue()) * 100;
<ide> }
<ide> else {
<del> val = 1 - nval.value / e.getValue().value;
<add> val = 1 - nval.doubleValue() / e.getValue().doubleValue();
<ide> }
<ide>
<ide> margin.emit(new KeyValPair(key, getValue(val.doubleValue()))); |
|
JavaScript | mit | 7545095cf0160784a18b4d083e7bc0b2edd4616a | 0 | mui/mochaui,vagner2015/mochaui,vagner2015/mochaui,vagner2015/mochaui,mui/mochaui,mui/mochaui | /* -----------------------------------------------------------------
In this file:
1. Define windows
var myWindow = function(){
new MochaUI.Window({
id: 'mywindow',
title: 'My Window',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
width: 340,
height: 150
});
}
2. Build windows on onDomReady
myWindow();
3. Add link events to build future windows
if ($('myWindowLink')){
$('myWindowLink').addEvent('click', function(e) {
new Event(e).stop();
jsonWindows();
});
}
Note: If your link is in the top menu, it opens only a single window, and you would
like a check mark next to it when it's window is open, format the link name as follows:
window.id + LinkCheck, e.g., mywindowLinkCheck
Otherwise it is suggested you just use mywindowLink
Associated HTML for link event above:
<a id="myWindowLink" href="pages/lipsum.html">My Window</a>
Notes:
If you need to add link events to links within windows you are creating, do
it in the onContentLoaded function of the new window.
----------------------------------------------------------------- */
initializeWindows = function(){
// Examples
MochaUI.ajaxpageWindow = function(){
new MochaUI.Window({
id: 'ajaxpage',
title: 'Content Loaded with an XMLHttpRequest',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
width: 340,
height: 150
});
}
if ($('ajaxpageLinkCheck')){
$('ajaxpageLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.ajaxpageWindow();
});
}
MochaUI.jsonWindows = function(){
var url = 'data/json-windows-data.js';
var request = new Request.JSON({
url: url,
method: 'get',
onRequest: function(){
// add code to show loading icon
}.bind(this),
onComplete: function(properties) {
MochaUI.newWindowsFromJSON(properties.windows);
// add code to hide loading icon
}
}).send();
}
if ($('jsonLink')){
$('jsonLink').addEvent('click', function(e) {
new Event(e).stop();
MochaUI.jsonWindows();
});
}
MochaUI.youtubeWindow = function(){
new MochaUI.Window({
id: 'youtube',
title: 'YouTube in Iframe',
loadMethod: 'iframe',
contentURL: 'pages/youtube.html',
width: 340,
height: 280,
scrollbars: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
resizeLimit: {'x': [330, 2500], 'y': [250, 2000]},
contentBgColor: '#000',
toolbar: true,
toolbarURL: 'pages/youtube-tabs.html'
});
}
if ($('youtubeLinkCheck')) {
$('youtubeLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.youtubeWindow();
});
}
MochaUI.mootoolsWindow = function(){
new MochaUI.Window({
id: 'mootools',
title: 'Mootools in an Iframe',
loadMethod: 'iframe',
contentURL: 'http://mootools.net/',
width: 650,
height: 400
});
}
if ($('mootoolsLinkCheck')) {
$('mootoolsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.mootoolsWindow();
});
}
MochaUI.parametricsWindow = function(){
new MochaUI.Window({
id: 'parametrics',
title: 'Window Parametrics',
icon: 'images/icons/page_red.gif',
loadMethod: 'xhr',
contentURL: 'plugins/parametrics/',
onContentLoaded: function(){
if ( !MochaUI.parametricsScript == true ){
new Request({
url: 'plugins/parametrics/scripts/parametrics.js',
method: 'get',
onSuccess: function() {
MochaUI.addRadiusSlider.delay(10); // Delay is for IE6
MochaUI.addShadowSlider.delay(10); // Delay is for IE6
MochaUI.parametricsScript = true;
}.bind(this)
}).send();
}
else {
MochaUI.addRadiusSlider.delay(10); // Delay is for IE6
MochaUI.addShadowSlider.delay(10); // Delay is for IE6
}
},
width: 300,
height: 105,
x: 250,
y: 405,
padding: { top: 12, right: 12, bottom: 10, left: 12 },
resizable: false,
maximizable: false,
contentBgColor: '#fff'
});
}
if ($('parametricsLinkCheck')){
$('parametricsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.parametricsWindow();
});
}
MochaUI.clockWindow = function(){
new MochaUI.Window({
id: 'clock',
title: 'Canvas Clock',
addClass: 'transparent',
loadMethod: 'xhr',
contentURL: 'plugins/coolclock/index.html?t=' + new Date().getTime(),
onContentLoaded: function(){
if ( !MochaUI.clockScript == true ){
new Request({
url: 'plugins/coolclock/scripts/coolclock.js?t=' + new Date().getTime(),
method: 'get',
onSuccess: function() {
if (Browser.Engine.trident) {
myClockInit = function(){
CoolClock.findAndCreateClocks();
};
window.addEvent('domready', function(){
myClockInit.delay(10); // Delay is for IE
});
MochaUI.clockScript = true;
}
else {
CoolClock.findAndCreateClocks();
}
}.bind(this)
}).send();
}
else {
if (Browser.Engine.trident) {
myClockInit = function(){
CoolClock.findAndCreateClocks();
};
window.addEvent('domready', function(){
myClockInit.delay(10); // Delay is for IE
});
MochaUI.clockScript = true;
}
else {
CoolClock.findAndCreateClocks();
}
}
},
shape: 'gauge',
headerHeight: 30,
width: 160,
height: 160,
x: 570,
y: 75,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
bodyBgColor: [250,250,250]
});
}
if ($('clockLinkCheck')){
$('clockLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.clockWindow();
});
}
// Examples > Tests
MochaUI.eventsWindow = function(){
new MochaUI.Window({
id: 'windowevents',
title: 'Window Events',
loadMethod: 'xhr',
contentURL: 'pages/events.html',
onBeforeBuild: function(){
alert('This window is about to be built.');
},
onContentLoaded: function(windowEl){
alert(windowEl.id + '\'s content was loaded.');
},
onClose: function(){
alert('The window is closing.');
},
onCloseComplete: function(){
alert('The window is closed.');
},
onMinimize: function(windowEl){
alert(windowEl.id + ' was minimized.');
},
onMaximize: function(windowEl){
alert(windowEl.id + ' was maximized.');
},
onRestore: function(windowEl){
alert(windowEl.id + ' was restored.');
},
onResize: function(windowEl){
alert(windowEl.id + ' was resized.');
},
onFocus: function(windowEl){
alert(windowEl.id + ' was focused.');
},
onBlur: function(windowEl){
alert(windowEl.id + ' lost focus.');
},
width: 340,
height: 250
});
}
if ($('windoweventsLinkCheck')){
$('windoweventsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.eventsWindow();
});
}
MochaUI.containertestWindow = function(){
new MochaUI.Window({
id: 'containertest',
title: 'Container Test',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
container: 'pageWrapper',
width: 340,
height: 150,
x: 100,
y: 100
});
}
if ($('containertestLinkCheck')){
$('containertestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.containertestWindow();
});
}
MochaUI.iframetestWindow = function(){
new MochaUI.Window({
id: 'iframetest',
title: 'Iframe Tests',
loadMethod: 'iframe',
contentURL: 'pages/iframetest.html'
});
}
if ($('iframetestLinkCheck')) {
$('iframetestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.iframetestWindow();
});
}
MochaUI.accordiantestWindow = function(){
var id = 'accordiantest';
new MochaUI.Window({
id: id,
title: 'Accordian Test',
loadMethod: 'xhr',
contentURL: 'pages/overview.html',
width: 300,
height: 200,
scrollbars: false,
resizable: false,
maximizable: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
onContentLoaded: function(windowEl){
this.windowEl = windowEl;
var accordianDelay = function(){
new Accordion('#' + id + ' h3.accordianToggler', "#" + id + ' div.accordianElement', {
// start: 'all-closed',
opacity: false,
alwaysHide: true,
onActive: function(toggler, element){
toggler.addClass('open');
},
onBackground: function(toggler, element){
toggler.removeClass('open');
},
onStart: function(toggler, element){
this.windowEl.accordianResize = function(){
MochaUI.dynamicResize($(id));
}
this.windowEl.accordianTimer = this.windowEl.accordianResize.periodical(10);
}.bind(this),
onComplete: function(){
this.windowEl.accordianTimer = $clear(this.windowEl.accordianTimer);
MochaUI.dynamicResize($(id)) // once more for good measure
}.bind(this)
}, $(id));
}.bind(this)
accordianDelay.delay(10, this); // Delay is a fix for IE
}
});
}
if ($('accordiantestLinkCheck')){
$('accordiantestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.accordiantestWindow();
});
}
// View
if ($('sidebarLinkCheck')){
$('sidebarLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.Desktop.sidebarToggle();
});
}
if ($('cascadeLink')){
$('cascadeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.arrangeCascade();
});
}
if ($('tileLink')){
$('tileLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.arrangeTile();
});
}
if ($('closeLink')){
$('closeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.closeAll();
});
}
if ($('minimizeLink')){
$('minimizeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.minimizeAll();
});
}
// Tools
MochaUI.builderWindow = function(){
new MochaUI.Window({
id: 'builder',
title: 'Window Builder',
loadMethod: 'xhr',
contentURL: 'plugins/windowform/',
onContentLoaded: function(){
if ( !MochaUI.windowformScript == true ){
new Request({
url: 'plugins/windowform/scripts/Window-from-form.js',
method: 'get',
onSuccess: function() {
$('newWindowSubmit').addEvent('click', function(e){
new Event(e).stop();
new MochaUI.WindowForm();
});
MochaUI.windowformScript = true;
}.bind(this)
}).send();
}
},
width: 370,
height: 400,
x: 20,
y: 70,
maximizable: false
});
}
if ($('builderLinkCheck')){
$('builderLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.builderWindow();
});
}
// Todo: Add menu check mark functionality for workspaces.
// Workspaces
if ($('saveWorkspaceLink')){
$('saveWorkspaceLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.saveWorkspace();
});
}
if ($('loadWorkspaceLink')){
$('loadWorkspaceLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.loadWorkspace();
});
}
// Help
MochaUI.overviewWindow = function(){
var id = 'overview';
new MochaUI.Window({
id: id,
title: 'Overview',
icon: 'images/icons/page.gif',
loadMethod: 'xhr',
contentURL: 'pages/overview.html',
width: 300,
height: 200,
x: 250,
y: 85,
scrollbars: false,
resizable: false,
maximizable: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
onContentLoaded: function(windowEl){
this.windowEl = windowEl;
var accordianDelay = function(){
new Accordion('#' + id + ' h3.accordianToggler', "#" + id + ' div.accordianElement', {
// start: 'all-closed',
opacity: false,
alwaysHide: true,
onActive: function(toggler, element){
toggler.addClass('open');
},
onBackground: function(toggler, element){
toggler.removeClass('open');
},
onStart: function(toggler, element){
this.windowEl.accordianResize = function(){
MochaUI.dynamicResize($(id));
}
this.windowEl.accordianTimer = this.windowEl.accordianResize.periodical(10);
}.bind(this),
onComplete: function(){
this.windowEl.accordianTimer = $clear(this.windowEl.accordianTimer);
MochaUI.dynamicResize($(id)) // once more for good measure
}.bind(this)
}, $(id));
}.bind(this)
accordianDelay.delay(10, this); // Delay is a fix for IE
}
});
}
if ($('overviewLinkCheck')){
$('overviewLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.overviewWindow();
});
}
MochaUI.featuresWindow = function(){
new MochaUI.Window({
id: 'features',
title: 'Features',
icon: 'images/icons/page_green.gif',
loadMethod: 'xhr',
contentURL: 'pages/features.html',
//onContentLoaded: function(){
//MochaUI.initializeTabs('featuresTabs');
//},
width: 330,
height: 190,
x: 570,
y: 85,
resizeLimit: {'x': [275, 2500], 'y': [125, 2000]},
toolbar: true,
toolbarURL: 'pages/features-tabs.html'
});
}
if ($('featuresLinkCheck')){
$('featuresLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.featuresWindow();
});
}
MochaUI.faqWindow = function(){
new MochaUI.Window({
id: 'faq',
title: 'FAQ',
loadMethod: 'xhr',
contentURL: 'pages/faq.html',
width: 750,
height: 350
});
}
if ($('faqLinkCheck')){
$('faqLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.faqWindow();
});
}
MochaUI.docsWindow = function(){
new MochaUI.Window({
id: 'docs',
title: 'Documentation',
loadMethod: 'xhr',
contentURL: 'pages/docs.html',
width: 750,
height: 350,
padding: [10,10,10,10,10]
});
}
if ($('docsLinkCheck')){
$('docsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.docsWindow();
});
}
MochaUI.resourcesWindow = function(){
new MochaUI.Window({
id: 'resources',
title: 'Resources',
loadMethod: 'xhr',
contentURL: 'pages/resources.html',
width: 300,
height: 275,
x: 20,
y: 70
});
}
if ($('resourcesLinkCheck')){
$('resourcesLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.resourcesWindow();
});
}
MochaUI.helpWindow = function(){
new MochaUI.Window({
id: 'help',
title: 'Support',
loadMethod: 'xhr',
contentURL: 'pages/support.html',
width: 320,
height: 320,
x: 20,
y: 70
});
}
if ($('helpLinkCheck')){
$('helpLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.helpWindow();
});
}
MochaUI.contributeWindow = function(){
new MochaUI.Window({
id: 'contribute',
title: 'Contribute',
loadMethod: 'xhr',
contentURL: 'pages/contribute.html',
width: 320,
height: 320,
x: 20,
y: 70
});
}
if ($('contributeLinkCheck')){
$('contributeLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.contributeWindow();
});
}
MochaUI.aboutWindow = function(){
new MochaUI.Window({
id: 'about',
title: 'Mocha UI Version 0.9',
loadMethod: 'xhr',
contentURL: 'pages/about.html',
type: 'modal',
width: 300,
height: 150
});
}
if ($('aboutLinkCheck')){
$('aboutLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.aboutWindow();
});
}
// Deactivate menu header links
$$('a.returnFalse').each(function(el){
el.addEvent('click', function(e){
new Event(e).stop();
});
});
// Build windows onDomReady
MochaUI.overviewWindow();
MochaUI.parametricsWindow();
MochaUI.featuresWindow();
}
// Initialize MochaUI when the DOM is ready
window.addEvent('domready', function(){
MochaUI.Desktop = new MochaUI.Desktop();
MochaUI.Dock = new MochaUI.Dock();
MochaUI.Modal = new MochaUI.Modal();
initializeWindows();
$$('li.folder').each(function(folder){
var elements = folder.getChildren('ul');
var image = new Element('img', {
'src': 'images/icons/tree/_open.gif',
'width': 18,
'height': 18
}).inject(folder, 'top');
if (folder.hasClass('root')) {
folder.minus = 'images/icons/tree/Rminus.gif'
folder.plus = 'images/icons/tree/Rplus.gif'
}
else if (folder.getNext()) {
folder.minus = 'images/icons/tree/Tminus.gif'
folder.plus = 'images/icons/tree/Tplus.gif'
}
else {
folder.minus = 'images/icons/tree/Lminus.gif'
folder.plus = 'images/icons/tree/Lplus.gif'
}
var image = new Element('img', {
'src': folder.minus,
'width': 18,
'height': 18
}).addEvent('click', function(){
if (folder.hasClass('f-open')) {
image.setProperty('src', folder.plus);
elements.each(function(el){
el.setStyle('display', 'none');
});
folder.removeClass('f-open');
}
else {
image.setProperty('src', folder.minus);
elements.each(function(el){
el.setStyle('display', 'block');
});
folder.addClass('f-open');
}
}).inject(folder, 'top');
if (!folder.hasClass('f-open')) {
image.setProperty('src', folder.plus);
elements.each(function(el){
el.setStyle('display', 'none');
});
folder.removeClass('f-open');
}
elements.each(function(element){
var docs = element.getChildren('li.doc');
docs.each(function(el){
if (el == docs.getLast() && !el.getNext()) {
new Element('img', {
'src': 'images/icons/tree/L.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
}
else {
new Element('img', {
'src': 'images/icons/tree/T.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
}
});
});
});
$$('ul.tree li').each(function(doc){
doc.getParents('li').each(function(parent){
if (parent.getNext()) {
new Element('img', {
'src': 'images/icons/tree/I.gif',
'width': 18,
'height': 18
}).inject(doc, 'top');
}
else {
new Element('img', {
'src': 'images/spacer.gif',
'width': 18,
'height': 18
}).inject(doc, 'top');
}
});
});
$$('ul.tree li.doc').each(function(el){
new Element('img', {
'src': 'images/icons/tree/_doc.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
});
});
// This runs when a person leaves your page.
window.addEvent('unload', function(){
if (MochaUI) MochaUI.garbageCleanUp();
}); | mocha-ui/scripts/mocha-init.js | /* -----------------------------------------------------------------
In this file:
1. Define windows
var myWindow = function(){
new MochaUI.Window({
id: 'mywindow',
title: 'My Window',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
width: 340,
height: 150
});
}
2. Build windows on onDomReady
myWindow();
3. Add link events to build future windows
if ($('myWindowLink')){
$('myWindowLink').addEvent('click', function(e) {
new Event(e).stop();
jsonWindows();
});
}
Note: If your link is in the top menu, it opens only a single window, and you would
like a check mark next to it when it's window is open, format the link name as follows:
window.id + LinkCheck, e.g., mywindowLinkCheck
Otherwise it is suggested you just use mywindowLink
Associated HTML for link event above:
<a id="myWindowLink" href="pages/lipsum.html">My Window</a>
Notes:
If you need to add link events to links within windows you are creating, do
it in the onContentLoaded function of the new window.
----------------------------------------------------------------- */
initializeWindows = function(){
// Examples
MochaUI.ajaxpageWindow = function(){
new MochaUI.Window({
id: 'ajaxpage',
title: 'Content Loaded with an XMLHttpRequest',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
width: 340,
height: 150
});
}
if ($('ajaxpageLinkCheck')){
$('ajaxpageLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.ajaxpageWindow();
});
}
MochaUI.jsonWindows = function(){
var url = 'data/json-windows-data.js';
var request = new Request.JSON({
url: url,
method: 'get',
onRequest: function(){
// add code to show loading icon
}.bind(this),
onComplete: function(properties) {
MochaUI.newWindowsFromJSON(properties.windows);
// add code to hide loading icon
}
}).send();
}
if ($('jsonLink')){
$('jsonLink').addEvent('click', function(e) {
new Event(e).stop();
MochaUI.jsonWindows();
});
}
MochaUI.youtubeWindow = function(){
new MochaUI.Window({
id: 'youtube',
title: 'YouTube in Iframe',
loadMethod: 'iframe',
contentURL: 'pages/youtube.html',
width: 340,
height: 280,
scrollbars: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
resizeLimit: {'x': [330, 2500], 'y': [250, 2000]},
contentBgColor: '#000',
toolbar: true,
toolbarURL: 'pages/youtube-tabs.html'
});
}
if ($('youtubeLinkCheck')) {
$('youtubeLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.youtubeWindow();
});
}
MochaUI.mootoolsWindow = function(){
new MochaUI.Window({
id: 'mootools',
title: 'Mootools in an Iframe',
loadMethod: 'iframe',
contentURL: 'http://mootools.net/',
width: 650,
height: 400
});
}
if ($('mootoolsLinkCheck')) {
$('mootoolsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.mootoolsWindow();
});
}
MochaUI.parametricsWindow = function(){
new MochaUI.Window({
id: 'parametrics',
title: 'Window Parametrics',
icon: 'images/icons/page_red.gif',
loadMethod: 'xhr',
contentURL: 'plugins/parametrics/',
onContentLoaded: function(){
if ( !MochaUI.parametricsScript == true ){
new Request({
url: 'plugins/parametrics/scripts/parametrics.js',
method: 'get',
onSuccess: function() {
MochaUI.addRadiusSlider.delay(10); // Delay is for IE6
MochaUI.addShadowSlider.delay(10); // Delay is for IE6
MochaUI.parametricsScript = true;
}.bind(this)
}).send();
}
else {
MochaUI.addRadiusSlider.delay(10); // Delay is for IE6
MochaUI.addShadowSlider.delay(10); // Delay is for IE6
}
},
width: 300,
height: 105,
x: 250,
y: 405,
padding: { top: 12, right: 12, bottom: 10, left: 12 },
resizable: false,
maximizable: false,
contentBgColor: '#fff'
});
}
if ($('parametricsLinkCheck')){
$('parametricsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.parametricsWindow();
});
}
MochaUI.clockWindow = function(){
new MochaUI.Window({
id: 'clock',
title: 'Canvas Clock',
addClass: 'transparent',
loadMethod: 'xhr',
contentURL: 'plugins/coolclock/index.html?t=' + new Date().getTime(),
onContentLoaded: function(){
if ( !MochaUI.clockScript == true ){
new Request({
url: 'plugins/coolclock/scripts/coolclock.js?t=' + new Date().getTime(),
method: 'get',
onSuccess: function() {
if (Browser.Engine.trident) {
myClockInit = function(){
CoolClock.findAndCreateClocks();
};
window.addEvent('domready', function(){
myClockInit.delay(10); // Delay is for IE
});
MochaUI.clockScript = true;
}
else {
CoolClock.findAndCreateClocks();
}
}.bind(this)
}).send();
}
else {
if (Browser.Engine.trident) {
myClockInit = function(){
CoolClock.findAndCreateClocks();
};
window.addEvent('domready', function(){
myClockInit.delay(10); // Delay is for IE
});
MochaUI.clockScript = true;
}
else {
CoolClock.findAndCreateClocks();
}
}
},
shape: 'gauge',
headerHeight: 30,
width: 160,
height: 160,
x: 570,
y: 75,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
bodyBgColor: [250,250,250]
});
}
if ($('clockLinkCheck')){
$('clockLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.clockWindow();
});
}
// Examples > Tests
MochaUI.eventsWindow = function(){
new MochaUI.Window({
id: 'windowevents',
title: 'Window Events',
loadMethod: 'xhr',
contentURL: 'pages/events.html',
onBeforeBuild: function(){
alert('This window is about to be built.');
},
onContentLoaded: function(windowEl){
alert(windowEl.id + '\'s content was loaded.');
},
onClose: function(){
alert('The window is closing.');
},
onCloseComplete: function(){
alert('The window is closed.');
},
onMinimize: function(windowEl){
alert(windowEl.id + ' was minimized.');
},
onMaximize: function(windowEl){
alert(windowEl.id + ' was maximized.');
},
onRestore: function(windowEl){
alert(windowEl.id + ' was restored.');
},
onResize: function(windowEl){
alert(windowEl.id + ' was resized.');
},
onFocus: function(windowEl){
alert(windowEl.id + ' was focused.');
},
onBlur: function(windowEl){
alert(windowEl.id + ' lost focus.');
},
width: 340,
height: 250
});
}
if ($('windoweventsLinkCheck')){
$('windoweventsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.eventsWindow();
});
}
MochaUI.containertestWindow = function(){
new MochaUI.Window({
id: 'containertest',
title: 'Container Test',
loadMethod: 'xhr',
contentURL: 'pages/lipsum.html',
container: 'pageWrapper',
width: 340,
height: 150,
x: 100,
y: 100
});
}
if ($('containertestLinkCheck')){
$('containertestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.containertestWindow();
});
}
MochaUI.iframetestWindow = function(){
new MochaUI.Window({
id: 'iframetest',
title: 'Iframe Tests',
loadMethod: 'iframe',
contentURL: 'pages/iframetest.html'
});
}
if ($('iframetestLinkCheck')) {
$('iframetestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.iframetestWindow();
});
}
MochaUI.accordiantestWindow = function(){
var id = 'accordiantest';
new MochaUI.Window({
id: id,
title: 'Accordian Test',
loadMethod: 'xhr',
contentURL: 'pages/overview.html',
width: 300,
height: 200,
scrollbars: false,
resizable: false,
maximizable: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
onContentLoaded: function(windowEl){
this.windowEl = windowEl;
var accordianDelay = function(){
new Accordion('#' + id + ' h3.accordianToggler', "#" + id + ' div.accordianElement', {
// start: 'all-closed',
opacity: false,
alwaysHide: true,
onActive: function(toggler, element){
toggler.addClass('open');
},
onBackground: function(toggler, element){
toggler.removeClass('open');
},
onStart: function(toggler, element){
this.windowEl.accordianResize = function(){
MochaUI.dynamicResize($(id));
}
this.windowEl.accordianTimer = this.windowEl.accordianResize.periodical(10);
}.bind(this),
onComplete: function(){
this.windowEl.accordianTimer = $clear(this.windowEl.accordianTimer);
MochaUI.dynamicResize($(id)) // once more for good measure
}.bind(this)
}, $(id));
}.bind(this)
accordianDelay.delay(10, this); // Delay is a fix for IE
}
});
}
if ($('accordiantestLinkCheck')){
$('accordiantestLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.accordiantestWindow();
});
}
// View
if ($('sidebarLinkCheck')){
$('sidebarLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.Desktop.sidebarToggle();
});
}
if ($('cascadeLink')){
$('cascadeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.arrangeCascade();
});
}
if ($('tileLink')){
$('tileLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.arrangeTile();
});
}
if ($('closeLink')){
$('closeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.closeAll();
});
}
if ($('minimizeLink')){
$('minimizeLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.minimizeAll();
});
}
// Tools
MochaUI.builderWindow = function(){
new MochaUI.Window({
id: 'builder',
title: 'Window Builder',
loadMethod: 'xhr',
contentURL: 'plugins/windowform/',
onContentLoaded: function(){
if ( !MochaUI.windowformScript == true ){
new Request({
url: 'plugins/windowform/scripts/Window-from-form.js',
method: 'get',
onSuccess: function() {
$('newWindowSubmit').addEvent('click', function(e){
new Event(e).stop();
new MochaUI.WindowForm();
});
MochaUI.windowformScript = true;
}.bind(this)
}).send();
}
},
width: 370,
height: 400,
x: 20,
y: 70,
maximizable: false
});
}
if ($('builderLinkCheck')){
$('builderLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.builderWindow();
});
}
// Todo: Add menu check mark functionality for workspaces.
// Workspaces
if ($('saveWorkspaceLink')){
$('saveWorkspaceLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.saveWorkspace();
});
}
if ($('loadWorkspaceLink')){
$('loadWorkspaceLink').addEvent('click', function(e){
new Event(e).stop();
MochaUI.loadWorkspace();
});
}
// Help
MochaUI.overviewWindow = function(){
var id = 'overview';
new MochaUI.Window({
id: id,
title: 'Overview',
icon: 'images/icons/page.gif',
loadMethod: 'xhr',
contentURL: 'pages/overview.html',
width: 300,
height: 200,
x: 250,
y: 85,
scrollbars: false,
resizable: false,
maximizable: false,
padding: { top: 0, right: 0, bottom: 0, left: 0 },
onContentLoaded: function(windowEl){
this.windowEl = windowEl;
var accordianDelay = function(){
new Accordion('#' + id + ' h3.accordianToggler', "#" + id + ' div.accordianElement', {
// start: 'all-closed',
opacity: false,
alwaysHide: true,
onActive: function(toggler, element){
toggler.addClass('open');
},
onBackground: function(toggler, element){
toggler.removeClass('open');
},
onStart: function(toggler, element){
this.windowEl.accordianResize = function(){
MochaUI.dynamicResize($(id));
}
this.windowEl.accordianTimer = this.windowEl.accordianResize.periodical(10);
}.bind(this),
onComplete: function(){
this.windowEl.accordianTimer = $clear(this.windowEl.accordianTimer);
MochaUI.dynamicResize($(id)) // once more for good measure
}.bind(this)
}, $(id));
}.bind(this)
accordianDelay.delay(10, this); // Delay is a fix for IE
}
});
}
if ($('overviewLinkCheck')){
$('overviewLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.overviewWindow();
});
}
MochaUI.featuresWindow = function(){
new MochaUI.Window({
id: 'features',
title: 'Features',
icon: 'images/icons/page_green.gif',
loadMethod: 'xhr',
contentURL: 'pages/features.html',
//onContentLoaded: function(){
//MochaUI.initializeTabs('featuresTabs');
//},
width: 330,
height: 190,
x: 570,
y: 85,
resizeLimit: {'x': [275, 2500], 'y': [125, 2000]},
toolbar: true,
toolbarURL: 'pages/features-tabs.html'
});
}
if ($('featuresLinkCheck')){
$('featuresLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.featuresWindow();
});
}
MochaUI.faqWindow = function(){
new MochaUI.Window({
id: 'faq',
title: 'FAQ',
loadMethod: 'xhr',
contentURL: 'pages/faq.html',
width: 750,
height: 350
});
}
if ($('faqLinkCheck')){
$('faqLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.faqWindow();
});
}
MochaUI.docsWindow = function(){
new MochaUI.Window({
id: 'docs',
title: 'Documentation',
loadMethod: 'xhr',
contentURL: 'pages/docs.html',
width: 750,
height: 350,
padding: [10,10,10,10,10]
});
}
if ($('docsLinkCheck')){
$('docsLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.docsWindow();
});
}
MochaUI.resourcesWindow = function(){
new MochaUI.Window({
id: 'resources',
title: 'Resources',
loadMethod: 'xhr',
contentURL: 'pages/resources.html',
width: 300,
height: 275,
x: 20,
y: 70
});
}
if ($('resourcesLinkCheck')){
$('resourcesLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.resourcesWindow();
});
}
MochaUI.helpWindow = function(){
new MochaUI.Window({
id: 'help',
title: 'Support',
loadMethod: 'xhr',
contentURL: 'pages/support.html',
width: 320,
height: 320,
x: 20,
y: 70
});
}
if ($('helpLinkCheck')){
$('helpLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.helpWindow();
});
}
MochaUI.contributeWindow = function(){
new MochaUI.Window({
id: 'contribute',
title: 'Contribute',
loadMethod: 'xhr',
contentURL: 'pages/contribute.html',
width: 320,
height: 320,
x: 20,
y: 70
});
}
if ($('contributeLinkCheck')){
$('contributeLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.contributeWindow();
});
}
MochaUI.aboutWindow = function(){
new MochaUI.Window({
id: 'about',
title: 'Mocha UI Version 0.9',
loadMethod: 'xhr',
contentURL: 'pages/about.html',
type: 'modal',
width: 300,
height: 150
});
}
if ($('aboutLinkCheck')){
$('aboutLinkCheck').addEvent('click', function(e){
new Event(e).stop();
MochaUI.aboutWindow();
});
}
// Deactivate menu header links
$$('a.returnFalse').each(function(el){
el.addEvent('click', function(e){
new Event(e).stop();
});
});
// Build windows onDomReady
MochaUI.overviewWindow();
MochaUI.parametricsWindow();
MochaUI.featuresWindow();
}
// Initialize MochaUI when the DOM is ready
window.addEvent('domready', function(){
MochaUI.Desktop = new MochaUI.Desktop();
MochaUI.Dock = new MochaUI.Dock();
MochaUI.Modal = new MochaUI.Modal();
initializeWindows();
$$('li.folder').each(function(folder){
var elements = folder.getChildren('ul');
var image = new Element('img', {
'src': 'images/icons/tree/_open.gif',
'width': 18,
'height': 18
}).inject(folder, 'top');
if (folder.hasClass('root')) {
folder.minus = 'images/icons/tree/Rminus.gif'
folder.plus = 'images/icons/tree/Rplus.gif'
}
else {
folder.minus = 'images/icons/tree/Tminus.gif'
folder.plus = 'images/icons/tree/Tplus.gif'
}
var image = new Element('img', {
'src': folder.minus,
'width': 18,
'height': 18
}).addEvent('click', function(){
if (folder.hasClass('f-open')) {
image.setProperty('src', folder.plus);
elements.each(function(el){
el.setStyle('display', 'none');
});
folder.removeClass('f-open');
}
else {
image.setProperty('src', folder.minus);
elements.each(function(el){
el.setStyle('display', 'block');
});
folder.addClass('f-open');
}
}).inject(folder, 'top');
if (!folder.hasClass('f-open')) {
image.setProperty('src', folder.plus);
elements.each(function(el){
el.setStyle('display', 'none');
});
folder.removeClass('f-open');
}
elements.each(function(element){
var docs = element.getChildren('li.doc');
docs.each(function(el){
if (el == docs.getLast() && !el.getNext()) {
new Element('img', {
'src': 'images/icons/tree/L.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
}
else {
new Element('img', {
'src': 'images/icons/tree/T.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
}
});
});
});
$$('ul.tree li').each(function(doc){
doc.getParents('li').each(function(parent){
if (parent.getNext()) {
new Element('img', {
'src': 'images/icons/tree/I.gif',
'width': 18,
'height': 18
}).inject(doc, 'top');
}
else {
new Element('img', {
'src': 'images/spacer.gif',
'width': 18,
'height': 18
}).inject(doc, 'top');
}
});
});
$$('ul.tree li.doc').each(function(el){
new Element('img', {
'src': 'images/icons/tree/_doc.gif',
'width': 18,
'height': 18
}).inject(el.getElement('span'), 'before');
});
});
// This runs when a person leaves your page.
window.addEvent('unload', function(){
if (MochaUI) MochaUI.garbageCleanUp();
}); | Work on a collapsible file tree.
| mocha-ui/scripts/mocha-init.js | Work on a collapsible file tree. | <ide><path>ocha-ui/scripts/mocha-init.js
<ide> initializeWindows();
<ide>
<ide>
<del> $$('li.folder').each(function(folder){
<del> var elements = folder.getChildren('ul');
<add> $$('li.folder').each(function(folder){
<add> var elements = folder.getChildren('ul');
<ide> var image = new Element('img', {
<del> 'src': 'images/icons/tree/_open.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(folder, 'top');
<add> 'src': 'images/icons/tree/_open.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(folder, 'top');
<ide>
<del> if (folder.hasClass('root')) {
<del> folder.minus = 'images/icons/tree/Rminus.gif'
<del> folder.plus = 'images/icons/tree/Rplus.gif'
<del> }
<del> else {
<del> folder.minus = 'images/icons/tree/Tminus.gif'
<del> folder.plus = 'images/icons/tree/Tplus.gif'
<del> }
<add> if (folder.hasClass('root')) {
<add> folder.minus = 'images/icons/tree/Rminus.gif'
<add> folder.plus = 'images/icons/tree/Rplus.gif'
<add> }
<add> else if (folder.getNext()) {
<add> folder.minus = 'images/icons/tree/Tminus.gif'
<add> folder.plus = 'images/icons/tree/Tplus.gif'
<add> }
<add> else {
<add> folder.minus = 'images/icons/tree/Lminus.gif'
<add> folder.plus = 'images/icons/tree/Lplus.gif'
<add> }
<ide>
<del> var image = new Element('img', {
<del> 'src': folder.minus,
<del> 'width': 18,
<del> 'height': 18
<del> }).addEvent('click', function(){
<del> if (folder.hasClass('f-open')) {
<del> image.setProperty('src', folder.plus);
<del> elements.each(function(el){
<del> el.setStyle('display', 'none');
<del> });
<del> folder.removeClass('f-open');
<del> }
<del> else {
<del> image.setProperty('src', folder.minus);
<del> elements.each(function(el){
<del> el.setStyle('display', 'block');
<del> });
<del> folder.addClass('f-open');
<del> }
<del> }).inject(folder, 'top');
<add> var image = new Element('img', {
<add> 'src': folder.minus,
<add> 'width': 18,
<add> 'height': 18
<add> }).addEvent('click', function(){
<add> if (folder.hasClass('f-open')) {
<add> image.setProperty('src', folder.plus);
<add> elements.each(function(el){
<add> el.setStyle('display', 'none');
<add> });
<add> folder.removeClass('f-open');
<add> }
<add> else {
<add> image.setProperty('src', folder.minus);
<add> elements.each(function(el){
<add> el.setStyle('display', 'block');
<add> });
<add> folder.addClass('f-open');
<add> }
<add> }).inject(folder, 'top');
<ide>
<del> if (!folder.hasClass('f-open')) {
<del> image.setProperty('src', folder.plus);
<del> elements.each(function(el){
<del> el.setStyle('display', 'none');
<del> });
<del> folder.removeClass('f-open');
<del> }
<add> if (!folder.hasClass('f-open')) {
<add> image.setProperty('src', folder.plus);
<add> elements.each(function(el){
<add> el.setStyle('display', 'none');
<add> });
<add> folder.removeClass('f-open');
<add> }
<ide>
<del> elements.each(function(element){
<add> elements.each(function(element){
<ide>
<del> var docs = element.getChildren('li.doc');
<del> docs.each(function(el){
<del> if (el == docs.getLast() && !el.getNext()) {
<del> new Element('img', {
<del> 'src': 'images/icons/tree/L.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(el.getElement('span'), 'before');
<del> }
<del> else {
<del> new Element('img', {
<del> 'src': 'images/icons/tree/T.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(el.getElement('span'), 'before');
<del> }
<del> });
<del>
<del>
<del> });
<add> var docs = element.getChildren('li.doc');
<add> docs.each(function(el){
<add> if (el == docs.getLast() && !el.getNext()) {
<add> new Element('img', {
<add> 'src': 'images/icons/tree/L.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(el.getElement('span'), 'before');
<add> }
<add> else {
<add> new Element('img', {
<add> 'src': 'images/icons/tree/T.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(el.getElement('span'), 'before');
<add> }
<add> });
<add> });
<ide>
<del> });
<add> });
<ide>
<del> $$('ul.tree li').each(function(doc){
<del> doc.getParents('li').each(function(parent){
<del> if (parent.getNext()) {
<del> new Element('img', {
<del> 'src': 'images/icons/tree/I.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(doc, 'top');
<del> }
<del> else {
<del> new Element('img', {
<del> 'src': 'images/spacer.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(doc, 'top');
<del> }
<del> });
<del> });
<add> $$('ul.tree li').each(function(doc){
<add> doc.getParents('li').each(function(parent){
<add> if (parent.getNext()) {
<add> new Element('img', {
<add> 'src': 'images/icons/tree/I.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(doc, 'top');
<add> }
<add> else {
<add> new Element('img', {
<add> 'src': 'images/spacer.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(doc, 'top');
<add> }
<add> });
<add> });
<ide>
<del> $$('ul.tree li.doc').each(function(el){
<del> new Element('img', {
<del> 'src': 'images/icons/tree/_doc.gif',
<del> 'width': 18,
<del> 'height': 18
<del> }).inject(el.getElement('span'), 'before');
<del> });
<add> $$('ul.tree li.doc').each(function(el){
<add> new Element('img', {
<add> 'src': 'images/icons/tree/_doc.gif',
<add> 'width': 18,
<add> 'height': 18
<add> }).inject(el.getElement('span'), 'before');
<add> });
<ide>
<ide> });
<ide> |
|
Java | apache-2.0 | error: pathspec 'src/com/valkryst/VTerminal/builder/component/ScreenBuilder.java' did not match any file(s) known to git
| d2c5ab14f5d0c7f392f01b0b1c8c553efc452f42 | 1 | Valkryst/VTerminal | package com.valkryst.VTerminal.builder.component;
import com.valkryst.VRadio.Radio;
import com.valkryst.VTerminal.component.Component;
import com.valkryst.VTerminal.component.Layer;
import com.valkryst.VTerminal.component.Screen;
import com.valkryst.VTerminal.misc.JSONFunctions;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import java.util.LinkedHashSet;
import java.util.Set;
public class ScreenBuilder extends ComponentBuilder<Screen> {
/** The width of the screen, in characters. */
@Getter @Setter private int width;
/** The height of the screen, in characters. */
@Getter @Setter private int height;
/** The non-layer components displayed on the screen. */
@Getter @Setter @NonNull private Set<Component> components = new LinkedHashSet<>();
/** The layer components displayed on the screen. */
@Getter @Setter @NonNull private Set<Layer> layerComponents = new LinkedHashSet<>();
@Override
public Screen build() {
checkState();
return new Screen(this);
}
/**
* Checks the current state of the builder.
*
* @throws IllegalArgumentException
* If the width or height is less than one.
*/
protected void checkState() throws NullPointerException {
super.checkState();
if (width < 1) {
throw new IllegalArgumentException("The width cannot be less than one.");
}
if (height < 1) {
throw new IllegalArgumentException("The height cannot be less than one.");
}
}
/** Resets the builder to it's default state. */
public void reset() {
super.reset();
width = 80;
height = 24;
components = new LinkedHashSet<>();
layerComponents = new LinkedHashSet<>();
}
@Override
public void parseJSON(final @NonNull JSONObject jsonObject) {
final Radio<String> radio = super.getRadio();
reset();
super.parseJSON(jsonObject);
final Integer width = JSONFunctions.getIntElement(jsonObject, "width");
final Integer height = JSONFunctions.getIntElement(jsonObject, "height");
final JSONArray components = (JSONArray) jsonObject.get("components");
if (width != null) {
this.width = width;
}
if (height != null) {
this.height = height;
}
if (components != null) {
for (final Object obj : components) {
final JSONObject arrayElement = (JSONObject) obj;
if (arrayElement != null) {
final Component component = loadComponentFromJSON(arrayElement, radio);
if (component != null) {
if (component instanceof Layer) {
layerComponents.add((Layer) component);
} else {
this.components.add(component);
}
}
}
}
}
}
/**
* Loads a component from it's JSON representation.
*
* @param jsonObject
* The JSON.
*
* @param radio
* The radio for the component to use.
*
* @return
* The component.
*
* @throws IllegalArgumentException
* If the type of the component isn't supported.
*/
private Component loadComponentFromJSON(final @NonNull JSONObject jsonObject, final @NonNull Radio<String> radio) {
String componentType = (String) jsonObject.get("type");
if (componentType == null) {
return null;
}
componentType = componentType.toLowerCase();
switch (componentType) {
case "button": {
final ButtonBuilder buttonBuilder = new ButtonBuilder();
buttonBuilder.parseJSON(jsonObject);
buttonBuilder.setRadio(radio);
return buttonBuilder.build();
}
case "check box": {
final CheckBoxBuilder checkBoxBuilder = new CheckBoxBuilder();
checkBoxBuilder.parseJSON(jsonObject);
checkBoxBuilder.setRadio(radio);
return checkBoxBuilder.build();
}
case "label": {
final LabelBuilder labelBuilder = new LabelBuilder();
labelBuilder.parseJSON(jsonObject);
labelBuilder.setRadio(radio);
return labelBuilder.build();
}
case "progress bar": {
final ProgressBarBuilder progressBarBuilder = new ProgressBarBuilder();
progressBarBuilder.parseJSON(jsonObject);
progressBarBuilder.setRadio(radio);
return progressBarBuilder.build();
}
case "text field": {
final TextFieldBuilder textFieldBuilder = new TextFieldBuilder();
textFieldBuilder.parseJSON(jsonObject);
textFieldBuilder.setRadio(radio);
return textFieldBuilder.build();
}
case "text area": {
final TextAreaBuilder textAreaBuilder = new TextAreaBuilder();
textAreaBuilder.parseJSON(jsonObject);
textAreaBuilder.setRadio(radio);
return textAreaBuilder.build();
}
default: {
throw new IllegalArgumentException("The type '" + componentType + "' is not supported.");
}
}
}
}
| src/com/valkryst/VTerminal/builder/component/ScreenBuilder.java | Initial commit.
| src/com/valkryst/VTerminal/builder/component/ScreenBuilder.java | Initial commit. | <ide><path>rc/com/valkryst/VTerminal/builder/component/ScreenBuilder.java
<add>package com.valkryst.VTerminal.builder.component;
<add>
<add>import com.valkryst.VRadio.Radio;
<add>import com.valkryst.VTerminal.component.Component;
<add>import com.valkryst.VTerminal.component.Layer;
<add>import com.valkryst.VTerminal.component.Screen;
<add>import com.valkryst.VTerminal.misc.JSONFunctions;
<add>import lombok.Getter;
<add>import lombok.NonNull;
<add>import lombok.Setter;
<add>import org.json.simple.JSONArray;
<add>import org.json.simple.JSONObject;
<add>
<add>import java.util.LinkedHashSet;
<add>import java.util.Set;
<add>
<add>public class ScreenBuilder extends ComponentBuilder<Screen> {
<add> /** The width of the screen, in characters. */
<add> @Getter @Setter private int width;
<add> /** The height of the screen, in characters. */
<add> @Getter @Setter private int height;
<add>
<add> /** The non-layer components displayed on the screen. */
<add> @Getter @Setter @NonNull private Set<Component> components = new LinkedHashSet<>();
<add>
<add> /** The layer components displayed on the screen. */
<add> @Getter @Setter @NonNull private Set<Layer> layerComponents = new LinkedHashSet<>();
<add>
<add> @Override
<add> public Screen build() {
<add> checkState();
<add> return new Screen(this);
<add> }
<add>
<add> /**
<add> * Checks the current state of the builder.
<add> *
<add> * @throws IllegalArgumentException
<add> * If the width or height is less than one.
<add> */
<add> protected void checkState() throws NullPointerException {
<add> super.checkState();
<add>
<add> if (width < 1) {
<add> throw new IllegalArgumentException("The width cannot be less than one.");
<add> }
<add>
<add> if (height < 1) {
<add> throw new IllegalArgumentException("The height cannot be less than one.");
<add> }
<add> }
<add>
<add> /** Resets the builder to it's default state. */
<add> public void reset() {
<add> super.reset();
<add>
<add> width = 80;
<add> height = 24;
<add>
<add> components = new LinkedHashSet<>();
<add> layerComponents = new LinkedHashSet<>();
<add> }
<add>
<add> @Override
<add> public void parseJSON(final @NonNull JSONObject jsonObject) {
<add> final Radio<String> radio = super.getRadio();
<add>
<add> reset();
<add> super.parseJSON(jsonObject);
<add>
<add>
<add> final Integer width = JSONFunctions.getIntElement(jsonObject, "width");
<add> final Integer height = JSONFunctions.getIntElement(jsonObject, "height");
<add> final JSONArray components = (JSONArray) jsonObject.get("components");
<add>
<add>
<add> if (width != null) {
<add> this.width = width;
<add> }
<add>
<add> if (height != null) {
<add> this.height = height;
<add> }
<add>
<add>
<add> if (components != null) {
<add> for (final Object obj : components) {
<add> final JSONObject arrayElement = (JSONObject) obj;
<add>
<add> if (arrayElement != null) {
<add> final Component component = loadComponentFromJSON(arrayElement, radio);
<add>
<add> if (component != null) {
<add> if (component instanceof Layer) {
<add> layerComponents.add((Layer) component);
<add> } else {
<add> this.components.add(component);
<add> }
<add> }
<add> }
<add> }
<add> }
<add> }
<add>
<add> /**
<add> * Loads a component from it's JSON representation.
<add> *
<add> * @param jsonObject
<add> * The JSON.
<add> *
<add> * @param radio
<add> * The radio for the component to use.
<add> *
<add> * @return
<add> * The component.
<add> *
<add> * @throws IllegalArgumentException
<add> * If the type of the component isn't supported.
<add> */
<add> private Component loadComponentFromJSON(final @NonNull JSONObject jsonObject, final @NonNull Radio<String> radio) {
<add> String componentType = (String) jsonObject.get("type");
<add>
<add> if (componentType == null) {
<add> return null;
<add> }
<add>
<add> componentType = componentType.toLowerCase();
<add>
<add> switch (componentType) {
<add> case "button": {
<add> final ButtonBuilder buttonBuilder = new ButtonBuilder();
<add> buttonBuilder.parseJSON(jsonObject);
<add> buttonBuilder.setRadio(radio);
<add> return buttonBuilder.build();
<add> }
<add>
<add> case "check box": {
<add> final CheckBoxBuilder checkBoxBuilder = new CheckBoxBuilder();
<add> checkBoxBuilder.parseJSON(jsonObject);
<add> checkBoxBuilder.setRadio(radio);
<add> return checkBoxBuilder.build();
<add> }
<add>
<add> case "label": {
<add> final LabelBuilder labelBuilder = new LabelBuilder();
<add> labelBuilder.parseJSON(jsonObject);
<add> labelBuilder.setRadio(radio);
<add> return labelBuilder.build();
<add> }
<add>
<add> case "progress bar": {
<add> final ProgressBarBuilder progressBarBuilder = new ProgressBarBuilder();
<add> progressBarBuilder.parseJSON(jsonObject);
<add> progressBarBuilder.setRadio(radio);
<add> return progressBarBuilder.build();
<add> }
<add>
<add> case "text field": {
<add> final TextFieldBuilder textFieldBuilder = new TextFieldBuilder();
<add> textFieldBuilder.parseJSON(jsonObject);
<add> textFieldBuilder.setRadio(radio);
<add> return textFieldBuilder.build();
<add> }
<add>
<add> case "text area": {
<add> final TextAreaBuilder textAreaBuilder = new TextAreaBuilder();
<add> textAreaBuilder.parseJSON(jsonObject);
<add> textAreaBuilder.setRadio(radio);
<add> return textAreaBuilder.build();
<add> }
<add>
<add> default: {
<add> throw new IllegalArgumentException("The type '" + componentType + "' is not supported.");
<add> }
<add> }
<add> }
<add>} |
|
JavaScript | apache-2.0 | 458f94efa4616873efc9ff81d5c39a5ce0f37572 | 0 | shopgate/theme-gmd | import { css } from 'glamor';
export default css({
' h1, h2, h3, h4, h5, h6, p, ul, ol': {
margin: '1rem 0',
},
' h1, h2, h3, h4, h5, h6': {
fontWeight: 600,
},
' h1': {
fontSize: '1.5rem',
},
' h2': {
fontSize: '1.25rem',
},
' h3': {
fontSize: '1.1rem',
},
' h4, h5, h6': {
fontSize: '1rem',
},
' img': {
display: 'initial',
},
// Clearfix for floated widget content
':after': {
clear: 'both',
content: '.',
display: 'block',
visibility: 'hidden',
height: 0,
},
}).toString();
| widgets/Html/style.js | import { css } from 'glamor';
export default css({
' img': {
display: 'initial',
},
// Clearfix for floated widget content
':after': {
clear: 'both',
content: '.',
display: 'block',
visibility: 'hidden',
height: 0,
},
}).toString();
| PWA-499 - basic elements typography fixes for CMS pages
| widgets/Html/style.js | PWA-499 - basic elements typography fixes for CMS pages | <ide><path>idgets/Html/style.js
<ide> import { css } from 'glamor';
<ide>
<ide> export default css({
<add> ' h1, h2, h3, h4, h5, h6, p, ul, ol': {
<add> margin: '1rem 0',
<add> },
<add> ' h1, h2, h3, h4, h5, h6': {
<add> fontWeight: 600,
<add> },
<add> ' h1': {
<add> fontSize: '1.5rem',
<add> },
<add> ' h2': {
<add> fontSize: '1.25rem',
<add> },
<add> ' h3': {
<add> fontSize: '1.1rem',
<add> },
<add> ' h4, h5, h6': {
<add> fontSize: '1rem',
<add> },
<ide> ' img': {
<ide> display: 'initial',
<ide> }, |
|
Java | apache-2.0 | 0ba6f4c6eb843e6f698c62ba7be1fe1d69cb375e | 0 | DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF | package org.sagebionetworks.bridge.validators;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.bridge.models.surveys.Constraints;
import org.sagebionetworks.bridge.models.surveys.MultiValueConstraints;
import org.sagebionetworks.bridge.models.surveys.StringConstraints;
import org.sagebionetworks.bridge.models.surveys.Survey;
import org.sagebionetworks.bridge.models.surveys.SurveyQuestion;
import org.sagebionetworks.bridge.models.surveys.SurveyRule;
import org.sagebionetworks.bridge.models.surveys.UIHint;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import com.google.common.collect.Sets;
public class SurveyValidator implements Validator {
@Override
public boolean supports(Class<?> clazz) {
return Survey.class.isAssignableFrom(clazz);
}
@Override
public void validate(Object object, Errors errors) {
Survey survey = (Survey)object;
if (StringUtils.isBlank(survey.getIdentifier())) {
errors.reject("missing an identifier");
}
if (StringUtils.isBlank(survey.getStudyKey())) {
errors.reject("missing a study key");
}
if (StringUtils.isBlank(survey.getGuid())) {
errors.reject("missing a GUID");
}
for (int i=0; i < survey.getQuestions().size(); i++) {
SurveyQuestion question = survey.getQuestions().get(i);
errors.pushNestedPath("question"+i);
doValidateQuestion(question, i, errors);
errors.popNestedPath();
}
validateRules(errors, survey.getQuestions());
}
private void validateRules(Errors errors, List<SurveyQuestion> questions) {
// Should not try and back-track in the survey.
Set<String> alreadySeenIdentifiers = Sets.newHashSet();
for (int i=0; i < questions.size(); i++) {
SurveyQuestion question = questions.get(i);
Constraints c = question.getConstraints();
if (c != null && c.getRules() != null) {
for (SurveyRule rule : c.getRules()) {
if (alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
errors.pushNestedPath("question"+i);
rejectField(errors, "rule", "back references question %s", rule.getSkipToTarget());
errors.popNestedPath();
}
}
}
alreadySeenIdentifiers.add(question.getIdentifier());
}
// Now verify that all skipToTarget identifiers actually exist
for (int i=0; i < questions.size(); i++) {
SurveyQuestion question = questions.get(i);
Constraints c = question.getConstraints();
if (c != null && c.getRules() != null) {
for (SurveyRule rule : c.getRules()) {
if (!alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
errors.pushNestedPath("question"+i);
rejectField(errors, "rule", "has a skipTo identifier that doesn't exist: %s", rule.getSkipToTarget());
errors.popNestedPath();
}
}
}
}
}
private void doValidateQuestion(SurveyQuestion question, int pos, Errors errors) {
if (StringUtils.isBlank(question.getIdentifier())) {
errors.rejectValue("identifier", "missing an identifier");
}
if (question.getUiHint() == null) {
errors.rejectValue("uiHint", "missing a UI hint");
}
if (StringUtils.isBlank(question.getPrompt())) {
errors.rejectValue("prompt", "missing a prompt/question");
}
if (question.getConstraints() == null) {
errors.rejectValue("constraints", "missing a constraints object");
} else {
errors.pushNestedPath("constraints");
doValidateConstraints(question, question.getConstraints(), errors);
errors.popNestedPath();
}
}
private void doValidateConstraints(SurveyQuestion question, Constraints con, Errors errors) {
if (con.getDataType() == null) {
errors.reject("has no dataType");
return;
}
UIHint hint = question.getUiHint();
if (hint == null) {
rejectField(errors, "uiHint", "required");
} else if (!con.getSupportedHints().contains(hint)) {
rejectField(errors, "dataType", "(%s) doesn't match the UI hint of '%s'", con.getDataType().name()
.toLowerCase(), hint.name().toLowerCase());
} else if (con instanceof MultiValueConstraints) {
// Multiple values have a few odd UI constraints
MultiValueConstraints mcon = (MultiValueConstraints)con;
String hintName = hint.name().toLowerCase();
if (hint == UIHint.COMBOBOX && (mcon.getAllowMultiple() || !mcon.getAllowOther())) {
rejectField(errors, "uiHint", "'%s' is only valid when multiple = false and other = true", hintName);
} else if (mcon.getAllowMultiple() && hint != UIHint.CHECKBOX && hint != UIHint.LIST) {
rejectField(errors, "uiHint",
"allows multiples but the '%s' UI hint doesn't gather more than one answer", hintName);
} else if (!mcon.getAllowMultiple() && (hint == UIHint.CHECKBOX || hint == UIHint.LIST)) {
rejectField(errors, "uiHint",
"doesn't allow multiples but the '%s' UI hint gathers more than one answer", hintName);
}
} else if (con instanceof StringConstraints) {
// Validate the regular expression, if it exists
StringConstraints scon = (StringConstraints)con;
if (StringUtils.isNotBlank(scon.getPattern())) {
try {
Pattern.compile(scon.getPattern());
} catch (PatternSyntaxException exception) {
rejectField(errors, "pattern", "is not a valid regular expression: %s", scon.getPattern());
}
}
}
}
private void rejectField(Errors errors, String field, String message, Object... args) {
if (args != null && args.length > 0) {
errors.rejectValue(field, message, args, message);
} else {
errors.rejectValue(field, message);
}
}
}
| app/org/sagebionetworks/bridge/validators/SurveyValidator.java | package org.sagebionetworks.bridge.validators;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.bridge.models.surveys.Constraints;
import org.sagebionetworks.bridge.models.surveys.MultiValueConstraints;
import org.sagebionetworks.bridge.models.surveys.StringConstraints;
import org.sagebionetworks.bridge.models.surveys.Survey;
import org.sagebionetworks.bridge.models.surveys.SurveyQuestion;
import org.sagebionetworks.bridge.models.surveys.SurveyRule;
import org.sagebionetworks.bridge.models.surveys.UIHint;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import com.google.common.collect.Sets;
public class SurveyValidator implements Validator {
@Override
public boolean supports(Class<?> clazz) {
return Survey.class.isAssignableFrom(clazz);
}
@Override
public void validate(Object object, Errors errors) {
Survey survey = (Survey)object;
if (StringUtils.isBlank(survey.getIdentifier())) {
errors.reject("missing an identifier");
}
if (StringUtils.isBlank(survey.getStudyKey())) {
errors.reject("missing a study key");
}
if (StringUtils.isBlank(survey.getGuid())) {
errors.reject("missing a GUID");
}
validateRules(errors, survey.getQuestions());
for (int i=0; i < survey.getQuestions().size(); i++) {
SurveyQuestion question = survey.getQuestions().get(i);
errors.pushNestedPath("question"+i);
doValidateQuestion(question, i, errors);
errors.popNestedPath();
}
}
private void validateRules(Errors errors, List<SurveyQuestion> questions) {
// Should not try and back-track in the survey.
Set<String> alreadySeenIdentifiers = Sets.newHashSet();
for (int i=0; i < questions.size(); i++) {
SurveyQuestion question = questions.get(i);
for (SurveyRule rule : question.getConstraints().getRules()) {
if (alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
errors.pushNestedPath("question"+i);
rejectField(errors, "rule", "back references question %s", rule.getSkipToTarget());
errors.popNestedPath();
}
}
alreadySeenIdentifiers.add(question.getIdentifier());
}
// Now verify that all skipToTarget identifiers actually exist
for (int i=0; i < questions.size(); i++) {
SurveyQuestion question = questions.get(i);
for (SurveyRule rule : question.getConstraints().getRules()) {
if (!alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
errors.pushNestedPath("question"+i);
rejectField(errors, "rule", "has a skipTo identifier that doesn't exist: %s", rule.getSkipToTarget());
errors.popNestedPath();
}
}
}
}
private void doValidateQuestion(SurveyQuestion question, int pos, Errors errors) {
if (StringUtils.isBlank(question.getIdentifier())) {
errors.rejectValue("identifier", "missing an identifier");
}
if (question.getUiHint() == null) {
errors.rejectValue("uiHint", "missing a UI hint");
}
if (StringUtils.isBlank(question.getPrompt())) {
errors.rejectValue("prompt", "missing a prompt/question");
}
if (question.getConstraints() == null) {
errors.rejectValue("constraints", "missing a constraints object");
} else {
errors.pushNestedPath("constraints");
doValidateConstraints(question, question.getConstraints(), errors);
errors.popNestedPath();
}
}
private void doValidateConstraints(SurveyQuestion question, Constraints con, Errors errors) {
if (con.getDataType() == null) {
errors.reject("has no dataType");
return;
}
UIHint hint = question.getUiHint();
if (hint == null) {
rejectField(errors, "uiHint", "required");
} else if (!con.getSupportedHints().contains(hint)) {
rejectField(errors, "dataType", "(%s) doesn't match the UI hint of '%s'", con.getDataType().name()
.toLowerCase(), hint.name().toLowerCase());
} else if (con instanceof MultiValueConstraints) {
// Multiple values have a few odd UI constraints
MultiValueConstraints mcon = (MultiValueConstraints)con;
String hintName = hint.name().toLowerCase();
if (hint == UIHint.COMBOBOX && (mcon.getAllowMultiple() || !mcon.getAllowOther())) {
rejectField(errors, "uiHint", "'%s' is only valid when multiple = false and other = true", hintName);
} else if (mcon.getAllowMultiple() && hint != UIHint.CHECKBOX && hint != UIHint.LIST) {
rejectField(errors, "uiHint",
"allows multiples but the '%s' UI hint doesn't gather more than one answer", hintName);
} else if (!mcon.getAllowMultiple() && (hint == UIHint.CHECKBOX || hint == UIHint.LIST)) {
rejectField(errors, "uiHint",
"doesn't allow multiples but the '%s' UI hint gathers more than one answer", hintName);
}
} else if (con instanceof StringConstraints) {
// Validate the regular expression, if it exists
StringConstraints scon = (StringConstraints)con;
if (StringUtils.isNotBlank(scon.getPattern())) {
try {
Pattern.compile(scon.getPattern());
} catch (PatternSyntaxException exception) {
rejectField(errors, "pattern", "is not a valid regular expression: %s", scon.getPattern());
}
}
}
}
private void rejectField(Errors errors, String field, String message, Object... args) {
if (args != null && args.length > 0) {
errors.rejectValue(field, message, args, message);
} else {
errors.rejectValue(field, message);
}
}
}
| Small change to prevent an NPE where there's an error earlier in the validation, such that you don't get all the validation messages back from the server.
| app/org/sagebionetworks/bridge/validators/SurveyValidator.java | Small change to prevent an NPE where there's an error earlier in the validation, such that you don't get all the validation messages back from the server. | <ide><path>pp/org/sagebionetworks/bridge/validators/SurveyValidator.java
<ide> if (StringUtils.isBlank(survey.getGuid())) {
<ide> errors.reject("missing a GUID");
<ide> }
<del> validateRules(errors, survey.getQuestions());
<del>
<ide> for (int i=0; i < survey.getQuestions().size(); i++) {
<ide> SurveyQuestion question = survey.getQuestions().get(i);
<ide> errors.pushNestedPath("question"+i);
<ide> doValidateQuestion(question, i, errors);
<ide> errors.popNestedPath();
<ide> }
<add> validateRules(errors, survey.getQuestions());
<ide> }
<ide> private void validateRules(Errors errors, List<SurveyQuestion> questions) {
<ide> // Should not try and back-track in the survey.
<ide> Set<String> alreadySeenIdentifiers = Sets.newHashSet();
<ide> for (int i=0; i < questions.size(); i++) {
<ide> SurveyQuestion question = questions.get(i);
<del> for (SurveyRule rule : question.getConstraints().getRules()) {
<del> if (alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
<del> errors.pushNestedPath("question"+i);
<del> rejectField(errors, "rule", "back references question %s", rule.getSkipToTarget());
<del> errors.popNestedPath();
<add> Constraints c = question.getConstraints();
<add> if (c != null && c.getRules() != null) {
<add> for (SurveyRule rule : c.getRules()) {
<add> if (alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
<add> errors.pushNestedPath("question"+i);
<add> rejectField(errors, "rule", "back references question %s", rule.getSkipToTarget());
<add> errors.popNestedPath();
<add> }
<ide> }
<ide> }
<ide> alreadySeenIdentifiers.add(question.getIdentifier());
<ide> // Now verify that all skipToTarget identifiers actually exist
<ide> for (int i=0; i < questions.size(); i++) {
<ide> SurveyQuestion question = questions.get(i);
<del> for (SurveyRule rule : question.getConstraints().getRules()) {
<del> if (!alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
<del> errors.pushNestedPath("question"+i);
<del> rejectField(errors, "rule", "has a skipTo identifier that doesn't exist: %s", rule.getSkipToTarget());
<del> errors.popNestedPath();
<add> Constraints c = question.getConstraints();
<add> if (c != null && c.getRules() != null) {
<add> for (SurveyRule rule : c.getRules()) {
<add> if (!alreadySeenIdentifiers.contains(rule.getSkipToTarget())) {
<add> errors.pushNestedPath("question"+i);
<add> rejectField(errors, "rule", "has a skipTo identifier that doesn't exist: %s", rule.getSkipToTarget());
<add> errors.popNestedPath();
<add> }
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | 3765a143332e3989589dc5a789d4cfc4225c9266 | 0 | allenprogram/guava,yanyongshan/guava,Kevin2030/guava,DavesMan/guava,kingland/guava,mosoft521/guava,dpursehouse/guava,DucQuang1/guava,tli2/guava,norru/guava,KengoTODA/guava-libraries,manolama/guava,kgislsompo/guava-libraries,mohanaraosv/guava,weihungliu/guava,janus-project/guava.janusproject.io,GabrielNicolasAvellaneda/guava,njucslqq/guava,npvincent/guava,jankill/guava,lijunhuayc/guava,taoguan/guava,HarveyTvT/guava,mkodekar/guava-libraries,jiteshmohan/guava,Haus1/guava-libraries,codershamo/guava,disc99/guava,elijah513/guava,dubu/guava-libraries,Xaerxess/guava,DucQuang1/guava,Ariloum/guava,zcwease/guava-libraries,huangsihuan/guava,witekcc/guava,abel-von/guava,XiWenRen/guava,allenprogram/guava,tailorlala/guava-libraries,huangsihuan/guava,KengoTODA/guava-libraries,ceosilvajr/guava,sebadiaz/guava,jayhetee/guava,taoguan/guava,clcron/guava-libraries,maidh91/guava-libraries,leesir/guava,yangxu998/guava-libraries,DavesMan/guava,abel-von/guava,Yijtx/guava,kaoudis/guava,eidehua/guava,lijunhuayc/guava,SyllaJay/guava,r4-keisuke/guava,montycheese/guava,Haus1/guava-libraries,mkodekar/guava-libraries,google/guava,DaveAKing/guava-libraries,tunzao/guava,sensui/guava-libraries,janus-project/guava.janusproject.io,jackyglony/guava,yigubigu/guava,xasx/guava,mgedigian/guava-bloom-filter,dushmis/guava,DaveAKing/guava-libraries,renchunxiao/guava,juneJuly/guava,binhvu7/guava,AnselQiao/guava,ningg/guava,leogong/guava,HarveyTvT/guava,RoliMG/guava,ben-manes/guava,clcron/guava-libraries,paddx01/guava-src,anigeorge/guava,kingland/guava,gmaes/guava,scr/guava,mbarbero/guava-libraries,juneJuly/guava,yf0994/guava-libraries,witekcc/guava,EdwardLee03/guava,aditya-chaturvedi/guava,chen870647924/guava-libraries,aiyanbo/guava,google/guava,codershamo/guava,manolama/guava,5A68656E67/guava,paplorinc/guava,dmi3aleks/guava,seanli310/guava,VikingDen/guava,ChengLong/guava,disc99/guava,berndhopp/guava,rob3ns/guava,yuan232007/guava,ignaciotcrespo/guava,mway08/guava,BollyCheng/guava,aiyanbo/guava,zcwease/guava-libraries,yuan232007/guava,mosoft521/guava,jsnchen/guava,sensui/guava-libraries,sebadiaz/guava,10045125/guava,leesir/guava,KengoTODA/guava,pwz3n0/guava,1yvT0s/guava,Xaerxess/guava,Overruler/guava-libraries,google/guava,AnselQiao/guava,lisb/guava,kucci/guava-libraries,mgalushka/guava,paddx01/guava-src,sunbeansoft/guava,scr/guava,jakubmalek/guava,jamesbrowder/guava-libraries,Balzanka/guava-libraries,nulakasatish/guava-libraries,XiWenRen/guava,0359xiaodong/guava,tobecrazy/guava,seanli310/guava,cogitate/guava-libraries,5A68656E67/guava,jsnchen/guava,qingsong-xu/guava,VikingDen/guava,sunbeansoft/guava,SaintBacchus/guava,fengshao0907/guava-libraries,mengdiwang/guava-libraries,lgscofield/guava,anigeorge/guava,cgdecker/guava,npvincent/guava,fengshao0907/guava,jedyang/guava,yigubigu/guava,lisb/guava,marstianna/guava,xueyin87/guava-libraries,renchunxiao/guava,mengdiwang/guava-libraries,Balzanka/guava-libraries,ningg/guava,danielnorberg/guava-libraries,ignaciotcrespo/guava,r4-keisuke/guava,xasx/guava,elijah513/guava,fengshao0907/guava,jackyglony/guava,dnrajugade/guava-libraries,mosoft521/guava,eidehua/guava,liyazhou/guava,baratali/guava,gvikei/guava-libraries,yanyongshan/guava,Ranjodh-Singh/ranjodh87-guavalib,marstianna/guava,ChengLong/guava,GabrielNicolasAvellaneda/guava,yangxu998/guava-libraries,kucci/guava-libraries,GitHub4Lgfei/guava,mgalushka/guava,1yvT0s/guava,Akshay77/guava,Yijtx/guava,monokurobo/guava,thinker-fang/guava,norru/guava,easyfmxu/guava,uschindler/guava,Ariloum/guava,okaywit/guava-libraries,RoliMG/guava,licheng-xd/guava,typetools/guava,thinker-fang/guava,paplorinc/guava,baratali/guava,tailorlala/guava-libraries,sander120786/guava-libraries,SaintBacchus/guava,rgoldberg/guava,jayhetee/guava,rcpoison/guava,mbarbero/guava-libraries,tli2/guava,qingsong-xu/guava,Overruler/guava-libraries,Kevin2030/guava,rcpoison/guava,dnrajugade/guava-libraries,njucslqq/guava,jakubmalek/guava,flowbywind/guava,cklsoft/guava,licheng-xd/guava,nulakasatish/guava-libraries,jamesbrowder/guava-libraries,sander120786/guava-libraries,leogong/guava,tunzao/guava,Ranjodh-Singh/ranjodh87-guavalib,kgislsompo/guava-libraries,typetools/guava,flowbywind/guava,EdwardLee03/guava,liyazhou/guava,xueyin87/guava-libraries,cogitate/guava-libraries,jedyang/guava,easyfmxu/guava,aditya-chaturvedi/guava,dubu/guava-libraries,gmaes/guava,gvikei/guava-libraries,m3n78am/guava,hannespernpeintner/guava,allalizaki/guava-libraries,tobecrazy/guava,mohanaraosv/guava,rob3ns/guava,fengshao0907/guava-libraries,weihungliu/guava,okaywit/guava-libraries,rgoldberg/guava,levenhdu/guava,sarvex/guava,montycheese/guava,dpursehouse/guava,KengoTODA/guava,cklsoft/guava,GitHub4Lgfei/guava,m3n78am/guava,jiteshmohan/guava,kaoudis/guava,yf0994/guava-libraries,dushmis/guava,chen870647924/guava-libraries,maidh91/guava-libraries,pwz3n0/guava,jankill/guava,BollyCheng/guava,Akshay77/guava,allalizaki/guava-libraries,hannespernpeintner/guava,levenhdu/guava,SyllaJay/guava,0359xiaodong/guava,lgscofield/guava,monokurobo/guava,mway08/guava,berndhopp/guava | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.hash;
import com.google.common.annotations.Beta;
import com.google.common.primitives.Ints;
import java.nio.charset.Charset;
/**
* A hash function is a collision-averse pure function that maps an arbitrary block of
* data to a number called a <i>hash code</i>.
*
* <h3>Definition</h3>
*
* <p>Unpacking this definition:
*
* <ul>
* <li><b>block of data:</b> the input for a hash function is always, in concept, an
* ordered byte array. This hashing API accepts an arbitrary sequence of byte and
* multibyte values (via {@link Hasher}), but this is merely a convenience; these are
* always translated into raw byte sequences under the covers.
*
* <li><b>hash code:</b> each hash function always yields hash codes of the same fixed bit
* length (given by {@link #bits}). For example, {@link Hashing#sha1} produces a
* 160-bit number, while {@link Hashing#murmur3_32()} yields only 32 bits. Because a
* {@code long} value is clearly insufficient to hold all hash code values, this API
* represents a hash code as an instance of {@link HashCode}.
*
* <li><b>pure function:</b> the value produced must depend only on the input bytes, in
* the order they appear. Input data is never modified. {@link HashFunction} instances
* should always be stateless, and therefore thread-safe.
*
* <li><b>collision-averse:</b> while it can't be helped that a hash function will
* sometimes produce the same hash code for distinct inputs (a "collision"), every
* hash function strives to <i>some</i> degree to make this unlikely. (Without this
* condition, a function that always returns zero could be called a hash function. It
* is not.)
* </ul>
*
* <p>Summarizing the last two points: "equal yield equal <i>always</i>; unequal yield
* unequal <i>often</i>." This is the most important characteristic of all hash functions.
*
* <h3>Desirable properties</h3>
*
* <p>A high-quality hash function strives for some subset of the following virtues:
*
* <ul>
* <li><b>collision-resistant:</b> while the definition above requires making at least
* <i>some</i> token attempt, one measure of the quality of a hash function is <i>how
* well</i> it succeeds at this goal. Important note: it may be easy to achieve the
* theoretical minimum collision rate when using completely <i>random</i> sample
* input. The true test of a hash function is how it performs on representative
* real-world data, which tends to contain many hidden patterns and clumps. The goal
* of a good hash function is to stamp these patterns out as thoroughly as possible.
*
* <li><b>bit-dispersing:</b> masking out any <i>single bit</i> from a hash code should
* yield only the expected <i>twofold</i> increase to all collision rates. Informally,
* the "information" in the hash code should be as evenly "spread out" through the
* hash code's bits as possible. The result is that, for example, when choosing a
* bucket in a hash table of size 2^8, <i>any</i> eight bits could be consistently
* used.
*
* <li><b>cryptographic:</b> certain hash functions such as {@link Hashing#sha512} are
* designed to make it as infeasible as possible to reverse-engineer the input that
* produced a given hash code, or even to discover <i>any</i> two distinct inputs that
* yield the same result. These are called <i>cryptographic hash functions</i>. But,
* whenever it is learned that either of these feats has become computationally
* feasible, the function is deemed "broken" and should no longer be used for secure
* purposes. (This is the likely eventual fate of <i>all</i> cryptographic hashes.)
*
* <li><b>fast:</b> perhaps self-explanatory, but often the most important consideration.
* We have published <a href="#noWeHaventYet">microbenchmark results</a> for many
* common hash functions.
* </ul>
*
* <h3>Providing input to a hash function</h3>
*
* <p>The primary way to provide the data that your hash function should act on is via a
* {@link Hasher}. Obtain a new hasher from the hash function using {@link #newHasher},
* "push" the relevant data into it using methods like {@link Hasher#putBytes(byte[])},
* and finally ask for the {@code HashCode} when finished using {@link Hasher#hash}. (See
* an {@linkplain #newHasher example} of this.)
*
* <p>If all you want to hash is a single byte array, string or {@code long} value, there
* are convenient shortcut methods defined directly on {@link HashFunction} to make this
* easier.
*
* <p>Hasher accepts primitive data types, but can also accept any Object of type {@code
* T} provided that you implement a {@link Funnel Funnel<T>} to specify how to "feed" data
* from that object into the function. (See {@linkplain Hasher#putObject an example} of
* this.)
*
* <p><b>Compatibility note:</b> Throughout this API, multibyte values are always
* interpreted in <i>little-endian</i> order. That is, hashing the byte array {@code
* {0x01, 0x02, 0x03, 0x04}} is equivalent to hashing the {@code int} value {@code
* 0x04030201}. If this isn't what you need, methods such as {@link Integer#reverseBytes}
* and {@link Ints#toByteArray} will help.
*
* <h3>Relationship to {@link Object#hashCode}</h3>
*
* <p>Java's baked-in concept of hash codes is constrained to 32 bits, and provides no
* separation between hash algorithms and the data they act on, so alternate hash
* algorithms can't be easily substituted. Also, implementations of {@code hashCode} tend
* to be poor-quality, in part because they end up depending on <i>other</i> existing
* poor-quality {@code hashCode} implementations, including those in many JDK classes.
*
* <p>{@code Object.hashCode} implementations tend to be very fast, but have weak
* collision prevention and <i>no</i> expectation of bit dispersion. This leaves them
* perfectly suitable for use in hash tables, because extra collisions cause only a slight
* performance hit, while poor bit dispersion is easily corrected using a secondary hash
* function (which all reasonable hash table implementations in Java use). For the many
* uses of hash functions beyond data structures, however, {@code Object.hashCode} almost
* always falls short -- hence this library.
*
* @author Kevin Bourrillion
* @since 11.0
*/
@Beta
public interface HashFunction {
/**
* Begins a new hash code computation by returning an initialized, stateful {@code
* Hasher} instance that is ready to receive data. Example: <pre> {@code
*
* HashFunction hf = Hashing.md5();
* HashCode hc = hf.newHasher()
* .putLong(id)
* .putString(name)
* .hash();}</pre>
*/
Hasher newHasher();
/**
* Begins a new hash code computation as {@link #newHasher()}, but provides a hint of the
* expected size of the input (in bytes). This is only important for non-streaming hash
* functions (hash functions that need to buffer their whole input before processing any
* of it).
*/
Hasher newHasher(int expectedInputSize);
/**
* Shortcut for {@code newHasher().putInt(input).hash()}; returns the hash code for the given
* {@code int} value, interpreted in little-endian byte order. The implementation <i>might</i>
* perform better than its longhand equivalent, but should not perform worse.
*
* @since 12.0
*/
HashCode hashInt(int input);
/**
* Shortcut for {@code newHasher().putLong(input).hash()}; returns the hash code for the
* given {@code long} value, interpreted in little-endian byte order. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform worse.
*/
HashCode hashLong(long input);
/**
* Shortcut for {@code newHasher().putBytes(input).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform
* worse.
*/
HashCode hashBytes(byte[] input);
/**
* Shortcut for {@code newHasher().putBytes(input, off, len).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform
* worse.
*
* @throws IndexOutOfBoundsException if {@code off < 0} or {@code off + len > bytes.length}
* or {@code len < 0}
*/
HashCode hashBytes(byte[] input, int off, int len);
/**
* Shortcut for {@code newHasher().putString(input).hash()}. The implementation <i>might</i>
* perform better than its longhand equivalent, but should not perform worse. Note that no
* character encoding is performed; the low byte and high byte of each character are hashed
* directly (in that order).
*/
HashCode hashString(CharSequence input);
/**
* Shortcut for {@code newHasher().putString(input, charset).hash()}. Characters are encoded
* using the given {@link Charset}. The implementation <i>might</i> perform better than its
* longhand equivalent, but should not perform worse.
*/
HashCode hashString(CharSequence input, Charset charset);
/**
* Shortcut for {@code newHasher().putObject(instance, funnel).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform worse.
*
* @since 14.0
*/
<T> HashCode hashObject(T instance, Funnel<? super T> funnel);
/**
* Returns the number of bits (a multiple of 32) that each hash code produced by this
* hash function has.
*/
int bits();
}
| guava/src/com/google/common/hash/HashFunction.java | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.hash;
import com.google.common.annotations.Beta;
import com.google.common.primitives.Ints;
import java.nio.charset.Charset;
/**
* A hash function is a collision-averse pure function that maps an arbitrary block of
* data to a number called a <i>hash code</i>.
*
* <h3>Definition</h3>
*
* <p>Unpacking this definition:
*
* <ul>
* <li><b>block of data:</b> the input for a hash function is always, in concept, an
* ordered byte array. This hashing API accepts an arbitrary sequence of byte and
* multibyte values (via {@link Hasher}), but this is merely a convenience; these are
* always translated into raw byte sequences under the covers.
*
* <li><b>hash code:</b> each hash function always yields hash codes of the same fixed bit
* length (given by {@link #bits}). For example, {@link Hashing#sha1} produces a
* 160-bit number, while {@link Hashing#murmur3_32()} yields only 32 bits. Because a
* {@code long} value is clearly insufficient to hold all hash code values, this API
* represents a hash code as an instance of {@link HashCode}.
*
* <li><b>pure function:</b> the value produced must depend only on the input bytes, in
* the order they appear. Input data is never modified. {@link HashFunction} instances
* should always be stateless, and therefore thread-safe.
*
* <li><b>collision-averse:</b> while it can't be helped that a hash function will
* sometimes produce the same hash code for distinct inputs (a "collision"), every
* hash function strives to <i>some</i> degree to make this unlikely. (Without this
* condition, a function that always returns zero could be called a hash function. It
* is not.)
* </ul>
*
* <p>Summarizing the last two points: "equal yield equal <i>always</i>; unequal yield
* unequal <i>often</i>." This is the most important characteristic of all hash functions.
*
* <h3>Desirable properties</h3>
*
* <p>A high-quality hash function strives for some subset of the following virtues:
*
* <ul>
* <li><b>collision-resistant:</b> while the definition above requires making at least
* <i>some</i> token attempt, one measure of the quality of a hash function is <i>how
* well</i> it succeeds at this goal. Important note: it may be easy to achieve the
* theoretical minimum collision rate when using completely <i>random</i> sample
* input. The true test of a hash function is how it performs on representative
* real-world data, which tends to contain many hidden patterns and clumps. The goal
* of a good hash function is to stamp these patterns out as thoroughly as possible.
*
* <li><b>bit-dispersing:</b> masking out any <i>single bit</i> from a hash code should
* yield only the expected <i>twofold</i> increase to all collision rates. Informally,
* the "information" in the hash code should be as evenly "spread out" through the
* hash code's bits as possible. The result is that, for example, when choosing a
* bucket in a hash table of size 2^8, <i>any</i> eight bits could be consistently
* used.
*
* <li><b>cryptographic:</b> certain hash functions such as {@link Hashing#sha512} are
* designed to make it as infeasible as possible to reverse-engineer the input that
* produced a given hash code, or even to discover <i>any</i> two distinct inputs that
* yield the same result. These are called <i>cryptographic hash functions</i>. But,
* whenever it is learned that either of these feats has become computationally
* feasible, the function is deemed "broken" and should no longer be used for secure
* purposes. (This is the likely eventual fate of <i>all</i> cryptographic hashes.)
*
* <li><b>fast:</b> perhaps self-explanatory, but often the most important consideration.
* We have published <a href="#noWeHaventYet">microbenchmark results</a> for many
* common hash functions.
* </ul>
*
* <h3>Providing input to a hash function</h3>
*
* <p>The primary way to provide the data that your hash function should act on is via a
* {@link Hasher}. Obtain a new hasher from the hash function using {@link #newHasher},
* "push" the relevant data into it using methods like {@link Hasher#putBytes(byte[])},
* and finally ask for the {@code HashCode} when finished using {@link Hasher#hash}. (See
* an {@linkplain #newHasher example} of this.)
*
* <p>If all you want to hash is a single byte array, string or {@code long} value, there
* are convenient shortcut methods defined directly on {@link HashFunction} to make this
* easier.
*
* <p>Hasher accepts primitive data types, but can also accept any Object of type {@code
* T} provided that you implement a {@link Funnel Funnel<T>} to specify how to "feed" data
* from that object into the function. (See {@linkplain Hasher#putObject an example} of
* this.)
*
* <p><b>Compatibility note:</b> Throughout this API, multibyte values are always
* interpreted in <i>little-endian</i> order. That is, hashing the byte array {@code
* {0x01, 0x02, 0x03, 0x04}} is equivalent to hashing the {@code int} value {@code
* 0x04030201}. If this isn't what you need, methods such as {@link Integer#reverseBytes}
* and {@link Ints#toByteArray} will help.
*
* <h3>Relationship to {@link Object#hashCode}</h3>
*
* <p>Java's baked-in concept of hash codes is constrained to 32 bits, and provides no
* separation between hash algorithms and the data they act on, so alternate hash
* algorithms can't be easily substituted. Also, implementations of {@code hashCode} tend
* to be poor-quality, in part because they end up depending on <i>other</i> existing
* poor-quality {@code hashCode} implementations, including those in many JDK classes.
*
* <p>{@code Object.hashCode} implementations tend to be very fast, but have weak
* collision prevention and <i>no</i> expectation of bit dispersion. This leaves them
* perfectly suitable for use in hash tables, because extra collisions cause only a slight
* performance hit, while poor bit dispersion is easily corrected using a secondary hash
* function (which all reasonable hash table implementations in Java use). For the many
* uses of hash functions beyond data structures, however, {@code Object.hashCode} almost
* always falls short -- hence this library.
*
* @author Kevin Bourrillion
* @since 11.0
*/
@Beta
public interface HashFunction {
/**
* Begins a new hash code computation by returning an initialized, stateful {@code
* Hasher} instance that is ready to receive data. Example: <pre> {@code
*
* HashFunction hf = Hashing.md5();
* HashCode hc = hf.newHasher()
* .putLong(id)
* .putString(name)
* .hash();}</pre>
*/
Hasher newHasher();
/**
* Begins a new hash code computation as {@link #newHasher()}, but provides a hint of the
* expected size of the input (in bytes). This is only important for non-streaming hash
* functions (hash functions that need to buffer their whole input before processing any
* of it).
*/
Hasher newHasher(int expectedInputSize);
/**
* Shortcut for {@code newHasher().putInt(input).hash()}; returns the hash code for the given
* {@code int} value, interpreted in little-endian byte order. The implementation <i>might</i>
* perform better than its longhand equivalent, but should not perform worse.
*
* @since 12.0
*/
HashCode hashInt(int input);
/**
* Shortcut for {@code newHasher().putLong(input).hash()}; returns the hash code for the
* given {@code long} value, interpreted in little-endian byte order. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform worse.
*/
HashCode hashLong(long input);
/**
* Shortcut for {@code newHasher().putBytes(input).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform
* worse.
*/
HashCode hashBytes(byte[] input);
/**
* Shortcut for {@code newHasher().putBytes(input, off, len).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform
* worse.
*
* @throws IndexOutOfBoundsException if {@code off < 0} or {@code off + len > bytes.length}
* or {@code len < 0}
*/
HashCode hashBytes(byte[] input, int off, int len);
/**
* Shortcut for {@code newHasher().putString(input).hash()}. The implementation <i>might</i>
* perform better than its longhand equivalent, but should not perform worse. Note that no
* character encoding is performed; the low byte and high byte of each character are hashed
* directly (in that order). This is equivalent to using
* {@code hashString(input, Charsets.UTF_16LE)}.
*/
HashCode hashString(CharSequence input);
/**
* Shortcut for {@code newHasher().putString(input, charset).hash()}. Characters are encoded
* using the given {@link Charset}. The implementation <i>might</i> perform better than its
* longhand equivalent, but should not perform worse.
*/
HashCode hashString(CharSequence input, Charset charset);
/**
* Shortcut for {@code newHasher().putObject(instance, funnel).hash()}. The implementation
* <i>might</i> perform better than its longhand equivalent, but should not perform worse.
*
* @since 14.0
*/
<T> HashCode hashObject(T instance, Funnel<? super T> funnel);
/**
* Returns the number of bits (a multiple of 32) that each hash code produced by this
* hash function has.
*/
int bits();
}
| Remove reference to UTF_16LE equivalence.
-------------
Created by MOE: http://code.google.com/p/moe-java
MOE_MIGRATED_REVID=42919674
| guava/src/com/google/common/hash/HashFunction.java | Remove reference to UTF_16LE equivalence. ------------- Created by MOE: http://code.google.com/p/moe-java MOE_MIGRATED_REVID=42919674 | <ide><path>uava/src/com/google/common/hash/HashFunction.java
<ide> * Shortcut for {@code newHasher().putString(input).hash()}. The implementation <i>might</i>
<ide> * perform better than its longhand equivalent, but should not perform worse. Note that no
<ide> * character encoding is performed; the low byte and high byte of each character are hashed
<del> * directly (in that order). This is equivalent to using
<del> * {@code hashString(input, Charsets.UTF_16LE)}.
<add> * directly (in that order).
<ide> */
<ide> HashCode hashString(CharSequence input);
<ide> |
|
JavaScript | mit | d1ca75a9c9720c058ec0893b21711c30bfce8965 | 0 | marko-js/marko,marko-js/marko | 'use strict';
var ok = require('assert').ok;
var path = require('path');
var taglibLookup = require('./taglib-lookup');
var charProps = require('char-props');
var deresolve = require('./util/deresolve');
var UniqueVars = require('./util/UniqueVars');
var PosInfo = require('./util/PosInfo');
var CompileError = require('./CompileError');
var path = require('path');
var Node = require('./ast/Node');
var macros = require('./util/macros');
function getTaglibPath(taglibPath) {
if (typeof window === 'undefined') {
return path.relative(process.cwd(), taglibPath);
} else {
return taglibPath;
}
}
function removeExt(filename) {
var ext = path.extname(filename);
if (ext) {
return filename.slice(0, 0 - ext.length);
} else {
return filename;
}
}
function requireResolve(builder, path) {
var requireResolveNode = builder.memberExpression(
builder.identifier('require'),
builder.identifier('resolve'));
return builder.functionCall(requireResolveNode, [ path ]);
}
class CompileContext {
constructor(src, filename, builder) {
ok(typeof src === 'string', '"src" string is required');
ok(filename, '"filename" is required');
this.src = src;
this.filename = filename;
this.builder = builder;
this.dirname = path.dirname(filename);
this.taglibLookup = taglibLookup.buildLookup(this.dirname);
this.data = {};
this._staticVars = {};
this._staticCode = null;
this._uniqueVars = new UniqueVars();
this._srcCharProps = null;
this._flags = {};
this._errors = [];
this._macros = null;
this._preserveWhitespace = null;
this._preserveComments = null;
}
getPosInfo(pos) {
var srcCharProps = this._srcCharProps || (this._srcCharProps = charProps(this.src));
let line = srcCharProps.lineAt(pos)+1;
let column = srcCharProps.columnAt(pos);
return new PosInfo(this.filename, line, column);
}
setFlag(name) {
this._flags[name] = true;
}
clearFlag(name) {
delete this._flags[name];
}
isFlagSet(name) {
return this._flags.hasOwnProperty(name);
}
addError(errorInfo) {
if (errorInfo instanceof Node) {
let node = arguments[0];
let message = arguments[1];
let code = arguments[2];
errorInfo = {
node,
message,
code
};
} else if (typeof errorInfo === 'string') {
let message = arguments[0];
let code = arguments[1];
errorInfo = {
message,
code
};
}
this._errors.push(new CompileError(errorInfo, this));
}
hasErrors() {
return this._errors.length !== 0;
}
getErrors() {
return this._errors;
}
getRequirePath(targetFilename) {
return deresolve(targetFilename, this.dirname);
}
addStaticVar(name, init) {
var actualVarName = this._uniqueVars.addVar(name, init);
this._staticVars[actualVarName] = init;
return this.builder.identifier(actualVarName);
}
getStaticVars() {
return this._staticVars;
}
addStaticCode(code) {
if (!code) {
return;
}
if (typeof code === 'string') {
// Wrap the String code in a Code AST node so that
// the code will be indented properly
code = this.builder.code(code);
}
if (this._staticCode == null) {
this._staticCode = [code];
} else {
this._staticCode.push(code);
}
}
getStaticCode() {
return this._staticCode;
}
getTagDef(tagName) {
var taglibLookup = this.taglibLookup;
if (typeof tagName === 'string') {
return taglibLookup.getTag(tagName);
} else {
let elNode = tagName;
if (elNode.tagDef) {
return elNode.tagDef;
}
return taglibLookup.getTag(elNode.tagName);
}
}
createNodeForEl(tagName, attributes, argument, openTagOnly, selfClosed) {
var elDef;
var builder = this.builder;
if (typeof tagName === 'object') {
elDef = tagName;
tagName = elDef.tagName;
attributes = elDef.attributes;
} else {
elDef = { tagName, argument, attributes, openTagOnly, selfClosed };
}
ok(typeof tagName === 'string', 'Invalid "tagName"');
ok(attributes == null || Array.isArray(attributes), 'Invalid "attributes"');
if (!attributes) {
attributes = elDef.attributes = [];
}
var node;
var elNode = builder.htmlElement(elDef);
var taglibLookup = this.taglibLookup;
var tagDef = taglibLookup.getTag(tagName);
if (tagDef) {
var nodeFactoryFunc = tagDef.getNodeFactory();
if (nodeFactoryFunc) {
var newNode = nodeFactoryFunc(elNode, this);
if (!(newNode instanceof Node)) {
throw new Error('Invalid node returned from node factory for tag "' + tagName + '".');
}
if (newNode != node) {
// Make sure the body container is associated with the correct node
if (newNode.body && newNode.body !== node) {
newNode.body = newNode.makeContainer(newNode.body.items);
}
node = newNode;
}
}
}
if (!node) {
node = elNode;
}
node.pos = elDef.pos;
var foundAttrs = {};
// Validate the attributes
attributes.forEach((attr) => {
let attrName = attr.name;
let attrDef = taglibLookup.getAttribute(tagName, attrName);
if (!attrDef) {
if (tagDef) {
// var isAttrForTaglib = compiler.taglibs.isTaglib(attrUri);
//Tag doesn't allow dynamic attributes
this.addError({
node: node,
message: 'The tag "' + tagName + '" in taglib "' + getTaglibPath(tagDef.taglibId) + '" does not support attribute "' + attrName + '"'
});
}
return;
}
attr.def = attrDef;
foundAttrs[attrName] = true;
});
if (tagDef) {
// Add default values for any attributes. If an attribute has a declared
// default value and the attribute was not found on the element
// then add the attribute with the specified default value
tagDef.forEachAttribute((attrDef) => {
var attrName = attrDef.name;
if (attrDef.hasOwnProperty('defaultValue') && !foundAttrs.hasOwnProperty(attrName)) {
attributes.push({
name: attrName,
value: builder.literal(attrDef.defaultValue)
});
} else if (attrDef.required === true) {
// TODO Only throw an error if there is no data argument provided (just HTML attributes)
if (!foundAttrs.hasOwnProperty(attrName)) {
this.addError({
node: node,
message: 'The "' + attrName + '" attribute is required for tag "' + tagName + '" in taglib "' + getTaglibPath(tagDef.taglibId) + '".'
});
}
}
});
node.tagDef = tagDef;
}
return node;
}
isMacro(name) {
if (!this._macros) {
return false;
}
return this._macros.isMacro(name);
}
getRegisteredMacro(name) {
if (!this._macros) {
return undefined;
}
return this._macros.getRegisteredMacro(name);
}
registerMacro(name, params) {
if (!this._macros) {
this._macros = macros.createMacrosContext();
}
return this._macros.registerMacro(name, params);
}
importTemplate(relativePath) {
ok(typeof relativePath === 'string', '"path" should be a string');
var builder = this.builder;
// We want to add the following import:
// var loadTemplate = __helpers.t;
// var template = loadTemplate(require.resolve(<templateRequirePath>))
var loadTemplateVar = this.addStaticVar('loadTemplate', '__helpers.l');
var requireResolveTemplate = requireResolve(builder, builder.literal(relativePath));
var loadFunctionCall = builder.functionCall(loadTemplateVar, [ requireResolveTemplate ]);
var templateVar = this.addStaticVar(removeExt(relativePath), loadFunctionCall);
return templateVar;
}
setPreserveWhitespace(preserveWhitespace) {
this._preserveWhitespace = preserveWhitespace;
}
isPreserveWhitespace() {
return this._preserveWhitespace === true;
}
setPreserveComments(preserveComments) {
this._preserveComments = preserveComments;
}
isPreserveComments() {
return this._preserveComments === true;
}
}
module.exports = CompileContext; | compiler/CompileContext.js | 'use strict';
var ok = require('assert').ok;
var path = require('path');
var taglibLookup = require('./taglib-lookup');
var charProps = require('char-props');
var deresolve = require('./util/deresolve');
var UniqueVars = require('./util/UniqueVars');
var PosInfo = require('./util/PosInfo');
var CompileError = require('./CompileError');
var path = require('path');
var Node = require('./ast/Node');
var macros = require('./util/macros');
function getTaglibPath(taglibPath) {
if (typeof window === 'undefined') {
return path.relative(process.cwd(), taglibPath);
} else {
return taglibPath;
}
}
function removeExt(filename) {
var ext = path.extname(filename);
if (ext) {
return filename.slice(0, 0 - ext.length);
} else {
return filename;
}
}
function requireResolve(builder, path) {
var requireResolveNode = builder.memberExpression(
builder.identifier('require'),
builder.identifier('resolve'));
return builder.functionCall(requireResolveNode, [ path ]);
}
class CompileContext {
constructor(src, filename, builder) {
ok(typeof src === 'string', '"src" string is required');
ok(filename, '"filename" is required');
this.src = src;
this.filename = filename;
this.builder = builder;
this.dirname = path.dirname(filename);
this.taglibLookup = taglibLookup.buildLookup(this.dirname);
this.data = {};
this._staticVars = {};
this._staticCode = null;
this._uniqueVars = new UniqueVars();
this._srcCharProps = null;
this._flags = {};
this._errors = [];
this._macros = null;
this._preserveWhitespace = null;
this._preserveComments = null;
}
getPosInfo(pos) {
var srcCharProps = this._srcCharProps || (this._srcCharProps = charProps(this.src));
let line = srcCharProps.lineAt(pos)+1;
let column = srcCharProps.columnAt(pos);
return new PosInfo(this.filename, line, column);
}
setFlag(name) {
this._flags[name] = true;
}
clearFlag(name) {
delete this._flags[name];
}
isFlagSet(name) {
return this._flags.hasOwnProperty(name);
}
addError(errorInfo) {
if (errorInfo instanceof Node) {
let node = arguments[0];
let message = arguments[1];
let code = arguments[2];
errorInfo = {
node,
message,
code
};
} else if (typeof errorInfo === 'string') {
let message = arguments[0];
let code = arguments[1];
errorInfo = {
message,
code
};
}
this._errors.push(new CompileError(errorInfo, this));
}
hasErrors() {
return this._errors.length !== 0;
}
getErrors() {
return this._errors;
}
getRequirePath(targetFilename) {
return deresolve(targetFilename, this.dirname);
}
addStaticVar(name, init) {
var actualVarName = this._uniqueVars.addVar(name, init);
this._staticVars[actualVarName] = init;
return actualVarName;
}
getStaticVars() {
return this._staticVars;
}
addStaticCode(code) {
if (!code) {
return;
}
if (typeof code === 'string') {
// Wrap the String code in a Code AST node so that
// the code will be indented properly
code = this.builder.code(code);
}
if (this._staticCode == null) {
this._staticCode = [code];
} else {
this._staticCode.push(code);
}
}
getStaticCode() {
return this._staticCode;
}
getTagDef(tagName) {
var taglibLookup = this.taglibLookup;
if (typeof tagName === 'string') {
return taglibLookup.getTag(tagName);
} else {
let elNode = tagName;
if (elNode.tagDef) {
return elNode.tagDef;
}
return taglibLookup.getTag(elNode.tagName);
}
}
createNodeForEl(tagName, attributes, argument, openTagOnly, selfClosed) {
var elDef;
var builder = this.builder;
if (typeof tagName === 'object') {
elDef = tagName;
tagName = elDef.tagName;
attributes = elDef.attributes;
} else {
elDef = { tagName, argument, attributes, openTagOnly, selfClosed };
}
ok(typeof tagName === 'string', 'Invalid "tagName"');
ok(attributes == null || Array.isArray(attributes), 'Invalid "attributes"');
if (!attributes) {
attributes = elDef.attributes = [];
}
var node;
var elNode = builder.htmlElement(elDef);
var taglibLookup = this.taglibLookup;
var tagDef = taglibLookup.getTag(tagName);
if (tagDef) {
var nodeFactoryFunc = tagDef.getNodeFactory();
if (nodeFactoryFunc) {
var newNode = nodeFactoryFunc(elNode, this);
if (!(newNode instanceof Node)) {
throw new Error('Invalid node returned from node factory for tag "' + tagName + '".');
}
if (newNode != node) {
// Make sure the body container is associated with the correct node
if (newNode.body && newNode.body !== node) {
newNode.body = newNode.makeContainer(newNode.body.items);
}
node = newNode;
}
}
}
if (!node) {
node = elNode;
}
node.pos = elDef.pos;
var foundAttrs = {};
// Validate the attributes
attributes.forEach((attr) => {
let attrName = attr.name;
let attrDef = taglibLookup.getAttribute(tagName, attrName);
if (!attrDef) {
if (tagDef) {
// var isAttrForTaglib = compiler.taglibs.isTaglib(attrUri);
//Tag doesn't allow dynamic attributes
this.addError({
node: node,
message: 'The tag "' + tagName + '" in taglib "' + getTaglibPath(tagDef.taglibId) + '" does not support attribute "' + attrName + '"'
});
}
return;
}
attr.def = attrDef;
foundAttrs[attrName] = true;
});
if (tagDef) {
// Add default values for any attributes. If an attribute has a declared
// default value and the attribute was not found on the element
// then add the attribute with the specified default value
tagDef.forEachAttribute((attrDef) => {
var attrName = attrDef.name;
if (attrDef.hasOwnProperty('defaultValue') && !foundAttrs.hasOwnProperty(attrName)) {
attributes.push({
name: attrName,
value: builder.literal(attrDef.defaultValue)
});
} else if (attrDef.required === true) {
// TODO Only throw an error if there is no data argument provided (just HTML attributes)
if (!foundAttrs.hasOwnProperty(attrName)) {
this.addError({
node: node,
message: 'The "' + attrName + '" attribute is required for tag "' + tagName + '" in taglib "' + getTaglibPath(tagDef.taglibId) + '".'
});
}
}
});
node.tagDef = tagDef;
}
return node;
}
isMacro(name) {
if (!this._macros) {
return false;
}
return this._macros.isMacro(name);
}
getRegisteredMacro(name) {
if (!this._macros) {
return undefined;
}
return this._macros.getRegisteredMacro(name);
}
registerMacro(name, params) {
if (!this._macros) {
this._macros = macros.createMacrosContext();
}
return this._macros.registerMacro(name, params);
}
importTemplate(relativePath) {
ok(typeof relativePath === 'string', '"path" should be a string');
var builder = this.builder;
// We want to add the following import:
// var loadTemplate = __helpers.t;
// var template = loadTemplate(require.resolve(<templateRequirePath>))
var loadTemplateVar = this.addStaticVar('loadTemplate', '__helpers.l');
var requireResolveTemplate = requireResolve(builder, builder.literal(relativePath));
var loadFunctionCall = builder.functionCall(loadTemplateVar, [ requireResolveTemplate ]);
var templateVar = this.addStaticVar(removeExt(relativePath), loadFunctionCall);
return templateVar;
}
setPreserveWhitespace(preserveWhitespace) {
this._preserveWhitespace = preserveWhitespace;
}
isPreserveWhitespace() {
return this._preserveWhitespace === true;
}
setPreserveComments(preserveComments) {
this._preserveComments = preserveComments;
}
isPreserveComments() {
return this._preserveComments === true;
}
}
module.exports = CompileContext; | Marko v3: Wrap returned static variable name in an Identifier node
| compiler/CompileContext.js | Marko v3: Wrap returned static variable name in an Identifier node | <ide><path>ompiler/CompileContext.js
<ide> addStaticVar(name, init) {
<ide> var actualVarName = this._uniqueVars.addVar(name, init);
<ide> this._staticVars[actualVarName] = init;
<del> return actualVarName;
<add> return this.builder.identifier(actualVarName);
<ide> }
<ide>
<ide> getStaticVars() { |
|
Java | apache-2.0 | 78f6655d0fd3520f848584d12b56918a9d4ff5d6 | 0 | inferred/FreeBuilder | /*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.inferred.freebuilder.processor;
import static org.inferred.freebuilder.processor.ElementFactory.INTEGERS;
import static org.inferred.freebuilder.processor.ElementFactory.STRINGS;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.datatype.guava.GuavaModule;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.testing.EqualsTester;
import org.inferred.freebuilder.FreeBuilder;
import org.inferred.freebuilder.processor.util.feature.FeatureSet;
import org.inferred.freebuilder.processor.util.feature.GuavaLibrary;
import org.inferred.freebuilder.processor.util.feature.SourceLevel;
import org.inferred.freebuilder.processor.util.testing.BehaviorTester;
import org.inferred.freebuilder.processor.util.testing.ParameterizedBehaviorTestFactory;
import org.inferred.freebuilder.processor.util.testing.ParameterizedBehaviorTestFactory.Shared;
import org.inferred.freebuilder.processor.util.testing.SourceBuilder;
import org.inferred.freebuilder.processor.util.testing.TestBuilder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.util.Arrays;
import java.util.List;
import javax.tools.JavaFileObject;
/** Behavioral tests for {@code Optional<?>} properties. */
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(ParameterizedBehaviorTestFactory.class)
public class OptionalPropertyTest {
@SuppressWarnings("unchecked")
@Parameters(name = "{0}<{1}>, {2}, {3}")
public static Iterable<Object[]> parameters() {
List<Class<?>> optionals = Arrays.asList(
java.util.Optional.class,
com.google.common.base.Optional.class);
List<ElementFactory> elements = Arrays.asList(INTEGERS, STRINGS);
List<NamingConvention> conventions = Arrays.asList(NamingConvention.values());
List<FeatureSet> features = FeatureSets.ALL;
return () -> Lists
.cartesianProduct(optionals, elements, conventions, features)
.stream()
.filter(parameters -> {
Class<?> optional = (Class<?>) parameters.get(0);
FeatureSet featureSet = (FeatureSet) parameters.get(3);
if (optional.equals(com.google.common.base.Optional.class)
&& !featureSet.get(GuavaLibrary.GUAVA).isAvailable()) {
return false;
}
if (optional.equals(java.util.Optional.class)
&& !featureSet.get(SourceLevel.SOURCE_LEVEL).hasLambdas()) {
return false;
}
return true;
})
.map(List::toArray)
.iterator();
}
@Rule public final ExpectedException thrown = ExpectedException.none();
@Shared public BehaviorTester behaviorTester;
private final Class<?> optional;
private final ElementFactory element;
private final NamingConvention convention;
private final FeatureSet features;
private final String empty;
private final JavaFileObject oneProperty;
private final JavaFileObject twoProperties;
private final JavaFileObject validatedProperty;
public OptionalPropertyTest(
Class<?> optional,
ElementFactory element,
NamingConvention convention,
FeatureSet features) {
this.optional = optional;
this.element = element;
this.convention = convention;
this.features = features;
this.empty = optional.getName().startsWith("com.google") ? "absent" : "empty";
oneProperty = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
twoProperties = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item1"))
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item2"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
validatedProperty = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" @Override public Builder %s(%s item) {",
convention.set("item"), element.unwrappedType())
.addLine(" if (!(%s)) {", element.validation("item"))
.addLine(" throw new IllegalArgumentException(\"%s\");", element.errorMessage())
.addLine(" }")
.addLine(" return super.%s(item);", convention.set("item"))
.addLine(" }")
.addLine(" @Override public Builder clearItem() {")
.addLine(" throw new UnsupportedOperationException(\"Clearing prohibited\");")
.addLine(" }")
.addLine(" }")
.addLine("")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
}
@Test
public void testConstructor_defaultAbsent() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder().build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderGetter_defaultValue() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder builder = new DataType.Builder();")
.addLine("assertEquals(%s.%s(), builder.%s);",
optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderGetter_nonDefaultValue() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder builder = new DataType.Builder()")
.addLine(" .%s(%s);", convention.set("item"), element.example(0))
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_notNull() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_null() {
thrown.expect(NullPointerException.class);
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new DataType.Builder().%s((%s) null);",
convention.set("item"), element.type())
.build())
.runTest();
}
@Test
public void testSet_optionalOf() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(%s))", convention.set("item"),
optional, element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_empty() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.<%s>%s())",
convention.set("item"), optional, element.type(), empty)
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_nullOptional() {
thrown.expect(NullPointerException.class);
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new DataType.Builder().%s((%s<%s>) null);",
convention.set("item"), optional, element.type())
.build())
.runTest();
}
@Test
public void testSetNullable_notNull() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("nullableItem"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSetNullable_null() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(null)", convention.set("nullableItem"))
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testClear() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clearItem()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_valueInstance() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .mergeFrom(value);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_builder() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s);", convention.set("item"), element.example(0))
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .mergeFrom(template);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_valueInstance_emptyOptional() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .build();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .mergeFrom(value);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_builder_emptyOptional() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .mergeFrom(template);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear_customDefault() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder().%s(%s);", convention.set("item"), element.example(3))
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(3), convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear_noBuilderFactory() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" public Builder(%s s) {", element.unwrappedType())
.addLine(" %s(s);", convention.set("item"))
.addLine(" }")
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder(%s)", element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testCustomization_optionalOf() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(element.errorMessage());
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s.of(%s));",
convention.set("item"), optional, element.invalidExample())
.build())
.runTest();
}
@Test
public void testCustomization_nullable() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(element.errorMessage());
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s);", convention.set("nullableItem"), element.invalidExample())
.build())
.runTest();
}
@Test
public void testCustomization_empty() {
thrown.expectMessage("Clearing prohibited");
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s.<%s>%s());",
convention.set("item"), optional, element.type(), empty)
.build())
.runTest();
}
@Test
public void testCustomization_null() {
thrown.expectMessage("Fooled you!");
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, String.class, convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" @Override public Builder clearItem() {")
.addLine(" throw new UnsupportedOperationException(\"Fooled you!\");")
.addLine(" }")
.addLine(" }")
.addLine("")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(null);", convention.set("nullableItem"))
.build())
.runTest();
}
@Test
public void testEquality() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new %s()", EqualsTester.class)
.addLine(" .addEqualityGroup(")
.addLine(" DataType.builder().build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(%s.<%s>%s())",
convention.set("item"), optional, element.type(), empty)
.addLine(" .build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(null)", convention.set("nullableItem"))
.addLine(" .build())")
.addLine(" .addEqualityGroup(")
.addLine(" DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(%s.of(%s))",
convention.set("item"), optional, element.example(0))
.addLine(" .build())")
.addLine(" .testEquals();")
.build())
.runTest();
}
@Test
public void testValueToString_singleField() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType empty = DataType.builder()")
.addLine(" .build();")
.addLine("DataType present = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(\"DataType{}\", empty.toString());")
.addLine("assertEquals(\"DataType{item=\" + %s + \"}\", present.toString());",
element.example(0))
.build())
.runTest();
}
@Test
public void testValueToString_twoFields() {
behaviorTester
.with(new Processor(features))
.with(twoProperties)
.with(testBuilder()
.addLine("DataType aa = DataType.builder()")
.addLine(" .build();")
.addLine("DataType pa = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .build();")
.addLine("DataType ap = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .build();")
.addLine("DataType pp = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .build();")
.addLine("assertEquals(\"DataType{}\", aa.toString());")
.addLine("assertEquals(\"DataType{item1=\" + %s + \"}\", pa.toString());",
element.example(0))
.addLine("assertEquals(\"DataType{item2=\" + %s + \"}\", ap.toString());",
element.example(1))
.addLine("assertEquals(\"DataType{item1=\" + %s + \","
+ " item2=\" + %s + \"}\", pp.toString());",
element.example(0), element.example(1))
.build())
.runTest();
}
@Test
public void testPartialToString_singleField() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType empty = DataType.builder()")
.addLine(" .buildPartial();")
.addLine("DataType present = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .buildPartial();")
.addLine("assertEquals(\"partial DataType{}\", empty.toString());")
.addLine("assertEquals(\"partial DataType{item=\" + %s + \"}\", present.toString());",
element.example(0))
.build())
.runTest();
}
@Test
public void testPartialToString_twoFields() {
behaviorTester
.with(new Processor(features))
.with(twoProperties)
.with(testBuilder()
.addLine("DataType aa = DataType.builder()")
.addLine(" .buildPartial();")
.addLine("DataType pa = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .buildPartial();")
.addLine("DataType ap = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .buildPartial();")
.addLine("DataType pp = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .buildPartial();")
.addLine("assertEquals(\"partial DataType{}\", aa.toString());")
.addLine("assertEquals(\"partial DataType{item1=\" + %s + \"}\", pa.toString());",
element.example(0))
.addLine("assertEquals(\"partial DataType{item2=\" + %s + \"}\", ap.toString());",
element.example(1))
.addLine("assertEquals(\"partial DataType{item1=\" + %s + \","
+ " item2=\" + %s + \"}\", pp.toString());",
element.example(0), element.example(1))
.build())
.runTest();
}
@Test
public void testWildcardHandling_noWildcard() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<%s>> %s;",
optional, List.class, Number.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of((%s) 1, 2, 3, 4))",
convention.set("items"), ImmutableList.class, Number.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testWildcardHandling_unboundedWildcard() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<?>> %s;",
optional, List.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(1, 2, 3, 4))", convention.set("items"), ImmutableList.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testWildcardHandling_wildcardWithExtendsBound() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<? extends %s>> %s;",
optional, List.class, Number.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(1, 2, 3, 4))", convention.set("items"), ImmutableList.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testJacksonInteroperability() {
// See also https://github.com/google/FreeBuilder/issues/68
Class<? extends Module> module =
optional.getName().startsWith("com.google") ? GuavaModule.class : Jdk8Module.class;
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("import " + JsonProperty.class.getName() + ";")
.addLine("@%s", FreeBuilder.class)
.addLine("@%s(builder = DataType.Builder.class)", JsonDeserialize.class)
.addLine("public interface DataType {")
.addLine(" @JsonProperty(\"stuff\") %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("%1$s mapper = new %1$s()", ObjectMapper.class)
.addLine(" .registerModule(new %s());", module)
.addLine("String json = mapper.writeValueAsString(value);")
.addLine("DataType clone = mapper.readValue(json, DataType.class);")
.addLine("assertEquals(%s.of(%s), clone.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
private static TestBuilder testBuilder() {
return new TestBuilder()
.addImport("com.example.DataType");
}
}
| src/test/java/org/inferred/freebuilder/processor/OptionalPropertyTest.java | /*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.inferred.freebuilder.processor;
import static org.inferred.freebuilder.processor.ElementFactory.INTEGERS;
import static org.inferred.freebuilder.processor.ElementFactory.STRINGS;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.datatype.guava.GuavaModule;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.testing.EqualsTester;
import org.inferred.freebuilder.FreeBuilder;
import org.inferred.freebuilder.processor.util.feature.FeatureSet;
import org.inferred.freebuilder.processor.util.feature.GuavaLibrary;
import org.inferred.freebuilder.processor.util.feature.SourceLevel;
import org.inferred.freebuilder.processor.util.testing.BehaviorTester;
import org.inferred.freebuilder.processor.util.testing.ParameterizedBehaviorTestFactory;
import org.inferred.freebuilder.processor.util.testing.ParameterizedBehaviorTestFactory.Shared;
import org.inferred.freebuilder.processor.util.testing.SourceBuilder;
import org.inferred.freebuilder.processor.util.testing.TestBuilder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.util.Arrays;
import java.util.List;
import javax.tools.JavaFileObject;
/** Behavioral tests for {@code Optional<?>} properties. */
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(ParameterizedBehaviorTestFactory.class)
public class OptionalPropertyTest {
@SuppressWarnings("unchecked")
@Parameters(name = "Optional<{0}>, {1}, {2}")
public static Iterable<Object[]> parameters() {
List<Class<?>> optionals = Arrays.asList(
java.util.Optional.class,
com.google.common.base.Optional.class);
List<ElementFactory> elements = Arrays.asList(INTEGERS, STRINGS);
List<NamingConvention> conventions = Arrays.asList(NamingConvention.values());
List<FeatureSet> features = FeatureSets.WITH_GUAVA;
return () -> Lists
.cartesianProduct(optionals, elements, conventions, features)
.stream()
.filter(parameters -> {
Class<?> optional = (Class<?>) parameters.get(0);
FeatureSet featureSet = (FeatureSet) parameters.get(3);
if (optional.equals(com.google.common.base.Optional.class)
&& !featureSet.get(GuavaLibrary.GUAVA).isAvailable()) {
return false;
}
if (optional.equals(java.util.Optional.class)
&& !featureSet.get(SourceLevel.SOURCE_LEVEL).hasLambdas()) {
return false;
}
return true;
})
.map(List::toArray)
.iterator();
}
@Rule public final ExpectedException thrown = ExpectedException.none();
@Shared public BehaviorTester behaviorTester;
private final Class<?> optional;
private final ElementFactory element;
private final NamingConvention convention;
private final FeatureSet features;
private final String empty;
private final JavaFileObject oneProperty;
private final JavaFileObject twoProperties;
private final JavaFileObject validatedProperty;
public OptionalPropertyTest(
Class<?> optional,
ElementFactory element,
NamingConvention convention,
FeatureSet features) {
this.optional = optional;
this.element = element;
this.convention = convention;
this.features = features;
this.empty = optional.getName().startsWith("com.google") ? "absent" : "empty";
oneProperty = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
twoProperties = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item1"))
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item2"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
validatedProperty = new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" @Override public Builder %s(%s item) {",
convention.set("item"), element.unwrappedType())
.addLine(" if (!(%s)) {", element.validation("item"))
.addLine(" throw new IllegalArgumentException(\"%s\");", element.errorMessage())
.addLine(" }")
.addLine(" return super.%s(item);", convention.set("item"))
.addLine(" }")
.addLine(" @Override public Builder clearItem() {")
.addLine(" throw new UnsupportedOperationException(\"Clearing prohibited\");")
.addLine(" }")
.addLine(" }")
.addLine("")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build();
}
@Test
public void testConstructor_defaultAbsent() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder().build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderGetter_defaultValue() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder builder = new DataType.Builder();")
.addLine("assertEquals(%s.%s(), builder.%s);",
optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderGetter_nonDefaultValue() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder builder = new DataType.Builder()")
.addLine(" .%s(%s);", convention.set("item"), element.example(0))
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_notNull() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_null() {
thrown.expect(NullPointerException.class);
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new DataType.Builder().%s((%s) null);",
convention.set("item"), element.type())
.build())
.runTest();
}
@Test
public void testSet_optionalOf() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(%s))", convention.set("item"),
optional, element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_empty() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.<%s>%s())",
convention.set("item"), optional, element.type(), empty)
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testSet_nullOptional() {
thrown.expect(NullPointerException.class);
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new DataType.Builder().%s((%s<%s>) null);",
convention.set("item"), optional, element.type())
.build())
.runTest();
}
@Test
public void testSetNullable_notNull() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("nullableItem"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testSetNullable_null() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(null)", convention.set("nullableItem"))
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testClear() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clearItem()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_valueInstance() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .mergeFrom(value);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_builder() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s);", convention.set("item"), element.example(0))
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .mergeFrom(template);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_valueInstance_emptyOptional() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .build();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .mergeFrom(value);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testMergeFrom_builder_emptyOptional() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder();")
.addLine("DataType.Builder builder = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .mergeFrom(template);")
.addLine("assertEquals(%s.of(%s), builder.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear_customDefault() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder().%s(%s);", convention.set("item"), element.example(3))
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.of(%s), value.%s);",
optional, element.example(3), convention.get("item"))
.build())
.runTest();
}
@Test
public void testBuilderClear_noBuilderFactory() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" public Builder(%s s) {", element.unwrappedType())
.addLine(" %s(s);", convention.set("item"))
.addLine(" }")
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder(%s)", element.example(0))
.addLine(" .clear()")
.addLine(" .build();")
.addLine("assertEquals(%s.%s(), value.%s);", optional, empty, convention.get("item"))
.build())
.runTest();
}
@Test
public void testCustomization_optionalOf() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(element.errorMessage());
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s.of(%s));",
convention.set("item"), optional, element.invalidExample())
.build())
.runTest();
}
@Test
public void testCustomization_nullable() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(element.errorMessage());
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s);", convention.set("nullableItem"), element.invalidExample())
.build())
.runTest();
}
@Test
public void testCustomization_empty() {
thrown.expectMessage("Clearing prohibited");
behaviorTester
.with(new Processor(features))
.with(validatedProperty)
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(%s.<%s>%s());",
convention.set("item"), optional, element.type(), empty)
.build())
.runTest();
}
@Test
public void testCustomization_null() {
thrown.expectMessage("Fooled you!");
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s> %s;",
optional, String.class, convention.get("item"))
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {")
.addLine(" @Override public Builder clearItem() {")
.addLine(" throw new UnsupportedOperationException(\"Fooled you!\");")
.addLine(" }")
.addLine(" }")
.addLine("")
.addLine(" public static Builder builder() {")
.addLine(" return new Builder();")
.addLine(" }")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType.Builder template = DataType.builder()")
.addLine(" .%s(null);", convention.set("nullableItem"))
.build())
.runTest();
}
@Test
public void testEquality() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("new %s()", EqualsTester.class)
.addLine(" .addEqualityGroup(")
.addLine(" DataType.builder().build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(%s.<%s>%s())",
convention.set("item"), optional, element.type(), empty)
.addLine(" .build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(null)", convention.set("nullableItem"))
.addLine(" .build())")
.addLine(" .addEqualityGroup(")
.addLine(" DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build(),")
.addLine(" DataType.builder()")
.addLine(" .%s(%s.of(%s))",
convention.set("item"), optional, element.example(0))
.addLine(" .build())")
.addLine(" .testEquals();")
.build())
.runTest();
}
@Test
public void testValueToString_singleField() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType empty = DataType.builder()")
.addLine(" .build();")
.addLine("DataType present = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("assertEquals(\"DataType{}\", empty.toString());")
.addLine("assertEquals(\"DataType{item=\" + %s + \"}\", present.toString());",
element.example(0))
.build())
.runTest();
}
@Test
public void testValueToString_twoFields() {
behaviorTester
.with(new Processor(features))
.with(twoProperties)
.with(testBuilder()
.addLine("DataType aa = DataType.builder()")
.addLine(" .build();")
.addLine("DataType pa = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .build();")
.addLine("DataType ap = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .build();")
.addLine("DataType pp = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .build();")
.addLine("assertEquals(\"DataType{}\", aa.toString());")
.addLine("assertEquals(\"DataType{item1=\" + %s + \"}\", pa.toString());",
element.example(0))
.addLine("assertEquals(\"DataType{item2=\" + %s + \"}\", ap.toString());",
element.example(1))
.addLine("assertEquals(\"DataType{item1=\" + %s + \","
+ " item2=\" + %s + \"}\", pp.toString());",
element.example(0), element.example(1))
.build())
.runTest();
}
@Test
public void testPartialToString_singleField() {
behaviorTester
.with(new Processor(features))
.with(oneProperty)
.with(testBuilder()
.addLine("DataType empty = DataType.builder()")
.addLine(" .buildPartial();")
.addLine("DataType present = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .buildPartial();")
.addLine("assertEquals(\"partial DataType{}\", empty.toString());")
.addLine("assertEquals(\"partial DataType{item=\" + %s + \"}\", present.toString());",
element.example(0))
.build())
.runTest();
}
@Test
public void testPartialToString_twoFields() {
behaviorTester
.with(new Processor(features))
.with(twoProperties)
.with(testBuilder()
.addLine("DataType aa = DataType.builder()")
.addLine(" .buildPartial();")
.addLine("DataType pa = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .buildPartial();")
.addLine("DataType ap = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .buildPartial();")
.addLine("DataType pp = DataType.builder()")
.addLine(" .%s(%s)", convention.set("item1"), element.example(0))
.addLine(" .%s(%s)", convention.set("item2"), element.example(1))
.addLine(" .buildPartial();")
.addLine("assertEquals(\"partial DataType{}\", aa.toString());")
.addLine("assertEquals(\"partial DataType{item1=\" + %s + \"}\", pa.toString());",
element.example(0))
.addLine("assertEquals(\"partial DataType{item2=\" + %s + \"}\", ap.toString());",
element.example(1))
.addLine("assertEquals(\"partial DataType{item1=\" + %s + \","
+ " item2=\" + %s + \"}\", pp.toString());",
element.example(0), element.example(1))
.build())
.runTest();
}
@Test
public void testWildcardHandling_noWildcard() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<%s>> %s;",
optional, List.class, Number.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of((%s) 1, 2, 3, 4))",
convention.set("items"), ImmutableList.class, Number.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testWildcardHandling_unboundedWildcard() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<?>> %s;",
optional, List.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(1, 2, 3, 4))", convention.set("items"), ImmutableList.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testWildcardHandling_wildcardWithExtendsBound() {
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public abstract class DataType {")
.addLine(" public abstract %s<%s<? extends %s>> %s;",
optional, List.class, Number.class, convention.get("items"))
.addLine(" public static class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s.of(1, 2, 3, 4))", convention.set("items"), ImmutableList.class)
.addLine(" .build();")
.addLine("assertThat(value.%s.get()).containsExactly(1, 2, 3, 4).inOrder();",
convention.get("items"))
.build())
.runTest();
}
@Test
public void testJacksonInteroperability() {
// See also https://github.com/google/FreeBuilder/issues/68
Class<? extends Module> module =
optional.getName().startsWith("com.google") ? GuavaModule.class : Jdk8Module.class;
behaviorTester
.with(new Processor(features))
.with(new SourceBuilder()
.addLine("package com.example;")
.addLine("import " + JsonProperty.class.getName() + ";")
.addLine("@%s", FreeBuilder.class)
.addLine("@%s(builder = DataType.Builder.class)", JsonDeserialize.class)
.addLine("public interface DataType {")
.addLine(" @JsonProperty(\"stuff\") %s<%s> %s;",
optional, element.type(), convention.get("item"))
.addLine("")
.addLine(" class Builder extends DataType_Builder {}")
.addLine("}")
.build())
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .%s(%s)", convention.set("item"), element.example(0))
.addLine(" .build();")
.addLine("%1$s mapper = new %1$s()", ObjectMapper.class)
.addLine(" .registerModule(new %s());", module)
.addLine("String json = mapper.writeValueAsString(value);")
.addLine("DataType clone = mapper.readValue(json, DataType.class);")
.addLine("assertEquals(%s.of(%s), clone.%s);",
optional, element.example(0), convention.get("item"))
.build())
.runTest();
}
private static TestBuilder testBuilder() {
return new TestBuilder()
.addImport("com.example.DataType");
}
}
| Fix typos in OptionalPropertyTest
| src/test/java/org/inferred/freebuilder/processor/OptionalPropertyTest.java | Fix typos in OptionalPropertyTest | <ide><path>rc/test/java/org/inferred/freebuilder/processor/OptionalPropertyTest.java
<ide> public class OptionalPropertyTest {
<ide>
<ide> @SuppressWarnings("unchecked")
<del> @Parameters(name = "Optional<{0}>, {1}, {2}")
<add> @Parameters(name = "{0}<{1}>, {2}, {3}")
<ide> public static Iterable<Object[]> parameters() {
<ide> List<Class<?>> optionals = Arrays.asList(
<ide> java.util.Optional.class,
<ide> com.google.common.base.Optional.class);
<ide> List<ElementFactory> elements = Arrays.asList(INTEGERS, STRINGS);
<ide> List<NamingConvention> conventions = Arrays.asList(NamingConvention.values());
<del> List<FeatureSet> features = FeatureSets.WITH_GUAVA;
<add> List<FeatureSet> features = FeatureSets.ALL;
<ide> return () -> Lists
<ide> .cartesianProduct(optionals, elements, conventions, features)
<ide> .stream() |
|
Java | apache-2.0 | 443110d626190898d04f0baab7dfa25e5d80745f | 0 | caojieliang/crud-jdbc | package com.landian.crud.jdbc.dao;
import com.landian.crud.core.dao.ProxyDao;
import com.landian.crud.core.dao.SQLPageUtils;
import com.landian.crud.core.result.SingleValue;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
import org.springframework.stereotype.Repository;
import java.sql.Statement;
import java.util.*;
import java.util.stream.Collectors;
@Repository
public class ProxyDaoJdbc implements ProxyDao {
@Autowired
private JdbcTemplate jdbcTemplate;
@Override
public int doInsert(String sql) {
jdbcTemplate.execute(sql);
return 1;
}
public int doInsertWidthId(String sql) {
jdbcTemplate.execute(sql);
return 1;
}
@Override
public Object doInsertAndReturnId(String sql) {
final String sqlFinal = sql;
KeyHolder keyHolder = new GeneratedKeyHolder();
PreparedStatementCreator preparedStatementCreator =
(con) -> jdbcTemplate.getDataSource().getConnection().prepareStatement(sqlFinal, Statement.RETURN_GENERATED_KEYS);
jdbcTemplate.update(preparedStatementCreator, keyHolder);
return keyHolder.getKey().longValue();
}
@Override
public int doUpdate(String sql) {
jdbcTemplate.update(sql);
return 1;
}
@Override
public List<Map<String, Object>> doFind(String sql) {
List<Map<String, Object>> dataMapList = jdbcTemplate.queryForList(sql);
// return mapList2HashMapList(dataMapList);
return dataMapList;
}
private List<HashMap<String, Object>> mapList2HashMapList(List<Map<String, Object>> mapList){
if(CollectionUtils.isEmpty(mapList)){
return Collections.EMPTY_LIST;
}
List<HashMap<String, Object>> targetList = new ArrayList<>();
mapList.forEach(b -> {
if(MapUtils.isNotEmpty(b)){
HashMap<String, Object> cruMap = new HashMap<>();
b.entrySet().forEach(set -> cruMap.put(set.getKey(),set.getValue()));
targetList.add(cruMap);
}
});
return targetList;
}
@Override
public List<Map<String, Object>> doFindPage(String sql, int start, int pageSize) {
String sqlTarget = SQLPageUtils.appendLimit(sql, start, pageSize);
return doFind(sqlTarget);
}
@Override
public int doDelete(String sql) {
jdbcTemplate.update(sql);
return 1;
}
@Override
public List<Long> queryAsLongValue(String sql) {
List<Object> objectList = queryAsSingleList(sql);
if(CollectionUtils.isEmpty(objectList)){
return Collections.EMPTY_LIST;
}
return objectList.stream().map(b -> SingleValue.newInstance(b).longValue()).collect(Collectors.toList());
}
@Override
public List<Integer> queryAsIntValue(String sql) {
List<Object> objectList = queryAsSingleList(sql);
if(CollectionUtils.isEmpty(objectList)){
return Collections.EMPTY_LIST;
}
return objectList.stream().map(b -> SingleValue.newInstance(b).integerValue()).collect(Collectors.toList());
}
private List<Object> queryAsSingleList(String sql){
List<Map<String, Object>> mapList = jdbcTemplate.queryForList(sql);
if(CollectionUtils.isEmpty(mapList)){
return Collections.EMPTY_LIST;
}
List<Object> targetList = new ArrayList<>();
mapList.forEach(b -> {
Object first = b.values().stream().findFirst().get();
targetList.add(first);
});
return targetList;
}
}
| src/main/java/com/landian/crud/jdbc/dao/ProxyDaoJdbc.java | package com.landian.crud.jdbc.dao;
import com.landian.crud.core.dao.ProxyDao;
import com.landian.crud.core.dao.SQLPageUtils;
import com.landian.crud.core.result.SingleValue;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
import org.springframework.stereotype.Repository;
import java.sql.Statement;
import java.util.*;
import java.util.stream.Collectors;
@Repository
public class ProxyDaoJdbc implements ProxyDao {
@Autowired
private JdbcTemplate jdbcTemplate;
@Override
public int doInsert(String sql) {
jdbcTemplate.execute(sql);
return 1;
}
public int doInsertWidthId(String sql) {
jdbcTemplate.execute(sql);
return 1;
}
@Override
public Object doInsertAndReturnId(String sql) {
final String sqlFinal = sql;
KeyHolder keyHolder = new GeneratedKeyHolder();
PreparedStatementCreator preparedStatementCreator =
(con) -> jdbcTemplate.getDataSource().getConnection().prepareStatement(sqlFinal, Statement.RETURN_GENERATED_KEYS);
jdbcTemplate.update(preparedStatementCreator, keyHolder);
return keyHolder.getKey().longValue();
}
@Override
public int doUpdate(String sql) {
jdbcTemplate.update(sql);
return 1;
}
@Override
public List<HashMap<String, Object>> doFind(String sql) {
List<Map<String, Object>> dataMapList = jdbcTemplate.queryForList(sql);
return mapList2HashMapList(dataMapList);
}
private List<HashMap<String, Object>> mapList2HashMapList(List<Map<String, Object>> mapList){
if(CollectionUtils.isEmpty(mapList)){
return Collections.EMPTY_LIST;
}
List<HashMap<String, Object>> targetList = new ArrayList<>();
mapList.forEach(b -> {
if(MapUtils.isNotEmpty(b)){
HashMap<String, Object> cruMap = new HashMap<>();
b.entrySet().forEach(set -> cruMap.put(set.getKey(),set.getValue()));
targetList.add(cruMap);
}
});
return targetList;
}
@Override
public List<HashMap<String, Object>> doFindPage(String sql, int start, int pageSize) {
String sqlTarget = SQLPageUtils.appendLimit(sql, start, pageSize);
return doFind(sqlTarget);
}
@Override
public int doDelete(String sql) {
jdbcTemplate.update(sql);
return 1;
}
@Override
public List<Long> queryAsLongValue(String sql) {
List<Object> objectList = queryAsSingleList(sql);
if(CollectionUtils.isEmpty(objectList)){
return Collections.EMPTY_LIST;
}
return objectList.stream().map(b -> SingleValue.newInstance(b).longValue()).collect(Collectors.toList());
}
@Override
public List<Integer> queryAsIntValue(String sql) {
List<Object> objectList = queryAsSingleList(sql);
if(CollectionUtils.isEmpty(objectList)){
return Collections.EMPTY_LIST;
}
return objectList.stream().map(b -> SingleValue.newInstance(b).integerValue()).collect(Collectors.toList());
}
private List<Object> queryAsSingleList(String sql){
List<Map<String, Object>> mapList = jdbcTemplate.queryForList(sql);
if(CollectionUtils.isEmpty(mapList)){
return Collections.EMPTY_LIST;
}
List<Object> targetList = new ArrayList<>();
mapList.forEach(b -> {
Object first = b.values().stream().findFirst().get();
targetList.add(first);
});
return targetList;
}
}
| replace HashHap as Map
| src/main/java/com/landian/crud/jdbc/dao/ProxyDaoJdbc.java | replace HashHap as Map | <ide><path>rc/main/java/com/landian/crud/jdbc/dao/ProxyDaoJdbc.java
<ide> }
<ide>
<ide> @Override
<del> public List<HashMap<String, Object>> doFind(String sql) {
<add> public List<Map<String, Object>> doFind(String sql) {
<ide> List<Map<String, Object>> dataMapList = jdbcTemplate.queryForList(sql);
<del> return mapList2HashMapList(dataMapList);
<add>// return mapList2HashMapList(dataMapList);
<add> return dataMapList;
<ide> }
<ide>
<ide> private List<HashMap<String, Object>> mapList2HashMapList(List<Map<String, Object>> mapList){
<ide> }
<ide>
<ide> @Override
<del> public List<HashMap<String, Object>> doFindPage(String sql, int start, int pageSize) {
<add> public List<Map<String, Object>> doFindPage(String sql, int start, int pageSize) {
<ide> String sqlTarget = SQLPageUtils.appendLimit(sql, start, pageSize);
<ide> return doFind(sqlTarget);
<ide> } |
|
Java | mit | 85c30f04d61104e2760362439ef127f106afa26d | 0 | Fundynamic/dune2themaker4j,Fundynamic/dune2themaker4j | package com.fundynamic.d2tm;
import com.fundynamic.d2tm.game.state.PlayingState;
import com.fundynamic.d2tm.game.terrain.impl.DuneTerrainFactory;
import com.fundynamic.d2tm.graphics.Shroud;
import com.fundynamic.d2tm.graphics.Theme;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Image;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.state.StateBasedGame;
import org.newdawn.slick.util.Bootstrap;
public class Game extends StateBasedGame {
private static final int SCREEN_WIDTH = 800;
private static final int SCREEN_HEIGHT = 600;
private static final int TILE_WIDTH = 32;
private static final int TILE_HEIGHT = 32;
public Game(String title) {
super(title);
}
@Override
public void initStatesList(GameContainer container) throws SlickException {
DuneTerrainFactory terrainFactory = new DuneTerrainFactory(
new Theme(
new Image("sheet_terrain.png"),
TILE_WIDTH,
TILE_HEIGHT
),
TILE_WIDTH,
TILE_HEIGHT
);
PlayingState playingState = new PlayingState(
container,
terrainFactory,
new Shroud(
new Image("shroud_edges.png"),
TILE_WIDTH,
TILE_HEIGHT
),
TILE_WIDTH,
TILE_HEIGHT
);
addState(playingState);
}
public static void main(String[] args) {
Bootstrap.runAsApplication(new Game("Dune II - The Maker"), SCREEN_WIDTH, SCREEN_HEIGHT, false);
}
}
| src/main/java/com/fundynamic/d2tm/Game.java | package com.fundynamic.d2tm;
import com.fundynamic.d2tm.game.state.PlayingState;
import com.fundynamic.d2tm.game.terrain.impl.DuneTerrainFactory;
import com.fundynamic.d2tm.graphics.Shroud;
import com.fundynamic.d2tm.graphics.Theme;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Image;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.state.StateBasedGame;
import org.newdawn.slick.util.Bootstrap;
public class Game extends StateBasedGame {
private static final int SCREEN_WIDTH = 800;
private static final int SCREEN_HEIGHT = 600;
private static final int TILE_WIDTH = 32;
private static final int TILE_HEIGHT = 32;
private static final int MIN_DELTA_BETWEEN_FRAMES = 10;
public Game(String title) {
super(title);
}
@Override
public void initStatesList(GameContainer container) throws SlickException {
DuneTerrainFactory terrainFactory = new DuneTerrainFactory(
new Theme(
new Image("sheet_terrain.png"),
TILE_WIDTH,
TILE_HEIGHT
),
TILE_WIDTH,
TILE_HEIGHT
);
container.setMinimumLogicUpdateInterval(MIN_DELTA_BETWEEN_FRAMES);
PlayingState playingState = new PlayingState(
container,
terrainFactory,
new Shroud(
new Image("shroud_edges.png"),
TILE_WIDTH,
TILE_HEIGHT
),
TILE_WIDTH,
TILE_HEIGHT
);
addState(playingState);
}
public static void main(String[] args) {
Bootstrap.runAsApplication(new Game("Dune II - The Maker"), SCREEN_WIDTH, SCREEN_HEIGHT, false);
}
}
| Fix choppy scrolling by allowing high framerates
| src/main/java/com/fundynamic/d2tm/Game.java | Fix choppy scrolling by allowing high framerates | <ide><path>rc/main/java/com/fundynamic/d2tm/Game.java
<ide> private static final int TILE_WIDTH = 32;
<ide> private static final int TILE_HEIGHT = 32;
<ide>
<del> private static final int MIN_DELTA_BETWEEN_FRAMES = 10;
<del>
<ide> public Game(String title) {
<ide> super(title);
<ide> }
<ide> TILE_WIDTH,
<ide> TILE_HEIGHT
<ide> );
<del>
<del> container.setMinimumLogicUpdateInterval(MIN_DELTA_BETWEEN_FRAMES);
<ide>
<ide> PlayingState playingState = new PlayingState(
<ide> container, |
|
JavaScript | mit | 8650f4c896fddf873d182bb12beca60f7a13093d | 0 | codefordenver/fresh-food-connect,codefordenver/fresh-food-connect | var path = require('path');
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var HtmlWebpackPlugin = require('html-webpack-plugin');
module.exports = {
devtool: 'source-map',
entry: {
app: './lib/index.js'
},
output: {
filename: '[name].min.js',
path: path.join(__dirname, 'dist'),
publicPath: ''
},
plugins: [
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
},
'__DEVTOOLS__': false
}),
new webpack.optimize.UglifyJsPlugin({
compressor: {
warnings: false
}
}),
new ExtractTextPlugin('app.css', { allChunks: true }),
new HtmlWebpackPlugin({
title: 'Fresh Food Connect',
filename: 'index.html',
template: 'index.template.html',
favicon: path.join(__dirname, 'assets/images/favicon.ico')
})
],
module: {
loaders: [
{
test: /\.scss$/,
loader: 'style!css!sass'
},
{test: /\.png$/, loader: "url-loader?mimetype=image/png"},
{ test: /\.css$/, loader: ExtractTextPlugin.extract('style-loader', 'css-loader!cssnext-loader') },
{ test: /\.js$/, loaders: ['babel'], exclude: /node_modules/ }
]
},
cssnext: {
browsers: 'last 2 versions'
}
};
| webpack.config.production.js | var path = require('path');
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var HtmlWebpackPlugin = require('html-webpack-plugin');
module.exports = {
devtool: 'source-map',
entry: {
app: './lib/index.js'
},
output: {
filename: '[name].min.js',
path: path.join(__dirname, 'dist'),
publicPath: ''
},
plugins: [
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
},
'__DEVTOOLS__': false
}),
new webpack.optimize.UglifyJsPlugin({
compressor: {
warnings: false
}
}),
new ExtractTextPlugin('app.css', { allChunks: true }),
new HtmlWebpackPlugin({
title: 'Fresh Food Connect',
filename: 'index.html',
template: 'index.template.html',
favicon: path.join(__dirname, 'assets/images/favicon.ico')
})
],
module: {
loaders: [
{ test: /\.css$/, loader: ExtractTextPlugin.extract('style-loader', 'css-loader!cssnext-loader') },
{ test: /\.js$/, loaders: ['babel'], exclude: /node_modules/ }
]
},
cssnext: {
browsers: 'last 2 versions'
}
};
| Fix ability to build prod static files
| webpack.config.production.js | Fix ability to build prod static files | <ide><path>ebpack.config.production.js
<ide> ],
<ide> module: {
<ide> loaders: [
<add> {
<add> test: /\.scss$/,
<add> loader: 'style!css!sass'
<add> },
<add> {test: /\.png$/, loader: "url-loader?mimetype=image/png"},
<ide> { test: /\.css$/, loader: ExtractTextPlugin.extract('style-loader', 'css-loader!cssnext-loader') },
<ide> { test: /\.js$/, loaders: ['babel'], exclude: /node_modules/ }
<ide> ] |
|
Java | apache-2.0 | c94144ca14c7cb45c473f1d8a56a19bdfe05b7cc | 0 | web-education/mod-gridfs-persistor,web-education/mod-gridfs-persistor | /*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.gridfs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import com.mongodb.*;
import org.vertx.java.busmods.BusModBase;
import org.vertx.java.core.Handler;
import org.vertx.java.core.buffer.Buffer;
import org.vertx.java.core.eventbus.Message;
import org.vertx.java.core.file.FileSystem;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import com.mongodb.gridfs.GridFSInputFile;
import com.mongodb.util.JSON;
import javax.net.ssl.SSLSocketFactory;
public class GridFSPersistor extends BusModBase implements Handler<Message<Buffer>> {
protected String address;
protected String host;
protected int port;
protected String dbName;
protected String username;
protected String password;
protected String bucket;
protected Mongo mongo;
protected DB db;
public void start() {
super.start();
address = getOptionalStringConfig("address", "vertx.gridfspersistor");
host = getOptionalStringConfig("host", "localhost");
port = getOptionalIntConfig("port", 27017);
dbName = getOptionalStringConfig("db_name", "default_db");
username = getOptionalStringConfig("username", null);
password = getOptionalStringConfig("password", null);
ReadPreference readPreference = ReadPreference.valueOf(
getOptionalStringConfig("read_preference", "primary"));
int poolSize = getOptionalIntConfig("pool_size", 10);
boolean autoConnectRetry = getOptionalBooleanConfig("auto_connect_retry", true);
int socketTimeout = getOptionalIntConfig("socket_timeout", 60000);
boolean useSSL = getOptionalBooleanConfig("use_ssl", false);
JsonArray seedsProperty = config.getArray("seeds");
bucket = getOptionalStringConfig("bucket", "fs");
try {
MongoClientOptions.Builder builder = new MongoClientOptions.Builder();
builder.connectionsPerHost(poolSize);
builder.autoConnectRetry(autoConnectRetry);
builder.socketTimeout(socketTimeout);
builder.readPreference(readPreference);
if (useSSL) {
builder.socketFactory(SSLSocketFactory.getDefault());
}
if (seedsProperty == null) {
ServerAddress address = new ServerAddress(host, port);
mongo = new MongoClient(address, builder.build());
} else {
List<ServerAddress> seeds = makeSeeds(seedsProperty);
mongo = new MongoClient(seeds, builder.build());
}
db = mongo.getDB(dbName);
if (username != null && password != null) {
db.authenticate(username, password.toCharArray());
}
} catch (UnknownHostException e) {
logger.error("Failed to connect to mongo server", e);
}
eb.registerHandler(address, this);
eb.registerHandler(address + ".json", new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
String action = message.body().getString("action", "");
switch (action) {
case "write" :
writeTo(message);
break;
default:
sendError(message, "Invalid action");
}
}
});
}
private List<ServerAddress> makeSeeds(JsonArray seedsProperty) throws UnknownHostException {
List<ServerAddress> seeds = new ArrayList<>();
for (Object elem : seedsProperty) {
JsonObject address = (JsonObject) elem;
String host = address.getString("host");
int port = address.getInteger("port");
seeds.add(new ServerAddress(host, port));
}
return seeds;
}
private void writeTo(Message<JsonObject> message) {
String path = message.body().getString("path");
if (path == null) {
sendError(message, "Invalid output path.");
return;
}
JsonObject query = message.body().getObject("query");
if (query == null) {
sendError(message, "Invalid query.");
return;
}
JsonObject alias = message.body().getObject("alias", new JsonObject());
boolean renameIfExists = message.body().getBoolean("rename-if-exists", true);
GridFS fs = new GridFS(db, bucket);
try {
List<GridFSDBFile> files = fs.find(jsonToDBObject(query));
FileSystem fileSystem = vertx.fileSystem();
for (GridFSDBFile f : files) {
String a = alias.getString(f.getId().toString());
String p = path + File.separator + ((a != null) ? a : f.getFilename());
if (renameIfExists && fileSystem.existsSync(p)) {
p = path + File.separator + f.getId().toString() + "_" +
((a != null) ? a : f.getFilename());
}
f.writeTo(p);
}
sendOK(message, new JsonObject().putNumber("number", files.size()));
} catch (IOException | MongoException e) {
logger.error(e.getMessage(), e);
sendError(message, e.getMessage());
}
}
public void stop() {
mongo.close();
}
@Override
public void handle(Message<Buffer> message) {
if (message.body() != null) {
Buffer content = message.body();
int headerSize = content.getInt(content.length() - 4);
byte [] header = content.getBytes(content.length() - 4 - headerSize, content.length() - 4);
JsonObject json = new JsonObject();
try {
json = new JsonObject(new String(header, "UTF-8"));
} catch (UnsupportedEncodingException e) {
container.logger().error(e.getMessage(), e);
}
byte [] data = content.getBytes(0, content.length() - 4 - headerSize);
switch (json.getString("action")) {
case "save":
persistFile(message, data, json);
break;
case "findone":
getFile(message, json);
break;
case "remove":
removeFile(message, json);
break;
case "copy":
copyFile(message, json);
break;
default:
replyError(message, "Invalid message");
break;
}
} else {
replyError(message, "Invalid message");
}
}
private void getFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
GridFSDBFile f = fs.findOne(jsonToDBObject(query));
if (f == null) {
replyError(message, "File not found with query : " + query.encode());
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
f.writeTo(os);
message.reply(new Buffer(os.toByteArray()));
} catch (IOException | MongoException e) {
container.logger().error(e.getMessage(), e);
JsonObject j = new JsonObject().putString("status", "error").putString("message", e.getMessage());
try {
message.reply(new Buffer(j.encode().getBytes("UTF-8")));
} catch (UnsupportedEncodingException e1) {
container.logger().error(e1.getMessage(), e1);
}
}
}
private void persistFile(Message<Buffer> message, byte[] data, JsonObject header) {
GridFS fs = new GridFS(db, bucket);
GridFSInputFile f = fs.createFile(data);
String id = header.getString("_id");
if (id == null || id.trim().isEmpty()) {
id = UUID.randomUUID().toString();
}
f.setId(id);
f.setContentType(header.getString("content-type"));
f.setFilename(header.getString("filename"));
f.save();
JsonObject reply = new JsonObject();
reply.putString("_id", id);
replyOK(message, reply);
}
private void copyFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
GridFSDBFile f = fs.findOne(jsonToDBObject(query));
if (f == null) {
replyError(message, "File not found with query : " + query.encode());
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
f.writeTo(os);
JsonObject j = new JsonObject();
j.putString("content-type", f.getContentType());
j.putString("filename", f.getFilename());
persistFile(message, os.toByteArray(), j);
} catch (IOException | MongoException e) {
replyError(message, e.getMessage());
}
}
private void removeFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
fs.remove(jsonToDBObject(query));
replyOK(message, null);
} catch (MongoException e) {
replyError(message, e.getMessage());
}
}
private DBObject jsonToDBObject(JsonObject object) {
String str = object.encode();
return (DBObject)JSON.parse(str);
}
protected void replyOK(Message<Buffer> message, JsonObject reply) {
if (reply == null) {
reply = new JsonObject();
}
reply.putString("status", "ok");
message.reply(reply);
}
protected void replyError(Message<Buffer> message, String error) {
logger.error(error);
JsonObject json = new JsonObject().putString("status", "error").putString("message", error);
message.reply(json);
}
}
| src/main/java/fr/wseduc/gridfs/GridFSPersistor.java | /*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.gridfs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.UnknownHostException;
import java.util.List;
import java.util.UUID;
import org.vertx.java.busmods.BusModBase;
import org.vertx.java.core.Handler;
import org.vertx.java.core.buffer.Buffer;
import org.vertx.java.core.eventbus.Message;
import org.vertx.java.core.file.FileSystem;
import org.vertx.java.core.json.JsonObject;
import com.mongodb.DB;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoException;
import com.mongodb.ServerAddress;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import com.mongodb.gridfs.GridFSInputFile;
import com.mongodb.util.JSON;
public class GridFSPersistor extends BusModBase implements Handler<Message<Buffer>> {
protected String address;
protected String host;
protected int port;
protected String dbName;
protected String username;
protected String password;
protected String bucket;
protected Mongo mongo;
protected DB db;
public void start() {
super.start();
address = getOptionalStringConfig("address", "vertx.gridfspersistor");
host = getOptionalStringConfig("host", "localhost");
port = getOptionalIntConfig("port", 27017);
dbName = getOptionalStringConfig("db_name", "default_db");
username = getOptionalStringConfig("username", null);
password = getOptionalStringConfig("password", null);
int poolSize = getOptionalIntConfig("pool_size", 10);
bucket = getOptionalStringConfig("bucket", "fs");
try {
MongoClientOptions.Builder builder = new MongoClientOptions.Builder();
builder.connectionsPerHost(poolSize);
ServerAddress address = new ServerAddress(host, port);
mongo = new MongoClient(address, builder.build());
db = mongo.getDB(dbName);
if (username != null && password != null) {
db.authenticate(username, password.toCharArray());
}
} catch (UnknownHostException e) {
logger.error("Failed to connect to mongo server", e);
}
eb.registerHandler(address, this);
eb.registerHandler(address + ".json", new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
String action = message.body().getString("action", "");
switch (action) {
case "write" :
writeTo(message);
break;
default:
sendError(message, "Invalid action");
}
}
});
}
private void writeTo(Message<JsonObject> message) {
String path = message.body().getString("path");
if (path == null) {
sendError(message, "Invalid output path.");
return;
}
JsonObject query = message.body().getObject("query");
if (query == null) {
sendError(message, "Invalid query.");
return;
}
JsonObject alias = message.body().getObject("alias", new JsonObject());
boolean renameIfExists = message.body().getBoolean("rename-if-exists", true);
GridFS fs = new GridFS(db, bucket);
try {
List<GridFSDBFile> files = fs.find(jsonToDBObject(query));
FileSystem fileSystem = vertx.fileSystem();
for (GridFSDBFile f : files) {
String a = alias.getString(f.getId().toString());
String p = path + File.separator + ((a != null) ? a : f.getFilename());
if (renameIfExists && fileSystem.existsSync(p)) {
p = path + File.separator + f.getId().toString() + "_" +
((a != null) ? a : f.getFilename());
}
f.writeTo(p);
}
sendOK(message, new JsonObject().putNumber("number", files.size()));
} catch (IOException | MongoException e) {
logger.error(e.getMessage(), e);
sendError(message, e.getMessage());
}
}
public void stop() {
mongo.close();
}
@Override
public void handle(Message<Buffer> message) {
if (message.body() != null) {
Buffer content = message.body();
int headerSize = content.getInt(content.length() - 4);
byte [] header = content.getBytes(content.length() - 4 - headerSize, content.length() - 4);
JsonObject json = new JsonObject();
try {
json = new JsonObject(new String(header, "UTF-8"));
} catch (UnsupportedEncodingException e) {
container.logger().error(e.getMessage(), e);
}
byte [] data = content.getBytes(0, content.length() - 4 - headerSize);
switch (json.getString("action")) {
case "save":
persistFile(message, data, json);
break;
case "findone":
getFile(message, json);
break;
case "remove":
removeFile(message, json);
break;
case "copy":
copyFile(message, json);
break;
default:
replyError(message, "Invalid message");
break;
}
} else {
replyError(message, "Invalid message");
}
}
private void getFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
GridFSDBFile f = fs.findOne(jsonToDBObject(query));
if (f == null) {
replyError(message, "File not found with query : " + query.encode());
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
f.writeTo(os);
message.reply(new Buffer(os.toByteArray()));
} catch (IOException | MongoException e) {
container.logger().error(e.getMessage(), e);
JsonObject j = new JsonObject().putString("status", "error").putString("message", e.getMessage());
try {
message.reply(new Buffer(j.encode().getBytes("UTF-8")));
} catch (UnsupportedEncodingException e1) {
container.logger().error(e1.getMessage(), e1);
}
}
}
private void persistFile(Message<Buffer> message, byte[] data, JsonObject header) {
GridFS fs = new GridFS(db, bucket);
GridFSInputFile f = fs.createFile(data);
String id = header.getString("_id");
if (id == null || id.trim().isEmpty()) {
id = UUID.randomUUID().toString();
}
f.setId(id);
f.setContentType(header.getString("content-type"));
f.setFilename(header.getString("filename"));
f.save();
JsonObject reply = new JsonObject();
reply.putString("_id", id);
replyOK(message, reply);
}
private void copyFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
GridFSDBFile f = fs.findOne(jsonToDBObject(query));
if (f == null) {
replyError(message, "File not found with query : " + query.encode());
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
f.writeTo(os);
JsonObject j = new JsonObject();
j.putString("content-type", f.getContentType());
j.putString("filename", f.getFilename());
persistFile(message, os.toByteArray(), j);
} catch (IOException | MongoException e) {
replyError(message, e.getMessage());
}
}
private void removeFile(Message<Buffer> message, JsonObject json) {
JsonObject query = json.getObject("query");
if (query == null) {
return;
}
GridFS fs = new GridFS(db, bucket);
try {
fs.remove(jsonToDBObject(query));
replyOK(message, null);
} catch (MongoException e) {
replyError(message, e.getMessage());
}
}
private DBObject jsonToDBObject(JsonObject object) {
String str = object.encode();
return (DBObject)JSON.parse(str);
}
protected void replyOK(Message<Buffer> message, JsonObject reply) {
if (reply == null) {
reply = new JsonObject();
}
reply.putString("status", "ok");
message.reply(reply);
}
protected void replyError(Message<Buffer> message, String error) {
logger.error(error);
JsonObject json = new JsonObject().putString("status", "error").putString("message", error);
message.reply(json);
}
}
| Support replica set
| src/main/java/fr/wseduc/gridfs/GridFSPersistor.java | Support replica set | <ide><path>rc/main/java/fr/wseduc/gridfs/GridFSPersistor.java
<ide> import java.io.IOException;
<ide> import java.io.UnsupportedEncodingException;
<ide> import java.net.UnknownHostException;
<add>import java.util.ArrayList;
<ide> import java.util.List;
<ide> import java.util.UUID;
<ide>
<add>import com.mongodb.*;
<ide> import org.vertx.java.busmods.BusModBase;
<ide> import org.vertx.java.core.Handler;
<ide> import org.vertx.java.core.buffer.Buffer;
<ide> import org.vertx.java.core.eventbus.Message;
<ide> import org.vertx.java.core.file.FileSystem;
<add>import org.vertx.java.core.json.JsonArray;
<ide> import org.vertx.java.core.json.JsonObject;
<ide>
<del>import com.mongodb.DB;
<del>import com.mongodb.DBObject;
<del>import com.mongodb.Mongo;
<del>import com.mongodb.MongoClient;
<del>import com.mongodb.MongoClientOptions;
<del>import com.mongodb.MongoException;
<del>import com.mongodb.ServerAddress;
<ide> import com.mongodb.gridfs.GridFS;
<ide> import com.mongodb.gridfs.GridFSDBFile;
<ide> import com.mongodb.gridfs.GridFSInputFile;
<ide> import com.mongodb.util.JSON;
<add>
<add>import javax.net.ssl.SSLSocketFactory;
<ide>
<ide> public class GridFSPersistor extends BusModBase implements Handler<Message<Buffer>> {
<ide>
<ide> dbName = getOptionalStringConfig("db_name", "default_db");
<ide> username = getOptionalStringConfig("username", null);
<ide> password = getOptionalStringConfig("password", null);
<add> ReadPreference readPreference = ReadPreference.valueOf(
<add> getOptionalStringConfig("read_preference", "primary"));
<ide> int poolSize = getOptionalIntConfig("pool_size", 10);
<add> boolean autoConnectRetry = getOptionalBooleanConfig("auto_connect_retry", true);
<add> int socketTimeout = getOptionalIntConfig("socket_timeout", 60000);
<add> boolean useSSL = getOptionalBooleanConfig("use_ssl", false);
<add> JsonArray seedsProperty = config.getArray("seeds");
<ide> bucket = getOptionalStringConfig("bucket", "fs");
<ide>
<ide> try {
<ide> MongoClientOptions.Builder builder = new MongoClientOptions.Builder();
<ide> builder.connectionsPerHost(poolSize);
<del> ServerAddress address = new ServerAddress(host, port);
<del> mongo = new MongoClient(address, builder.build());
<add> builder.autoConnectRetry(autoConnectRetry);
<add> builder.socketTimeout(socketTimeout);
<add> builder.readPreference(readPreference);
<add>
<add> if (useSSL) {
<add> builder.socketFactory(SSLSocketFactory.getDefault());
<add> }
<add>
<add> if (seedsProperty == null) {
<add> ServerAddress address = new ServerAddress(host, port);
<add> mongo = new MongoClient(address, builder.build());
<add> } else {
<add> List<ServerAddress> seeds = makeSeeds(seedsProperty);
<add> mongo = new MongoClient(seeds, builder.build());
<add> }
<add>
<ide> db = mongo.getDB(dbName);
<ide> if (username != null && password != null) {
<ide> db.authenticate(username, password.toCharArray());
<ide> });
<ide> }
<ide>
<add> private List<ServerAddress> makeSeeds(JsonArray seedsProperty) throws UnknownHostException {
<add> List<ServerAddress> seeds = new ArrayList<>();
<add> for (Object elem : seedsProperty) {
<add> JsonObject address = (JsonObject) elem;
<add> String host = address.getString("host");
<add> int port = address.getInteger("port");
<add> seeds.add(new ServerAddress(host, port));
<add> }
<add> return seeds;
<add> }
<add>
<ide> private void writeTo(Message<JsonObject> message) {
<ide> String path = message.body().getString("path");
<ide> if (path == null) { |
|
JavaScript | mit | 755c5688c317bf865a30ddc925d8756ecd1c90ec | 0 | tleunen/react-mdl,tleunen/react-mdl | module.exports = {
externals: {
react: {
root: 'React',
commonjs2: 'react',
commonjs: 'react',
amd: 'react'
},
'react-dom': {
root: 'ReactDOM',
commonjs2: 'react-dom',
commonjs: 'react-dom',
amd: 'react-dom'
}
},
devtool: 'sourcemap',
module: {
loaders: [
{ test: /\.js$/, loader: 'babel-loader', exclude: /(node_modules)/ }
]
}
};
| webpack.config.js | module.exports = {
externals: {
react: 'React',
'react-dom': 'ReactDOM'
},
devtool: 'sourcemap',
module: {
loaders: [
{ test: /\.js$/, loader: 'babel-loader', exclude: /(node_modules)/ }
]
}
};
| Fixed UMD output
Expanded upon webpack externals config to differentiate between different output targets:
* Import `react` and `react-dom` when using `require`
* Import `React` and `ReactDOM` when using globals
SEE: Dependency names wrong when using React MDL from script #298
SEE: ReactMDL.js bundle: "react" or "react-dom" instead of require('React') or require('ReactDOM') #267
| webpack.config.js | Fixed UMD output Expanded upon webpack externals config to differentiate between different output targets: * Import `react` and `react-dom` when using `require` * Import `React` and `ReactDOM` when using globals SEE: Dependency names wrong when using React MDL from script #298 SEE: ReactMDL.js bundle: "react" or "react-dom" instead of require('React') or require('ReactDOM') #267 | <ide><path>ebpack.config.js
<ide> module.exports = {
<ide> externals: {
<del> react: 'React',
<del> 'react-dom': 'ReactDOM'
<add> react: {
<add> root: 'React',
<add> commonjs2: 'react',
<add> commonjs: 'react',
<add> amd: 'react'
<add> },
<add> 'react-dom': {
<add> root: 'ReactDOM',
<add> commonjs2: 'react-dom',
<add> commonjs: 'react-dom',
<add> amd: 'react-dom'
<add> }
<ide> },
<ide> devtool: 'sourcemap',
<ide> module: { |
|
Java | apache-2.0 | 349477ed31b8d4993dca757981419feec341d014 | 0 | TomRoss/activemq-artemis,jbertram/activemq-artemis,clebertsuconic/activemq-artemis,tabish121/activemq-artemis,rgodfrey/activemq-artemis,orpiske/activemq-artemis,clebertsuconic/activemq-artemis,kjniemi/activemq-artemis,apache/activemq-artemis,tabish121/activemq-artemis,apache/activemq-artemis,iweiss/activemq-artemis,iweiss/activemq-artemis,tabish121/activemq-artemis,rgodfrey/activemq-artemis,andytaylor/activemq-artemis,gaohoward/activemq-artemis,andytaylor/activemq-artemis,TomRoss/activemq-artemis,rgodfrey/activemq-artemis,jbertram/activemq-artemis,clebertsuconic/activemq-artemis,gaohoward/activemq-artemis,kjniemi/activemq-artemis,kjniemi/activemq-artemis,andytaylor/activemq-artemis,TomRoss/activemq-artemis,kjniemi/activemq-artemis,apache/activemq-artemis,clebertsuconic/activemq-artemis,graben/activemq-artemis,orpiske/activemq-artemis,TomRoss/activemq-artemis,gaohoward/activemq-artemis,rgodfrey/activemq-artemis,andytaylor/activemq-artemis,apache/activemq-artemis,iweiss/activemq-artemis,rgodfrey/activemq-artemis,orpiske/activemq-artemis,jbertram/activemq-artemis,graben/activemq-artemis,graben/activemq-artemis,iweiss/activemq-artemis,tabish121/activemq-artemis,graben/activemq-artemis,orpiske/activemq-artemis,gaohoward/activemq-artemis,rgodfrey/activemq-artemis,kjniemi/activemq-artemis,jbertram/activemq-artemis | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.management.impl;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanOperationInfo;
import javax.management.openmbean.CompositeData;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.JsonUtil;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.core.filter.Filter;
import org.apache.activemq.artemis.core.filter.impl.FilterImpl;
import org.apache.activemq.artemis.core.management.impl.openmbean.OpenTypeSupport;
import org.apache.activemq.artemis.core.message.impl.CoreMessage;
import org.apache.activemq.artemis.core.messagecounter.MessageCounter;
import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterHelper;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.postoffice.Binding;
import org.apache.activemq.artemis.core.postoffice.PostOffice;
import org.apache.activemq.artemis.core.security.CheckType;
import org.apache.activemq.artemis.core.security.SecurityAuth;
import org.apache.activemq.artemis.core.security.SecurityStore;
import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle;
import org.apache.activemq.artemis.core.server.Consumer;
import org.apache.activemq.artemis.core.server.MessageReference;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.ServerConsumer;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.utils.Base64;
import org.apache.activemq.artemis.utils.JsonLoader;
import org.apache.activemq.artemis.utils.collections.LinkedListIterator;
public class QueueControlImpl extends AbstractControl implements QueueControl {
public static final int FLUSH_LIMIT = 500;
public static final String UNDEFINED = "*_UNDEFINED_*";
// Constants -----------------------------------------------------
// Attributes ----------------------------------------------------
private final Queue queue;
private final String address;
private final PostOffice postOffice;
private final StorageManager storageManager;
private final SecurityStore securityStore;
private final HierarchicalRepository<AddressSettings> addressSettingsRepository;
private MessageCounter counter;
// Static --------------------------------------------------------
private static String toJSON(final Map<String, Object>[] messages) {
JsonArray array = toJSONMsgArray(messages);
return array.toString();
}
private static JsonArray toJSONMsgArray(final Map<String, Object>[] messages) {
JsonArrayBuilder array = JsonLoader.createArrayBuilder();
for (Map<String, Object> message : messages) {
array.add(JsonUtil.toJsonObject(message));
}
return array.build();
}
private static String toJSON(final Map<String, Map<String, Object>[]> messages) {
JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder();
for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) {
JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder();
objectItem.add("consumerName", entry.getKey());
objectItem.add("elements", toJSONMsgArray(entry.getValue()));
arrayReturn.add(objectItem);
}
return arrayReturn.build().toString();
}
// Constructors --------------------------------------------------
public QueueControlImpl(final Queue queue,
final String address,
final PostOffice postOffice,
final StorageManager storageManager,
final SecurityStore securityStore,
final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception {
super(QueueControl.class, storageManager);
this.queue = queue;
this.address = address;
this.postOffice = postOffice;
this.storageManager = storageManager;
this.securityStore = securityStore;
this.addressSettingsRepository = addressSettingsRepository;
}
// Public --------------------------------------------------------
public void setMessageCounter(final MessageCounter counter) {
this.counter = counter;
}
// QueueControlMBean implementation ------------------------------
@Override
public String getName() {
clearIO();
try {
return queue.getName().toString();
} finally {
blockOnIO();
}
}
@Override
public String getAddress() {
checkStarted();
return address;
}
@Override
public String getFilter() {
checkStarted();
clearIO();
try {
Filter filter = queue.getFilter();
return filter != null ? filter.getFilterString().toString() : null;
} finally {
blockOnIO();
}
}
@Override
public boolean isDurable() {
checkStarted();
clearIO();
try {
return queue.isDurable();
} finally {
blockOnIO();
}
}
@Override
public String getUser() {
checkStarted();
clearIO();
try {
SimpleString user = queue.getUser();
return user == null ? null : user.toString();
} finally {
blockOnIO();
}
}
@Override
public String getRoutingType() {
checkStarted();
clearIO();
try {
return queue.getRoutingType().toString();
} finally {
blockOnIO();
}
}
@Override
public boolean isTemporary() {
checkStarted();
clearIO();
try {
return queue.isTemporary();
} finally {
blockOnIO();
}
}
@Override
public long getMessageCount() {
checkStarted();
clearIO();
try {
return queue.getMessageCount();
} finally {
blockOnIO();
}
}
@Override
public long getPersistentSize() {
checkStarted();
clearIO();
try {
return queue.getPersistentSize();
} finally {
blockOnIO();
}
}
@Override
public long getDurableMessageCount() {
checkStarted();
clearIO();
try {
return queue.getDurableMessageCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurablePersistentSize() {
checkStarted();
clearIO();
try {
return queue.getDurablePersistentSize();
} finally {
blockOnIO();
}
}
@Override
public int getConsumerCount() {
checkStarted();
clearIO();
try {
return queue.getConsumerCount();
} finally {
blockOnIO();
}
}
@Override
public int getDeliveringCount() {
checkStarted();
clearIO();
try {
return queue.getDeliveringCount();
} finally {
blockOnIO();
}
}
@Override
public long getDeliveringSize() {
checkStarted();
clearIO();
try {
return queue.getDeliveringSize();
} finally {
blockOnIO();
}
}
@Override
public int getDurableDeliveringCount() {
checkStarted();
clearIO();
try {
return queue.getDurableDeliveringCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurableDeliveringSize() {
checkStarted();
clearIO();
try {
return queue.getDurableDeliveringSize();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAdded() {
checkStarted();
clearIO();
try {
return queue.getMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAcknowledged() {
checkStarted();
clearIO();
try {
return queue.getMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesExpired() {
checkStarted();
clearIO();
try {
return queue.getMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesKilled() {
checkStarted();
clearIO();
try {
return queue.getMessagesKilled();
} finally {
blockOnIO();
}
}
@Override
public long getID() {
checkStarted();
clearIO();
try {
return queue.getID();
} finally {
blockOnIO();
}
}
@Override
public long getScheduledCount() {
checkStarted();
clearIO();
try {
return queue.getScheduledCount();
} finally {
blockOnIO();
}
}
@Override
public long getScheduledSize() {
checkStarted();
clearIO();
try {
return queue.getScheduledSize();
} finally {
blockOnIO();
}
}
@Override
public long getDurableScheduledCount() {
checkStarted();
clearIO();
try {
return queue.getDurableScheduledCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurableScheduledSize() {
checkStarted();
clearIO();
try {
return queue.getDurableScheduledSize();
} finally {
blockOnIO();
}
}
@Override
public String getDeadLetterAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getDeadLetterAddress() != null) {
return addressSettings.getDeadLetterAddress().toString();
}
return null;
} finally {
blockOnIO();
}
}
@Override
public String getExpiryAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getExpiryAddress() != null) {
return addressSettings.getExpiryAddress().toString();
} else {
return null;
}
} finally {
blockOnIO();
}
}
@Override
public int getMaxConsumers() {
checkStarted();
clearIO();
try {
return queue.getMaxConsumers();
} finally {
blockOnIO();
}
}
@Override
public boolean isPurgeOnNoConsumers() {
checkStarted();
clearIO();
try {
return queue.isPurgeOnNoConsumers();
} finally {
blockOnIO();
}
}
@Override
public boolean isExclusive() {
checkStarted();
clearIO();
try {
return queue.isExclusive();
} finally {
blockOnIO();
}
}
@Override
public boolean isLastValue() {
checkStarted();
clearIO();
try {
return queue.isLastValue();
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listScheduledMessages() throws Exception {
checkStarted();
clearIO();
try {
List<MessageReference> refs = queue.getScheduledMessages();
return convertMessagesToMaps(refs);
} finally {
blockOnIO();
}
}
@Override
public String listScheduledMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listScheduledMessages());
} finally {
blockOnIO();
}
}
/**
* @param refs
* @return
*/
private Map<String, Object>[] convertMessagesToMaps(List<MessageReference> refs) throws ActiveMQException {
Map<String, Object>[] messages = new Map[refs.size()];
int i = 0;
for (MessageReference ref : refs) {
Message message = ref.getMessage();
messages[i++] = message.toMap();
}
return messages;
}
@Override
public Map<String, Map<String, Object>[]> listDeliveringMessages() throws ActiveMQException {
checkStarted();
clearIO();
try {
Map<String, List<MessageReference>> msgs = queue.getDeliveringMessages();
Map<String, Map<String, Object>[]> msgRet = new HashMap<>();
for (Map.Entry<String, List<MessageReference>> entry : msgs.entrySet()) {
msgRet.put(entry.getKey(), convertMessagesToMaps(entry.getValue()));
}
return msgRet;
} finally {
blockOnIO();
}
}
@Override
public String listDeliveringMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listDeliveringMessages());
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter == null || filter.match(ref.getMessage())) {
Message message = ref.getMessage();
messages.add(message.toMap());
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return messages.toArray(new Map[messages.size()]);
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public String listMessagesAsJSON(final String filter) throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listMessages(filter));
} finally {
blockOnIO();
}
}
protected Map<String, Object>[] getFirstMessage() throws Exception {
checkStarted();
clearIO();
try {
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
// returns just the first, as it's the first only
if (iterator.hasNext()) {
MessageReference ref = iterator.next();
Message message = ref.getMessage();
messages.add(message.toMap());
}
return messages.toArray(new Map[1]);
}
} finally {
blockOnIO();
}
}
@Override
public String getFirstMessageAsJSON() throws Exception {
return toJSON(getFirstMessage());
}
@Override
public Long getFirstMessageTimestamp() throws Exception {
Map<String, Object>[] _message = getFirstMessage();
if (_message == null || _message.length == 0 || _message[0] == null) {
return null;
}
Map<String, Object> message = _message[0];
if (!message.containsKey("timestamp")) {
return null;
}
return (Long) message.get("timestamp");
}
@Override
public Long getFirstMessageAge() throws Exception {
Long firstMessageTimestamp = getFirstMessageTimestamp();
if (firstMessageTimestamp == null) {
return null;
}
long now = new Date().getTime();
return now - firstMessageTimestamp.longValue();
}
@Override
public long countMessages() throws Exception {
return countMessages(null);
}
@Override
public long countMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
if (filter == null) {
return getMessageCount();
} else {
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
int count = 0;
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter.match(ref.getMessage())) {
count++;
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return count;
}
}
} finally {
blockOnIO();
}
}
@Override
public String countMessagesProperty(final String filter) throws Exception {
checkStarted();
clearIO();
try {
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
Map<String, Integer> result = new HashMap<>();
String propertySearch = filter == null ? UNDEFINED : filter;
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
String messageProperty = ref.getMessage().getStringProperty(propertySearch);
messageProperty = messageProperty == null ? UNDEFINED : messageProperty;
Integer value = result.getOrDefault(messageProperty, 0);
result.put(messageProperty, ++value);
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return JsonUtil.toJsonObject(result).toString();
}
} finally {
blockOnIO();
}
}
@Override
public boolean removeMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.deleteReference(messageID);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public int removeMessages(final String filterStr) throws Exception {
return removeMessages(FLUSH_LIMIT, filterStr);
}
@Override
public int removeMessages(final int flushLimit, final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.deleteMatchingReferences(flushLimit, filter);
} finally {
blockOnIO();
}
}
@Override
public int removeAllMessages() throws Exception {
return removeMessages(FLUSH_LIMIT, null);
}
@Override
public boolean expireMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.expireReference(messageID);
} finally {
blockOnIO();
}
}
@Override
public int expireMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.expireReferences(filter);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public boolean retryMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
Filter singleMessageFilter = new Filter() {
@Override
public boolean match(Message message) {
return message.getMessageID() == messageID;
}
@Override
public SimpleString getFilterString() {
return new SimpleString("custom filter for MESSAGEID= messageID");
}
};
return queue.retryMessages(singleMessageFilter) > 0;
} finally {
blockOnIO();
}
}
@Override
public int retryMessages() throws Exception {
checkStarted();
clearIO();
try {
return queue.retryMessages(null);
} finally {
blockOnIO();
}
}
@Override
public boolean moveMessage(final long messageID, final String otherQueueName) throws Exception {
return moveMessage(messageID, otherQueueName, false);
}
@Override
public boolean moveMessage(final long messageID,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
return queue.moveReference(messageID, binding.getAddress(), binding, rejectDuplicates);
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr, final String otherQueueName) throws Exception {
return moveMessages(filterStr, otherQueueName, false);
}
@Override
public int moveMessages(final int flushLimit,
final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
int retValue = queue.moveReferences(flushLimit, filter, binding.getAddress(), rejectDuplicates, binding);
return retValue;
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
return moveMessages(FLUSH_LIMIT, filterStr, otherQueueName, rejectDuplicates);
}
@Override
public int sendMessagesToDeadLetterAddress(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.sendMessagesToDeadLetterAddress(filter);
} finally {
blockOnIO();
}
}
@Override
public String sendMessage(final Map<String, String> headers,
final int type,
final String body,
boolean durable,
final String user,
final String password) throws Exception {
try {
securityStore.check(queue.getAddress(), queue.getName(), CheckType.SEND, new SecurityAuth() {
@Override
public String getUsername() {
return user;
}
@Override
public String getPassword() {
return password;
}
@Override
public RemotingConnection getRemotingConnection() {
return null;
}
});
CoreMessage message = new CoreMessage(storageManager.generateID(), 50);
if (headers != null) {
for (String header : headers.keySet()) {
message.putStringProperty(new SimpleString(header), new SimpleString(headers.get(header)));
}
}
message.setType((byte) type);
message.setDurable(durable);
message.setTimestamp(System.currentTimeMillis());
if (body != null) {
if (type == Message.TEXT_TYPE) {
message.getBodyBuffer().writeNullableSimpleString(new SimpleString(body));
} else {
message.getBodyBuffer().writeBytes(Base64.decode(body));
}
}
message.setAddress(queue.getAddress());
ByteBuffer buffer = ByteBuffer.allocate(8);
buffer.putLong(queue.getID());
message.putBytesProperty(Message.HDR_ROUTE_TO_IDS, buffer.array());
postOffice.route(message, true);
return "" + message.getMessageID();
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
}
}
@Override
public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.sendMessageToDeadLetterAddress(messageID);
} finally {
blockOnIO();
}
}
@Override
public int changeMessagesPriority(final String filterStr, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
Filter filter = FilterImpl.createFilter(filterStr);
return queue.changeReferencesPriority(filter, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public boolean changeMessagePriority(final long messageID, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
return queue.changeReferencePriority(messageID, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounter() {
checkStarted();
clearIO();
try {
return counter.toJSon();
} catch (Exception e) {
throw new IllegalStateException(e);
} finally {
blockOnIO();
}
}
@Override
public void resetMessageCounter() {
checkStarted();
clearIO();
try {
counter.resetCounter();
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistory() throws Exception {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistory(counter);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistoryAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistoryAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public void pause() {
checkStarted();
clearIO();
try {
queue.pause();
} finally {
blockOnIO();
}
}
@Override
public void pause(boolean persist) {
checkStarted();
clearIO();
try {
queue.pause(persist);
} finally {
blockOnIO();
}
}
@Override
public void resume() {
checkStarted();
clearIO();
try {
queue.resume();
} finally {
blockOnIO();
}
}
@Override
public boolean isPaused() throws Exception {
checkStarted();
clearIO();
try {
return queue.isPaused();
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse(int page, int pageSize) throws Exception {
String filter = null;
checkStarted();
clearIO();
try {
long index = 0;
long start = (page - 1) * pageSize;
long end = Math.min(page * pageSize, queue.getMessageCount());
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && index < end) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
if (index >= start) {
c.add(OpenTypeSupport.convert(ref));
}
}
index++;
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse() throws Exception {
return browse(null);
}
@Override
public CompositeData[] browse(String filter) throws Exception {
checkStarted();
clearIO();
try {
int pageSize = addressSettingsRepository.getMatch(queue.getName().toString()).getManagementBrowsePageSize();
int currentPageSize = 0;
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && currentPageSize++ < pageSize) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
c.add(OpenTypeSupport.convert(ref));
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public void flushExecutor() {
checkStarted();
clearIO();
try {
queue.flushExecutor();
} finally {
blockOnIO();
}
}
@Override
public void resetAllGroups() {
checkStarted();
clearIO();
try {
queue.resetAllGroups();
} finally {
blockOnIO();
}
}
@Override
public void resetGroup(String groupID) {
checkStarted();
clearIO();
try {
queue.resetGroup(SimpleString.toSimpleString(groupID));
} finally {
blockOnIO();
}
}
@Override
public int getGroupCount() {
checkStarted();
clearIO();
try {
return queue.getGroupCount();
} finally {
blockOnIO();
}
}
@Override
public String listGroupsAsJSON() throws Exception {
checkStarted();
clearIO();
try {
Map<SimpleString, Consumer> groups = queue.getGroups();
JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
for (Map.Entry<SimpleString, Consumer> group : groups.entrySet()) {
if (group.getValue() instanceof ServerConsumer) {
ServerConsumer serverConsumer = (ServerConsumer) group.getValue();
JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("groupID", group.getKey().toString()).add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
jsonArray.add(obj);
}
}
return jsonArray.build().toString();
} finally {
blockOnIO();
}
}
@Override
public String listConsumersAsJSON() throws Exception {
checkStarted();
clearIO();
try {
Collection<Consumer> consumers = queue.getConsumers();
JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
for (Consumer consumer : consumers) {
if (consumer instanceof ServerConsumer) {
ServerConsumer serverConsumer = (ServerConsumer) consumer;
JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
jsonArray.add(obj);
}
}
return jsonArray.build().toString();
} finally {
blockOnIO();
}
}
@Override
protected MBeanOperationInfo[] fillMBeanOperationInfo() {
return MBeanInfoHelper.getMBeanOperationsInfo(QueueControl.class);
}
@Override
protected MBeanAttributeInfo[] fillMBeanAttributeInfo() {
return MBeanInfoHelper.getMBeanAttributesInfo(QueueControl.class);
}
@Override
public void resetMessagesAdded() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesAcknowledged() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesExpired() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesKilled() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesKilled();
} finally {
blockOnIO();
}
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private void checkStarted() {
if (!postOffice.isStarted()) {
throw new IllegalStateException("Broker is not started. Queue can not be managed yet");
}
}
// Inner classes -------------------------------------------------
}
| artemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.management.impl;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanOperationInfo;
import javax.management.openmbean.CompositeData;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.JsonUtil;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.core.filter.Filter;
import org.apache.activemq.artemis.core.filter.impl.FilterImpl;
import org.apache.activemq.artemis.core.management.impl.openmbean.OpenTypeSupport;
import org.apache.activemq.artemis.core.message.impl.CoreMessage;
import org.apache.activemq.artemis.core.messagecounter.MessageCounter;
import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterHelper;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.postoffice.Binding;
import org.apache.activemq.artemis.core.postoffice.PostOffice;
import org.apache.activemq.artemis.core.security.CheckType;
import org.apache.activemq.artemis.core.security.SecurityAuth;
import org.apache.activemq.artemis.core.security.SecurityStore;
import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle;
import org.apache.activemq.artemis.core.server.Consumer;
import org.apache.activemq.artemis.core.server.MessageReference;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.ServerConsumer;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.utils.Base64;
import org.apache.activemq.artemis.utils.JsonLoader;
import org.apache.activemq.artemis.utils.collections.LinkedListIterator;
public class QueueControlImpl extends AbstractControl implements QueueControl {
public static final int FLUSH_LIMIT = 500;
public static final String UNDEFINED = "*_UNDEFINED_*";
// Constants -----------------------------------------------------
// Attributes ----------------------------------------------------
private final Queue queue;
private final String address;
private final PostOffice postOffice;
private final StorageManager storageManager;
private final SecurityStore securityStore;
private final HierarchicalRepository<AddressSettings> addressSettingsRepository;
private MessageCounter counter;
// Static --------------------------------------------------------
private static String toJSON(final Map<String, Object>[] messages) {
JsonArray array = toJSONMsgArray(messages);
return array.toString();
}
private static JsonArray toJSONMsgArray(final Map<String, Object>[] messages) {
JsonArrayBuilder array = JsonLoader.createArrayBuilder();
for (Map<String, Object> message : messages) {
array.add(JsonUtil.toJsonObject(message));
}
return array.build();
}
private static String toJSON(final Map<String, Map<String, Object>[]> messages) {
JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder();
for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) {
JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder();
objectItem.add("consumerName", entry.getKey());
objectItem.add("elements", toJSONMsgArray(entry.getValue()));
arrayReturn.add(objectItem);
}
return arrayReturn.build().toString();
}
// Constructors --------------------------------------------------
public QueueControlImpl(final Queue queue,
final String address,
final PostOffice postOffice,
final StorageManager storageManager,
final SecurityStore securityStore,
final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception {
super(QueueControl.class, storageManager);
this.queue = queue;
this.address = address;
this.postOffice = postOffice;
this.storageManager = storageManager;
this.securityStore = securityStore;
this.addressSettingsRepository = addressSettingsRepository;
}
// Public --------------------------------------------------------
public void setMessageCounter(final MessageCounter counter) {
this.counter = counter;
}
// QueueControlMBean implementation ------------------------------
@Override
public String getName() {
clearIO();
try {
return queue.getName().toString();
} finally {
blockOnIO();
}
}
@Override
public String getAddress() {
checkStarted();
return address;
}
@Override
public String getFilter() {
checkStarted();
clearIO();
try {
Filter filter = queue.getFilter();
return filter != null ? filter.getFilterString().toString() : null;
} finally {
blockOnIO();
}
}
@Override
public boolean isDurable() {
checkStarted();
clearIO();
try {
return queue.isDurable();
} finally {
blockOnIO();
}
}
@Override
public String getUser() {
checkStarted();
clearIO();
try {
SimpleString user = queue.getUser();
return user == null ? null : user.toString();
} finally {
blockOnIO();
}
}
@Override
public String getRoutingType() {
checkStarted();
clearIO();
try {
return queue.getRoutingType().toString();
} finally {
blockOnIO();
}
}
@Override
public boolean isTemporary() {
checkStarted();
clearIO();
try {
return queue.isTemporary();
} finally {
blockOnIO();
}
}
@Override
public long getMessageCount() {
checkStarted();
clearIO();
try {
return queue.getMessageCount();
} finally {
blockOnIO();
}
}
@Override
public long getPersistentSize() {
checkStarted();
clearIO();
try {
return queue.getPersistentSize();
} finally {
blockOnIO();
}
}
@Override
public long getDurableMessageCount() {
checkStarted();
clearIO();
try {
return queue.getDurableMessageCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurablePersistentSize() {
checkStarted();
clearIO();
try {
return queue.getDurablePersistentSize();
} finally {
blockOnIO();
}
}
@Override
public int getConsumerCount() {
checkStarted();
clearIO();
try {
return queue.getConsumerCount();
} finally {
blockOnIO();
}
}
@Override
public int getDeliveringCount() {
checkStarted();
clearIO();
try {
return queue.getDeliveringCount();
} finally {
blockOnIO();
}
}
@Override
public long getDeliveringSize() {
checkStarted();
clearIO();
try {
return queue.getDeliveringSize();
} finally {
blockOnIO();
}
}
@Override
public int getDurableDeliveringCount() {
checkStarted();
clearIO();
try {
return queue.getDurableDeliveringCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurableDeliveringSize() {
checkStarted();
clearIO();
try {
return queue.getDurableDeliveringSize();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAdded() {
checkStarted();
clearIO();
try {
return queue.getMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesAcknowledged() {
checkStarted();
clearIO();
try {
return queue.getMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesExpired() {
checkStarted();
clearIO();
try {
return queue.getMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public long getMessagesKilled() {
checkStarted();
clearIO();
try {
return queue.getMessagesKilled();
} finally {
blockOnIO();
}
}
@Override
public long getID() {
checkStarted();
clearIO();
try {
return queue.getID();
} finally {
blockOnIO();
}
}
@Override
public long getScheduledCount() {
checkStarted();
clearIO();
try {
return queue.getScheduledCount();
} finally {
blockOnIO();
}
}
@Override
public long getScheduledSize() {
checkStarted();
clearIO();
try {
return queue.getScheduledSize();
} finally {
blockOnIO();
}
}
@Override
public long getDurableScheduledCount() {
checkStarted();
clearIO();
try {
return queue.getDurableScheduledCount();
} finally {
blockOnIO();
}
}
@Override
public long getDurableScheduledSize() {
checkStarted();
clearIO();
try {
return queue.getDurableScheduledSize();
} finally {
blockOnIO();
}
}
@Override
public String getDeadLetterAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getDeadLetterAddress() != null) {
return addressSettings.getDeadLetterAddress().toString();
}
return null;
} finally {
blockOnIO();
}
}
@Override
public String getExpiryAddress() {
checkStarted();
clearIO();
try {
AddressSettings addressSettings = addressSettingsRepository.getMatch(address);
if (addressSettings != null && addressSettings.getExpiryAddress() != null) {
return addressSettings.getExpiryAddress().toString();
} else {
return null;
}
} finally {
blockOnIO();
}
}
@Override
public int getMaxConsumers() {
checkStarted();
clearIO();
try {
return queue.getMaxConsumers();
} finally {
blockOnIO();
}
}
@Override
public boolean isPurgeOnNoConsumers() {
checkStarted();
clearIO();
try {
return queue.isPurgeOnNoConsumers();
} finally {
blockOnIO();
}
}
@Override
public boolean isExclusive() {
checkStarted();
clearIO();
try {
return queue.isExclusive();
} finally {
blockOnIO();
}
}
@Override
public boolean isLastValue() {
checkStarted();
clearIO();
try {
return queue.isLastValue();
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listScheduledMessages() throws Exception {
checkStarted();
clearIO();
try {
List<MessageReference> refs = queue.getScheduledMessages();
return convertMessagesToMaps(refs);
} finally {
blockOnIO();
}
}
@Override
public String listScheduledMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listScheduledMessages());
} finally {
blockOnIO();
}
}
/**
* @param refs
* @return
*/
private Map<String, Object>[] convertMessagesToMaps(List<MessageReference> refs) throws ActiveMQException {
Map<String, Object>[] messages = new Map[refs.size()];
int i = 0;
for (MessageReference ref : refs) {
Message message = ref.getMessage();
messages[i++] = message.toMap();
}
return messages;
}
@Override
public Map<String, Map<String, Object>[]> listDeliveringMessages() throws ActiveMQException {
checkStarted();
clearIO();
try {
Map<String, List<MessageReference>> msgs = queue.getDeliveringMessages();
Map<String, Map<String, Object>[]> msgRet = new HashMap<>();
for (Map.Entry<String, List<MessageReference>> entry : msgs.entrySet()) {
msgRet.put(entry.getKey(), convertMessagesToMaps(entry.getValue()));
}
return msgRet;
} finally {
blockOnIO();
}
}
@Override
public String listDeliveringMessagesAsJSON() throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listDeliveringMessages());
} finally {
blockOnIO();
}
}
@Override
public Map<String, Object>[] listMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter == null || filter.match(ref.getMessage())) {
Message message = ref.getMessage();
messages.add(message.toMap());
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return messages.toArray(new Map[messages.size()]);
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public String listMessagesAsJSON(final String filter) throws Exception {
checkStarted();
clearIO();
try {
return QueueControlImpl.toJSON(listMessages(filter));
} finally {
blockOnIO();
}
}
protected Map<String, Object>[] getFirstMessage() throws Exception {
checkStarted();
clearIO();
try {
List<Map<String, Object>> messages = new ArrayList<>();
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
// returns just the first, as it's the first only
if (iterator.hasNext()) {
MessageReference ref = iterator.next();
Message message = ref.getMessage();
messages.add(message.toMap());
}
return messages.toArray(new Map[1]);
}
} finally {
blockOnIO();
}
}
@Override
public String getFirstMessageAsJSON() throws Exception {
return toJSON(getFirstMessage());
}
@Override
public Long getFirstMessageTimestamp() throws Exception {
Map<String, Object>[] _message = getFirstMessage();
if (_message == null || _message.length == 0 || _message[0] == null) {
return null;
}
Map<String, Object> message = _message[0];
if (!message.containsKey("timestamp")) {
return null;
}
return (Long) message.get("timestamp");
}
@Override
public Long getFirstMessageAge() throws Exception {
Long firstMessageTimestamp = getFirstMessageTimestamp();
if (firstMessageTimestamp == null) {
return null;
}
long now = new Date().getTime();
return now - firstMessageTimestamp.longValue();
}
@Override
public long countMessages() throws Exception {
return countMessages(null);
}
@Override
public long countMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
if (filter == null) {
return getMessageCount();
} else {
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
int count = 0;
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
if (filter.match(ref.getMessage())) {
count++;
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return count;
}
}
} finally {
blockOnIO();
}
}
@Override
public String countMessagesProperty(final String filter) throws Exception {
checkStarted();
clearIO();
try {
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
Map<String, Integer> result = new HashMap<>();
String propertySearch = filter == null ? UNDEFINED : filter;
try {
while (iterator.hasNext()) {
MessageReference ref = iterator.next();
String messageProperty = ref.getMessage().getStringProperty(propertySearch);
messageProperty = messageProperty == null ? UNDEFINED : messageProperty ;
Integer value = result.getOrDefault(messageProperty, 0);
result.put(messageProperty, ++value);
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
return JsonUtil.toJsonObject(result).toString();
}
} finally {
blockOnIO();
}
}
@Override
public boolean removeMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.deleteReference(messageID);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public int removeMessages(final String filterStr) throws Exception {
return removeMessages(FLUSH_LIMIT, filterStr);
}
@Override
public int removeMessages(final int flushLimit, final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.deleteMatchingReferences(flushLimit, filter);
} finally {
blockOnIO();
}
}
@Override
public int removeAllMessages() throws Exception {
return removeMessages(FLUSH_LIMIT, null);
}
@Override
public boolean expireMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.expireReference(messageID);
} finally {
blockOnIO();
}
}
@Override
public int expireMessages(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.expireReferences(filter);
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public boolean retryMessage(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
Filter singleMessageFilter = new Filter() {
@Override
public boolean match(Message message) {
return message.getMessageID() == messageID;
}
@Override
public SimpleString getFilterString() {
return new SimpleString("custom filter for MESSAGEID= messageID");
}
};
return queue.retryMessages(singleMessageFilter) > 0;
} finally {
blockOnIO();
}
}
@Override
public int retryMessages() throws Exception {
checkStarted();
clearIO();
try {
return queue.retryMessages(null);
} finally {
blockOnIO();
}
}
@Override
public boolean moveMessage(final long messageID, final String otherQueueName) throws Exception {
return moveMessage(messageID, otherQueueName, false);
}
@Override
public boolean moveMessage(final long messageID,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
return queue.moveReference(messageID, binding.getAddress(), binding, rejectDuplicates);
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr, final String otherQueueName) throws Exception {
return moveMessages(filterStr, otherQueueName, false);
}
@Override
public int moveMessages(final int flushLimit,
final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
Binding binding = postOffice.getBinding(new SimpleString(otherQueueName));
if (binding == null) {
throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName);
}
int retValue = queue.moveReferences(flushLimit, filter, binding.getAddress(), rejectDuplicates, binding);
return retValue;
} finally {
blockOnIO();
}
}
@Override
public int moveMessages(final String filterStr,
final String otherQueueName,
final boolean rejectDuplicates) throws Exception {
return moveMessages(FLUSH_LIMIT, filterStr, otherQueueName, rejectDuplicates);
}
@Override
public int sendMessagesToDeadLetterAddress(final String filterStr) throws Exception {
checkStarted();
clearIO();
try {
Filter filter = FilterImpl.createFilter(filterStr);
return queue.sendMessagesToDeadLetterAddress(filter);
} finally {
blockOnIO();
}
}
@Override
public String sendMessage(final Map<String, String> headers,
final int type,
final String body,
boolean durable,
final String user,
final String password) throws Exception {
try {
securityStore.check(queue.getAddress(), queue.getName(), CheckType.SEND, new SecurityAuth() {
@Override
public String getUsername() {
return user;
}
@Override
public String getPassword() {
return password;
}
@Override
public RemotingConnection getRemotingConnection() {
return null;
}
});
CoreMessage message = new CoreMessage(storageManager.generateID(), 50);
if (headers != null) {
for (String header : headers.keySet()) {
message.putStringProperty(new SimpleString(header), new SimpleString(headers.get(header)));
}
}
message.setType((byte) type);
message.setDurable(durable);
message.setTimestamp(System.currentTimeMillis());
if (body != null) {
if (type == Message.TEXT_TYPE) {
message.getBodyBuffer().writeNullableSimpleString(new SimpleString(body));
} else {
message.getBodyBuffer().writeBytes(Base64.decode(body));
}
}
message.setAddress(queue.getAddress());
ByteBuffer buffer = ByteBuffer.allocate(8);
buffer.putLong(queue.getID());
message.putBytesProperty(Message.HDR_ROUTE_TO_IDS, buffer.array());
postOffice.route(message, true);
return "" + message.getMessageID();
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
}
}
@Override
public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception {
checkStarted();
clearIO();
try {
return queue.sendMessageToDeadLetterAddress(messageID);
} finally {
blockOnIO();
}
}
@Override
public int changeMessagesPriority(final String filterStr, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
Filter filter = FilterImpl.createFilter(filterStr);
return queue.changeReferencesPriority(filter, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public boolean changeMessagePriority(final long messageID, final int newPriority) throws Exception {
checkStarted();
clearIO();
try {
if (newPriority < 0 || newPriority > 9) {
throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority);
}
return queue.changeReferencePriority(messageID, (byte) newPriority);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounter() {
checkStarted();
clearIO();
try {
return counter.toJSon();
} catch (Exception e) {
throw new IllegalStateException(e);
} finally {
blockOnIO();
}
}
@Override
public void resetMessageCounter() {
checkStarted();
clearIO();
try {
counter.resetCounter();
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistory() throws Exception {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistory(counter);
} finally {
blockOnIO();
}
}
@Override
public String listMessageCounterHistoryAsHTML() {
checkStarted();
clearIO();
try {
return MessageCounterHelper.listMessageCounterHistoryAsHTML(new MessageCounter[]{counter});
} finally {
blockOnIO();
}
}
@Override
public void pause() {
checkStarted();
clearIO();
try {
queue.pause();
} finally {
blockOnIO();
}
}
@Override
public void pause(boolean persist) {
checkStarted();
clearIO();
try {
queue.pause(persist);
} finally {
blockOnIO();
}
}
@Override
public void resume() {
checkStarted();
clearIO();
try {
queue.resume();
} finally {
blockOnIO();
}
}
@Override
public boolean isPaused() throws Exception {
checkStarted();
clearIO();
try {
return queue.isPaused();
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse(int page, int pageSize) throws Exception {
String filter = null;
checkStarted();
clearIO();
try {
long index = 0;
long start = (page - 1) * pageSize;
long end = Math.min(page * pageSize, queue.getMessageCount());
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && index < end) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
if (index >= start) {
c.add(OpenTypeSupport.convert(ref));
}
}
index++;
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public CompositeData[] browse() throws Exception {
return browse(null);
}
@Override
public CompositeData[] browse(String filter) throws Exception {
checkStarted();
clearIO();
try {
int pageSize = addressSettingsRepository.getMatch(queue.getName().toString()).getManagementBrowsePageSize();
int currentPageSize = 0;
ArrayList<CompositeData> c = new ArrayList<>();
Filter thefilter = FilterImpl.createFilter(filter);
queue.flushExecutor();
try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) {
try {
while (iterator.hasNext() && currentPageSize++ < pageSize) {
MessageReference ref = iterator.next();
if (thefilter == null || thefilter.match(ref.getMessage())) {
c.add(OpenTypeSupport.convert(ref));
}
}
} catch (NoSuchElementException ignored) {
// this could happen through paging browsing
}
CompositeData[] rc = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
} catch (ActiveMQException e) {
throw new IllegalStateException(e.getMessage());
} finally {
blockOnIO();
}
}
@Override
public void flushExecutor() {
checkStarted();
clearIO();
try {
queue.flushExecutor();
} finally {
blockOnIO();
}
}
@Override
public void resetAllGroups() {
checkStarted();
clearIO();
try {
queue.resetAllGroups();
} finally {
blockOnIO();
}
}
@Override
public void resetGroup(String groupID) {
checkStarted();
clearIO();
try {
queue.resetGroup(SimpleString.toSimpleString(groupID));
} finally {
blockOnIO();
}
}
@Override
public int getGroupCount() {
checkStarted();
clearIO();
try {
return queue.getGroupCount();
} finally {
blockOnIO();
}
}
@Override
public String listGroupsAsJSON() throws Exception {
checkStarted();
clearIO();
try {
Map<SimpleString, Consumer> groups = queue.getGroups();
JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
for (Map.Entry<SimpleString, Consumer> group : groups.entrySet()) {
if (group.getValue() instanceof ServerConsumer) {
ServerConsumer serverConsumer = (ServerConsumer) group.getValue();
JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("groupID", group.getKey().toString()).add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
jsonArray.add(obj);
}
}
return jsonArray.build().toString();
} finally {
blockOnIO();
}
}
@Override
public String listConsumersAsJSON() throws Exception {
checkStarted();
clearIO();
try {
Collection<Consumer> consumers = queue.getConsumers();
JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
for (Consumer consumer : consumers) {
if (consumer instanceof ServerConsumer) {
ServerConsumer serverConsumer = (ServerConsumer) consumer;
JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
jsonArray.add(obj);
}
}
return jsonArray.build().toString();
} finally {
blockOnIO();
}
}
@Override
protected MBeanOperationInfo[] fillMBeanOperationInfo() {
return MBeanInfoHelper.getMBeanOperationsInfo(QueueControl.class);
}
@Override
protected MBeanAttributeInfo[] fillMBeanAttributeInfo() {
return MBeanInfoHelper.getMBeanAttributesInfo(QueueControl.class);
}
@Override
public void resetMessagesAdded() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAdded();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesAcknowledged() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesAcknowledged();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesExpired() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesExpired();
} finally {
blockOnIO();
}
}
@Override
public void resetMessagesKilled() throws Exception {
checkStarted();
clearIO();
try {
queue.resetMessagesKilled();
} finally {
blockOnIO();
}
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private void checkStarted() {
if (!postOffice.isStarted()) {
throw new IllegalStateException("Broker is not started. Queue can not be managed yet");
}
}
// Inner classes -------------------------------------------------
}
| [ARTEMIS-2022] Ajust checkstyle
| artemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java | [ARTEMIS-2022] Ajust checkstyle | <ide><path>rtemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java
<ide> while (iterator.hasNext()) {
<ide> MessageReference ref = iterator.next();
<ide> String messageProperty = ref.getMessage().getStringProperty(propertySearch);
<del> messageProperty = messageProperty == null ? UNDEFINED : messageProperty ;
<add> messageProperty = messageProperty == null ? UNDEFINED : messageProperty;
<ide> Integer value = result.getOrDefault(messageProperty, 0);
<ide> result.put(messageProperty, ++value);
<ide> } |
|
JavaScript | mit | 012c1c1b38c2f224ffd32136bc443f25a353e692 | 0 | hilongjw/vue-recyclerview | /*!
* Vue-RecyclerView.js v0.3.7
* (c) 2017 Awe <[email protected]>
* Released under the MIT License.
*/
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.RecyclerView=e()}(this,function(){"use strict";function t(t){if(!t)return l;if(u.test(t.type)){var e=t.touches[0];return{x:e.clientX,y:e.clientY}}return c.test(t.type)?{x:t.clientX,y:t.clientY}:l}function e(t,e){for(var i in e)if(e[i].test(t[i]))return!0;return!1}function i(t,e){if(null==t)throw new TypeError("Cannot convert undefined or null to object");for(var i=Object(t),s=1;s<arguments.length;s++){var o=arguments[s];if(o)for(var n in o)Object.prototype.hasOwnProperty.call(o,n)&&(i[n]=o[n])}return i}function s(t,e){for(var i=0,s=t.length;i<s;i++)if(e(t[i],i))return t[i]}function o(t,e,i){this.RUNWAY_ITEMS=i.prerender,this.RUNWAY_ITEMS_OPPOSITE=i.remain,this.ANIMATION_DURATION_MS=i.animation_duration_ms,this.TOMBSTONE_CLASS=i.tombstone_class,this.INVISIBLE_CLASS=i.invisible_class,this.MAX_COUNT=d,this.column=i.column||1,this.waterflow=i.waterflow,this.anchorItem={index:0,offset:0},this.timer=null,this.firstAttachedItem_=0,this.lastAttachedItem_=0,this.anchorScrollTop=0,this.tombstoneSize_=0,this.tombstoneWidth_=0,this.tombstones_=[],this.scroller_=t,this.source_=e,this.items_=i.list||[],this.loadedItems_=0,this.requestInProgress_=!1,this.cacheVM=i.cacheVM,this.options=i,this.source_.fetch||this.setItems(i.list),this.curPos=0,this.unusedNodes=[],this.baseNode=document.createElement("div"),this.scroller_.addEventListener("scroll",this.onScroll_.bind(this)),window.addEventListener("resize",this.onResize_.bind(this)),window.addEventListener("orientationchange",this.onResize_.bind(this)),this.initPosList(),this.onResize_()}function n(t){var e=(arguments.length>1&&void 0!==arguments[1]&&arguments[1],y(t));return t.component(e.name,e),e}var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},h=(function(){function t(t){this.value=t}function e(e){function i(t,e){return new Promise(function(i,o){var h={key:t,arg:e,resolve:i,reject:o,next:null};r?r=r.next=h:(n=r=h,s(t,e))})}function s(i,n){try{var r=e[i](n),h=r.value;h instanceof t?Promise.resolve(h.value).then(function(t){s("next",t)},function(t){s("throw",t)}):o(r.done?"return":"normal",r.value)}catch(t){o("throw",t)}}function o(t,e){switch(t){case"return":n.resolve({value:e,done:!0});break;case"throw":n.reject(e);break;default:n.resolve({value:e,done:!1})}n=n.next,n?s(n.key,n.arg):r=null}var n,r;this._invoke=i,"function"!=typeof e.return&&(this.return=void 0)}"function"==typeof Symbol&&Symbol.asyncIterator&&(e.prototype[Symbol.asyncIterator]=function(){return this}),e.prototype.next=function(t){return this._invoke("next",t)},e.prototype.throw=function(t){return this._invoke("throw",t)},e.prototype.return=function(t){return this._invoke("return",t)}}(),function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}),a=function(){function t(t,e){for(var i=0;i<e.length;i++){var s=e[i];s.enumerable=s.enumerable||!1,s.configurable=!0,"value"in s&&(s.writable=!0),Object.defineProperty(t,s.key,s)}}return function(e,i,s){return i&&t(e.prototype,i),s&&t(e,s),e}}();Object.keys||(Object.keys=function(){var t=Object.prototype.hasOwnProperty,e=!{toString:null}.propertyIsEnumerable("toString"),i=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],s=i.length;return function(o){if("object"!==(void 0===o?"undefined":r(o))&&"function"!=typeof o||null===o)throw new TypeError("Object.keys called on non-object");var n=[];for(var h in o)t.call(o,h)&&n.push(h);if(e)for(var a=0;a<s;a++)t.call(o,i[a])&&n.push(i[a]);return n}}());var l={x:0,y:0},c=/mouse(down|move|up)/,u=/touch(start|move|end)/,m=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(t){window.setTimeout(t,1e3/60)},d=1/0;o.prototype={onResize_:function(){var t=this.source_.createTombstone(this.baseNode.cloneNode(!0));t.style.position="absolute",this.scroller_.appendChild(t),t.classList.remove(this.INVISIBLE_CLASS),this.tombstoneSize_=t.offsetHeight/this.column,this.tombstoneWidth_=t.offsetWidth,this.scroller_.removeChild(t);for(var e=0;e<this.items_.length;e++)this.items_[e].top=-1,this.items_[e].height=this.items_[e].width=this.items_[e].cacheHeightCount=0;this.onScroll_()},onScroll_:function(){var t=this.scroller_.scrollTop-this.anchorScrollTop;0==this.scroller_.scrollTop?this.anchorItem={index:0,offset:0}:this.anchorItem=this.calculateAnchoredItem(this.anchorItem,t),this.anchorScrollTop=this.scroller_.scrollTop;var e=this.calculateAnchoredItem(this.anchorItem,this.scroller_.offsetHeight);t<0?this.fill(this.anchorItem.index-this.RUNWAY_ITEMS,e.index+this.RUNWAY_ITEMS_OPPOSITE):this.fill(this.anchorItem.index-this.RUNWAY_ITEMS_OPPOSITE,e.index+this.RUNWAY_ITEMS)},calculateAnchoredItem:function(t,e){if(0===e)return t;e+=t.offset;var i=t.index,s=0;if(e<0){for(;e<0&&i>0&&this.items_[i-1].height;)e+=this.items_[i-1].height,i--;s=Math.max(-i,Math.ceil(Math.min(e,0)/this.tombstoneSize_))}else{for(;e>0&&i<this.items_.length&&this.items_[i].height&&this.items_[i].height<e;)e-=this.items_[i].height,i++;(i>=this.items_.length||!this.items_[i].height)&&(s=Math.floor(Math.max(e,0)/this.tombstoneSize_))}return i+=s,e-=s*this.tombstoneSize_,i=Math.min(i,this.MAX_COUNT-1),{index:Math.floor(i/this.column)*this.column,offset:e}},fill:function(t,e){this.firstAttachedItem_=Math.max(0,t),this.lastAttachedItem_=e,this.attachContent()},getTombstone:function(){var t=this.tombstones_.pop();return t?(t.classList.remove(this.INVISIBLE_CLASS),t.style.opacity=1,t.style.transform="",t.style.transition="",t):this.source_.createTombstone(this.baseNode.cloneNode(!0))},layoutInView:function(t){var e=this.posList.get(Math.floor(t/this.column)-1,t%this.column);return!e||Math.abs(e-this.anchorScrollTop)<2*window.innerHeight},getUnUsedNodes:function(t){if(this.waterflow)for(var e=0,i=this.items_.length;e<i;e++)!this.items_[e].node||!t&&this.layoutInView(e)||(this.items_[e].vm?this.clearItem(this.items_[e]):this.clearTombstone(this.items_[e]),this.items_[e].vm=null,this.items_[e].node=null);else for(var s=0,o=this.items_.length;s<o;s++)s!==this.firstAttachedItem_?(this.items_[s].vm?this.clearItem(this.items_[s]):this.clearTombstone(this.items_[s]),this.items_[s].vm=null,this.items_[s].node=null):s=this.lastAttachedItem_-1},clearItem:function(t){if(this.options.reuseVM)this.scroller_.removeChild(t.node),this.source_.free(t.data);else{if(this.cacheVM&&t.node)return this.scroller_.removeChild(t.node);t.vm.$destroy(),t.node&&this.unusedNodes.push(t.node)}},clearTombstone:function(t){t.node&&(t.node.classList.contains(this.TOMBSTONE_CLASS)?(this.tombstones_.push(t.node),this.tombstones_[this.tombstones_.length-1].classList.add(this.INVISIBLE_CLASS)):this.unusedNodes.push(t.node))},clearUnUsedNodes:function(){for(;this.unusedNodes.length;)this.scroller_.removeChild(this.unusedNodes.pop())},getNodePosition:function(){this.anchorScrollTop=0;for(var t=0;t<this.anchorItem.index;t++)this.anchorScrollTop+=this.items_[t].height||this.tombstoneSize_;this.anchorScrollTop+=this.anchorItem.offset,this.curPos=this.anchorScrollTop-this.anchorItem.offset;for(var e=this.anchorItem.index;e>this.firstAttachedItem_;)this.curPos-=this.items_[e-1].height||this.tombstoneSize_,e--;for(;e<this.firstAttachedItem_;)this.curPos+=this.items_[e].height||this.tombstoneSize_,e++},initPosList:function(){for(var t={},e=0,i=this.column;e<i;e++)t[e]=this.curPos;this.posList={data:{0:t},get:function(t,e){if(!this.data[t]){for(var i={},s=0,o=this.column;s<o;s++)i[s]=this.curPos;this.data[t]=i}return void 0===e?this.data[t]:this.data[t][e]},set:function(t,e,i){this.get(t)[e]=i}}},tombstoneLayout:function(t){var e=void 0,i=void 0,s=void 0;for(e in t)i=t[e],s=e%this.column*this.items_[e].width,this.items_[e].node.style.transform="translate3d("+s+"px,"+(this.anchorScrollTop+i[1])*this.column+"px, 0) scale("+this.tombstoneWidth_/this.items_[e].width+", "+this.tombstoneSize_/this.items_[e].height+")",this.items_[e].node.offsetTop,i[0].offsetTop,this.items_[e].node.style.transition="transform "+this.ANIMATION_DURATION_MS+"ms"},itemLayout:function(t){var e=void 0,i=void 0,s=0,o=0,n=0;for(e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)i=t[e],this.waterflow&&(n=Math.floor(e/this.column)),s=e%this.column*(this.items_[e].width||this.tombstoneWidth_),o=this.waterflow?this.posList.get(n,e%this.column):this.curPos,i&&(i[0].style.transition="transform "+this.ANIMATION_DURATION_MS+"ms, opacity "+this.ANIMATION_DURATION_MS+"ms",i[0].style.transform="translate3d("+s+"px,"+o+"px, 0) scale("+this.items_[e].width/this.tombstoneWidth_+", "+this.items_[e].height/this.tombstoneSize_+")",i[0].style.opacity=0),this.curPos!==this.items_[e].top&&(i||(this.items_[e].node.style.transition=""),this.items_[e].node.style.transform="translate3d("+s+"px,"+o+"px, 0)"),this.items_[e].top=o,(e+1)%this.column==0&&(this.curPos+=(this.items_[e].height||this.tombstoneSize_)*this.column),this.waterflow&&this.posList.set(n+1,e%this.column,o+(this.items_[e].height||this.tombstoneSize_)*this.column)},setAnimatePosition:function(t){this.tombstoneLayout(t),this.itemLayout(t)},renderItems:function(){var t={},e=void 0,i=[],s=void 0,o=Math.floor((this.lastAttachedItem_+this.RUNWAY_ITEMS)/this.column)*this.column;for(o>this.MAX_COUNT&&(this.lastAttachedItem_=this.MAX_COUNT),s=this.firstAttachedItem_;s<this.lastAttachedItem_;s++){for(;this.items_.length<=s;)this.addItem_();if(this.items_[s].node){if(!this.items_[s].node.classList.contains(this.TOMBSTONE_CLASS)||!this.items_[s].data)continue;this.ANIMATION_DURATION_MS?(this.items_[s].node.style.zIndex=1,t[s]=[this.items_[s].node,this.items_[s].top-this.anchorScrollTop]):(this.items_[s].node.classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(this.items_[s].node)),this.items_[s].node=null}e=this.items_[s].data?this.source_.render(this.items_[s].data,this.unusedNodes.pop()||this.baseNode.cloneNode(!0),this.items_[s]):this.getTombstone(),e.style.position="absolute",this.items_[s].top=-1,this.items_[s].node=e,i.push(e)}var n=i.length;for(s=0;s<n;s++)this.scroller_.appendChild(i[s]);return t},cacheItemHeight:function(t){for(var e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)!this.items_[e].data||!t&&this.items_[e].height?this.items_[e].cacheHeightCount<10&&(this.items_[e].cacheHeightCount++,this.items_[e].height&&this.items_[e].node&&this.items_[e].height!==this.items_[e].node.offsetHeight/this.column&&(this.items_[e].height=this.items_[e].node.offsetHeight/this.column)):(this.items_[e].height=this.items_[e].node.offsetHeight/this.column,this.items_[e].width=this.items_[e].node.offsetWidth,this.items_[e].cacheHeightCount=0)},attachContent:function(){var t=this;this.getUnUsedNodes();var e=this.renderItems();this.clearUnUsedNodes(),this.cacheItemHeight(),this.getNodePosition(),this.setAnimatePosition(e),this.ANIMATION_DURATION_MS&&setTimeout(function(){t.tombstoneAnimation(e)},this.ANIMATION_DURATION_MS),this.maybeRequestContent()},setItems:function(t){t=t||[],this.items_=t,this.MAX_COUNT=t.length},scrollToIndex:function(t){var e=this.lastAttachedItem_-this.firstAttachedItem_;this.fill(t-e,t+1)},setScrollRunway:function(){this.scrollRunwayEnd_=Math.max(this.scrollRunwayEnd_,this.curPos+this.SCROLL_RUNWAY),this.scrollRunway_.style.transform="translate(0, "+this.scrollRunwayEnd_+"px)",this.scroller_.scrollTop=this.anchorScrollTop},tombstoneAnimation:function(t){var e=void 0;for(var i in t)e=t[i],e[0].classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(e[0]);t=null},maybeRequestContent:function(){var t=this;if(!this.requestInProgress_){var e=this.lastAttachedItem_-this.loadedItems_;e<=0||(this.requestInProgress_=!0,this.source_.fetch&&this.source_.fetch(e,this.loadedItems_).then(function(e){t.MAX_COUNT=e.count,t.addContent(e.list)}))}},addItem_:function(){this.items_.push({vm:null,data:null,node:null,height:0,width:0,top:0})},addContent:function(t){if(t.length){this.requestInProgress_=!1;for(var e=void 0,i=0;i<t.length;i++)this.items_.length<=this.loadedItems_&&this.addItem_(),this.loadedItems_<=this.MAX_COUNT&&(e=this.loadedItems_++,this.items_[e].data=t[i]);this.attachContent()}},clear:function(){this.loadedItems_=0,this.requestInProgress_=!1,this.firstAttachedItem_=-1,this.lastAttachedItem_=-1,this.getUnUsedNodes(!0),this.clearUnUsedNodes(),this.items_=[],this.onResize_()},destroy:function(){this.scroller_.removeEventListener("scroll",this.onScroll_),window.removeEventListener("resize",this.onResize_),window.removeEventListener("orientationchange",this.onResize_),this.clear()}};var f=function(){function t(e,i){var o=this;h(this,t),this.itemRender=i.item,this.TombstoneRender=i.tombstone,this.fetch=i.fetch,this.Vue=e,this.options=i,this.itemCache={data:{},length:0,get:function(t){return this.data[t]},set:function(t,e){this.length++,this.data[t]=e,this.length>i.cacheVM&&i.cacheVM>50&&this.recycle(10,t)},recycle:function(t,e){for(var i=void 0,s=Object.keys(this.data),o=s.length;t;)t--,i=s[Math.floor(Math.random()*o)],this.data[i]&&this.length--&&this.data[i].$destroy(),this.data[i]=null}},this.reuseVM={queue:[],generate:function(t,e){var i=s(o.reuseVM.queue,function(t){return!t.inuse});if(i)i.vm.data=t,i.inuse=!0,i.id=t.id;else{var n={props:{data:t}};o.options.props.data=t,o.options.props&&Object.keys(o.options.props).map(function(t){n.props[t]=o.options.props[t]});var r={el:e,data:n.props,render:function(t){return t(o.itemRender,n)}};i={id:t.id,inuse:!0,vm:new o.Vue(r)},o.reuseVM.queue.push(i)}return i.vm},free:function(t){s(this.queue,function(e){return e.id===t}).inuse=!1},destroy:function(t,e){for(var i=0,s=this.queue.length;i<s;i++)(this.queue[i].id===t||e)&&(this.queue.vm&&this.queue.vm.$destroy(),this.queue.splice(i,1))}}}return a(t,[{key:"createTombstone",value:function(t){var e=this;return new this.Vue({el:t,render:function(t){return t(e.TombstoneRender)}}).$el}},{key:"free",value:function(t){this.reuseVM.free(t.id)}},{key:"render",value:function(t,e,i){var s=this;if(this.options.reuseVM){var o=this.reuseVM.generate(t,e);return i.vm=o,o.$el}var n=void 0,r={props:{data:t}};this.options.props.data=t,this.options.props&&Object.keys(this.options.props).map(function(t){r.props[t]=s.options.props[t]});var h={el:e,render:function(t){return t(s.itemRender,r)}};return this.options.cacheVM?(n=this.itemCache.get(t.id))?(i.vm=n,n.$el):(n=new this.Vue(h),this.itemCache.set(t.id,n),i.vm=n,n.$el):(n=new this.Vue(h),i.vm=n,n.$el)}},{key:"destroy",value:function(){return this.reuseVM.destroy(null,!0),this.reuseVM.queue}}]),t}(),_={render:function(t){return t("div",{attrs:{class:"recyclerview-loading"}},"Loading...")}},p={render:function(t){return t("div",{attrs:{class:"recyclerview-item tombstone"},style:{height:"100px",width:"100%"}},"")}},v={preventDefaultException:{tagName:/^(INPUT|TEXTAREA|BUTTON|SELECT|IMG)$/},distance:50,animation_duration_ms:200,tombstone_class:"tombstone",invisible_class:"invisible",prerender:20,remain:10,preventDefault:!1,column:1,waterflow:!1,cacheVM:0,reuseVM:!1,props:{}},y=function(s){return{name:"RecyclerView",props:{fetch:Function,list:Array,item:Object,loading:Object,tombstone:{type:Object,default:function(){return p}},column:Number,prerender:Number,remain:Number,waterflow:Boolean,preventDefault:Boolean,options:Object,tag:{type:String,default:"div"}},render:function(t){return t(this.tag,{attrs:{class:"recyclerview-container"}},[t(this.loading||_),t(this.tag,{attrs:{class:"recyclerview"},on:{touchstart:this._start,touchmove:this._move,touchend:this._end,touchcancel:this._end,mousedown:this._start,mousemove:this._move,mouseup:this._end}})])},data:function(){return{startPointer:{x:0,y:0},_options:{},distance:0,pulling:!1,_contentSource:null,scroller:null}},mounted:function(){this.init()},beforeDestroy:function(){this.scroller.destroy(),this.scroller=null},methods:{init:function(){this._options=i({},v,{prerender:this.prerender||v.prerender,remain:this.remain||v.remain,column:this.column||v.column,waterflow:this.waterflow||v.waterflow,fetch:this.fetch,list:this.list,item:this.item,loading:this.loading,tombstone:this.tombstone},this.options),this._contentSource=new f(s,this._options),this.$list=this.$el.querySelector(".recyclerview"),this.scroller=new o(this.$list,this._contentSource,this._options),this.$emit("inited")},scrollToIndex:function(t){var e=this;if(this.waterflow)for(var i=0,s=this.scroller.items_.length;i<s;i++)i===t&&this._scrollTo(this.scroller.items_[i].top-this.scroller.items_[i].height*this._options.column+this.$list.offsetWidth);else t=Number(t),this.scroller.scrollToIndex(t),this.$nextTick(function(){e._scrollToBottom()})},_scrollTo:function(t){t=t||0,this.$list.scrollTop=Number(t)},_scrollToBottom:function(){this._scrollTo(this.$list.scrollHeight)},_renderListStyle:function(){this.$list.style.transform="translate3d(0, "+this.distance+"px, 0)"},_start:function(i){this.$list.scrollTop>0||(this.pulling=!0,this.startPointer=t(i),this.$list.style.transition="transform .2s",this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault())},_move:function(i){if(this.pulling){var s=t(i),o=s.y-this.startPointer.y;if(o<0)return void this._scrollTo(-o);this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault(),this.distance=Math.floor(.5*o),this.distance>this._options.distance&&(this.distance=this._options.distance),m(this._renderListStyle.bind(this))}},_end:function(t){var i=this;this.pulling&&(this.preventDefault&&!e(t.target,this._options.preventDefaultException)&&t.preventDefault(),this.pulling=!1,this.$list.style.transition="transform .3s",this.$nextTick(function(){i.$list.style.transform=""}),this.distance>=this._options.distance&&(this.distance=0,this.scroller.clear()))}}}};!function(t,e){if("undefined"==typeof document)return e;t=t||"";var i=document.head||document.getElementsByTagName("head")[0],s=document.createElement("style");s.type="text/css",s.styleSheet?s.styleSheet.cssText=t:s.appendChild(document.createTextNode(t)),i.appendChild(s)}(".recyclerview-container{position:relative}.recyclerview-loading{position:absolute;top:0;left:0;width:100%;text-align:center;padding:10px;font-size:14px;color:#9e9e9e}.recyclerview{background:#fff;margin:0;padding:0;overflow-x:hidden;overflow-y:scroll;-webkit-overflow-scrolling:touch;width:100%;height:100%;position:absolute;box-sizing:border-box;contain:layout;will-change:transform}",void 0);var g={install:n};return"undefined"!=typeof window&&window.Vue&&window.Vue.use(n),g});
| dist/vue-recyclerview.js | /*!
* Vue-RecyclerView.js v0.3.6
* (c) 2017 Awe <[email protected]>
* Released under the MIT License.
*/
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.RecyclerView=e()}(this,function(){"use strict";function t(t){if(!t)return l;if(u.test(t.type)){var e=t.touches[0];return{x:e.clientX,y:e.clientY}}return c.test(t.type)?{x:t.clientX,y:t.clientY}:l}function e(t,e){for(var i in e)if(e[i].test(t[i]))return!0;return!1}function i(t,e){if(null==t)throw new TypeError("Cannot convert undefined or null to object");for(var i=Object(t),s=1;s<arguments.length;s++){var o=arguments[s];if(o)for(var n in o)Object.prototype.hasOwnProperty.call(o,n)&&(i[n]=o[n])}return i}function s(t,e){for(var i=0,s=t.length;i<s;i++)if(e(t[i],i))return t[i]}function o(t,e,i){this.RUNWAY_ITEMS=i.prerender,this.RUNWAY_ITEMS_OPPOSITE=i.remain,this.ANIMATION_DURATION_MS=i.animation_duration_ms,this.TOMBSTONE_CLASS=i.tombstone_class,this.INVISIBLE_CLASS=i.invisible_class,this.MAX_COUNT=d,this.column=i.column||1,this.waterflow=i.waterflow,this.anchorItem={index:0,offset:0},this.timer=null,this.firstAttachedItem_=0,this.lastAttachedItem_=0,this.anchorScrollTop=0,this.tombstoneSize_=0,this.tombstoneWidth_=0,this.tombstones_=[],this.scroller_=t,this.source_=e,this.items_=i.list||[],this.loadedItems_=0,this.requestInProgress_=!1,this.cacheVM=i.cacheVM,this.options=i,this.source_.fetch||this.setItems(i.list),this.curPos=0,this.unusedNodes=[],this.baseNode=document.createElement("div"),this.scroller_.addEventListener("scroll",this.onScroll_.bind(this)),window.addEventListener("resize",this.onResize_.bind(this)),window.addEventListener("orientationchange",this.onResize_.bind(this)),this.initPosList(),this.onResize_()}function n(t){var e=(arguments.length>1&&void 0!==arguments[1]&&arguments[1],y(t));return t.component(e.name,e),e}var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},h=(function(){function t(t){this.value=t}function e(e){function i(t,e){return new Promise(function(i,o){var h={key:t,arg:e,resolve:i,reject:o,next:null};r?r=r.next=h:(n=r=h,s(t,e))})}function s(i,n){try{var r=e[i](n),h=r.value;h instanceof t?Promise.resolve(h.value).then(function(t){s("next",t)},function(t){s("throw",t)}):o(r.done?"return":"normal",r.value)}catch(t){o("throw",t)}}function o(t,e){switch(t){case"return":n.resolve({value:e,done:!0});break;case"throw":n.reject(e);break;default:n.resolve({value:e,done:!1})}n=n.next,n?s(n.key,n.arg):r=null}var n,r;this._invoke=i,"function"!=typeof e.return&&(this.return=void 0)}"function"==typeof Symbol&&Symbol.asyncIterator&&(e.prototype[Symbol.asyncIterator]=function(){return this}),e.prototype.next=function(t){return this._invoke("next",t)},e.prototype.throw=function(t){return this._invoke("throw",t)},e.prototype.return=function(t){return this._invoke("return",t)}}(),function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}),a=function(){function t(t,e){for(var i=0;i<e.length;i++){var s=e[i];s.enumerable=s.enumerable||!1,s.configurable=!0,"value"in s&&(s.writable=!0),Object.defineProperty(t,s.key,s)}}return function(e,i,s){return i&&t(e.prototype,i),s&&t(e,s),e}}();Object.keys||(Object.keys=function(){var t=Object.prototype.hasOwnProperty,e=!{toString:null}.propertyIsEnumerable("toString"),i=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],s=i.length;return function(o){if("object"!==(void 0===o?"undefined":r(o))&&"function"!=typeof o||null===o)throw new TypeError("Object.keys called on non-object");var n=[];for(var h in o)t.call(o,h)&&n.push(h);if(e)for(var a=0;a<s;a++)t.call(o,i[a])&&n.push(i[a]);return n}}());var l={x:0,y:0},c=/mouse(down|move|up)/,u=/touch(start|move|end)/,m=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(t){window.setTimeout(t,1e3/60)},d=1/0;o.prototype={onResize_:function(){var t=this.source_.createTombstone(this.baseNode.cloneNode(!0));t.style.position="absolute",this.scroller_.appendChild(t),t.classList.remove(this.INVISIBLE_CLASS),this.tombstoneSize_=t.offsetHeight/this.column,this.tombstoneWidth_=t.offsetWidth,this.scroller_.removeChild(t);for(var e=0;e<this.items_.length;e++)this.items_[e].top=-1,this.items_[e].height=this.items_[e].width=this.items_[e].cacheHeightCount=0;this.onScroll_()},onScroll_:function(){var t=this.scroller_.scrollTop-this.anchorScrollTop;0==this.scroller_.scrollTop?this.anchorItem={index:0,offset:0}:this.anchorItem=this.calculateAnchoredItem(this.anchorItem,t),this.anchorScrollTop=this.scroller_.scrollTop;var e=this.calculateAnchoredItem(this.anchorItem,this.scroller_.offsetHeight);t<0?this.fill(this.anchorItem.index-this.RUNWAY_ITEMS,e.index+this.RUNWAY_ITEMS_OPPOSITE):this.fill(this.anchorItem.index-this.RUNWAY_ITEMS_OPPOSITE,e.index+this.RUNWAY_ITEMS)},calculateAnchoredItem:function(t,e){if(0===e)return t;e+=t.offset;var i=t.index,s=0;if(e<0){for(;e<0&&i>0&&this.items_[i-1].height;)e+=this.items_[i-1].height,i--;s=Math.max(-i,Math.ceil(Math.min(e,0)/this.tombstoneSize_))}else{for(;e>0&&i<this.items_.length&&this.items_[i].height&&this.items_[i].height<e;)e-=this.items_[i].height,i++;(i>=this.items_.length||!this.items_[i].height)&&(s=Math.floor(Math.max(e,0)/this.tombstoneSize_))}return i+=s,e-=s*this.tombstoneSize_,i=Math.min(i,this.MAX_COUNT-1),{index:Math.floor(i/this.column)*this.column,offset:e}},fill:function(t,e){this.firstAttachedItem_=Math.max(0,t),this.lastAttachedItem_=e,this.attachContent()},getTombstone:function(){var t=this.tombstones_.pop();return t?(t.classList.remove(this.INVISIBLE_CLASS),t.style.opacity=1,t.style.transform="",t.style.transition="",t):this.source_.createTombstone(this.baseNode.cloneNode(!0))},layoutInView:function(t){var e=this.posList.get(Math.floor(t/this.column)-1,t%this.column);return!e||Math.abs(e-this.anchorScrollTop)<2*window.innerHeight},getUnUsedNodes:function(){if(this.waterflow)for(var t=0,e=this.items_.length;t<e;t++)this.layoutInView(t),this.items_[t].node&&!this.layoutInView(t)&&(this.items_[t].vm?this.clearItem(this.items_[t]):this.clearTombstone(this.items_[t]),this.items_[t].vm=null,this.items_[t].node=null);else for(var i=0,s=this.items_.length;i<s;i++)i!==this.firstAttachedItem_?(this.items_[i].vm?this.clearItem(this.items_[i]):this.clearTombstone(this.items_[i]),this.items_[i].vm=null,this.items_[i].node=null):i=this.lastAttachedItem_-1},clearItem:function(t){if(this.options.reuseVM)this.scroller_.removeChild(t.node),this.source_.free(t.data);else{if(this.cacheVM&&t.node)return this.scroller_.removeChild(t.node);t.vm.$destroy(),t.node&&this.unusedNodes.push(t.node)}},clearTombstone:function(t){t.node&&(t.node.classList.contains(this.TOMBSTONE_CLASS)?(this.tombstones_.push(t.node),this.tombstones_[this.tombstones_.length-1].classList.add(this.INVISIBLE_CLASS)):this.unusedNodes.push(t.node))},clearUnUsedNodes:function(){for(;this.unusedNodes.length;)this.scroller_.removeChild(this.unusedNodes.pop())},getNodePosition:function(){this.anchorScrollTop=0;for(var t=0;t<this.anchorItem.index;t++)this.anchorScrollTop+=this.items_[t].height||this.tombstoneSize_;this.anchorScrollTop+=this.anchorItem.offset,this.curPos=this.anchorScrollTop-this.anchorItem.offset;for(var e=this.anchorItem.index;e>this.firstAttachedItem_;)this.curPos-=this.items_[e-1].height||this.tombstoneSize_,e--;for(;e<this.firstAttachedItem_;)this.curPos+=this.items_[e].height||this.tombstoneSize_,e++},initPosList:function(){for(var t={},e=0,i=this.column;e<i;e++)t[e]=this.curPos;this.posList={data:{0:t},get:function(t,e){if(!this.data[t]){for(var i={},s=0,o=this.column;s<o;s++)i[s]=this.curPos;this.data[t]=i}return void 0===e?this.data[t]:this.data[t][e]},set:function(t,e,i){this.get(t)[e]=i}}},tombstoneLayout:function(t){var e=void 0,i=void 0,s=void 0;for(e in t)i=t[e],s=e%this.column*this.items_[e].width,this.items_[e].node.style.transform="translate3d("+s+"px,"+(this.anchorScrollTop+i[1])*this.column+"px, 0) scale("+this.tombstoneWidth_/this.items_[e].width+", "+this.tombstoneSize_/this.items_[e].height+")",this.items_[e].node.offsetTop,i[0].offsetTop,this.items_[e].node.style.transition="transform "+this.ANIMATION_DURATION_MS+"ms"},itemLayout:function(t){var e=void 0,i=void 0,s=0,o=0,n=0;for(e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)i=t[e],this.waterflow&&(n=Math.floor(e/this.column)),s=e%this.column*(this.items_[e].width||this.tombstoneWidth_),o=this.waterflow?this.posList.get(n,e%this.column):this.curPos,i&&(i[0].style.transition="transform "+this.ANIMATION_DURATION_MS+"ms, opacity "+this.ANIMATION_DURATION_MS+"ms",i[0].style.transform="translate3d("+s+"px,"+o+"px, 0) scale("+this.items_[e].width/this.tombstoneWidth_+", "+this.items_[e].height/this.tombstoneSize_+")",i[0].style.opacity=0),this.curPos!==this.items_[e].top&&(i||(this.items_[e].node.style.transition=""),this.items_[e].node.style.transform="translate3d("+s+"px,"+o+"px, 0)"),this.items_[e].top=o,(e+1)%this.column==0&&(this.curPos+=(this.items_[e].height||this.tombstoneSize_)*this.column),this.waterflow&&this.posList.set(n+1,e%this.column,o+(this.items_[e].height||this.tombstoneSize_)*this.column)},setAnimatePosition:function(t){this.tombstoneLayout(t),this.itemLayout(t)},renderItems:function(){var t={},e=void 0,i=[],s=void 0,o=Math.floor((this.lastAttachedItem_+this.RUNWAY_ITEMS)/this.column)*this.column;for(o>this.MAX_COUNT&&(this.lastAttachedItem_=this.MAX_COUNT),s=this.firstAttachedItem_;s<this.lastAttachedItem_;s++){for(;this.items_.length<=s;)this.addItem_();if(this.items_[s].node){if(!this.items_[s].node.classList.contains(this.TOMBSTONE_CLASS)||!this.items_[s].data)continue;this.ANIMATION_DURATION_MS?(this.items_[s].node.style.zIndex=1,t[s]=[this.items_[s].node,this.items_[s].top-this.anchorScrollTop]):(this.items_[s].node.classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(this.items_[s].node)),this.items_[s].node=null}e=this.items_[s].data?this.source_.render(this.items_[s].data,this.unusedNodes.pop()||this.baseNode.cloneNode(!0),this.items_[s]):this.getTombstone(),e.style.position="absolute",this.items_[s].top=-1,this.items_[s].node=e,i.push(e)}var n=i.length;for(s=0;s<n;s++)this.scroller_.appendChild(i[s]);return t},cacheItemHeight:function(t){for(var e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)!this.items_[e].data||!t&&this.items_[e].height?this.items_[e].cacheHeightCount<10&&(this.items_[e].cacheHeightCount++,this.items_[e].height&&this.items_[e].node&&this.items_[e].height!==this.items_[e].node.offsetHeight/this.column&&(this.items_[e].height=this.items_[e].node.offsetHeight/this.column)):(this.items_[e].height=this.items_[e].node.offsetHeight/this.column,this.items_[e].width=this.items_[e].node.offsetWidth,this.items_[e].cacheHeightCount=0)},attachContent:function(){var t=this;this.getUnUsedNodes();var e=this.renderItems();this.clearUnUsedNodes(),this.cacheItemHeight(),this.getNodePosition(),this.setAnimatePosition(e),this.ANIMATION_DURATION_MS&&setTimeout(function(){t.tombstoneAnimation(e)},this.ANIMATION_DURATION_MS),this.maybeRequestContent()},setItems:function(t){t=t||[],this.items_=t,this.MAX_COUNT=t.length},scrollToIndex:function(t){var e=this.lastAttachedItem_-this.firstAttachedItem_;this.fill(t-e,t+1)},setScrollRunway:function(){this.scrollRunwayEnd_=Math.max(this.scrollRunwayEnd_,this.curPos+this.SCROLL_RUNWAY),this.scrollRunway_.style.transform="translate(0, "+this.scrollRunwayEnd_+"px)",this.scroller_.scrollTop=this.anchorScrollTop},tombstoneAnimation:function(t){var e=void 0;for(var i in t)e=t[i],e[0].classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(e[0]);t=null},maybeRequestContent:function(){var t=this;if(!this.requestInProgress_){var e=this.lastAttachedItem_-this.loadedItems_;e<=0||(this.requestInProgress_=!0,this.source_.fetch&&this.source_.fetch(e,this.loadedItems_).then(function(e){t.MAX_COUNT=e.count,t.addContent(e.list)}))}},addItem_:function(){this.items_.push({vm:null,data:null,node:null,height:0,width:0,top:0})},addContent:function(t){if(t.length){this.requestInProgress_=!1;for(var e=void 0,i=0;i<t.length;i++)this.items_.length<=this.loadedItems_&&this.addItem_(),this.loadedItems_<=this.MAX_COUNT&&(e=this.loadedItems_++,this.items_[e].data=t[i]);this.attachContent()}},clear:function(){this.loadedItems_=0,this.requestInProgress_=!1,this.firstAttachedItem_=-1,this.lastAttachedItem_=-1,this.getUnUsedNodes(),this.clearUnUsedNodes(),this.items_=[],this.onResize_()},destroy:function(){this.scroller_.removeEventListener("scroll",this.onScroll_),window.removeEventListener("resize",this.onResize_),window.removeEventListener("orientationchange",this.onResize_),this.clear()}};var f=function(){function t(e,i){var o=this;h(this,t),this.itemRender=i.item,this.TombstoneRender=i.tombstone,this.fetch=i.fetch,this.Vue=e,this.options=i,this.itemCache={data:{},length:0,get:function(t){return this.data[t]},set:function(t,e){this.length++,this.data[t]=e,this.length>i.cacheVM&&i.cacheVM>50&&this.recycle(10,t)},recycle:function(t,e){for(var i=void 0,s=Object.keys(this.data),o=s.length;t;)t--,i=s[Math.floor(Math.random()*o)],this.data[i]&&this.length--&&this.data[i].$destroy(),this.data[i]=null}},this.reuseVM={queue:[],generate:function(t,e){var i=s(o.reuseVM.queue,function(t){return!t.inuse});if(i)i.vm.data=t,i.inuse=!0,i.id=t.id;else{var n={props:{data:t}};o.options.props.data=t,o.options.props&&Object.keys(o.options.props).map(function(t){n.props[t]=o.options.props[t]});var r={el:e,data:n.props,render:function(t){return t(o.itemRender,n)}};i={id:t.id,inuse:!0,vm:new o.Vue(r)},o.reuseVM.queue.push(i)}return i.vm},free:function(t){s(this.queue,function(e){return e.id===t}).inuse=!1},destroy:function(t,e){for(var i=0,s=this.queue.length;i<s;i++)(this.queue[i].id===t||e)&&(this.queue.vm&&this.queue.vm.$destroy(),this.queue.splice(i,1))}}}return a(t,[{key:"createTombstone",value:function(t){var e=this;return new this.Vue({el:t,render:function(t){return t(e.TombstoneRender)}}).$el}},{key:"free",value:function(t){this.reuseVM.free(t.id)}},{key:"render",value:function(t,e,i){var s=this;if(this.options.reuseVM){var o=this.reuseVM.generate(t,e);return i.vm=o,o.$el}var n=void 0,r={props:{data:t}};this.options.props.data=t,this.options.props&&Object.keys(this.options.props).map(function(t){r.props[t]=s.options.props[t]});var h={el:e,render:function(t){return t(s.itemRender,r)}};return this.options.cacheVM?(n=this.itemCache.get(t.id))?(i.vm=n,n.$el):(n=new this.Vue(h),this.itemCache.set(t.id,n),i.vm=n,n.$el):(n=new this.Vue(h),i.vm=n,n.$el)}},{key:"destroy",value:function(){this.reuseVM.destroy(null,!0)}}]),t}(),_={render:function(t){return t("div",{attrs:{class:"recyclerview-loading"}},"Loading...")}},p={render:function(t){return t("div",{attrs:{class:"recyclerview-item tombstone"},style:{height:"100px",width:"100%"}},"")}},v={preventDefaultException:{tagName:/^(INPUT|TEXTAREA|BUTTON|SELECT|IMG)$/},distance:50,animation_duration_ms:200,tombstone_class:"tombstone",invisible_class:"invisible",prerender:20,remain:10,preventDefault:!1,column:1,waterflow:!1,cacheVM:0,reuseVM:!1,props:{}},y=function(s){return{name:"RecyclerView",props:{fetch:Function,list:Array,item:Object,loading:Object,tombstone:{type:Object,default:function(){return p}},column:Number,prerender:Number,remain:Number,waterflow:Boolean,preventDefault:Boolean,options:Object,tag:{type:String,default:"div"}},render:function(t){return t(this.tag,{attrs:{class:"recyclerview-container"}},[t(this.loading||_),t(this.tag,{attrs:{class:"recyclerview"},on:{touchstart:this._start,touchmove:this._move,touchend:this._end,touchcancel:this._end,mousedown:this._start,mousemove:this._move,mouseup:this._end}})])},data:function(){return{startPointer:{x:0,y:0},_options:{},distance:0,pulling:!1,_contentSource:null,scroller:null}},mounted:function(){this.init()},beforeDestroy:function(){this.scroller.destroy(),this.scroller=null},methods:{init:function(){this._options=i({},v,{prerender:this.prerender||v.prerender,remain:this.remain||v.remain,column:this.column||v.column,waterflow:this.waterflow||v.waterflow,fetch:this.fetch,list:this.list,item:this.item,loading:this.loading,tombstone:this.tombstone},this.options),this._contentSource=new f(s,this._options),this.$list=this.$el.querySelector(".recyclerview"),this.scroller=new o(this.$list,this._contentSource,this._options),this.$emit("inited")},scrollToIndex:function(t){var e=this;if(this.waterflow)for(var i=0,s=this.scroller.items_.length;i<s;i++)i===t&&this._scrollTo(this.scroller.items_[i].top-this.scroller.items_[i].height*this._options.column+this.$list.offsetWidth);else t=Number(t),this.scroller.scrollToIndex(t),this.$nextTick(function(){e._scrollToBottom()})},_scrollTo:function(t){t=t||0,this.$list.scrollTop=Number(t)},_scrollToBottom:function(){this._scrollTo(this.$list.scrollHeight)},_renderListStyle:function(){this.$list.style.transform="translate3d(0, "+this.distance+"px, 0)"},_start:function(i){this.$list.scrollTop>0||(this.pulling=!0,this.startPointer=t(i),this.$list.style.transition="transform .2s",this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault())},_move:function(i){if(this.pulling){var s=t(i),o=s.y-this.startPointer.y;if(o<0)return void this._scrollTo(-o);this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault(),this.distance=Math.floor(.5*o),this.distance>this._options.distance&&(this.distance=this._options.distance),m(this._renderListStyle.bind(this))}},_end:function(t){var i=this;this.pulling&&(this.preventDefault&&!e(t.target,this._options.preventDefaultException)&&t.preventDefault(),this.pulling=!1,this.$list.style.transition="transform .3s",this.$nextTick(function(){i.$list.style.transform=""}),this.distance>=this._options.distance&&(this.distance=0,this.scroller.clear()))}}}};!function(t,e){if("undefined"==typeof document)return e;t=t||"";var i=document.head||document.getElementsByTagName("head")[0],s=document.createElement("style");s.type="text/css",s.styleSheet?s.styleSheet.cssText=t:s.appendChild(document.createTextNode(t)),i.appendChild(s)}(".recyclerview-container{position:relative}.recyclerview-loading{position:absolute;top:0;left:0;width:100%;text-align:center;padding:10px;font-size:14px;color:#9e9e9e}.recyclerview{background:#fff;margin:0;padding:0;overflow-x:hidden;overflow-y:scroll;-webkit-overflow-scrolling:touch;width:100%;height:100%;position:absolute;box-sizing:border-box;contain:layout;will-change:transform}",void 0);var I={install:n};return"undefined"!=typeof window&&window.Vue&&window.Vue.use(n),I});
| fix: reuse vm free
| dist/vue-recyclerview.js | fix: reuse vm free | <ide><path>ist/vue-recyclerview.js
<ide> /*!
<del> * Vue-RecyclerView.js v0.3.6
<add> * Vue-RecyclerView.js v0.3.7
<ide> * (c) 2017 Awe <[email protected]>
<ide> * Released under the MIT License.
<ide> */
<del>!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.RecyclerView=e()}(this,function(){"use strict";function t(t){if(!t)return l;if(u.test(t.type)){var e=t.touches[0];return{x:e.clientX,y:e.clientY}}return c.test(t.type)?{x:t.clientX,y:t.clientY}:l}function e(t,e){for(var i in e)if(e[i].test(t[i]))return!0;return!1}function i(t,e){if(null==t)throw new TypeError("Cannot convert undefined or null to object");for(var i=Object(t),s=1;s<arguments.length;s++){var o=arguments[s];if(o)for(var n in o)Object.prototype.hasOwnProperty.call(o,n)&&(i[n]=o[n])}return i}function s(t,e){for(var i=0,s=t.length;i<s;i++)if(e(t[i],i))return t[i]}function o(t,e,i){this.RUNWAY_ITEMS=i.prerender,this.RUNWAY_ITEMS_OPPOSITE=i.remain,this.ANIMATION_DURATION_MS=i.animation_duration_ms,this.TOMBSTONE_CLASS=i.tombstone_class,this.INVISIBLE_CLASS=i.invisible_class,this.MAX_COUNT=d,this.column=i.column||1,this.waterflow=i.waterflow,this.anchorItem={index:0,offset:0},this.timer=null,this.firstAttachedItem_=0,this.lastAttachedItem_=0,this.anchorScrollTop=0,this.tombstoneSize_=0,this.tombstoneWidth_=0,this.tombstones_=[],this.scroller_=t,this.source_=e,this.items_=i.list||[],this.loadedItems_=0,this.requestInProgress_=!1,this.cacheVM=i.cacheVM,this.options=i,this.source_.fetch||this.setItems(i.list),this.curPos=0,this.unusedNodes=[],this.baseNode=document.createElement("div"),this.scroller_.addEventListener("scroll",this.onScroll_.bind(this)),window.addEventListener("resize",this.onResize_.bind(this)),window.addEventListener("orientationchange",this.onResize_.bind(this)),this.initPosList(),this.onResize_()}function n(t){var e=(arguments.length>1&&void 0!==arguments[1]&&arguments[1],y(t));return t.component(e.name,e),e}var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},h=(function(){function t(t){this.value=t}function e(e){function i(t,e){return new Promise(function(i,o){var h={key:t,arg:e,resolve:i,reject:o,next:null};r?r=r.next=h:(n=r=h,s(t,e))})}function s(i,n){try{var r=e[i](n),h=r.value;h instanceof t?Promise.resolve(h.value).then(function(t){s("next",t)},function(t){s("throw",t)}):o(r.done?"return":"normal",r.value)}catch(t){o("throw",t)}}function o(t,e){switch(t){case"return":n.resolve({value:e,done:!0});break;case"throw":n.reject(e);break;default:n.resolve({value:e,done:!1})}n=n.next,n?s(n.key,n.arg):r=null}var n,r;this._invoke=i,"function"!=typeof e.return&&(this.return=void 0)}"function"==typeof Symbol&&Symbol.asyncIterator&&(e.prototype[Symbol.asyncIterator]=function(){return this}),e.prototype.next=function(t){return this._invoke("next",t)},e.prototype.throw=function(t){return this._invoke("throw",t)},e.prototype.return=function(t){return this._invoke("return",t)}}(),function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}),a=function(){function t(t,e){for(var i=0;i<e.length;i++){var s=e[i];s.enumerable=s.enumerable||!1,s.configurable=!0,"value"in s&&(s.writable=!0),Object.defineProperty(t,s.key,s)}}return function(e,i,s){return i&&t(e.prototype,i),s&&t(e,s),e}}();Object.keys||(Object.keys=function(){var t=Object.prototype.hasOwnProperty,e=!{toString:null}.propertyIsEnumerable("toString"),i=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],s=i.length;return function(o){if("object"!==(void 0===o?"undefined":r(o))&&"function"!=typeof o||null===o)throw new TypeError("Object.keys called on non-object");var n=[];for(var h in o)t.call(o,h)&&n.push(h);if(e)for(var a=0;a<s;a++)t.call(o,i[a])&&n.push(i[a]);return n}}());var l={x:0,y:0},c=/mouse(down|move|up)/,u=/touch(start|move|end)/,m=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(t){window.setTimeout(t,1e3/60)},d=1/0;o.prototype={onResize_:function(){var t=this.source_.createTombstone(this.baseNode.cloneNode(!0));t.style.position="absolute",this.scroller_.appendChild(t),t.classList.remove(this.INVISIBLE_CLASS),this.tombstoneSize_=t.offsetHeight/this.column,this.tombstoneWidth_=t.offsetWidth,this.scroller_.removeChild(t);for(var e=0;e<this.items_.length;e++)this.items_[e].top=-1,this.items_[e].height=this.items_[e].width=this.items_[e].cacheHeightCount=0;this.onScroll_()},onScroll_:function(){var t=this.scroller_.scrollTop-this.anchorScrollTop;0==this.scroller_.scrollTop?this.anchorItem={index:0,offset:0}:this.anchorItem=this.calculateAnchoredItem(this.anchorItem,t),this.anchorScrollTop=this.scroller_.scrollTop;var e=this.calculateAnchoredItem(this.anchorItem,this.scroller_.offsetHeight);t<0?this.fill(this.anchorItem.index-this.RUNWAY_ITEMS,e.index+this.RUNWAY_ITEMS_OPPOSITE):this.fill(this.anchorItem.index-this.RUNWAY_ITEMS_OPPOSITE,e.index+this.RUNWAY_ITEMS)},calculateAnchoredItem:function(t,e){if(0===e)return t;e+=t.offset;var i=t.index,s=0;if(e<0){for(;e<0&&i>0&&this.items_[i-1].height;)e+=this.items_[i-1].height,i--;s=Math.max(-i,Math.ceil(Math.min(e,0)/this.tombstoneSize_))}else{for(;e>0&&i<this.items_.length&&this.items_[i].height&&this.items_[i].height<e;)e-=this.items_[i].height,i++;(i>=this.items_.length||!this.items_[i].height)&&(s=Math.floor(Math.max(e,0)/this.tombstoneSize_))}return i+=s,e-=s*this.tombstoneSize_,i=Math.min(i,this.MAX_COUNT-1),{index:Math.floor(i/this.column)*this.column,offset:e}},fill:function(t,e){this.firstAttachedItem_=Math.max(0,t),this.lastAttachedItem_=e,this.attachContent()},getTombstone:function(){var t=this.tombstones_.pop();return t?(t.classList.remove(this.INVISIBLE_CLASS),t.style.opacity=1,t.style.transform="",t.style.transition="",t):this.source_.createTombstone(this.baseNode.cloneNode(!0))},layoutInView:function(t){var e=this.posList.get(Math.floor(t/this.column)-1,t%this.column);return!e||Math.abs(e-this.anchorScrollTop)<2*window.innerHeight},getUnUsedNodes:function(){if(this.waterflow)for(var t=0,e=this.items_.length;t<e;t++)this.layoutInView(t),this.items_[t].node&&!this.layoutInView(t)&&(this.items_[t].vm?this.clearItem(this.items_[t]):this.clearTombstone(this.items_[t]),this.items_[t].vm=null,this.items_[t].node=null);else for(var i=0,s=this.items_.length;i<s;i++)i!==this.firstAttachedItem_?(this.items_[i].vm?this.clearItem(this.items_[i]):this.clearTombstone(this.items_[i]),this.items_[i].vm=null,this.items_[i].node=null):i=this.lastAttachedItem_-1},clearItem:function(t){if(this.options.reuseVM)this.scroller_.removeChild(t.node),this.source_.free(t.data);else{if(this.cacheVM&&t.node)return this.scroller_.removeChild(t.node);t.vm.$destroy(),t.node&&this.unusedNodes.push(t.node)}},clearTombstone:function(t){t.node&&(t.node.classList.contains(this.TOMBSTONE_CLASS)?(this.tombstones_.push(t.node),this.tombstones_[this.tombstones_.length-1].classList.add(this.INVISIBLE_CLASS)):this.unusedNodes.push(t.node))},clearUnUsedNodes:function(){for(;this.unusedNodes.length;)this.scroller_.removeChild(this.unusedNodes.pop())},getNodePosition:function(){this.anchorScrollTop=0;for(var t=0;t<this.anchorItem.index;t++)this.anchorScrollTop+=this.items_[t].height||this.tombstoneSize_;this.anchorScrollTop+=this.anchorItem.offset,this.curPos=this.anchorScrollTop-this.anchorItem.offset;for(var e=this.anchorItem.index;e>this.firstAttachedItem_;)this.curPos-=this.items_[e-1].height||this.tombstoneSize_,e--;for(;e<this.firstAttachedItem_;)this.curPos+=this.items_[e].height||this.tombstoneSize_,e++},initPosList:function(){for(var t={},e=0,i=this.column;e<i;e++)t[e]=this.curPos;this.posList={data:{0:t},get:function(t,e){if(!this.data[t]){for(var i={},s=0,o=this.column;s<o;s++)i[s]=this.curPos;this.data[t]=i}return void 0===e?this.data[t]:this.data[t][e]},set:function(t,e,i){this.get(t)[e]=i}}},tombstoneLayout:function(t){var e=void 0,i=void 0,s=void 0;for(e in t)i=t[e],s=e%this.column*this.items_[e].width,this.items_[e].node.style.transform="translate3d("+s+"px,"+(this.anchorScrollTop+i[1])*this.column+"px, 0) scale("+this.tombstoneWidth_/this.items_[e].width+", "+this.tombstoneSize_/this.items_[e].height+")",this.items_[e].node.offsetTop,i[0].offsetTop,this.items_[e].node.style.transition="transform "+this.ANIMATION_DURATION_MS+"ms"},itemLayout:function(t){var e=void 0,i=void 0,s=0,o=0,n=0;for(e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)i=t[e],this.waterflow&&(n=Math.floor(e/this.column)),s=e%this.column*(this.items_[e].width||this.tombstoneWidth_),o=this.waterflow?this.posList.get(n,e%this.column):this.curPos,i&&(i[0].style.transition="transform "+this.ANIMATION_DURATION_MS+"ms, opacity "+this.ANIMATION_DURATION_MS+"ms",i[0].style.transform="translate3d("+s+"px,"+o+"px, 0) scale("+this.items_[e].width/this.tombstoneWidth_+", "+this.items_[e].height/this.tombstoneSize_+")",i[0].style.opacity=0),this.curPos!==this.items_[e].top&&(i||(this.items_[e].node.style.transition=""),this.items_[e].node.style.transform="translate3d("+s+"px,"+o+"px, 0)"),this.items_[e].top=o,(e+1)%this.column==0&&(this.curPos+=(this.items_[e].height||this.tombstoneSize_)*this.column),this.waterflow&&this.posList.set(n+1,e%this.column,o+(this.items_[e].height||this.tombstoneSize_)*this.column)},setAnimatePosition:function(t){this.tombstoneLayout(t),this.itemLayout(t)},renderItems:function(){var t={},e=void 0,i=[],s=void 0,o=Math.floor((this.lastAttachedItem_+this.RUNWAY_ITEMS)/this.column)*this.column;for(o>this.MAX_COUNT&&(this.lastAttachedItem_=this.MAX_COUNT),s=this.firstAttachedItem_;s<this.lastAttachedItem_;s++){for(;this.items_.length<=s;)this.addItem_();if(this.items_[s].node){if(!this.items_[s].node.classList.contains(this.TOMBSTONE_CLASS)||!this.items_[s].data)continue;this.ANIMATION_DURATION_MS?(this.items_[s].node.style.zIndex=1,t[s]=[this.items_[s].node,this.items_[s].top-this.anchorScrollTop]):(this.items_[s].node.classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(this.items_[s].node)),this.items_[s].node=null}e=this.items_[s].data?this.source_.render(this.items_[s].data,this.unusedNodes.pop()||this.baseNode.cloneNode(!0),this.items_[s]):this.getTombstone(),e.style.position="absolute",this.items_[s].top=-1,this.items_[s].node=e,i.push(e)}var n=i.length;for(s=0;s<n;s++)this.scroller_.appendChild(i[s]);return t},cacheItemHeight:function(t){for(var e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)!this.items_[e].data||!t&&this.items_[e].height?this.items_[e].cacheHeightCount<10&&(this.items_[e].cacheHeightCount++,this.items_[e].height&&this.items_[e].node&&this.items_[e].height!==this.items_[e].node.offsetHeight/this.column&&(this.items_[e].height=this.items_[e].node.offsetHeight/this.column)):(this.items_[e].height=this.items_[e].node.offsetHeight/this.column,this.items_[e].width=this.items_[e].node.offsetWidth,this.items_[e].cacheHeightCount=0)},attachContent:function(){var t=this;this.getUnUsedNodes();var e=this.renderItems();this.clearUnUsedNodes(),this.cacheItemHeight(),this.getNodePosition(),this.setAnimatePosition(e),this.ANIMATION_DURATION_MS&&setTimeout(function(){t.tombstoneAnimation(e)},this.ANIMATION_DURATION_MS),this.maybeRequestContent()},setItems:function(t){t=t||[],this.items_=t,this.MAX_COUNT=t.length},scrollToIndex:function(t){var e=this.lastAttachedItem_-this.firstAttachedItem_;this.fill(t-e,t+1)},setScrollRunway:function(){this.scrollRunwayEnd_=Math.max(this.scrollRunwayEnd_,this.curPos+this.SCROLL_RUNWAY),this.scrollRunway_.style.transform="translate(0, "+this.scrollRunwayEnd_+"px)",this.scroller_.scrollTop=this.anchorScrollTop},tombstoneAnimation:function(t){var e=void 0;for(var i in t)e=t[i],e[0].classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(e[0]);t=null},maybeRequestContent:function(){var t=this;if(!this.requestInProgress_){var e=this.lastAttachedItem_-this.loadedItems_;e<=0||(this.requestInProgress_=!0,this.source_.fetch&&this.source_.fetch(e,this.loadedItems_).then(function(e){t.MAX_COUNT=e.count,t.addContent(e.list)}))}},addItem_:function(){this.items_.push({vm:null,data:null,node:null,height:0,width:0,top:0})},addContent:function(t){if(t.length){this.requestInProgress_=!1;for(var e=void 0,i=0;i<t.length;i++)this.items_.length<=this.loadedItems_&&this.addItem_(),this.loadedItems_<=this.MAX_COUNT&&(e=this.loadedItems_++,this.items_[e].data=t[i]);this.attachContent()}},clear:function(){this.loadedItems_=0,this.requestInProgress_=!1,this.firstAttachedItem_=-1,this.lastAttachedItem_=-1,this.getUnUsedNodes(),this.clearUnUsedNodes(),this.items_=[],this.onResize_()},destroy:function(){this.scroller_.removeEventListener("scroll",this.onScroll_),window.removeEventListener("resize",this.onResize_),window.removeEventListener("orientationchange",this.onResize_),this.clear()}};var f=function(){function t(e,i){var o=this;h(this,t),this.itemRender=i.item,this.TombstoneRender=i.tombstone,this.fetch=i.fetch,this.Vue=e,this.options=i,this.itemCache={data:{},length:0,get:function(t){return this.data[t]},set:function(t,e){this.length++,this.data[t]=e,this.length>i.cacheVM&&i.cacheVM>50&&this.recycle(10,t)},recycle:function(t,e){for(var i=void 0,s=Object.keys(this.data),o=s.length;t;)t--,i=s[Math.floor(Math.random()*o)],this.data[i]&&this.length--&&this.data[i].$destroy(),this.data[i]=null}},this.reuseVM={queue:[],generate:function(t,e){var i=s(o.reuseVM.queue,function(t){return!t.inuse});if(i)i.vm.data=t,i.inuse=!0,i.id=t.id;else{var n={props:{data:t}};o.options.props.data=t,o.options.props&&Object.keys(o.options.props).map(function(t){n.props[t]=o.options.props[t]});var r={el:e,data:n.props,render:function(t){return t(o.itemRender,n)}};i={id:t.id,inuse:!0,vm:new o.Vue(r)},o.reuseVM.queue.push(i)}return i.vm},free:function(t){s(this.queue,function(e){return e.id===t}).inuse=!1},destroy:function(t,e){for(var i=0,s=this.queue.length;i<s;i++)(this.queue[i].id===t||e)&&(this.queue.vm&&this.queue.vm.$destroy(),this.queue.splice(i,1))}}}return a(t,[{key:"createTombstone",value:function(t){var e=this;return new this.Vue({el:t,render:function(t){return t(e.TombstoneRender)}}).$el}},{key:"free",value:function(t){this.reuseVM.free(t.id)}},{key:"render",value:function(t,e,i){var s=this;if(this.options.reuseVM){var o=this.reuseVM.generate(t,e);return i.vm=o,o.$el}var n=void 0,r={props:{data:t}};this.options.props.data=t,this.options.props&&Object.keys(this.options.props).map(function(t){r.props[t]=s.options.props[t]});var h={el:e,render:function(t){return t(s.itemRender,r)}};return this.options.cacheVM?(n=this.itemCache.get(t.id))?(i.vm=n,n.$el):(n=new this.Vue(h),this.itemCache.set(t.id,n),i.vm=n,n.$el):(n=new this.Vue(h),i.vm=n,n.$el)}},{key:"destroy",value:function(){this.reuseVM.destroy(null,!0)}}]),t}(),_={render:function(t){return t("div",{attrs:{class:"recyclerview-loading"}},"Loading...")}},p={render:function(t){return t("div",{attrs:{class:"recyclerview-item tombstone"},style:{height:"100px",width:"100%"}},"")}},v={preventDefaultException:{tagName:/^(INPUT|TEXTAREA|BUTTON|SELECT|IMG)$/},distance:50,animation_duration_ms:200,tombstone_class:"tombstone",invisible_class:"invisible",prerender:20,remain:10,preventDefault:!1,column:1,waterflow:!1,cacheVM:0,reuseVM:!1,props:{}},y=function(s){return{name:"RecyclerView",props:{fetch:Function,list:Array,item:Object,loading:Object,tombstone:{type:Object,default:function(){return p}},column:Number,prerender:Number,remain:Number,waterflow:Boolean,preventDefault:Boolean,options:Object,tag:{type:String,default:"div"}},render:function(t){return t(this.tag,{attrs:{class:"recyclerview-container"}},[t(this.loading||_),t(this.tag,{attrs:{class:"recyclerview"},on:{touchstart:this._start,touchmove:this._move,touchend:this._end,touchcancel:this._end,mousedown:this._start,mousemove:this._move,mouseup:this._end}})])},data:function(){return{startPointer:{x:0,y:0},_options:{},distance:0,pulling:!1,_contentSource:null,scroller:null}},mounted:function(){this.init()},beforeDestroy:function(){this.scroller.destroy(),this.scroller=null},methods:{init:function(){this._options=i({},v,{prerender:this.prerender||v.prerender,remain:this.remain||v.remain,column:this.column||v.column,waterflow:this.waterflow||v.waterflow,fetch:this.fetch,list:this.list,item:this.item,loading:this.loading,tombstone:this.tombstone},this.options),this._contentSource=new f(s,this._options),this.$list=this.$el.querySelector(".recyclerview"),this.scroller=new o(this.$list,this._contentSource,this._options),this.$emit("inited")},scrollToIndex:function(t){var e=this;if(this.waterflow)for(var i=0,s=this.scroller.items_.length;i<s;i++)i===t&&this._scrollTo(this.scroller.items_[i].top-this.scroller.items_[i].height*this._options.column+this.$list.offsetWidth);else t=Number(t),this.scroller.scrollToIndex(t),this.$nextTick(function(){e._scrollToBottom()})},_scrollTo:function(t){t=t||0,this.$list.scrollTop=Number(t)},_scrollToBottom:function(){this._scrollTo(this.$list.scrollHeight)},_renderListStyle:function(){this.$list.style.transform="translate3d(0, "+this.distance+"px, 0)"},_start:function(i){this.$list.scrollTop>0||(this.pulling=!0,this.startPointer=t(i),this.$list.style.transition="transform .2s",this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault())},_move:function(i){if(this.pulling){var s=t(i),o=s.y-this.startPointer.y;if(o<0)return void this._scrollTo(-o);this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault(),this.distance=Math.floor(.5*o),this.distance>this._options.distance&&(this.distance=this._options.distance),m(this._renderListStyle.bind(this))}},_end:function(t){var i=this;this.pulling&&(this.preventDefault&&!e(t.target,this._options.preventDefaultException)&&t.preventDefault(),this.pulling=!1,this.$list.style.transition="transform .3s",this.$nextTick(function(){i.$list.style.transform=""}),this.distance>=this._options.distance&&(this.distance=0,this.scroller.clear()))}}}};!function(t,e){if("undefined"==typeof document)return e;t=t||"";var i=document.head||document.getElementsByTagName("head")[0],s=document.createElement("style");s.type="text/css",s.styleSheet?s.styleSheet.cssText=t:s.appendChild(document.createTextNode(t)),i.appendChild(s)}(".recyclerview-container{position:relative}.recyclerview-loading{position:absolute;top:0;left:0;width:100%;text-align:center;padding:10px;font-size:14px;color:#9e9e9e}.recyclerview{background:#fff;margin:0;padding:0;overflow-x:hidden;overflow-y:scroll;-webkit-overflow-scrolling:touch;width:100%;height:100%;position:absolute;box-sizing:border-box;contain:layout;will-change:transform}",void 0);var I={install:n};return"undefined"!=typeof window&&window.Vue&&window.Vue.use(n),I});
<add>!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.RecyclerView=e()}(this,function(){"use strict";function t(t){if(!t)return l;if(u.test(t.type)){var e=t.touches[0];return{x:e.clientX,y:e.clientY}}return c.test(t.type)?{x:t.clientX,y:t.clientY}:l}function e(t,e){for(var i in e)if(e[i].test(t[i]))return!0;return!1}function i(t,e){if(null==t)throw new TypeError("Cannot convert undefined or null to object");for(var i=Object(t),s=1;s<arguments.length;s++){var o=arguments[s];if(o)for(var n in o)Object.prototype.hasOwnProperty.call(o,n)&&(i[n]=o[n])}return i}function s(t,e){for(var i=0,s=t.length;i<s;i++)if(e(t[i],i))return t[i]}function o(t,e,i){this.RUNWAY_ITEMS=i.prerender,this.RUNWAY_ITEMS_OPPOSITE=i.remain,this.ANIMATION_DURATION_MS=i.animation_duration_ms,this.TOMBSTONE_CLASS=i.tombstone_class,this.INVISIBLE_CLASS=i.invisible_class,this.MAX_COUNT=d,this.column=i.column||1,this.waterflow=i.waterflow,this.anchorItem={index:0,offset:0},this.timer=null,this.firstAttachedItem_=0,this.lastAttachedItem_=0,this.anchorScrollTop=0,this.tombstoneSize_=0,this.tombstoneWidth_=0,this.tombstones_=[],this.scroller_=t,this.source_=e,this.items_=i.list||[],this.loadedItems_=0,this.requestInProgress_=!1,this.cacheVM=i.cacheVM,this.options=i,this.source_.fetch||this.setItems(i.list),this.curPos=0,this.unusedNodes=[],this.baseNode=document.createElement("div"),this.scroller_.addEventListener("scroll",this.onScroll_.bind(this)),window.addEventListener("resize",this.onResize_.bind(this)),window.addEventListener("orientationchange",this.onResize_.bind(this)),this.initPosList(),this.onResize_()}function n(t){var e=(arguments.length>1&&void 0!==arguments[1]&&arguments[1],y(t));return t.component(e.name,e),e}var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},h=(function(){function t(t){this.value=t}function e(e){function i(t,e){return new Promise(function(i,o){var h={key:t,arg:e,resolve:i,reject:o,next:null};r?r=r.next=h:(n=r=h,s(t,e))})}function s(i,n){try{var r=e[i](n),h=r.value;h instanceof t?Promise.resolve(h.value).then(function(t){s("next",t)},function(t){s("throw",t)}):o(r.done?"return":"normal",r.value)}catch(t){o("throw",t)}}function o(t,e){switch(t){case"return":n.resolve({value:e,done:!0});break;case"throw":n.reject(e);break;default:n.resolve({value:e,done:!1})}n=n.next,n?s(n.key,n.arg):r=null}var n,r;this._invoke=i,"function"!=typeof e.return&&(this.return=void 0)}"function"==typeof Symbol&&Symbol.asyncIterator&&(e.prototype[Symbol.asyncIterator]=function(){return this}),e.prototype.next=function(t){return this._invoke("next",t)},e.prototype.throw=function(t){return this._invoke("throw",t)},e.prototype.return=function(t){return this._invoke("return",t)}}(),function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}),a=function(){function t(t,e){for(var i=0;i<e.length;i++){var s=e[i];s.enumerable=s.enumerable||!1,s.configurable=!0,"value"in s&&(s.writable=!0),Object.defineProperty(t,s.key,s)}}return function(e,i,s){return i&&t(e.prototype,i),s&&t(e,s),e}}();Object.keys||(Object.keys=function(){var t=Object.prototype.hasOwnProperty,e=!{toString:null}.propertyIsEnumerable("toString"),i=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],s=i.length;return function(o){if("object"!==(void 0===o?"undefined":r(o))&&"function"!=typeof o||null===o)throw new TypeError("Object.keys called on non-object");var n=[];for(var h in o)t.call(o,h)&&n.push(h);if(e)for(var a=0;a<s;a++)t.call(o,i[a])&&n.push(i[a]);return n}}());var l={x:0,y:0},c=/mouse(down|move|up)/,u=/touch(start|move|end)/,m=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(t){window.setTimeout(t,1e3/60)},d=1/0;o.prototype={onResize_:function(){var t=this.source_.createTombstone(this.baseNode.cloneNode(!0));t.style.position="absolute",this.scroller_.appendChild(t),t.classList.remove(this.INVISIBLE_CLASS),this.tombstoneSize_=t.offsetHeight/this.column,this.tombstoneWidth_=t.offsetWidth,this.scroller_.removeChild(t);for(var e=0;e<this.items_.length;e++)this.items_[e].top=-1,this.items_[e].height=this.items_[e].width=this.items_[e].cacheHeightCount=0;this.onScroll_()},onScroll_:function(){var t=this.scroller_.scrollTop-this.anchorScrollTop;0==this.scroller_.scrollTop?this.anchorItem={index:0,offset:0}:this.anchorItem=this.calculateAnchoredItem(this.anchorItem,t),this.anchorScrollTop=this.scroller_.scrollTop;var e=this.calculateAnchoredItem(this.anchorItem,this.scroller_.offsetHeight);t<0?this.fill(this.anchorItem.index-this.RUNWAY_ITEMS,e.index+this.RUNWAY_ITEMS_OPPOSITE):this.fill(this.anchorItem.index-this.RUNWAY_ITEMS_OPPOSITE,e.index+this.RUNWAY_ITEMS)},calculateAnchoredItem:function(t,e){if(0===e)return t;e+=t.offset;var i=t.index,s=0;if(e<0){for(;e<0&&i>0&&this.items_[i-1].height;)e+=this.items_[i-1].height,i--;s=Math.max(-i,Math.ceil(Math.min(e,0)/this.tombstoneSize_))}else{for(;e>0&&i<this.items_.length&&this.items_[i].height&&this.items_[i].height<e;)e-=this.items_[i].height,i++;(i>=this.items_.length||!this.items_[i].height)&&(s=Math.floor(Math.max(e,0)/this.tombstoneSize_))}return i+=s,e-=s*this.tombstoneSize_,i=Math.min(i,this.MAX_COUNT-1),{index:Math.floor(i/this.column)*this.column,offset:e}},fill:function(t,e){this.firstAttachedItem_=Math.max(0,t),this.lastAttachedItem_=e,this.attachContent()},getTombstone:function(){var t=this.tombstones_.pop();return t?(t.classList.remove(this.INVISIBLE_CLASS),t.style.opacity=1,t.style.transform="",t.style.transition="",t):this.source_.createTombstone(this.baseNode.cloneNode(!0))},layoutInView:function(t){var e=this.posList.get(Math.floor(t/this.column)-1,t%this.column);return!e||Math.abs(e-this.anchorScrollTop)<2*window.innerHeight},getUnUsedNodes:function(t){if(this.waterflow)for(var e=0,i=this.items_.length;e<i;e++)!this.items_[e].node||!t&&this.layoutInView(e)||(this.items_[e].vm?this.clearItem(this.items_[e]):this.clearTombstone(this.items_[e]),this.items_[e].vm=null,this.items_[e].node=null);else for(var s=0,o=this.items_.length;s<o;s++)s!==this.firstAttachedItem_?(this.items_[s].vm?this.clearItem(this.items_[s]):this.clearTombstone(this.items_[s]),this.items_[s].vm=null,this.items_[s].node=null):s=this.lastAttachedItem_-1},clearItem:function(t){if(this.options.reuseVM)this.scroller_.removeChild(t.node),this.source_.free(t.data);else{if(this.cacheVM&&t.node)return this.scroller_.removeChild(t.node);t.vm.$destroy(),t.node&&this.unusedNodes.push(t.node)}},clearTombstone:function(t){t.node&&(t.node.classList.contains(this.TOMBSTONE_CLASS)?(this.tombstones_.push(t.node),this.tombstones_[this.tombstones_.length-1].classList.add(this.INVISIBLE_CLASS)):this.unusedNodes.push(t.node))},clearUnUsedNodes:function(){for(;this.unusedNodes.length;)this.scroller_.removeChild(this.unusedNodes.pop())},getNodePosition:function(){this.anchorScrollTop=0;for(var t=0;t<this.anchorItem.index;t++)this.anchorScrollTop+=this.items_[t].height||this.tombstoneSize_;this.anchorScrollTop+=this.anchorItem.offset,this.curPos=this.anchorScrollTop-this.anchorItem.offset;for(var e=this.anchorItem.index;e>this.firstAttachedItem_;)this.curPos-=this.items_[e-1].height||this.tombstoneSize_,e--;for(;e<this.firstAttachedItem_;)this.curPos+=this.items_[e].height||this.tombstoneSize_,e++},initPosList:function(){for(var t={},e=0,i=this.column;e<i;e++)t[e]=this.curPos;this.posList={data:{0:t},get:function(t,e){if(!this.data[t]){for(var i={},s=0,o=this.column;s<o;s++)i[s]=this.curPos;this.data[t]=i}return void 0===e?this.data[t]:this.data[t][e]},set:function(t,e,i){this.get(t)[e]=i}}},tombstoneLayout:function(t){var e=void 0,i=void 0,s=void 0;for(e in t)i=t[e],s=e%this.column*this.items_[e].width,this.items_[e].node.style.transform="translate3d("+s+"px,"+(this.anchorScrollTop+i[1])*this.column+"px, 0) scale("+this.tombstoneWidth_/this.items_[e].width+", "+this.tombstoneSize_/this.items_[e].height+")",this.items_[e].node.offsetTop,i[0].offsetTop,this.items_[e].node.style.transition="transform "+this.ANIMATION_DURATION_MS+"ms"},itemLayout:function(t){var e=void 0,i=void 0,s=0,o=0,n=0;for(e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)i=t[e],this.waterflow&&(n=Math.floor(e/this.column)),s=e%this.column*(this.items_[e].width||this.tombstoneWidth_),o=this.waterflow?this.posList.get(n,e%this.column):this.curPos,i&&(i[0].style.transition="transform "+this.ANIMATION_DURATION_MS+"ms, opacity "+this.ANIMATION_DURATION_MS+"ms",i[0].style.transform="translate3d("+s+"px,"+o+"px, 0) scale("+this.items_[e].width/this.tombstoneWidth_+", "+this.items_[e].height/this.tombstoneSize_+")",i[0].style.opacity=0),this.curPos!==this.items_[e].top&&(i||(this.items_[e].node.style.transition=""),this.items_[e].node.style.transform="translate3d("+s+"px,"+o+"px, 0)"),this.items_[e].top=o,(e+1)%this.column==0&&(this.curPos+=(this.items_[e].height||this.tombstoneSize_)*this.column),this.waterflow&&this.posList.set(n+1,e%this.column,o+(this.items_[e].height||this.tombstoneSize_)*this.column)},setAnimatePosition:function(t){this.tombstoneLayout(t),this.itemLayout(t)},renderItems:function(){var t={},e=void 0,i=[],s=void 0,o=Math.floor((this.lastAttachedItem_+this.RUNWAY_ITEMS)/this.column)*this.column;for(o>this.MAX_COUNT&&(this.lastAttachedItem_=this.MAX_COUNT),s=this.firstAttachedItem_;s<this.lastAttachedItem_;s++){for(;this.items_.length<=s;)this.addItem_();if(this.items_[s].node){if(!this.items_[s].node.classList.contains(this.TOMBSTONE_CLASS)||!this.items_[s].data)continue;this.ANIMATION_DURATION_MS?(this.items_[s].node.style.zIndex=1,t[s]=[this.items_[s].node,this.items_[s].top-this.anchorScrollTop]):(this.items_[s].node.classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(this.items_[s].node)),this.items_[s].node=null}e=this.items_[s].data?this.source_.render(this.items_[s].data,this.unusedNodes.pop()||this.baseNode.cloneNode(!0),this.items_[s]):this.getTombstone(),e.style.position="absolute",this.items_[s].top=-1,this.items_[s].node=e,i.push(e)}var n=i.length;for(s=0;s<n;s++)this.scroller_.appendChild(i[s]);return t},cacheItemHeight:function(t){for(var e=this.firstAttachedItem_;e<this.lastAttachedItem_;e++)!this.items_[e].data||!t&&this.items_[e].height?this.items_[e].cacheHeightCount<10&&(this.items_[e].cacheHeightCount++,this.items_[e].height&&this.items_[e].node&&this.items_[e].height!==this.items_[e].node.offsetHeight/this.column&&(this.items_[e].height=this.items_[e].node.offsetHeight/this.column)):(this.items_[e].height=this.items_[e].node.offsetHeight/this.column,this.items_[e].width=this.items_[e].node.offsetWidth,this.items_[e].cacheHeightCount=0)},attachContent:function(){var t=this;this.getUnUsedNodes();var e=this.renderItems();this.clearUnUsedNodes(),this.cacheItemHeight(),this.getNodePosition(),this.setAnimatePosition(e),this.ANIMATION_DURATION_MS&&setTimeout(function(){t.tombstoneAnimation(e)},this.ANIMATION_DURATION_MS),this.maybeRequestContent()},setItems:function(t){t=t||[],this.items_=t,this.MAX_COUNT=t.length},scrollToIndex:function(t){var e=this.lastAttachedItem_-this.firstAttachedItem_;this.fill(t-e,t+1)},setScrollRunway:function(){this.scrollRunwayEnd_=Math.max(this.scrollRunwayEnd_,this.curPos+this.SCROLL_RUNWAY),this.scrollRunway_.style.transform="translate(0, "+this.scrollRunwayEnd_+"px)",this.scroller_.scrollTop=this.anchorScrollTop},tombstoneAnimation:function(t){var e=void 0;for(var i in t)e=t[i],e[0].classList.add(this.INVISIBLE_CLASS),this.tombstones_.push(e[0]);t=null},maybeRequestContent:function(){var t=this;if(!this.requestInProgress_){var e=this.lastAttachedItem_-this.loadedItems_;e<=0||(this.requestInProgress_=!0,this.source_.fetch&&this.source_.fetch(e,this.loadedItems_).then(function(e){t.MAX_COUNT=e.count,t.addContent(e.list)}))}},addItem_:function(){this.items_.push({vm:null,data:null,node:null,height:0,width:0,top:0})},addContent:function(t){if(t.length){this.requestInProgress_=!1;for(var e=void 0,i=0;i<t.length;i++)this.items_.length<=this.loadedItems_&&this.addItem_(),this.loadedItems_<=this.MAX_COUNT&&(e=this.loadedItems_++,this.items_[e].data=t[i]);this.attachContent()}},clear:function(){this.loadedItems_=0,this.requestInProgress_=!1,this.firstAttachedItem_=-1,this.lastAttachedItem_=-1,this.getUnUsedNodes(!0),this.clearUnUsedNodes(),this.items_=[],this.onResize_()},destroy:function(){this.scroller_.removeEventListener("scroll",this.onScroll_),window.removeEventListener("resize",this.onResize_),window.removeEventListener("orientationchange",this.onResize_),this.clear()}};var f=function(){function t(e,i){var o=this;h(this,t),this.itemRender=i.item,this.TombstoneRender=i.tombstone,this.fetch=i.fetch,this.Vue=e,this.options=i,this.itemCache={data:{},length:0,get:function(t){return this.data[t]},set:function(t,e){this.length++,this.data[t]=e,this.length>i.cacheVM&&i.cacheVM>50&&this.recycle(10,t)},recycle:function(t,e){for(var i=void 0,s=Object.keys(this.data),o=s.length;t;)t--,i=s[Math.floor(Math.random()*o)],this.data[i]&&this.length--&&this.data[i].$destroy(),this.data[i]=null}},this.reuseVM={queue:[],generate:function(t,e){var i=s(o.reuseVM.queue,function(t){return!t.inuse});if(i)i.vm.data=t,i.inuse=!0,i.id=t.id;else{var n={props:{data:t}};o.options.props.data=t,o.options.props&&Object.keys(o.options.props).map(function(t){n.props[t]=o.options.props[t]});var r={el:e,data:n.props,render:function(t){return t(o.itemRender,n)}};i={id:t.id,inuse:!0,vm:new o.Vue(r)},o.reuseVM.queue.push(i)}return i.vm},free:function(t){s(this.queue,function(e){return e.id===t}).inuse=!1},destroy:function(t,e){for(var i=0,s=this.queue.length;i<s;i++)(this.queue[i].id===t||e)&&(this.queue.vm&&this.queue.vm.$destroy(),this.queue.splice(i,1))}}}return a(t,[{key:"createTombstone",value:function(t){var e=this;return new this.Vue({el:t,render:function(t){return t(e.TombstoneRender)}}).$el}},{key:"free",value:function(t){this.reuseVM.free(t.id)}},{key:"render",value:function(t,e,i){var s=this;if(this.options.reuseVM){var o=this.reuseVM.generate(t,e);return i.vm=o,o.$el}var n=void 0,r={props:{data:t}};this.options.props.data=t,this.options.props&&Object.keys(this.options.props).map(function(t){r.props[t]=s.options.props[t]});var h={el:e,render:function(t){return t(s.itemRender,r)}};return this.options.cacheVM?(n=this.itemCache.get(t.id))?(i.vm=n,n.$el):(n=new this.Vue(h),this.itemCache.set(t.id,n),i.vm=n,n.$el):(n=new this.Vue(h),i.vm=n,n.$el)}},{key:"destroy",value:function(){return this.reuseVM.destroy(null,!0),this.reuseVM.queue}}]),t}(),_={render:function(t){return t("div",{attrs:{class:"recyclerview-loading"}},"Loading...")}},p={render:function(t){return t("div",{attrs:{class:"recyclerview-item tombstone"},style:{height:"100px",width:"100%"}},"")}},v={preventDefaultException:{tagName:/^(INPUT|TEXTAREA|BUTTON|SELECT|IMG)$/},distance:50,animation_duration_ms:200,tombstone_class:"tombstone",invisible_class:"invisible",prerender:20,remain:10,preventDefault:!1,column:1,waterflow:!1,cacheVM:0,reuseVM:!1,props:{}},y=function(s){return{name:"RecyclerView",props:{fetch:Function,list:Array,item:Object,loading:Object,tombstone:{type:Object,default:function(){return p}},column:Number,prerender:Number,remain:Number,waterflow:Boolean,preventDefault:Boolean,options:Object,tag:{type:String,default:"div"}},render:function(t){return t(this.tag,{attrs:{class:"recyclerview-container"}},[t(this.loading||_),t(this.tag,{attrs:{class:"recyclerview"},on:{touchstart:this._start,touchmove:this._move,touchend:this._end,touchcancel:this._end,mousedown:this._start,mousemove:this._move,mouseup:this._end}})])},data:function(){return{startPointer:{x:0,y:0},_options:{},distance:0,pulling:!1,_contentSource:null,scroller:null}},mounted:function(){this.init()},beforeDestroy:function(){this.scroller.destroy(),this.scroller=null},methods:{init:function(){this._options=i({},v,{prerender:this.prerender||v.prerender,remain:this.remain||v.remain,column:this.column||v.column,waterflow:this.waterflow||v.waterflow,fetch:this.fetch,list:this.list,item:this.item,loading:this.loading,tombstone:this.tombstone},this.options),this._contentSource=new f(s,this._options),this.$list=this.$el.querySelector(".recyclerview"),this.scroller=new o(this.$list,this._contentSource,this._options),this.$emit("inited")},scrollToIndex:function(t){var e=this;if(this.waterflow)for(var i=0,s=this.scroller.items_.length;i<s;i++)i===t&&this._scrollTo(this.scroller.items_[i].top-this.scroller.items_[i].height*this._options.column+this.$list.offsetWidth);else t=Number(t),this.scroller.scrollToIndex(t),this.$nextTick(function(){e._scrollToBottom()})},_scrollTo:function(t){t=t||0,this.$list.scrollTop=Number(t)},_scrollToBottom:function(){this._scrollTo(this.$list.scrollHeight)},_renderListStyle:function(){this.$list.style.transform="translate3d(0, "+this.distance+"px, 0)"},_start:function(i){this.$list.scrollTop>0||(this.pulling=!0,this.startPointer=t(i),this.$list.style.transition="transform .2s",this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault())},_move:function(i){if(this.pulling){var s=t(i),o=s.y-this.startPointer.y;if(o<0)return void this._scrollTo(-o);this.preventDefault&&!e(i.target,this._options.preventDefaultException)&&i.preventDefault(),this.distance=Math.floor(.5*o),this.distance>this._options.distance&&(this.distance=this._options.distance),m(this._renderListStyle.bind(this))}},_end:function(t){var i=this;this.pulling&&(this.preventDefault&&!e(t.target,this._options.preventDefaultException)&&t.preventDefault(),this.pulling=!1,this.$list.style.transition="transform .3s",this.$nextTick(function(){i.$list.style.transform=""}),this.distance>=this._options.distance&&(this.distance=0,this.scroller.clear()))}}}};!function(t,e){if("undefined"==typeof document)return e;t=t||"";var i=document.head||document.getElementsByTagName("head")[0],s=document.createElement("style");s.type="text/css",s.styleSheet?s.styleSheet.cssText=t:s.appendChild(document.createTextNode(t)),i.appendChild(s)}(".recyclerview-container{position:relative}.recyclerview-loading{position:absolute;top:0;left:0;width:100%;text-align:center;padding:10px;font-size:14px;color:#9e9e9e}.recyclerview{background:#fff;margin:0;padding:0;overflow-x:hidden;overflow-y:scroll;-webkit-overflow-scrolling:touch;width:100%;height:100%;position:absolute;box-sizing:border-box;contain:layout;will-change:transform}",void 0);var g={install:n};return"undefined"!=typeof window&&window.Vue&&window.Vue.use(n),g}); |
|
Java | apache-2.0 | 222070a7c7c14eab0dfa1d1a343ebf5858bba6df | 0 | meringlab/stringdb-psicquic | /**
* Copyright 2014 University of Zürich, SIB, and others.
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.string_db;
import com.google.common.collect.ImmutableSet;
import org.apache.log4j.Logger;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.string_db.jdbc.GenericQueryProcessor;
import org.string_db.jdbc.TwoColumnRowMapper;
import java.sql.SQLException;
import java.util.*;
/**
* Helper class to data from PostgreSQL
*
* @author Milan Simonovic <[email protected]>
*/
public class DbFacade {
private static final Logger log = Logger.getLogger(DbFacade.class);
protected final ProteinRepository proteinRepository;
protected final SpeciesRepository speciesRepository;
protected final GenericQueryProcessor queryProcessor;
protected final Map<String, String> scoreTypeMap = new HashMap<>();
public DbFacade(ProteinRepository proteinRepository, SpeciesRepository speciesRepository, GenericQueryProcessor queryProcessor) {
this.proteinRepository = proteinRepository;
this.speciesRepository = speciesRepository;
this.queryProcessor = queryProcessor;
scoreTypeMap.put("equiv_nscore", "neighbourhood");
scoreTypeMap.put("equiv_nscore_transferred", "neighbourhood_transferred");
scoreTypeMap.put("equiv_fscore", "fusion");
scoreTypeMap.put("equiv_pscore", "cooccurrence");
scoreTypeMap.put("equiv_hscore", "homology");
scoreTypeMap.put("array_score", "coexpression");
scoreTypeMap.put("array_score_transferred", "coexpression_transferred");
scoreTypeMap.put("experimental_score", "experimental");
scoreTypeMap.put("experimental_score_transferred", "experimental_transferred");
scoreTypeMap.put("database_score", "database");
scoreTypeMap.put("database_score_transferred", "database_transferred");
scoreTypeMap.put("textmining_score", "textmining");
scoreTypeMap.put("textmining_score_transferred", "textmining_transferred");
scoreTypeMap.put("neighborhood_score", "neighborhood");
scoreTypeMap.put("fusion_score", "fusion");
scoreTypeMap.put("cooccurence_score", "cooccurence");
}
public List<Integer> loadCoreSpecies() {
return speciesRepository.loadCoreSpeciesIds();
}
public List<Integer> loadSpeciesIds() {
return speciesRepository.loadSpeciesIds();
}
/**
* evidence.sets_items table
*
* @param spcId
*/
public Map<Integer, Set<String>> loadProteinsSets(Integer spcId) {
log.info("loading proteins sets");
Map<Integer, Set<String>> map =
queryProcessor.selectTwoColumns("item_id", "set_id", "evidence.sets_items",
TwoColumnRowMapper.<Integer, String>multiValMapper(),
"item_id > 0 and species_id = :species_id; ",
new MapSqlParameterSource("species_id", spcId));
log.info(map.size() + " proteins.sets records read");
return map;
}
public Map<Integer, String> loadScoreTypes() throws SQLException {
final Map<Integer, String> crypticTypes = queryProcessor.selectTwoColumns("score_id", "score_type", "network.score_types",
TwoColumnRowMapper.<Integer, String>uniqueValMapper());
Map<Integer, String> types = new LinkedHashMap<>();
for (Integer id : crypticTypes.keySet()) {
final String crypticType = crypticTypes.get(id);
if (!this.scoreTypeMap.containsKey(crypticType)) {
throw new RuntimeException("missing score type: " + crypticType);
}
types.put(id, this.scoreTypeMap.get(crypticType));
}
return types;
}
/**
* evidence.sets table
*/
public Map<String, String> loadSetsCollections() {
log.info("loading sets collections");
final Map<String, String> map = queryProcessor.selectTwoColumns("set_id", "collection_id", "evidence.sets",
TwoColumnRowMapper.<String, String>uniqueValMapper());
log.info(map.size() + " records read");
return map;
}
/**
* <pre>select distinct(source) from items.proteins_names
* where LOWER(source) like '%refseq%'
* --returns
* "RefSeq"
* "Ensembl_RefSeq"
* "Ensembl_HGNC_RefSeq_IDs"
* "Ensembl_RefSeq_synonym"
* "Ensembl_RefSeq_short"
* </pre>
* <p/>
* We'll skip "Ensembl_RefSeq_synonym" and "Ensembl_RefSeq_short".
*
* @param spcId
* @return
*/
public Map<Integer, Set<String>> loadRefseqIds(Integer spcId) throws SQLException {
log.info("loadRefseqIds");
final Map<Integer, Set<String>> names = proteinRepository.loadProteinNames(spcId,
ImmutableSet.of("Ensembl_RefSeq", "Ensembl_HGNC_RefSeq_IDs", "RefSeq"));
log.info(names.size() + " names read");
return names;
}
public Map<Integer, ProteinExternalId> loadProteinExternalIds(Integer spcId) throws SQLException {
log.info("loadProteinExternalIds");
final Map<Integer, ProteinExternalId> ids = proteinRepository.loadExternalIds(spcId);
log.info(ids.size() + " ids read");
return ids;
}
public Map<Integer, String> loadProteinNames(Integer spcId) throws SQLException {
log.info("loadProteinNames");
final Map<Integer, String> names = proteinRepository.loadProteinPreferredNames(spcId);
log.info(names.size() + " names read");
return names;
}
public Map<Integer, String> loadProteinSequences(Integer spcId) {
log.info("loadProteinSequences");
final Map<Integer, String> sequences = proteinRepository.loadProteinSequences(spcId);
log.info(sequences.size() + " sequences read");
return sequences;
}
public String loadSpeciesName(Integer speciesId) {
return speciesRepository.loadSpeciesName(speciesId);
}
/**
* Seems like items.species_names has lots of not really useful names,
* so let's just pick short, one-word ones, that start with lower case,
* e.g. human for 9606, mouse&mice for 10090, yeast for 4932, etc.
*
* @param speciesId
* @return
*/
public Collection<String> loadSpeciesNames(Integer speciesId) {
final Map<String, Set<String>> all = queryProcessor.selectTwoColumns("official_name", "species_name",
"items.species_names",
TwoColumnRowMapper.<String, String>multiValMapper(),
"species_id = :species_id and species_name not like '% %';",
new MapSqlParameterSource("species_id", speciesId));
List<String> names = new ArrayList<>();
final Iterator<String> iterator = all.keySet().iterator();
if (!iterator.hasNext()) {
//some species don't have synonyms so need to get the official one:
names.add(loadSpeciesName(speciesId));
return names;
}
final String official = iterator.next();
names.add(official);
for (String name : all.get(official)) {
if (Character.isLowerCase(name.charAt(0))) {
names.add(name);
}
}
return names;
}
public Map<Integer, Set<String>> loadUniProtLinkouts() {
log.info("loading UniProt ids");
Map<Integer, Set<String>> table =
queryProcessor.selectTwoColumns("protein_id", "linkout_url", "items.proteins_linkouts",
TwoColumnRowMapper.<Integer, String>multiValMapper(),
"linkout_type = 'UniProt'; ", null);
log.info(table.size() + " UniProt records read");
return table;
}
}
| src/main/java/org/string_db/DbFacade.java | /**
* Copyright 2014 University of Zürich, SIB, and others.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.string_db;
import com.google.common.collect.ImmutableSet;
import org.apache.log4j.Logger;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.string_db.jdbc.GenericQueryProcessor;
import org.string_db.jdbc.TwoColumnRowMapper;
import java.sql.SQLException;
import java.util.*;
/**
* Helper class to data from PostgreSQL
*
* @author Milan Simonovic <[email protected]>
*/
public class DbFacade {
private static final Logger log = Logger.getLogger(DbFacade.class);
protected final ProteinRepository proteinRepository;
protected final SpeciesRepository speciesRepository;
protected final GenericQueryProcessor queryProcessor;
public DbFacade(ProteinRepository proteinRepository, SpeciesRepository speciesRepository, GenericQueryProcessor queryProcessor) {
this.proteinRepository = proteinRepository;
this.speciesRepository = speciesRepository;
this.queryProcessor = queryProcessor;
}
public List<Integer> loadCoreSpecies() {
return speciesRepository.loadCoreSpeciesIds();
}
public List<Integer> loadSpeciesIds() {
return speciesRepository.loadSpeciesIds();
}
/**
* evidence.sets_items table
*
* @param spcId
*/
public Map<Integer, Set<String>> loadProteinsSets(Integer spcId) {
log.info("loading proteins sets");
Map<Integer, Set<String>> map =
queryProcessor.selectTwoColumns("item_id", "set_id", "evidence.sets_items",
TwoColumnRowMapper.<Integer, String>multiValMapper(),
"item_id > 0 and species_id = :species_id; ",
new MapSqlParameterSource("species_id", spcId));
log.info(map.size() + " proteins.sets records read");
return map;
}
public Map<Integer, String> loadScoreTypes() throws SQLException {
return queryProcessor.selectTwoColumns("score_id", "score_type", "network.score_types_user_friendly",
TwoColumnRowMapper.<Integer, String>uniqueValMapper());
}
/**
* evidence.sets table
*/
public Map<String, String> loadSetsCollections() {
log.info("loading sets collections");
final Map<String, String> map = queryProcessor.selectTwoColumns("set_id", "collection_id", "evidence.sets",
TwoColumnRowMapper.<String, String>uniqueValMapper());
log.info(map.size() + " records read");
return map;
}
/**
* <pre>select distinct(source) from items.proteins_names
* where LOWER(source) like '%refseq%'
* --returns
* "RefSeq"
* "Ensembl_RefSeq"
* "Ensembl_HGNC_RefSeq_IDs"
* "Ensembl_RefSeq_synonym"
* "Ensembl_RefSeq_short"
* </pre>
* <p/>
* We'll skip "Ensembl_RefSeq_synonym" and "Ensembl_RefSeq_short".
*
* @param spcId
* @return
*/
public Map<Integer, Set<String>> loadRefseqIds(Integer spcId) throws SQLException {
log.info("loadRefseqIds");
final Map<Integer, Set<String>> names = proteinRepository.loadProteinNames(spcId,
ImmutableSet.of("Ensembl_RefSeq", "Ensembl_HGNC_RefSeq_IDs", "RefSeq"));
log.info(names.size() + " names read");
return names;
}
public Map<Integer, ProteinExternalId> loadProteinExternalIds(Integer spcId) throws SQLException {
log.info("loadProteinExternalIds");
final Map<Integer, ProteinExternalId> ids = proteinRepository.loadExternalIds(spcId);
log.info(ids.size() + " ids read");
return ids;
}
public Map<Integer, String> loadProteinNames(Integer spcId) throws SQLException {
log.info("loadProteinNames");
final Map<Integer, String> names = proteinRepository.loadProteinPreferredNames(spcId);
log.info(names.size() + " names read");
return names;
}
public Map<Integer, String> loadProteinSequences(Integer spcId) {
log.info("loadProteinSequences");
final Map<Integer, String> sequences = proteinRepository.loadProteinSequences(spcId);
log.info(sequences.size() + " sequences read");
return sequences;
}
public String loadSpeciesName(Integer speciesId) {
return speciesRepository.loadSpeciesName(speciesId);
}
/**
* Seems like items.species_names has lots of not really useful names,
* so let's just pick short, one-word ones, that start with lower case,
* e.g. human for 9606, mouse&mice for 10090, yeast for 4932, etc.
*
* @param speciesId
* @return
*/
public Collection<String> loadSpeciesNames(Integer speciesId) {
final Map<String, Set<String>> all = queryProcessor.selectTwoColumns("official_name", "species_name",
"items.species_names",
TwoColumnRowMapper.<String, String>multiValMapper(),
"species_id = :species_id and species_name not like '% %';",
new MapSqlParameterSource("species_id", speciesId));
List<String> names = new ArrayList<>();
final Iterator<String> iterator = all.keySet().iterator();
if (!iterator.hasNext()) {
//some species don't have synonyms so need to get the official one:
names.add(loadSpeciesName(speciesId));
return names;
}
final String official = iterator.next();
names.add(official);
for (String name : all.get(official)) {
if (Character.isLowerCase(name.charAt(0))) {
names.add(name);
}
}
return names;
}
public Map<Integer, Set<String>> loadUniProtLinkouts() {
log.info("loading UniProt ids");
Map<Integer, Set<String>> table =
queryProcessor.selectTwoColumns("protein_id", "linkout_url", "items.proteins_linkouts",
TwoColumnRowMapper.<Integer, String>multiValMapper(),
"linkout_type = 'UniProt'; ", null);
log.info(table.size() + " UniProt records read");
return table;
}
}
| bugfix: use the official score_types column
| src/main/java/org/string_db/DbFacade.java | bugfix: use the official score_types column | <ide><path>rc/main/java/org/string_db/DbFacade.java
<ide> /**
<ide> * Copyright 2014 University of Zürich, SIB, and others.
<del> *
<add> * <p>
<ide> * Licensed to the Apache Software Foundation (ASF) under one
<ide> * or more contributor license agreements. The ASF licenses this file
<ide> * to you under the Apache License, Version 2.0 (the
<ide> * "License"); you may not use this file except in compliance
<ide> * with the License. You may obtain a copy of the License at
<del> *
<add> * <p>
<ide> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<add> * <p>
<ide> * Unless required by applicable law or agreed to in writing, software
<ide> * distributed under the License is distributed on an "AS IS" BASIS,
<ide> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> protected final ProteinRepository proteinRepository;
<ide> protected final SpeciesRepository speciesRepository;
<ide> protected final GenericQueryProcessor queryProcessor;
<add> protected final Map<String, String> scoreTypeMap = new HashMap<>();
<ide>
<ide> public DbFacade(ProteinRepository proteinRepository, SpeciesRepository speciesRepository, GenericQueryProcessor queryProcessor) {
<ide> this.proteinRepository = proteinRepository;
<ide> this.speciesRepository = speciesRepository;
<ide> this.queryProcessor = queryProcessor;
<add>
<add> scoreTypeMap.put("equiv_nscore", "neighbourhood");
<add> scoreTypeMap.put("equiv_nscore_transferred", "neighbourhood_transferred");
<add> scoreTypeMap.put("equiv_fscore", "fusion");
<add> scoreTypeMap.put("equiv_pscore", "cooccurrence");
<add> scoreTypeMap.put("equiv_hscore", "homology");
<add> scoreTypeMap.put("array_score", "coexpression");
<add> scoreTypeMap.put("array_score_transferred", "coexpression_transferred");
<add> scoreTypeMap.put("experimental_score", "experimental");
<add> scoreTypeMap.put("experimental_score_transferred", "experimental_transferred");
<add> scoreTypeMap.put("database_score", "database");
<add> scoreTypeMap.put("database_score_transferred", "database_transferred");
<add> scoreTypeMap.put("textmining_score", "textmining");
<add> scoreTypeMap.put("textmining_score_transferred", "textmining_transferred");
<add> scoreTypeMap.put("neighborhood_score", "neighborhood");
<add> scoreTypeMap.put("fusion_score", "fusion");
<add> scoreTypeMap.put("cooccurence_score", "cooccurence");
<add>
<ide> }
<ide>
<ide> public List<Integer> loadCoreSpecies() {
<ide> return map;
<ide> }
<ide>
<add>
<ide> public Map<Integer, String> loadScoreTypes() throws SQLException {
<del> return queryProcessor.selectTwoColumns("score_id", "score_type", "network.score_types_user_friendly",
<add> final Map<Integer, String> crypticTypes = queryProcessor.selectTwoColumns("score_id", "score_type", "network.score_types",
<ide> TwoColumnRowMapper.<Integer, String>uniqueValMapper());
<add> Map<Integer, String> types = new LinkedHashMap<>();
<add> for (Integer id : crypticTypes.keySet()) {
<add> final String crypticType = crypticTypes.get(id);
<add> if (!this.scoreTypeMap.containsKey(crypticType)) {
<add> throw new RuntimeException("missing score type: " + crypticType);
<add> }
<add> types.put(id, this.scoreTypeMap.get(crypticType));
<add> }
<add> return types;
<ide> }
<ide>
<ide> /** |
|
JavaScript | mit | dea02a4a3222aae342403308989e7fdd8407ab99 | 0 | m1ch3lp3r3z/symfony-tasks-manager-demo,m1ch3lp3r3z/symfony-tasks-manager-demo,m1ch3lp3r3z/symfony-tasks-manager-demo | define(function(require) {
'use strict';
var _ = require('underscore'),
MainLayout = require('views/layout');
require('collections/task');
require('backbone.marionette');
// Define and initializes self only once
var self;
return Backbone.Marionette.AppRouter.extend({
routes: {
':model/:id': 'showModelDetail',
':model': 'showModelList',
'': 'showHome'
},
currentView: null,
application: null,
initialize: function(options) {
self = this;
this.application = options.application;
},
showHome: function() {
Backbone.history.navigate('task', { trigger: true });
},
bindEvents: function(view) {
this.listenTo(view, 'show:message', function() {
self.application.execute.apply(self.application, ['showMessage'].concat(_.values(arguments)));
});
this.listenTo(view, 'close', function () {
self.stopListening(view);
});
return view;
},
showModelList: function(model) {
var collectionClass = require(['collections', model].join('/')),
collection = new collectionClass(),
collectionViewClass = require(['views', model, 'collection'].join('/'));
collection.fetch().then(function() {
self.application.contentRegion.show(
self.bindEvents(new collectionViewClass({ collection: collection }))
);
}, function(response) {
self.application.vent.trigger('response:failed', 'test 123123');
});
},
showModelDetail: function(id) {
var task = new TaskModel(id),
callback = function() {
self.application.contentRegion.show(new TaskItemView({ model: task }));
};
tasks.fetch().then(callback, function() {
self.application.vent.trigger('response:failed', 'test 123123');
});
}
});
});
| web/assets/js/src/routers/main.js | define(function(require) {
'use strict';
var _ = require('underscore'),
MainLayout = require('views/layout');
require('collections/task');
require('backbone.marionette');
// Define and initializes self only once
var self;
return Backbone.Marionette.AppRouter.extend({
routes: {
':model/:id': 'showModelDetail',
':model': 'showModelList',
'': 'showHome'
},
currentView: null,
application: null,
initialize: function(options) {
self = this;
this.application = options.application;
},
showHome: function() {
if (this.currentView == null || !(this.currentView instanceof MainLayout)) {
this.currentView = new MainLayout();
}
},
bindEvents: function(view) {
this.listenTo(view, 'show:message', function() {
self.application.execute.apply(self.application, ['showMessage'].concat(_.values(arguments)));
});
this.listenTo(view, 'close', function () {
self.stopListening(view);
});
return view;
},
showModelList: function(model) {
var collectionClass = require(['collections', model].join('/')),
collection = new collectionClass(),
collectionViewClass = require(['views', model, 'collection'].join('/'));
collection.fetch().then(function() {
self.application.contentRegion.show(
self.bindEvents(new collectionViewClass({ collection: collection }))
);
}, function(response) {
self.application.vent.trigger('response:failed', 'test 123123');
});
},
showModelDetail: function(id) {
var task = new TaskModel(id),
callback = function() {
self.application.contentRegion.show(new TaskItemView({ model: task }));
};
tasks.fetch().then(callback, function() {
self.application.vent.trigger('response:failed', 'test 123123');
});
}
});
});
| Redirects to tasks view by default
| web/assets/js/src/routers/main.js | Redirects to tasks view by default | <ide><path>eb/assets/js/src/routers/main.js
<ide> },
<ide>
<ide> showHome: function() {
<del> if (this.currentView == null || !(this.currentView instanceof MainLayout)) {
<del> this.currentView = new MainLayout();
<del> }
<add> Backbone.history.navigate('task', { trigger: true });
<ide> },
<ide>
<ide> bindEvents: function(view) { |
|
Java | agpl-3.0 | 429e943a363d76c6a8953d2905c5ab33dee8f6bc | 0 | bisq-network/exchange,bisq-network/exchange,bitsquare/bitsquare,bitsquare/bitsquare | package bisq.apitest.method.payment;
import bisq.core.api.model.PaymentAccountForm;
import bisq.core.locale.FiatCurrency;
import bisq.core.locale.Res;
import bisq.core.locale.TradeCurrency;
import bisq.core.payment.PaymentAccount;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.stream.JsonWriter;
import java.nio.file.Paths;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestInfo;
import static java.lang.String.format;
import static java.lang.System.getProperty;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.*;
import bisq.apitest.method.MethodTest;
import bisq.cli.GrpcClient;
@Slf4j
public class AbstractPaymentAccountTest extends MethodTest {
static final String PROPERTY_NAME_JSON_COMMENTS = "_COMMENTS_";
static final List<String> PROPERTY_VALUE_JSON_COMMENTS = new ArrayList<>() {{
add("Do not manually edit the paymentMethodId field.");
add("Edit the salt field only if you are recreating a payment"
+ " account on a new installation and wish to preserve the account age.");
}};
static final String PROPERTY_NAME_PAYMENT_METHOD_ID = "paymentMethodId";
static final String PROPERTY_NAME_ACCOUNT_ID = "accountId";
static final String PROPERTY_NAME_ACCOUNT_NAME = "accountName";
static final String PROPERTY_NAME_ACCOUNT_NR = "accountNr";
static final String PROPERTY_NAME_ACCOUNT_TYPE = "accountType";
static final String PROPERTY_NAME_ANSWER = "answer";
static final String PROPERTY_NAME_BANK_ACCOUNT_NAME = "bankAccountName";
static final String PROPERTY_NAME_BANK_ACCOUNT_NUMBER = "bankAccountNumber";
static final String PROPERTY_NAME_BANK_ACCOUNT_TYPE = "bankAccountType";
static final String PROPERTY_NAME_BANK_ADDRESS = "bankAddress";
static final String PROPERTY_NAME_BANK_BRANCH = "bankBranch";
static final String PROPERTY_NAME_BANK_BRANCH_CODE = "bankBranchCode";
static final String PROPERTY_NAME_BANK_BRANCH_NAME = "bankBranchName";
static final String PROPERTY_NAME_BANK_CODE = "bankCode";
static final String PROPERTY_NAME_BANK_COUNTRY_CODE = "bankCountryCode";
@SuppressWarnings("unused")
static final String PROPERTY_NAME_BANK_ID = "bankId";
static final String PROPERTY_NAME_BANK_NAME = "bankName";
static final String PROPERTY_NAME_BANK_SWIFT_CODE = "bankSwiftCode";
static final String PROPERTY_NAME_BRANCH_ID = "branchId";
static final String PROPERTY_NAME_BIC = "bic";
static final String PROPERTY_NAME_BENEFICIARY_NAME = "beneficiaryName";
static final String PROPERTY_NAME_BENEFICIARY_ACCOUNT_NR = "beneficiaryAccountNr";
static final String PROPERTY_NAME_BENEFICIARY_ADDRESS = "beneficiaryAddress";
static final String PROPERTY_NAME_BENEFICIARY_CITY = "beneficiaryCity";
static final String PROPERTY_NAME_BENEFICIARY_PHONE = "beneficiaryPhone";
static final String PROPERTY_NAME_COUNTRY = "country";
static final String PROPERTY_NAME_CITY = "city";
static final String PROPERTY_NAME_CONTACT = "contact";
static final String PROPERTY_NAME_EMAIL = "email";
static final String PROPERTY_NAME_EMAIL_OR_MOBILE_NR = "emailOrMobileNr";
static final String PROPERTY_NAME_EXTRA_INFO = "extraInfo";
static final String PROPERTY_NAME_HOLDER_EMAIL = "holderEmail";
static final String PROPERTY_NAME_HOLDER_NAME = "holderName";
static final String PROPERTY_NAME_HOLDER_TAX_ID = "holderTaxId";
static final String PROPERTY_NAME_IBAN = "iban";
static final String PROPERTY_NAME_INTERMEDIARY_ADDRESS = "intermediaryAddress";
static final String PROPERTY_NAME_INTERMEDIARY_BRANCH = "intermediaryBranch";
static final String PROPERTY_NAME_INTERMEDIARY_COUNTRY_CODE = "intermediaryCountryCode";
static final String PROPERTY_NAME_INTERMEDIARY_NAME = "intermediaryName";
static final String PROPERTY_NAME_INTERMEDIARY_SWIFT_CODE = "intermediarySwiftCode";
static final String PROPERTY_NAME_MOBILE_NR = "mobileNr";
static final String PROPERTY_NAME_NATIONAL_ACCOUNT_ID = "nationalAccountId";
static final String PROPERTY_NAME_PAY_ID = "payid";
static final String PROPERTY_NAME_POSTAL_ADDRESS = "postalAddress";
static final String PROPERTY_NAME_PROMPT_PAY_ID = "promptPayId";
static final String PROPERTY_NAME_QUESTION = "question";
static final String PROPERTY_NAME_REQUIREMENTS = "requirements";
static final String PROPERTY_NAME_SALT = "salt";
static final String PROPERTY_NAME_SELECTED_TRADE_CURRENCY = "selectedTradeCurrency";
static final String PROPERTY_NAME_SORT_CODE = "sortCode";
static final String PROPERTY_NAME_SPECIAL_INSTRUCTIONS = "specialInstructions";
static final String PROPERTY_NAME_STATE = "state";
static final String PROPERTY_NAME_TRADE_CURRENCIES = "tradeCurrencies";
static final String PROPERTY_NAME_USERNAME = "userName";
static final Gson GSON = new GsonBuilder()
.setPrettyPrinting()
.serializeNulls()
.create();
static final Map<String, Object> COMPLETED_FORM_MAP = new HashMap<>();
// A payment account serializer / deserializer.
static final PaymentAccountForm PAYMENT_ACCOUNT_FORM = new PaymentAccountForm();
@BeforeEach
public void setup() {
Res.setup();
}
protected final File getEmptyForm(TestInfo testInfo, String paymentMethodId) {
// This would normally be done in @BeforeEach, but these test cases might be
// called from a single 'scenario' test case, and the @BeforeEach -> clear()
// would be skipped.
COMPLETED_FORM_MAP.clear();
File emptyForm = getPaymentAccountForm(aliceClient, paymentMethodId);
// A shortcut over the API:
// File emptyForm = PAYMENT_ACCOUNT_FORM.getPaymentAccountForm(paymentMethodId);
log.debug("{} Empty form saved to {}",
testName(testInfo),
PAYMENT_ACCOUNT_FORM.getClickableURI(emptyForm));
emptyForm.deleteOnExit();
return emptyForm;
}
protected final void verifyEmptyForm(File jsonForm, String paymentMethodId, String... fields) {
@SuppressWarnings("unchecked")
Map<String, Object> emptyForm = (Map<String, Object>) GSON.fromJson(
PAYMENT_ACCOUNT_FORM.toJsonString(jsonForm),
Object.class);
assertNotNull(emptyForm);
// TODO remove 'false' condition to enable creation of SWIFT accounts in future PR.
if (false && paymentMethodId.equals("SWIFT_ID")) {
assertEquals(getSwiftFormComments(), emptyForm.get(PROPERTY_NAME_JSON_COMMENTS));
} else {
assertEquals(PROPERTY_VALUE_JSON_COMMENTS, emptyForm.get(PROPERTY_NAME_JSON_COMMENTS));
}
assertEquals(paymentMethodId, emptyForm.get(PROPERTY_NAME_PAYMENT_METHOD_ID));
assertEquals("your accountname", emptyForm.get(PROPERTY_NAME_ACCOUNT_NAME));
for (String field : fields) {
if (field.equals("country"))
assertEquals("your two letter country code", emptyForm.get(field));
else
assertEquals("your " + field.toLowerCase(), emptyForm.get(field));
}
}
protected final void verifyCommonFormEntries(PaymentAccount paymentAccount) {
// All PaymentAccount subclasses have paymentMethodId and an accountName fields.
assertNotNull(paymentAccount);
assertEquals(COMPLETED_FORM_MAP.get(PROPERTY_NAME_PAYMENT_METHOD_ID), paymentAccount.getPaymentMethod().getId());
assertTrue(paymentAccount.getCreationDate().getTime() > 0);
assertEquals(COMPLETED_FORM_MAP.get(PROPERTY_NAME_ACCOUNT_NAME), paymentAccount.getAccountName());
}
protected final void verifyAccountSingleTradeCurrency(String expectedCurrencyCode, PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getSingleTradeCurrency());
assertEquals(expectedCurrencyCode, paymentAccount.getSingleTradeCurrency().getCode());
}
protected final void verifyAccountTradeCurrencies(Collection<FiatCurrency> expectedFiatCurrencies,
PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getTradeCurrencies());
List<TradeCurrency> expectedTradeCurrencies = new ArrayList<>() {{
addAll(expectedFiatCurrencies);
}};
assertArrayEquals(expectedTradeCurrencies.toArray(), paymentAccount.getTradeCurrencies().toArray());
}
protected final void verifyAccountTradeCurrencies(List<TradeCurrency> expectedTradeCurrencies,
PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getTradeCurrencies());
assertArrayEquals(expectedTradeCurrencies.toArray(), paymentAccount.getTradeCurrencies().toArray());
}
protected final void verifyUserPayloadHasPaymentAccountWithId(GrpcClient grpcClient,
String paymentAccountId) {
Optional<protobuf.PaymentAccount> paymentAccount = grpcClient.getPaymentAccounts()
.stream()
.filter(a -> a.getId().equals(paymentAccountId))
.findFirst();
assertTrue(paymentAccount.isPresent());
}
protected final String getCompletedFormAsJsonString(List<String> comments) {
File completedForm = fillPaymentAccountForm(comments);
String jsonString = PAYMENT_ACCOUNT_FORM.toJsonString(completedForm);
log.debug("Completed form: {}", jsonString);
return jsonString;
}
protected final String getCompletedFormAsJsonString() {
File completedForm = fillPaymentAccountForm(PROPERTY_VALUE_JSON_COMMENTS);
String jsonString = PAYMENT_ACCOUNT_FORM.toJsonString(completedForm);
log.debug("Completed form: {}", jsonString);
return jsonString;
}
protected final String getCommaDelimitedFiatCurrencyCodes(Collection<FiatCurrency> fiatCurrencies) {
return fiatCurrencies.stream()
.sorted(TradeCurrency::compareTo) // note: sorted by ccy name, not ccy code
.map(c -> c.getCurrency().getCurrencyCode())
.collect(Collectors.joining(","));
}
protected final String getCommaDelimitedTradeCurrencyCodes(List<TradeCurrency> tradeCurrencies) {
return tradeCurrencies.stream()
.sorted(Comparator.comparing(TradeCurrency::getCode)) // sorted by code
.map(c -> c.getCode())
.collect(Collectors.joining(","));
}
protected final List<String> getSwiftFormComments() {
List<String> comments = new ArrayList<>();
comments.addAll(PROPERTY_VALUE_JSON_COMMENTS);
// List<String> wrappedSwiftComments = Res.getWrappedAsList("payment.swift.info", 110);
// comments.addAll(wrappedSwiftComments);
// comments.add("See https://bisq.wiki/SWIFT");
return comments;
}
private File fillPaymentAccountForm(List<String> comments) {
File tmpJsonForm = null;
try {
tmpJsonForm = File.createTempFile("temp_acct_form_",
".json",
Paths.get(getProperty("java.io.tmpdir")).toFile());
JsonWriter writer = new JsonWriter(new OutputStreamWriter(new FileOutputStream(tmpJsonForm), UTF_8));
writer.beginObject();
writer.name(PROPERTY_NAME_JSON_COMMENTS);
writer.beginArray();
for (String s : comments) {
writer.value(s);
}
writer.endArray();
for (Map.Entry<String, Object> entry : COMPLETED_FORM_MAP.entrySet()) {
String k = entry.getKey();
Object v = entry.getValue();
writer.name(k);
writer.value(v.toString());
}
writer.endObject();
writer.close();
} catch (IOException ex) {
log.error("", ex);
fail(format("Could not write json file from form entries %s", COMPLETED_FORM_MAP));
}
tmpJsonForm.deleteOnExit();
return tmpJsonForm;
}
}
| apitest/src/test/java/bisq/apitest/method/payment/AbstractPaymentAccountTest.java | package bisq.apitest.method.payment;
import bisq.core.api.model.PaymentAccountForm;
import bisq.core.locale.FiatCurrency;
import bisq.core.locale.Res;
import bisq.core.locale.TradeCurrency;
import bisq.core.payment.PaymentAccount;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.stream.JsonWriter;
import java.nio.file.Paths;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestInfo;
import static java.lang.String.format;
import static java.lang.System.getProperty;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.*;
import bisq.apitest.method.MethodTest;
import bisq.cli.GrpcClient;
@Slf4j
public class AbstractPaymentAccountTest extends MethodTest {
static final String PROPERTY_NAME_JSON_COMMENTS = "_COMMENTS_";
static final List<String> PROPERTY_VALUE_JSON_COMMENTS = new ArrayList<>() {{
add("Do not manually edit the paymentMethodId field.");
add("Edit the salt field only if you are recreating a payment"
+ " account on a new installation and wish to preserve the account age.");
}};
static final String PROPERTY_NAME_PAYMENT_METHOD_ID = "paymentMethodId";
static final String PROPERTY_NAME_ACCOUNT_ID = "accountId";
static final String PROPERTY_NAME_ACCOUNT_NAME = "accountName";
static final String PROPERTY_NAME_ACCOUNT_NR = "accountNr";
static final String PROPERTY_NAME_ACCOUNT_TYPE = "accountType";
static final String PROPERTY_NAME_ANSWER = "answer";
static final String PROPERTY_NAME_BANK_ACCOUNT_NAME = "bankAccountName";
static final String PROPERTY_NAME_BANK_ACCOUNT_NUMBER = "bankAccountNumber";
static final String PROPERTY_NAME_BANK_ACCOUNT_TYPE = "bankAccountType";
static final String PROPERTY_NAME_BANK_ADDRESS = "bankAddress";
static final String PROPERTY_NAME_BANK_BRANCH = "bankBranch";
static final String PROPERTY_NAME_BANK_BRANCH_CODE = "bankBranchCode";
static final String PROPERTY_NAME_BANK_BRANCH_NAME = "bankBranchName";
static final String PROPERTY_NAME_BANK_CODE = "bankCode";
static final String PROPERTY_NAME_BANK_COUNTRY_CODE = "bankCountryCode";
@SuppressWarnings("unused")
static final String PROPERTY_NAME_BANK_ID = "bankId";
static final String PROPERTY_NAME_BANK_NAME = "bankName";
static final String PROPERTY_NAME_BANK_SWIFT_CODE = "bankSwiftCode";
static final String PROPERTY_NAME_BRANCH_ID = "branchId";
static final String PROPERTY_NAME_BIC = "bic";
static final String PROPERTY_NAME_BENEFICIARY_NAME = "beneficiaryName";
static final String PROPERTY_NAME_BENEFICIARY_ACCOUNT_NR = "beneficiaryAccountNr";
static final String PROPERTY_NAME_BENEFICIARY_ADDRESS = "beneficiaryAddress";
static final String PROPERTY_NAME_BENEFICIARY_CITY = "beneficiaryCity";
static final String PROPERTY_NAME_BENEFICIARY_PHONE = "beneficiaryPhone";
static final String PROPERTY_NAME_COUNTRY = "country";
static final String PROPERTY_NAME_CITY = "city";
static final String PROPERTY_NAME_CONTACT = "contact";
static final String PROPERTY_NAME_EMAIL = "email";
static final String PROPERTY_NAME_EMAIL_OR_MOBILE_NR = "emailOrMobileNr";
static final String PROPERTY_NAME_EXTRA_INFO = "extraInfo";
static final String PROPERTY_NAME_HOLDER_EMAIL = "holderEmail";
static final String PROPERTY_NAME_HOLDER_NAME = "holderName";
static final String PROPERTY_NAME_HOLDER_TAX_ID = "holderTaxId";
static final String PROPERTY_NAME_IBAN = "iban";
static final String PROPERTY_NAME_INTERMEDIARY_ADDRESS = "intermediaryAddress";
static final String PROPERTY_NAME_INTERMEDIARY_BRANCH = "intermediaryBranch";
static final String PROPERTY_NAME_INTERMEDIARY_COUNTRY_CODE = "intermediaryCountryCode";
static final String PROPERTY_NAME_INTERMEDIARY_NAME = "intermediaryName";
static final String PROPERTY_NAME_INTERMEDIARY_SWIFT_CODE = "intermediarySwiftCode";
static final String PROPERTY_NAME_MOBILE_NR = "mobileNr";
static final String PROPERTY_NAME_NATIONAL_ACCOUNT_ID = "nationalAccountId";
static final String PROPERTY_NAME_PAY_ID = "payid";
static final String PROPERTY_NAME_POSTAL_ADDRESS = "postalAddress";
static final String PROPERTY_NAME_PROMPT_PAY_ID = "promptPayId";
static final String PROPERTY_NAME_QUESTION = "question";
static final String PROPERTY_NAME_REQUIREMENTS = "requirements";
static final String PROPERTY_NAME_SALT = "salt";
static final String PROPERTY_NAME_SELECTED_TRADE_CURRENCY = "selectedTradeCurrency";
static final String PROPERTY_NAME_SORT_CODE = "sortCode";
static final String PROPERTY_NAME_SPECIAL_INSTRUCTIONS = "specialInstructions";
static final String PROPERTY_NAME_STATE = "state";
static final String PROPERTY_NAME_TRADE_CURRENCIES = "tradeCurrencies";
static final String PROPERTY_NAME_USERNAME = "userName";
static final Gson GSON = new GsonBuilder()
.setPrettyPrinting()
.serializeNulls()
.create();
static final Map<String, Object> COMPLETED_FORM_MAP = new HashMap<>();
// A payment account serializer / deserializer.
static final PaymentAccountForm PAYMENT_ACCOUNT_FORM = new PaymentAccountForm();
@BeforeEach
public void setup() {
Res.setup();
}
protected final File getEmptyForm(TestInfo testInfo, String paymentMethodId) {
// This would normally be done in @BeforeEach, but these test cases might be
// called from a single 'scenario' test case, and the @BeforeEach -> clear()
// would be skipped.
COMPLETED_FORM_MAP.clear();
File emptyForm = getPaymentAccountForm(aliceClient, paymentMethodId);
// A shortcut over the API:
// File emptyForm = PAYMENT_ACCOUNT_FORM.getPaymentAccountForm(paymentMethodId);
log.debug("{} Empty form saved to {}",
testName(testInfo),
PAYMENT_ACCOUNT_FORM.getClickableURI(emptyForm));
emptyForm.deleteOnExit();
return emptyForm;
}
protected final void verifyEmptyForm(File jsonForm, String paymentMethodId, String... fields) {
@SuppressWarnings("unchecked")
Map<String, Object> emptyForm = (Map<String, Object>) GSON.fromJson(
PAYMENT_ACCOUNT_FORM.toJsonString(jsonForm),
Object.class);
assertNotNull(emptyForm);
if (false && paymentMethodId.equals("SWIFT_ID")) {
assertEquals(getSwiftFormComments(), emptyForm.get(PROPERTY_NAME_JSON_COMMENTS));
} else {
assertEquals(PROPERTY_VALUE_JSON_COMMENTS, emptyForm.get(PROPERTY_NAME_JSON_COMMENTS));
}
assertEquals(paymentMethodId, emptyForm.get(PROPERTY_NAME_PAYMENT_METHOD_ID));
assertEquals("your accountname", emptyForm.get(PROPERTY_NAME_ACCOUNT_NAME));
for (String field : fields) {
if (field.equals("country"))
assertEquals("your two letter country code", emptyForm.get(field));
else
assertEquals("your " + field.toLowerCase(), emptyForm.get(field));
}
}
protected final void verifyCommonFormEntries(PaymentAccount paymentAccount) {
// All PaymentAccount subclasses have paymentMethodId and an accountName fields.
assertNotNull(paymentAccount);
assertEquals(COMPLETED_FORM_MAP.get(PROPERTY_NAME_PAYMENT_METHOD_ID), paymentAccount.getPaymentMethod().getId());
assertTrue(paymentAccount.getCreationDate().getTime() > 0);
assertEquals(COMPLETED_FORM_MAP.get(PROPERTY_NAME_ACCOUNT_NAME), paymentAccount.getAccountName());
}
protected final void verifyAccountSingleTradeCurrency(String expectedCurrencyCode, PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getSingleTradeCurrency());
assertEquals(expectedCurrencyCode, paymentAccount.getSingleTradeCurrency().getCode());
}
protected final void verifyAccountTradeCurrencies(Collection<FiatCurrency> expectedFiatCurrencies,
PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getTradeCurrencies());
List<TradeCurrency> expectedTradeCurrencies = new ArrayList<>() {{
addAll(expectedFiatCurrencies);
}};
assertArrayEquals(expectedTradeCurrencies.toArray(), paymentAccount.getTradeCurrencies().toArray());
}
protected final void verifyAccountTradeCurrencies(List<TradeCurrency> expectedTradeCurrencies,
PaymentAccount paymentAccount) {
assertNotNull(paymentAccount.getTradeCurrencies());
assertArrayEquals(expectedTradeCurrencies.toArray(), paymentAccount.getTradeCurrencies().toArray());
}
protected final void verifyUserPayloadHasPaymentAccountWithId(GrpcClient grpcClient,
String paymentAccountId) {
Optional<protobuf.PaymentAccount> paymentAccount = grpcClient.getPaymentAccounts()
.stream()
.filter(a -> a.getId().equals(paymentAccountId))
.findFirst();
assertTrue(paymentAccount.isPresent());
}
protected final String getCompletedFormAsJsonString(List<String> comments) {
File completedForm = fillPaymentAccountForm(comments);
String jsonString = PAYMENT_ACCOUNT_FORM.toJsonString(completedForm);
log.debug("Completed form: {}", jsonString);
return jsonString;
}
protected final String getCompletedFormAsJsonString() {
File completedForm = fillPaymentAccountForm(PROPERTY_VALUE_JSON_COMMENTS);
String jsonString = PAYMENT_ACCOUNT_FORM.toJsonString(completedForm);
log.debug("Completed form: {}", jsonString);
return jsonString;
}
protected final String getCommaDelimitedFiatCurrencyCodes(Collection<FiatCurrency> fiatCurrencies) {
return fiatCurrencies.stream()
.sorted(TradeCurrency::compareTo) // note: sorted by ccy name, not ccy code
.map(c -> c.getCurrency().getCurrencyCode())
.collect(Collectors.joining(","));
}
protected final String getCommaDelimitedTradeCurrencyCodes(List<TradeCurrency> tradeCurrencies) {
return tradeCurrencies.stream()
.sorted(Comparator.comparing(TradeCurrency::getCode)) // sorted by code
.map(c -> c.getCode())
.collect(Collectors.joining(","));
}
protected final List<String> getSwiftFormComments() {
List<String> comments = new ArrayList<>();
comments.addAll(PROPERTY_VALUE_JSON_COMMENTS);
// List<String> wrappedSwiftComments = Res.getWrappedAsList("payment.swift.info", 110);
// comments.addAll(wrappedSwiftComments);
// comments.add("See https://bisq.wiki/SWIFT");
return comments;
}
private File fillPaymentAccountForm(List<String> comments) {
File tmpJsonForm = null;
try {
tmpJsonForm = File.createTempFile("temp_acct_form_",
".json",
Paths.get(getProperty("java.io.tmpdir")).toFile());
JsonWriter writer = new JsonWriter(new OutputStreamWriter(new FileOutputStream(tmpJsonForm), UTF_8));
writer.beginObject();
writer.name(PROPERTY_NAME_JSON_COMMENTS);
writer.beginArray();
for (String s : comments) {
writer.value(s);
}
writer.endArray();
for (Map.Entry<String, Object> entry : COMPLETED_FORM_MAP.entrySet()) {
String k = entry.getKey();
Object v = entry.getValue();
writer.name(k);
writer.value(v.toString());
}
writer.endObject();
writer.close();
} catch (IOException ex) {
log.error("", ex);
fail(format("Could not write json file from form entries %s", COMPLETED_FORM_MAP));
}
tmpJsonForm.deleteOnExit();
return tmpJsonForm;
}
}
| Add TODO commment above placeholder for future swift test
Resolves https://github.com/bisq-network/bisq/pull/5685#discussion_r710005619.
| apitest/src/test/java/bisq/apitest/method/payment/AbstractPaymentAccountTest.java | Add TODO commment above placeholder for future swift test | <ide><path>pitest/src/test/java/bisq/apitest/method/payment/AbstractPaymentAccountTest.java
<ide> Object.class);
<ide> assertNotNull(emptyForm);
<ide>
<add> // TODO remove 'false' condition to enable creation of SWIFT accounts in future PR.
<ide> if (false && paymentMethodId.equals("SWIFT_ID")) {
<ide> assertEquals(getSwiftFormComments(), emptyForm.get(PROPERTY_NAME_JSON_COMMENTS));
<ide> } else { |
|
Java | apache-2.0 | d7aad455f4e7f12098ef53c08f6ff596201ca4ea | 0 | Kurento/kurento-java,EugenioFidel/kurento-java,EugenioFidel/kurento-java,Kurento/kurento-java,EugenioFidel/kurento-java,Kurento/kurento-java,EugenioFidel/kurento-java,Kurento/kurento-java | /*
* (C) Copyright 2015 Kurento (http://kurento.org/)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl-2.1.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
package org.kurento.test.services;
import static org.kurento.commons.PropertiesManager.getProperty;
import static org.kurento.test.browser.WebRtcCandidateType.RELAY;
import static org.kurento.test.browser.WebRtcCandidateType.SRFLX;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_FALSE_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TESTCLASS_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TESTSUITE_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TEST_VALUE;
import static org.kurento.test.config.TestConfiguration.KMS_AUTOSTART_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_AUTOSTART_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_FORCE_PULLING_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_FORCE_PULLING_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_NAME_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_NAME_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_ACCESS_KEY_ID;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_BUCKET_NAME;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_HOSTNAME;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_SECRET_ACCESS_KEY;
import static org.kurento.test.config.TestConfiguration.KMS_GST_PLUGINS_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_LOGIN_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_LOG_PATH_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_LOG_PATH_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_PASSWD_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_PEM_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_DOCKER;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_COMMAND_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_COMMAND_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_DEBUG_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_DEBUG_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_PROP_EXPORT;
import static org.kurento.test.config.TestConfiguration.KSM_GST_PLUGINS_PROP;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_CANDIDATE_KMS_TYPE;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_CANDIDATE_SELENIUM_TYPE;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_SERVER_URL_PROPERTY;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_DNAT;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_DNAT_DEFAULT;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_TRANSPORT;
import static org.kurento.test.config.TestConfiguration.TEST_SELENIUM_DNAT;
import static org.kurento.test.config.TestConfiguration.TEST_SELENIUM_DNAT_DEFAULT;
import static org.kurento.test.services.TestService.TestServiceScope.EXTERNAL;
import static org.kurento.test.services.TestService.TestServiceScope.TEST;
import static org.kurento.test.services.TestService.TestServiceScope.TESTCLASS;
import static org.kurento.test.services.TestService.TestServiceScope.TESTSUITE;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.websocket.ClientEndpoint;
import javax.websocket.ClientEndpointConfig;
import javax.websocket.CloseReason;
import javax.websocket.ContainerProvider;
import javax.websocket.DeploymentException;
import javax.websocket.Endpoint;
import javax.websocket.EndpointConfig;
import javax.websocket.OnClose;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import org.apache.commons.io.FileUtils;
import org.kurento.client.ErrorEvent;
import org.kurento.client.EventListener;
import org.kurento.client.KurentoClient;
import org.kurento.client.MediaPipeline;
import org.kurento.client.ObjectCreatedEvent;
import org.kurento.commons.exception.KurentoException;
import org.kurento.test.base.KurentoTest;
import org.kurento.test.config.TestConfiguration;
import org.kurento.test.docker.Docker;
import org.kurento.test.utils.Shell;
import org.kurento.test.utils.SshConnection;
import com.github.dockerjava.api.command.CreateContainerCmd;
import com.github.dockerjava.api.command.CreateContainerResponse;
import com.github.dockerjava.api.model.AccessMode;
import com.github.dockerjava.api.model.Bind;
import com.github.dockerjava.api.model.Volume;
import com.github.dockerjava.api.model.VolumesFrom;
import com.github.dockerjava.core.command.PullImageResultCallback;
import com.google.common.io.CharStreams;
import freemarker.template.Configuration;
import freemarker.template.Template;
/**
* Kurento Media Server service.
*
* @author Boni Garcia ([email protected])
* @since 6.1.1
*/
public class KmsService extends TestService {
// FIXME replace with a registration mechanism
protected static String monitoredDockerContainerName;
protected String dockerContainerName = "kms";
protected SshConnection remoteKmsSshConnection;
protected Path workspace;
protected String wsUri;
protected boolean isKmsRemote;
protected boolean isKmsDocker;
protected boolean isKmsStarted;
protected String registrarUri;
protected String registrarLocalAddress = "127.0.0.1";
protected String kmsLoginProp;
protected String kmsPasswdProp;
protected String kmsPemProp;
protected String kmsAutostartProp;
protected String kmsAutostartDefault;
protected String kmsWsUriProp;
protected String kmsWsUriExportProp;
protected String kmsScopeProp;
protected String kmsScopeDefault;
protected KurentoClient kurentoClient;
public KmsService(String wsUri) {
this();
setWsUri(wsUri);
}
public KmsService() {
this.kmsLoginProp = KMS_LOGIN_PROP;
this.kmsPasswdProp = KMS_PASSWD_PROP;
this.kmsPemProp = KMS_PEM_PROP;
this.kmsAutostartProp = KMS_AUTOSTART_PROP;
this.kmsAutostartDefault = KMS_AUTOSTART_DEFAULT;
this.kmsWsUriProp = KMS_WS_URI_PROP;
this.kmsWsUriExportProp = KMS_WS_URI_PROP_EXPORT;
this.kmsScopeProp = KMS_SCOPE_PROP;
this.kmsScopeDefault = KMS_SCOPE_DEFAULT;
setWsUri(getProperty(kmsWsUriProp, KMS_WS_URI_DEFAULT));
}
public KmsService(String kmsLoginProp, String kmsPasswdProp, String kmsPemProp,
String kmsAutostartProp, String kmsWsUriProp, String kmsWsUriExportProp, String kmsScopeProp,
String kmsScopeDefault) {
this.kmsLoginProp = kmsLoginProp;
this.kmsPasswdProp = kmsPasswdProp;
this.kmsPemProp = kmsPemProp;
this.kmsAutostartProp = kmsAutostartProp;
this.kmsWsUriProp = kmsWsUriProp;
this.kmsWsUriExportProp = kmsWsUriExportProp;
this.kmsScopeProp = kmsScopeProp;
this.kmsScopeDefault = kmsScopeDefault;
setWsUri(getProperty(kmsWsUriProp, KMS_WS_URI_DEFAULT));
}
@Override
public void start() {
super.start();
if (wsUri == null) {
log.warn("WS URI is null, will not start");
isKmsStarted = false;
return;
}
isKmsRemote = !wsUri.contains("localhost") && !wsUri.contains("127.0.0.1") && !isKmsDocker;
isKmsDocker = KMS_SCOPE_DOCKER.equals(getProperty(kmsScopeProp, kmsScopeDefault));
// Assertion: if KMS remote, credentials should be available
String kmsLogin = getProperty(kmsLoginProp);
String kmsPasswd = getProperty(kmsPasswdProp);
String kmsPem = getProperty(kmsPemProp);
String kmsAutoStart = getProperty(kmsAutostartProp, kmsAutostartDefault);
if (isKmsRemote && kmsLogin == null && (kmsPem == null || kmsPasswd == null)) {
throw new KurentoException("Bad test parameters: " + kmsAutostartProp + "=" + kmsAutoStart
+ " and " + kmsWsUriProp + "=" + wsUri
+ ". Remote KMS should be started but its credentials are not present: " + kmsLoginProp
+ "=" + kmsLogin + ", " + kmsPasswdProp + "=" + kmsPasswd + ", " + kmsPemProp + "="
+ kmsPem);
}
// Assertion: if local or remote KMS, port should be available
if (!isKmsDocker && !isKmsRemote && !isFreePort(wsUri)) {
throw new KurentoException("KMS cannot be started in URI: " + wsUri + ". Port is not free");
}
if (isKmsDocker) {
log.info("Starting KMS dockerized");
Docker dockerClient = Docker.getSingleton();
if (dockerClient.isRunningInContainer()) {
setDockerContainerName(dockerClient.getContainerName() + getDockerContainerNameSuffix());
}
} else {
log.info("Starting KMS with URI: {}", wsUri);
try {
workspace = Files.createTempDirectory("kurento-test");
} catch (IOException e) {
throw new KurentoException("Exception creating temporal folder", e);
}
log.trace("Local folder to store temporal files: {}", workspace);
if (isKmsRemote) {
String remoteKmsStr = wsUri.substring(wsUri.indexOf("//") + 2, wsUri.lastIndexOf(":"));
log.info("Using remote KMS at {}", remoteKmsStr);
remoteKmsSshConnection = new SshConnection(remoteKmsStr, kmsLogin, kmsPasswd, kmsPem);
if (kmsPem != null) {
remoteKmsSshConnection.setPem(kmsPem);
}
remoteKmsSshConnection.start();
remoteKmsSshConnection.createTmpFolder();
}
createKurentoConf();
}
if (isKmsRemote && !kmsAutoStart.equals(AUTOSTART_FALSE_VALUE)) {
String[] filesToBeCopied = { "kurento.conf.json", "kurento.sh" };
for (String s : filesToBeCopied) {
remoteKmsSshConnection.scp(workspace + File.separator + s,
remoteKmsSshConnection.getTmpFolder() + File.separator + s);
}
remoteKmsSshConnection.runAndWaitCommand("chmod", "+x", remoteKmsSshConnection.getTmpFolder()
+ File.separator + "kurento.sh");
}
startKms();
waitForKms();
}
@Override
public void stop() {
super.stop();
// Close Kurento client
closeKurentoClient();
// Stop KMS
stopKms();
// Retrieve logs
try {
retrieveLogs();
} catch (IOException e) {
log.warn("Exception retrieving KMS logs", e);
}
if (isKmsDocker) {
try {
Docker.getSingleton().removeContainer(dockerContainerName);
log.info("*** Only for debugging: Docker.getSingleton().removeContainer({})",
dockerContainerName);
} catch (Throwable name) {
log.error(
" +++ Only for debugging: Exception on Docker.getSingleton().removeContainer({})",
dockerContainerName);
}
}
log.info("+++ Only for debugging: After removeContainer {}", dockerContainerName);
// Delete temporal folder and content
if (!isKmsDocker) {
try {
deleteFolderAndContent(workspace);
} catch (IOException e) {
log.warn("Exception deleting temporal folder {}", workspace, e);
}
}
log.info("+++ Only for debugging: End of KmsService.stop() for: {}", dockerContainerName);
}
@Override
public TestServiceScope getScope() {
TestServiceScope scope = TESTSUITE;
String kmsAutostart = getProperty(kmsAutostartProp, kmsAutostartDefault);
switch (kmsAutostart) {
case AUTOSTART_FALSE_VALUE:
scope = EXTERNAL;
break;
case AUTOSTART_TEST_VALUE:
scope = TEST;
break;
case AUTOSTART_TESTCLASS_VALUE:
scope = TESTCLASS;
break;
case AUTOSTART_TESTSUITE_VALUE:
scope = TESTSUITE;
break;
default:
throw new IllegalArgumentException("Unknown autostart value " + kmsAutostart);
}
return scope;
}
protected String getDockerContainerNameSuffix() {
return "_kms";
}
protected String getDockerLogSuffix() {
return "-kms";
}
private boolean isFreePort(String wsUri) {
try {
URI wsUrl = new URI(wsUri);
String result =
Shell.runAndWait("/bin/bash", "-c", "nc -z " + wsUrl.getHost() + " " + wsUrl.getPort()
+ "; echo $?");
if (result.trim().equals("0")) {
log.warn("Port " + wsUrl.getPort()
+ " is used. Maybe another KMS instance is running in this port");
return false;
}
} catch (URISyntaxException e) {
log.warn("WebSocket URI {} is malformed: " + e.getMessage(), wsUri);
}
return true;
}
private void createKurentoConf() {
Map<String, Object> data = new HashMap<String, Object>();
try {
URI wsAsUri = new URI(wsUri);
int port = wsAsUri.getPort();
String path = wsAsUri.getPath();
data.put("wsPort", String.valueOf(port));
data.put("wsPath", path.substring(1));
data.put("registrar", registrarUri);
data.put("registrarLocalAddress", registrarLocalAddress);
} catch (URISyntaxException e) {
throw new KurentoException("Invalid ws uri: " + wsUri);
}
data.put("gstPlugins", getGstPlugins());
data.put("debugOptions", getDebugOptions());
data.put("serverCommand", getServerCommand());
data.put("workspace", getKmsLogPath());
Configuration cfg = new Configuration(Configuration.DEFAULT_INCOMPATIBLE_IMPROVEMENTS);
cfg.setClassForTemplateLoading(this.getClass(), "/templates/");
createFileFromTemplate(cfg, data, "kurento.conf.json");
createFileFromTemplate(cfg, data, "kurento.sh");
Shell.runAndWait("chmod", "+x", workspace + File.separator + "kurento.sh");
}
private void startKms() {
String kmsLogPath = getKmsLogPath();
if (isKmsRemote) {
remoteKmsSshConnection.runAndWaitCommand("sh", "-c", kmsLogPath + "kurento.sh > /dev/null");
log.info("Remote KMS started in URI {}", wsUri);
} else if (isKmsDocker) {
startDockerizedKms();
} else {
Shell.run("sh", "-c", kmsLogPath + "kurento.sh");
log.info("Local KMS started in URI {}", wsUri);
}
isKmsStarted = true;
}
private void waitForKms() {
long initTime = System.nanoTime();
@ClientEndpoint
class WebSocketClient extends Endpoint {
@OnClose
@Override
public void onClose(Session session, CloseReason closeReason) {
}
@OnOpen
@Override
public void onOpen(Session session, EndpointConfig config) {
}
}
if (wsUri != null) {
WebSocketContainer container = ContainerProvider.getWebSocketContainer();
final int retries = 300;
final int waitTime = 100;
for (int i = 0; i < retries; i++) {
try {
Session wsSession =
container.connectToServer(new WebSocketClient(), ClientEndpointConfig.Builder
.create().build(), new URI(wsUri));
wsSession.close();
double time = (System.nanoTime() - initTime) / (double) 1000000;
log.debug("Connected to KMS in " + String.format("%3.2f", time) + " milliseconds");
return;
} catch (DeploymentException | IOException | URISyntaxException e) {
try {
Thread.sleep(waitTime);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
}
throw new KurentoException("Timeout of " + retries * waitTime + " millis waiting for KMS "
+ wsUri);
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
log.error("InterruptedException {}", e.getMessage());
}
}
}
private void startDockerizedKms() {
Docker dockerClient = Docker.getSingleton();
String kmsImageName = getProperty(KMS_DOCKER_IMAGE_NAME_PROP, KMS_DOCKER_IMAGE_NAME_DEFAULT);
boolean forcePulling =
getProperty(KMS_DOCKER_IMAGE_FORCE_PULLING_PROP, KMS_DOCKER_IMAGE_FORCE_PULLING_DEFAULT);
if (!dockerClient.existsImage(kmsImageName) || forcePulling) {
log.info("Pulling KMS image {} ... plase wait", kmsImageName);
dockerClient.getClient().pullImageCmd(kmsImageName).exec(new PullImageResultCallback())
.awaitSuccess();
log.info("KMS image {} pulled", kmsImageName);
}
if (dockerClient.existsContainer(dockerContainerName)) {
log.warn("Trying to create a new container named '" + dockerContainerName
+ "' but it already exist. Stopping and removing existing one and creating it again.");
dockerClient.stopAndRemoveContainer(dockerContainerName);
}
log.debug("Starting KMS container...");
// Check S3 properties
String s3BucketName = getProperty(KMS_DOCKER_S3_BUCKET_NAME);
String s3AccessKeyId = getProperty(KMS_DOCKER_S3_ACCESS_KEY_ID);
String s3SecretAccessKey = getProperty(KMS_DOCKER_S3_SECRET_ACCESS_KEY);
String s3Hostname = getProperty(KMS_DOCKER_S3_HOSTNAME);
Boolean kmsDnat = false;
if (getProperty(TEST_KMS_DNAT) != null && getProperty(TEST_KMS_DNAT, TEST_KMS_DNAT_DEFAULT)) {
kmsDnat = true;
}
Boolean seleniumDnat = false;
if (getProperty(TEST_SELENIUM_DNAT) != null
&& getProperty(TEST_SELENIUM_DNAT, TEST_SELENIUM_DNAT_DEFAULT)) {
seleniumDnat = true;
}
String kmsCandidateType = getProperty(TEST_ICE_CANDIDATE_KMS_TYPE);
String seleniumCandidateType = getProperty(TEST_ICE_CANDIDATE_SELENIUM_TYPE);
// Check Stun properties
String kmsStunIp = getProperty(TestConfiguration.KMS_STUN_IP_PROPERTY);
String kmsStunPort = getProperty(TestConfiguration.KMS_STUN_PORT_PROPERTY);
if (kmsDnat && seleniumDnat && RELAY.toString().toUpperCase().equals(seleniumCandidateType)
&& SRFLX.toString().toUpperCase().equals(kmsCandidateType)) {
// Change kmsStunIp by turn values
kmsStunIp = getProperty(TEST_ICE_SERVER_URL_PROPERTY).split(":")[1];
kmsStunPort = "3478";
}
if (kmsStunIp == null) {
kmsStunIp = "";
}
if (kmsStunPort == null) {
kmsStunPort = "";
}
log.info("Stun Server {}:{}", kmsStunIp, kmsStunPort);
CreateContainerCmd createContainerCmd =
dockerClient
.getClient()
.createContainerCmd(kmsImageName)
.withName(dockerContainerName)
.withEnv("GST_DEBUG=" + getDebugOptions(), "S3_ACCESS_BUCKET_NAME=" + s3BucketName,
"S3_ACCESS_KEY_ID=" + s3AccessKeyId, "S3_SECRET_ACCESS_KEY=" + s3SecretAccessKey,
"S3_HOSTNAME=" + s3Hostname, "KMS_STUN_IP=" + kmsStunIp,
"KMS_STUN_PORT=" + kmsStunPort).withCmd("--gst-debug-no-color");
if (dockerClient.isRunningInContainer()) {
createContainerCmd.withVolumesFrom(new VolumesFrom(dockerClient.getContainerId()));
} else {
String testFilesPath = KurentoTest.getTestFilesDiskPath();
Volume volume = new Volume(testFilesPath);
String targetPath =
Paths.get(KurentoTest.getDefaultOutputFolder().toURI()).toAbsolutePath().toString();
Volume volumeTest = new Volume(targetPath);
createContainerCmd.withVolumes(volume, volumeTest).withBinds(
new Bind(testFilesPath, volume, AccessMode.ro),
new Bind(targetPath, volumeTest, AccessMode.rw));
}
String kmsAddress = "";
if (kmsDnat) {
log.debug("Set network, for kms, as none");
createContainerCmd.withNetworkMode("none");
Map<String, String> labels = new HashMap<String, String>();
labels.put("KurentoDnat", "true");
labels.put("Transport", getProperty(TEST_KMS_TRANSPORT));
kmsAddress = dockerClient.generateIpAddressForContainer();
labels.put("IpAddress", kmsAddress);
createContainerCmd.withLabels(labels);
CreateContainerResponse kmsContainer = createContainerCmd.exec();
dockerClient.getClient().startContainerCmd(kmsContainer.getId()).exec();
} else {
CreateContainerResponse kmsContainer = createContainerCmd.exec();
dockerClient.getClient().startContainerCmd(kmsContainer.getId()).exec();
kmsAddress =
dockerClient.inspectContainer(dockerContainerName).getNetworkSettings().getIpAddress();
}
setWsUri("ws://" + kmsAddress + ":8888/kurento");
log.info("Dockerized KMS started in URI {}", wsUri);
}
public String getKmsLogPath() {
String kmsAutoStart = getProperty(kmsAutostartProp, kmsAutostartDefault);
return kmsAutoStart.equals(AUTOSTART_FALSE_VALUE) ? getProperty(KMS_LOG_PATH_PROP,
KMS_LOG_PATH_DEFAULT) : isKmsRemote ? remoteKmsSshConnection.getTmpFolder()
+ File.separator : workspace + File.separator;
}
private void createFileFromTemplate(Configuration cfg, Map<String, Object> data, String filename) {
try {
Template template = cfg.getTemplate(filename + ".ftl");
File file = new File(workspace + File.separator + filename);
Writer writer = new FileWriter(file);
template.process(data, writer);
writer.flush();
writer.close();
log.trace("Created file '{}'", file.getAbsolutePath());
} catch (Exception e) {
throw new KurentoException("Exception while creating file from template", e);
}
}
public void retrieveLogs() throws IOException {
File targetFolder = KurentoTest.getDefaultOutputFolder();
String kmsLogsPath = getKmsLogPath();
Path defaultOutput = Paths.get(targetFolder.toURI());
if (!Files.exists(defaultOutput)) {
Files.createDirectories(defaultOutput);
}
if (isKmsStarted) {
kmsLogsPath += "logs/";
}
String testMethodName = KurentoTest.getSimpleTestName();
if (isKmsDocker) {
Docker.getSingleton()
.downloadLog(
dockerContainerName,
Paths.get(targetFolder.getAbsolutePath(), testMethodName + getDockerLogSuffix()
+ ".log"));
}
else if (isKmsRemote) {
if (!remoteKmsSshConnection.isStarted()) {
remoteKmsSshConnection.start();
}
log.debug("Copying KMS logs located on {} from remote host {} to {}", kmsLogsPath,
remoteKmsSshConnection.getConnection(), targetFolder);
List<String> remoteLogFiles = remoteKmsSshConnection.listFiles(kmsLogsPath, true, false);
for (String remoteLogFile : remoteLogFiles) {
String localLogFile =
targetFolder + "/" + testMethodName + "-"
+ remoteLogFile.substring(remoteLogFile.lastIndexOf("/") + 1);
remoteKmsSshConnection.getFile(localLogFile, remoteLogFile);
KurentoTest.addLogFile(new File(localLogFile));
log.debug("Log file: {}", localLogFile);
}
} else {
File directory = new File(kmsLogsPath);
if (directory.isDirectory()) {
log.debug("Copying KMS logs from local path {} to {}", kmsLogsPath, targetFolder);
Collection<File> logFiles = FileUtils.listFiles(directory, null, true);
for (File logFile : logFiles) {
File destFile = new File(targetFolder, testMethodName + "-" + logFile.getName());
try {
FileUtils.copyFile(logFile, destFile);
KurentoTest.addLogFile(destFile);
log.debug("Log file: {}", destFile);
} catch (Throwable e) {
log.warn("Exception copy KMS file {} {}", e.getClass(), e.getMessage());
}
}
} else {
log.warn("Path {} is not a directory", directory);
}
}
}
public void stopKms() {
if (isKmsDocker) {
Docker.getSingleton().stopContainer(dockerContainerName);
} else {
killKmsProcesses();
if (isKmsRemote) {
remoteKmsSshConnection.stop();
}
}
isKmsStarted = false;
}
private void killKmsProcesses() {
int numKmsProcesses = 0;
// Max timeout waiting kms ending: 5 seconds
long timeout = System.currentTimeMillis() + 5000;
do {
// If timeout, break the loop
if (System.currentTimeMillis() > timeout) {
break;
}
// Sending SIGTERM signal to KMS process
kmsSigTerm();
// Wait 100 msec to order kms termination
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
numKmsProcesses = countKmsProcesses();
} while (numKmsProcesses > 0);
if (numKmsProcesses > 0) {
// If at this point there is still kms process (after trying to
// kill it with SIGTERM during 5 seconds), we send the SIGKILL
// signal to the process
kmsSigKill();
}
}
private void kmsSigTerm() {
log.trace("Sending SIGTERM to KMS process");
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
remoteKmsSshConnection.runAndWaitCommand("kill", kmsPid);
} else {
Shell.runAndWait("sh", "-c", "kill `cat " + workspace + File.separator + "kms-pid`");
}
}
private void kmsSigKill() {
log.trace("Sending SIGKILL to KMS process");
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
remoteKmsSshConnection.runAndWaitCommand("sh", "-c", "kill -9 " + kmsPid);
} else {
Shell.runAndWait("sh", "-c", "kill -9 `cat " + workspace + File.separator + "kms-pid`");
}
}
private int countKmsProcesses() {
int result = 0;
try {
// This command counts number of process (given its PID, stored in
// kms-pid file)
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
result =
Integer.parseInt(remoteKmsSshConnection.execAndWaitCommandNoBr("ps --pid " + kmsPid
+ " --no-headers | wc -l"));
} else {
String[] command =
{ "sh", "-c",
"ps --pid `cat " + workspace + File.separator + "kms-pid` --no-headers | wc -l" };
Process countKms = Runtime.getRuntime().exec(command);
String stringFromStream =
CharStreams.toString(new InputStreamReader(countKms.getInputStream(), "UTF-8"));
result = Integer.parseInt(stringFromStream.trim());
}
} catch (IOException e) {
log.warn("Exception counting KMS processes", e);
}
return result;
}
private void deleteFolderAndContent(Path folder) throws IOException {
if (folder != null) {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
public synchronized void setDockerContainerName(String containerName) {
dockerContainerName = containerName;
if (monitoredDockerContainerName == null) {
monitoredDockerContainerName = dockerContainerName;
}
}
private String getServerCommand() {
return getProperty(KMS_SERVER_COMMAND_PROP, KMS_SERVER_COMMAND_DEFAULT);
}
private String getGstPlugins() {
return getProperty(KSM_GST_PLUGINS_PROP, KMS_GST_PLUGINS_DEFAULT);
}
private String getDebugOptions() {
return getProperty(KMS_SERVER_DEBUG_PROP, KMS_SERVER_DEBUG_DEFAULT);
}
public KurentoClient getKurentoClient() {
if (kurentoClient == null && wsUri != null) {
kurentoClient = createKurentoClient();
kurentoClient.getServerManager().addObjectCreatedListener(
new EventListener<ObjectCreatedEvent>() {
@Override
public void onEvent(ObjectCreatedEvent event) {
if (event instanceof MediaPipeline) {
MediaPipeline mp = (MediaPipeline) event;
mp.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
String msgException =
"Error in KMS: " + event.getDescription() + "; Type: " + event.getType()
+ "; Error Code: " + event.getErrorCode();
log.error(msgException);
throw new KurentoException(msgException);
}
});
}
}
});
}
return kurentoClient;
}
public KurentoClient createKurentoClient() {
return KurentoClient.create(wsUri);
}
public void closeKurentoClient() {
if (kurentoClient != null) {
kurentoClient.destroy();
kurentoClient = null;
}
}
public String getWsUri() {
return wsUri;
}
public void setWsUri(String wsUri) {
if (wsUri != null) {
System.setProperty(kmsWsUriExportProp, wsUri);
}
this.wsUri = wsUri;
}
public void setRegistrarUri(String registrarUri) {
this.registrarUri = registrarUri;
}
public void setRegistrarLocalAddress(String registrarLocalAddress) {
this.registrarLocalAddress = registrarLocalAddress;
}
public boolean isKmsStarted() {
return isKmsStarted;
}
// returns the name of the first container
public static String getMonitoredDockerContainerName() {
return monitoredDockerContainerName;
}
}
| kurento-integration-tests/kurento-test/src/main/java/org/kurento/test/services/KmsService.java | /*
* (C) Copyright 2015 Kurento (http://kurento.org/)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl-2.1.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
package org.kurento.test.services;
import static org.kurento.commons.PropertiesManager.getProperty;
import static org.kurento.test.browser.WebRtcCandidateType.RELAY;
import static org.kurento.test.browser.WebRtcCandidateType.SRFLX;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_FALSE_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TESTCLASS_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TESTSUITE_VALUE;
import static org.kurento.test.config.TestConfiguration.AUTOSTART_TEST_VALUE;
import static org.kurento.test.config.TestConfiguration.KMS_AUTOSTART_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_AUTOSTART_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_FORCE_PULLING_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_FORCE_PULLING_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_NAME_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_IMAGE_NAME_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_ACCESS_KEY_ID;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_BUCKET_NAME;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_HOSTNAME;
import static org.kurento.test.config.TestConfiguration.KMS_DOCKER_S3_SECRET_ACCESS_KEY;
import static org.kurento.test.config.TestConfiguration.KMS_GST_PLUGINS_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_LOGIN_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_LOG_PATH_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_LOG_PATH_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_PASSWD_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_PEM_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_DOCKER;
import static org.kurento.test.config.TestConfiguration.KMS_SCOPE_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_COMMAND_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_COMMAND_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_DEBUG_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_SERVER_DEBUG_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_DEFAULT;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_PROP;
import static org.kurento.test.config.TestConfiguration.KMS_WS_URI_PROP_EXPORT;
import static org.kurento.test.config.TestConfiguration.KSM_GST_PLUGINS_PROP;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_CANDIDATE_KMS_TYPE;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_CANDIDATE_SELENIUM_TYPE;
import static org.kurento.test.config.TestConfiguration.TEST_ICE_SERVER_URL_PROPERTY;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_DNAT;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_DNAT_DEFAULT;
import static org.kurento.test.config.TestConfiguration.TEST_KMS_TRANSPORT;
import static org.kurento.test.config.TestConfiguration.TEST_SELENIUM_DNAT;
import static org.kurento.test.config.TestConfiguration.TEST_SELENIUM_DNAT_DEFAULT;
import static org.kurento.test.services.TestService.TestServiceScope.EXTERNAL;
import static org.kurento.test.services.TestService.TestServiceScope.TEST;
import static org.kurento.test.services.TestService.TestServiceScope.TESTCLASS;
import static org.kurento.test.services.TestService.TestServiceScope.TESTSUITE;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.websocket.ClientEndpoint;
import javax.websocket.ClientEndpointConfig;
import javax.websocket.CloseReason;
import javax.websocket.ContainerProvider;
import javax.websocket.DeploymentException;
import javax.websocket.Endpoint;
import javax.websocket.EndpointConfig;
import javax.websocket.OnClose;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import org.apache.commons.io.FileUtils;
import org.kurento.client.ErrorEvent;
import org.kurento.client.EventListener;
import org.kurento.client.KurentoClient;
import org.kurento.client.MediaPipeline;
import org.kurento.client.ObjectCreatedEvent;
import org.kurento.commons.exception.KurentoException;
import org.kurento.test.base.KurentoTest;
import org.kurento.test.config.TestConfiguration;
import org.kurento.test.docker.Docker;
import org.kurento.test.utils.Shell;
import org.kurento.test.utils.SshConnection;
import com.github.dockerjava.api.command.CreateContainerCmd;
import com.github.dockerjava.api.command.CreateContainerResponse;
import com.github.dockerjava.api.model.AccessMode;
import com.github.dockerjava.api.model.Bind;
import com.github.dockerjava.api.model.Volume;
import com.github.dockerjava.api.model.VolumesFrom;
import com.github.dockerjava.core.command.PullImageResultCallback;
import com.google.common.io.CharStreams;
import freemarker.template.Configuration;
import freemarker.template.Template;
/**
* Kurento Media Server service.
*
* @author Boni Garcia ([email protected])
* @since 6.1.1
*/
public class KmsService extends TestService {
// FIXME replace with a registration mechanism
protected static String monitoredDockerContainerName;
protected String dockerContainerName = "kms";
protected SshConnection remoteKmsSshConnection;
protected Path workspace;
protected String wsUri;
protected boolean isKmsRemote;
protected boolean isKmsDocker;
protected boolean isKmsStarted;
protected String registrarUri;
protected String registrarLocalAddress = "127.0.0.1";
protected String kmsLoginProp;
protected String kmsPasswdProp;
protected String kmsPemProp;
protected String kmsAutostartProp;
protected String kmsAutostartDefault;
protected String kmsWsUriProp;
protected String kmsWsUriExportProp;
protected String kmsScopeProp;
protected String kmsScopeDefault;
protected KurentoClient kurentoClient;
public KmsService(String wsUri) {
this();
setWsUri(wsUri);
}
public KmsService() {
this.kmsLoginProp = KMS_LOGIN_PROP;
this.kmsPasswdProp = KMS_PASSWD_PROP;
this.kmsPemProp = KMS_PEM_PROP;
this.kmsAutostartProp = KMS_AUTOSTART_PROP;
this.kmsAutostartDefault = KMS_AUTOSTART_DEFAULT;
this.kmsWsUriProp = KMS_WS_URI_PROP;
this.kmsWsUriExportProp = KMS_WS_URI_PROP_EXPORT;
this.kmsScopeProp = KMS_SCOPE_PROP;
this.kmsScopeDefault = KMS_SCOPE_DEFAULT;
setWsUri(getProperty(kmsWsUriProp, KMS_WS_URI_DEFAULT));
}
public KmsService(String kmsLoginProp, String kmsPasswdProp, String kmsPemProp,
String kmsAutostartProp, String kmsWsUriProp, String kmsWsUriExportProp, String kmsScopeProp,
String kmsScopeDefault) {
this.kmsLoginProp = kmsLoginProp;
this.kmsPasswdProp = kmsPasswdProp;
this.kmsPemProp = kmsPemProp;
this.kmsAutostartProp = kmsAutostartProp;
this.kmsWsUriProp = kmsWsUriProp;
this.kmsWsUriExportProp = kmsWsUriExportProp;
this.kmsScopeProp = kmsScopeProp;
this.kmsScopeDefault = kmsScopeDefault;
setWsUri(getProperty(kmsWsUriProp, KMS_WS_URI_DEFAULT));
}
@Override
public void start() {
super.start();
if (wsUri == null) {
log.warn("WS URI is null, will not start");
isKmsStarted = false;
return;
}
isKmsRemote = !wsUri.contains("localhost") && !wsUri.contains("127.0.0.1") && !isKmsDocker;
isKmsDocker = KMS_SCOPE_DOCKER.equals(getProperty(kmsScopeProp, kmsScopeDefault));
// Assertion: if KMS remote, credentials should be available
String kmsLogin = getProperty(kmsLoginProp);
String kmsPasswd = getProperty(kmsPasswdProp);
String kmsPem = getProperty(kmsPemProp);
String kmsAutoStart = getProperty(kmsAutostartProp, kmsAutostartDefault);
if (isKmsRemote && kmsLogin == null && (kmsPem == null || kmsPasswd == null)) {
throw new KurentoException("Bad test parameters: " + kmsAutostartProp + "=" + kmsAutoStart
+ " and " + kmsWsUriProp + "=" + wsUri
+ ". Remote KMS should be started but its credentials are not present: " + kmsLoginProp
+ "=" + kmsLogin + ", " + kmsPasswdProp + "=" + kmsPasswd + ", " + kmsPemProp + "="
+ kmsPem);
}
// Assertion: if local or remote KMS, port should be available
if (!isKmsDocker && !isKmsRemote && !isFreePort(wsUri)) {
throw new KurentoException("KMS cannot be started in URI: " + wsUri + ". Port is not free");
}
if (isKmsDocker) {
log.info("Starting KMS dockerized");
Docker dockerClient = Docker.getSingleton();
if (dockerClient.isRunningInContainer()) {
setDockerContainerName(dockerClient.getContainerName() + getDockerContainerNameSuffix());
}
} else {
log.info("Starting KMS with URI: {}", wsUri);
try {
workspace = Files.createTempDirectory("kurento-test");
} catch (IOException e) {
throw new KurentoException("Exception creating temporal folder", e);
}
log.trace("Local folder to store temporal files: {}", workspace);
if (isKmsRemote) {
String remoteKmsStr = wsUri.substring(wsUri.indexOf("//") + 2, wsUri.lastIndexOf(":"));
log.info("Using remote KMS at {}", remoteKmsStr);
remoteKmsSshConnection = new SshConnection(remoteKmsStr, kmsLogin, kmsPasswd, kmsPem);
if (kmsPem != null) {
remoteKmsSshConnection.setPem(kmsPem);
}
remoteKmsSshConnection.start();
remoteKmsSshConnection.createTmpFolder();
}
createKurentoConf();
}
if (isKmsRemote && !kmsAutoStart.equals(AUTOSTART_FALSE_VALUE)) {
String[] filesToBeCopied = { "kurento.conf.json", "kurento.sh" };
for (String s : filesToBeCopied) {
remoteKmsSshConnection.scp(workspace + File.separator + s,
remoteKmsSshConnection.getTmpFolder() + File.separator + s);
}
remoteKmsSshConnection.runAndWaitCommand("chmod", "+x", remoteKmsSshConnection.getTmpFolder()
+ File.separator + "kurento.sh");
}
startKms();
waitForKms();
}
@Override
public void stop() {
super.stop();
// Close Kurento client
closeKurentoClient();
// Stop KMS
stopKms();
// Retrieve logs
try {
retrieveLogs();
} catch (IOException e) {
log.warn("Exception retrieving KMS logs", e);
}
if (isKmsDocker) {
try {
Docker.getSingleton().removeContainer(dockerContainerName);
log.info("*** Only for debugging: Docker.getSingleton().removeContainer({})",
dockerContainerName);
} catch (Throwable name) {
log.error(
" +++ Only for debugging: Exception on Docker.getSingleton().removeContainer({})",
dockerContainerName);
}
}
log.info("+++ Only for debugging: After removeContainer {}", dockerContainerName);
// Delete temporal folder and content
if (!isKmsDocker) {
try {
deleteFolderAndContent(workspace);
} catch (IOException e) {
log.warn("Exception deleting temporal folder {}", workspace, e);
}
}
log.info("+++ Only for debugging: End of KmsService.stop() for: {}", dockerContainerName);
}
@Override
public TestServiceScope getScope() {
TestServiceScope scope = TESTSUITE;
String kmsAutostart = getProperty(kmsAutostartProp, kmsAutostartDefault);
switch (kmsAutostart) {
case AUTOSTART_FALSE_VALUE:
scope = EXTERNAL;
break;
case AUTOSTART_TEST_VALUE:
scope = TEST;
break;
case AUTOSTART_TESTCLASS_VALUE:
scope = TESTCLASS;
break;
case AUTOSTART_TESTSUITE_VALUE:
scope = TESTSUITE;
break;
default:
throw new IllegalArgumentException("Unknown autostart value " + kmsAutostart);
}
return scope;
}
protected String getDockerContainerNameSuffix() {
return "_kms";
}
protected String getDockerLogSuffix() {
return "-kms";
}
private boolean isFreePort(String wsUri) {
try {
URI wsUrl = new URI(wsUri);
String result =
Shell.runAndWait("/bin/bash", "-c", "nc -z " + wsUrl.getHost() + " " + wsUrl.getPort()
+ "; echo $?");
if (result.trim().equals("0")) {
log.warn("Port " + wsUrl.getPort()
+ " is used. Maybe another KMS instance is running in this port");
return false;
}
} catch (URISyntaxException e) {
log.warn("WebSocket URI {} is malformed: " + e.getMessage(), wsUri);
}
return true;
}
private void createKurentoConf() {
Map<String, Object> data = new HashMap<String, Object>();
try {
URI wsAsUri = new URI(wsUri);
int port = wsAsUri.getPort();
String path = wsAsUri.getPath();
data.put("wsPort", String.valueOf(port));
data.put("wsPath", path.substring(1));
data.put("registrar", registrarUri);
data.put("registrarLocalAddress", registrarLocalAddress);
} catch (URISyntaxException e) {
throw new KurentoException("Invalid ws uri: " + wsUri);
}
data.put("gstPlugins", getGstPlugins());
data.put("debugOptions", getDebugOptions());
data.put("serverCommand", getServerCommand());
data.put("workspace", getKmsLogPath());
Configuration cfg = new Configuration(Configuration.DEFAULT_INCOMPATIBLE_IMPROVEMENTS);
cfg.setClassForTemplateLoading(this.getClass(), "/templates/");
createFileFromTemplate(cfg, data, "kurento.conf.json");
createFileFromTemplate(cfg, data, "kurento.sh");
Shell.runAndWait("chmod", "+x", workspace + File.separator + "kurento.sh");
}
private void startKms() {
String kmsLogPath = getKmsLogPath();
if (isKmsRemote) {
remoteKmsSshConnection.runAndWaitCommand("sh", "-c", kmsLogPath + "kurento.sh > /dev/null");
log.info("Remote KMS started in URI {}", wsUri);
} else if (isKmsDocker) {
startDockerizedKms();
} else {
Shell.run("sh", "-c", kmsLogPath + "kurento.sh");
log.info("Local KMS started in URI {}", wsUri);
}
isKmsStarted = true;
}
private void waitForKms() {
long initTime = System.nanoTime();
@ClientEndpoint
class WebSocketClient extends Endpoint {
@OnClose
@Override
public void onClose(Session session, CloseReason closeReason) {
}
@OnOpen
@Override
public void onOpen(Session session, EndpointConfig config) {
}
}
if (wsUri != null) {
WebSocketContainer container = ContainerProvider.getWebSocketContainer();
final int retries = 300;
final int waitTime = 100;
for (int i = 0; i < retries; i++) {
try {
Session wsSession =
container.connectToServer(new WebSocketClient(), ClientEndpointConfig.Builder
.create().build(), new URI(wsUri));
wsSession.close();
double time = (System.nanoTime() - initTime) / (double) 1000000;
log.debug("Connected to KMS in " + String.format("%3.2f", time) + " milliseconds");
return;
} catch (DeploymentException | IOException | URISyntaxException e) {
try {
Thread.sleep(waitTime);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
}
throw new KurentoException("Timeout of " + retries * waitTime + " millis waiting for KMS "
+ wsUri);
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
log.error("InterruptedException {}", e.getMessage());
}
}
}
private void startDockerizedKms() {
Docker dockerClient = Docker.getSingleton();
String kmsImageName = getProperty(KMS_DOCKER_IMAGE_NAME_PROP, KMS_DOCKER_IMAGE_NAME_DEFAULT);
boolean forcePulling =
getProperty(KMS_DOCKER_IMAGE_FORCE_PULLING_PROP, KMS_DOCKER_IMAGE_FORCE_PULLING_DEFAULT);
if (!dockerClient.existsImage(kmsImageName) || forcePulling) {
log.info("Pulling KMS image {} ... plase wait", kmsImageName);
dockerClient.getClient().pullImageCmd(kmsImageName).exec(new PullImageResultCallback())
.awaitSuccess();
log.info("KMS image {} pulled", kmsImageName);
}
if (dockerClient.existsContainer(dockerContainerName)) {
log.warn("Trying to create a new container named '" + dockerContainerName
+ "' but it already exist. Stopping and removing existing one and creating it again.");
dockerClient.stopAndRemoveContainer(dockerContainerName);
}
log.debug("Starting KMS container...");
// Check S3 properties
String s3BucketName = getProperty(KMS_DOCKER_S3_BUCKET_NAME);
String s3AccessKeyId = getProperty(KMS_DOCKER_S3_ACCESS_KEY_ID);
String s3SecretAccessKey = getProperty(KMS_DOCKER_S3_SECRET_ACCESS_KEY);
String s3Hostname = getProperty(KMS_DOCKER_S3_HOSTNAME);
Boolean kmsDnat = false;
if (getProperty(TEST_KMS_DNAT) != null && getProperty(TEST_KMS_DNAT, TEST_KMS_DNAT_DEFAULT)) {
kmsDnat = true;
}
Boolean seleniumDnat = false;
if (getProperty(TEST_SELENIUM_DNAT) != null
&& getProperty(TEST_SELENIUM_DNAT, TEST_SELENIUM_DNAT_DEFAULT)) {
seleniumDnat = true;
}
String kmsCandidateType = getProperty(TEST_ICE_CANDIDATE_KMS_TYPE);
String seleniumCandidateType = getProperty(TEST_ICE_CANDIDATE_SELENIUM_TYPE);
// Check Stun properties
String kmsStunIp = getProperty(TestConfiguration.KMS_STUN_IP_PROPERTY);
String kmsStunPort = getProperty(TestConfiguration.KMS_STUN_PORT_PROPERTY);
if (kmsDnat && seleniumDnat && RELAY.toString().toUpperCase().equals(seleniumCandidateType)
&& SRFLX.toString().toUpperCase().equals(kmsCandidateType)) {
// Change kmsStunIp by turn values
kmsStunIp = getProperty(TEST_ICE_SERVER_URL_PROPERTY).split(":")[1];
}
if (kmsStunIp == null) {
kmsStunIp = "";
}
if (kmsStunPort == null) {
kmsStunPort = "";
}
CreateContainerCmd createContainerCmd =
dockerClient
.getClient()
.createContainerCmd(kmsImageName)
.withName(dockerContainerName)
.withEnv("GST_DEBUG=" + getDebugOptions(), "S3_ACCESS_BUCKET_NAME=" + s3BucketName,
"S3_ACCESS_KEY_ID=" + s3AccessKeyId, "S3_SECRET_ACCESS_KEY=" + s3SecretAccessKey,
"S3_HOSTNAME=" + s3Hostname, "KMS_STUN_IP=" + kmsStunIp,
"KMS_STUN_PORT=" + kmsStunPort).withCmd("--gst-debug-no-color");
if (dockerClient.isRunningInContainer()) {
createContainerCmd.withVolumesFrom(new VolumesFrom(dockerClient.getContainerId()));
} else {
String testFilesPath = KurentoTest.getTestFilesDiskPath();
Volume volume = new Volume(testFilesPath);
String targetPath =
Paths.get(KurentoTest.getDefaultOutputFolder().toURI()).toAbsolutePath().toString();
Volume volumeTest = new Volume(targetPath);
createContainerCmd.withVolumes(volume, volumeTest).withBinds(
new Bind(testFilesPath, volume, AccessMode.ro),
new Bind(targetPath, volumeTest, AccessMode.rw));
}
String kmsAddress = "";
if (kmsDnat) {
log.debug("Set network, for kms, as none");
createContainerCmd.withNetworkMode("none");
Map<String, String> labels = new HashMap<String, String>();
labels.put("KurentoDnat", "true");
labels.put("Transport", getProperty(TEST_KMS_TRANSPORT));
kmsAddress = dockerClient.generateIpAddressForContainer();
labels.put("IpAddress", kmsAddress);
createContainerCmd.withLabels(labels);
CreateContainerResponse kmsContainer = createContainerCmd.exec();
dockerClient.getClient().startContainerCmd(kmsContainer.getId()).exec();
} else {
CreateContainerResponse kmsContainer = createContainerCmd.exec();
dockerClient.getClient().startContainerCmd(kmsContainer.getId()).exec();
kmsAddress =
dockerClient.inspectContainer(dockerContainerName).getNetworkSettings().getIpAddress();
}
setWsUri("ws://" + kmsAddress + ":8888/kurento");
log.info("Dockerized KMS started in URI {}", wsUri);
}
public String getKmsLogPath() {
String kmsAutoStart = getProperty(kmsAutostartProp, kmsAutostartDefault);
return kmsAutoStart.equals(AUTOSTART_FALSE_VALUE) ? getProperty(KMS_LOG_PATH_PROP,
KMS_LOG_PATH_DEFAULT) : isKmsRemote ? remoteKmsSshConnection.getTmpFolder()
+ File.separator : workspace + File.separator;
}
private void createFileFromTemplate(Configuration cfg, Map<String, Object> data, String filename) {
try {
Template template = cfg.getTemplate(filename + ".ftl");
File file = new File(workspace + File.separator + filename);
Writer writer = new FileWriter(file);
template.process(data, writer);
writer.flush();
writer.close();
log.trace("Created file '{}'", file.getAbsolutePath());
} catch (Exception e) {
throw new KurentoException("Exception while creating file from template", e);
}
}
public void retrieveLogs() throws IOException {
File targetFolder = KurentoTest.getDefaultOutputFolder();
String kmsLogsPath = getKmsLogPath();
Path defaultOutput = Paths.get(targetFolder.toURI());
if (!Files.exists(defaultOutput)) {
Files.createDirectories(defaultOutput);
}
if (isKmsStarted) {
kmsLogsPath += "logs/";
}
String testMethodName = KurentoTest.getSimpleTestName();
if (isKmsDocker) {
Docker.getSingleton()
.downloadLog(
dockerContainerName,
Paths.get(targetFolder.getAbsolutePath(), testMethodName + getDockerLogSuffix()
+ ".log"));
}
else if (isKmsRemote) {
if (!remoteKmsSshConnection.isStarted()) {
remoteKmsSshConnection.start();
}
log.debug("Copying KMS logs located on {} from remote host {} to {}", kmsLogsPath,
remoteKmsSshConnection.getConnection(), targetFolder);
List<String> remoteLogFiles = remoteKmsSshConnection.listFiles(kmsLogsPath, true, false);
for (String remoteLogFile : remoteLogFiles) {
String localLogFile =
targetFolder + "/" + testMethodName + "-"
+ remoteLogFile.substring(remoteLogFile.lastIndexOf("/") + 1);
remoteKmsSshConnection.getFile(localLogFile, remoteLogFile);
KurentoTest.addLogFile(new File(localLogFile));
log.debug("Log file: {}", localLogFile);
}
} else {
File directory = new File(kmsLogsPath);
if (directory.isDirectory()) {
log.debug("Copying KMS logs from local path {} to {}", kmsLogsPath, targetFolder);
Collection<File> logFiles = FileUtils.listFiles(directory, null, true);
for (File logFile : logFiles) {
File destFile = new File(targetFolder, testMethodName + "-" + logFile.getName());
try {
FileUtils.copyFile(logFile, destFile);
KurentoTest.addLogFile(destFile);
log.debug("Log file: {}", destFile);
} catch (Throwable e) {
log.warn("Exception copy KMS file {} {}", e.getClass(), e.getMessage());
}
}
} else {
log.warn("Path {} is not a directory", directory);
}
}
}
public void stopKms() {
if (isKmsDocker) {
Docker.getSingleton().stopContainer(dockerContainerName);
} else {
killKmsProcesses();
if (isKmsRemote) {
remoteKmsSshConnection.stop();
}
}
isKmsStarted = false;
}
private void killKmsProcesses() {
int numKmsProcesses = 0;
// Max timeout waiting kms ending: 5 seconds
long timeout = System.currentTimeMillis() + 5000;
do {
// If timeout, break the loop
if (System.currentTimeMillis() > timeout) {
break;
}
// Sending SIGTERM signal to KMS process
kmsSigTerm();
// Wait 100 msec to order kms termination
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
numKmsProcesses = countKmsProcesses();
} while (numKmsProcesses > 0);
if (numKmsProcesses > 0) {
// If at this point there is still kms process (after trying to
// kill it with SIGTERM during 5 seconds), we send the SIGKILL
// signal to the process
kmsSigKill();
}
}
private void kmsSigTerm() {
log.trace("Sending SIGTERM to KMS process");
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
remoteKmsSshConnection.runAndWaitCommand("kill", kmsPid);
} else {
Shell.runAndWait("sh", "-c", "kill `cat " + workspace + File.separator + "kms-pid`");
}
}
private void kmsSigKill() {
log.trace("Sending SIGKILL to KMS process");
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
remoteKmsSshConnection.runAndWaitCommand("sh", "-c", "kill -9 " + kmsPid);
} else {
Shell.runAndWait("sh", "-c", "kill -9 `cat " + workspace + File.separator + "kms-pid`");
}
}
private int countKmsProcesses() {
int result = 0;
try {
// This command counts number of process (given its PID, stored in
// kms-pid file)
if (isKmsRemote) {
String kmsPid =
remoteKmsSshConnection.execAndWaitCommandNoBr("cat",
remoteKmsSshConnection.getTmpFolder() + "/kms-pid");
result =
Integer.parseInt(remoteKmsSshConnection.execAndWaitCommandNoBr("ps --pid " + kmsPid
+ " --no-headers | wc -l"));
} else {
String[] command =
{ "sh", "-c",
"ps --pid `cat " + workspace + File.separator + "kms-pid` --no-headers | wc -l" };
Process countKms = Runtime.getRuntime().exec(command);
String stringFromStream =
CharStreams.toString(new InputStreamReader(countKms.getInputStream(), "UTF-8"));
result = Integer.parseInt(stringFromStream.trim());
}
} catch (IOException e) {
log.warn("Exception counting KMS processes", e);
}
return result;
}
private void deleteFolderAndContent(Path folder) throws IOException {
if (folder != null) {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
public synchronized void setDockerContainerName(String containerName) {
dockerContainerName = containerName;
if (monitoredDockerContainerName == null) {
monitoredDockerContainerName = dockerContainerName;
}
}
private String getServerCommand() {
return getProperty(KMS_SERVER_COMMAND_PROP, KMS_SERVER_COMMAND_DEFAULT);
}
private String getGstPlugins() {
return getProperty(KSM_GST_PLUGINS_PROP, KMS_GST_PLUGINS_DEFAULT);
}
private String getDebugOptions() {
return getProperty(KMS_SERVER_DEBUG_PROP, KMS_SERVER_DEBUG_DEFAULT);
}
public KurentoClient getKurentoClient() {
if (kurentoClient == null && wsUri != null) {
kurentoClient = createKurentoClient();
kurentoClient.getServerManager().addObjectCreatedListener(
new EventListener<ObjectCreatedEvent>() {
@Override
public void onEvent(ObjectCreatedEvent event) {
if (event instanceof MediaPipeline) {
MediaPipeline mp = (MediaPipeline) event;
mp.addErrorListener(new EventListener<ErrorEvent>() {
@Override
public void onEvent(ErrorEvent event) {
String msgException =
"Error in KMS: " + event.getDescription() + "; Type: " + event.getType()
+ "; Error Code: " + event.getErrorCode();
log.error(msgException);
throw new KurentoException(msgException);
}
});
}
}
});
}
return kurentoClient;
}
public KurentoClient createKurentoClient() {
return KurentoClient.create(wsUri);
}
public void closeKurentoClient() {
if (kurentoClient != null) {
kurentoClient.destroy();
kurentoClient = null;
}
}
public String getWsUri() {
return wsUri;
}
public void setWsUri(String wsUri) {
if (wsUri != null) {
System.setProperty(kmsWsUriExportProp, wsUri);
}
this.wsUri = wsUri;
}
public void setRegistrarUri(String registrarUri) {
this.registrarUri = registrarUri;
}
public void setRegistrarLocalAddress(String registrarLocalAddress) {
this.registrarLocalAddress = registrarLocalAddress;
}
public boolean isKmsStarted() {
return isKmsStarted;
}
// returns the name of the first container
public static String getMonitoredDockerContainerName() {
return monitoredDockerContainerName;
}
}
| [KmsService]Set stun port
When kmsDnat && seleniumDnat && RELAY == seleniumCandidateType && SRFLX == kmsCandidateType
Change-Id: I5938a5551e5180f083dfe01584e135785520ffe0
| kurento-integration-tests/kurento-test/src/main/java/org/kurento/test/services/KmsService.java | [KmsService]Set stun port | <ide><path>urento-integration-tests/kurento-test/src/main/java/org/kurento/test/services/KmsService.java
<ide> && SRFLX.toString().toUpperCase().equals(kmsCandidateType)) {
<ide> // Change kmsStunIp by turn values
<ide> kmsStunIp = getProperty(TEST_ICE_SERVER_URL_PROPERTY).split(":")[1];
<add> kmsStunPort = "3478";
<ide> }
<ide>
<ide> if (kmsStunIp == null) {
<ide> if (kmsStunPort == null) {
<ide> kmsStunPort = "";
<ide> }
<add>
<add> log.info("Stun Server {}:{}", kmsStunIp, kmsStunPort);
<ide>
<ide> CreateContainerCmd createContainerCmd =
<ide> dockerClient |
|
Java | mit | 1ef5390e0e33f5ecd3b09f1664c374a2d838ff32 | 0 | nh13/picard,annkupi/picard,alecw/picard,broadinstitute/picard,nh13/picard,alecw/picard,annkupi/picard,alecw/picard,broadinstitute/picard,broadinstitute/picard,broadinstitute/picard,nh13/picard,nh13/picard,broadinstitute/picard,alecw/picard,annkupi/picard,annkupi/picard | package net.sf.picard.sam;
import net.sf.picard.cmdline.CommandLineProgram;
import net.sf.picard.cmdline.Option;
import net.sf.picard.cmdline.StandardOptionDefinitions;
import net.sf.picard.cmdline.Usage;
import net.sf.picard.io.IoUtil;
import net.sf.picard.util.Log;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMFileWriter;
import net.sf.samtools.SAMFileWriterFactory;
import net.sf.samtools.SAMRecord;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* Class to randomly downsample a BAM file while respecting that we should either get rid
* of both ends of a pair or neither end of the pair!
*/
public class DownsampleSam extends CommandLineProgram {
@Usage public final String USAGE = getStandardUsagePreamble() + " Randomly down-sample a SAM or BAM file to retain " +
"a random subset of the reads. Mate-pairs are either both kept or both discarded. Reads marked as not primary " +
"alignments are all discarded. Each read is given a probability P of being retained - results with the exact " +
"same input in the same order and with the same value for RANDOM_SEED will produce the same results.";
@Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc="The input SAM or BAM file to downsample.")
public File INPUT;
@Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The output, downsampled, SAM or BAM file to write.")
public File OUTPUT;
@Option(shortName="R", doc="Random seed to use if reproducibilty is desired. " +
"Setting to null will cause multiple invocations to produce different results.")
public Long RANDOM_SEED = 1L;
@Option(shortName="P", doc="The probability of keeping any individual read, between 0 and 1.")
public double PROBABILITY = 1;
private final Log log = Log.getInstance(DownsampleSam.class);
public static void main(final String[] args) {
new DownsampleSam().instanceMainWithExit(args);
}
@Override
protected int doWork() {
IoUtil.assertFileIsReadable(INPUT);
IoUtil.assertFileIsWritable(OUTPUT);
final Random r = RANDOM_SEED == null ? new Random() : new Random(RANDOM_SEED);
final SAMFileReader in = new SAMFileReader(INPUT);
final SAMFileWriter out = new SAMFileWriterFactory().makeSAMOrBAMWriter(in.getFileHeader(), true, OUTPUT);
final Map<String,Boolean> decisions = new HashMap<String,Boolean>();
long total = 0;
long kept = 0;
for (final SAMRecord rec : in) {
if (rec.getNotPrimaryAlignmentFlag()) continue;
if (++total % 10000000 == 0) {
log.info("Read " + total + " reads, kept " + kept);
}
final String key = rec.getReadName();
final Boolean previous = decisions.remove(key);
final boolean keeper;
if (previous == null) {
keeper = r.nextDouble() <= PROBABILITY;
if (rec.getReadPairedFlag()) decisions.put(key, keeper);
}
else {
keeper = previous;
}
if (keeper) {
out.addAlignment(rec);
++kept;
}
}
out.close();
log.info("Finished! Kept " + kept + " out of " + total + " reads.");
return 0;
}
}
| src/java/net/sf/picard/sam/DownsampleSam.java | package net.sf.picard.sam;
import net.sf.picard.cmdline.CommandLineProgram;
import net.sf.picard.cmdline.Option;
import net.sf.picard.cmdline.StandardOptionDefinitions;
import net.sf.picard.cmdline.Usage;
import net.sf.picard.io.IoUtil;
import net.sf.picard.util.Log;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMFileWriter;
import net.sf.samtools.SAMFileWriterFactory;
import net.sf.samtools.SAMRecord;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* Class to randomly downsample a BAM file while respecting that we should either get rid
* of both ends of a pair or neither end of the pair!
*/
public class DownsampleSam extends CommandLineProgram {
@Usage public final String USAGE = getStandardUsagePreamble() + " Randomly down-sample a SAM or BAM file to retain " +
"a random subset of the reads. Mate-pairs are either both kept or both discarded. Reads marked as not primary " +
"alignments are all discarded. Each read is given a probability P of being retained - results with the exact " +
"same input in the same order and with the same value for RANDOM_SEED will produce the same results.";
@Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc="The input SAM or BAM file to downsample.")
public File INPUT;
@Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The output, downsampled, SAM or BAM file to write.")
public File OUTPUT;
@Option(shortName="R", doc="Random seed to use if reproducibilty is desired.")
public Long RANDOM_SEED = 1L;
@Option(shortName="P", doc="The probability of keeping any individual read, between 0 and 1.")
public double PROBABILITY = 1;
private final Log log = Log.getInstance(DownsampleSam.class);
public static void main(final String[] args) {
new DownsampleSam().instanceMainWithExit(args);
}
@Override
protected int doWork() {
IoUtil.assertFileIsReadable(INPUT);
IoUtil.assertFileIsWritable(OUTPUT);
final Random r = RANDOM_SEED == null ? new Random() : new Random(RANDOM_SEED);
final SAMFileReader in = new SAMFileReader(INPUT);
final SAMFileWriter out = new SAMFileWriterFactory().makeSAMOrBAMWriter(in.getFileHeader(), true, OUTPUT);
final Map<String,Boolean> decisions = new HashMap<String,Boolean>();
long total = 0;
long kept = 0;
for (final SAMRecord rec : in) {
if (rec.getNotPrimaryAlignmentFlag()) continue;
if (++total % 10000000 == 0) {
log.info("Read " + total + " reads, kept " + kept);
}
final String key = rec.getReadName();
final Boolean previous = decisions.remove(key);
final boolean keeper;
if (previous == null) {
keeper = r.nextDouble() <= PROBABILITY;
if (rec.getReadPairedFlag()) decisions.put(key, keeper);
}
else {
keeper = previous;
}
if (keeper) {
out.addAlignment(rec);
++kept;
}
}
out.close();
log.info("Finished! Kept " + kept + " out of " + total + " reads.");
return 0;
}
}
| Improve option doc.
| src/java/net/sf/picard/sam/DownsampleSam.java | Improve option doc. | <ide><path>rc/java/net/sf/picard/sam/DownsampleSam.java
<ide> @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The output, downsampled, SAM or BAM file to write.")
<ide> public File OUTPUT;
<ide>
<del> @Option(shortName="R", doc="Random seed to use if reproducibilty is desired.")
<add> @Option(shortName="R", doc="Random seed to use if reproducibilty is desired. " +
<add> "Setting to null will cause multiple invocations to produce different results.")
<ide> public Long RANDOM_SEED = 1L;
<ide>
<ide> @Option(shortName="P", doc="The probability of keeping any individual read, between 0 and 1.") |
|
Java | lgpl-2.1 | 98b2b79f98cbfb63479e3ee22627019baf60f1fe | 0 | alienth/opentsdb,OpenTSDB/opentsdb,MadDogTechnology/opentsdb,OpenTSDB/opentsdb,OpenTSDB/opentsdb,MadDogTechnology/opentsdb,alienth/opentsdb,MadDogTechnology/opentsdb | // This file is part of OpenTSDB.
// Copyright (C) 2013 The OpenTSDB Authors.
//
// This program is free software: you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 2.1 of the License, or (at your
// option) any later version. This program is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
// General Public License for more details. You should have received a copy
// of the GNU Lesser General Public License along with this program. If not,
// see <http://www.gnu.org/licenses/>.
package net.opentsdb.tsd;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.hbase.async.Bytes;
import org.hbase.async.Bytes.ByteMap;
import org.hbase.async.PutRequest;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import com.stumbleupon.async.Callback;
import com.stumbleupon.async.Deferred;
import net.opentsdb.core.TSDB;
import net.opentsdb.core.Tags;
import net.opentsdb.meta.TSMeta;
import net.opentsdb.meta.TSUIDQuery;
import net.opentsdb.meta.UIDMeta;
import net.opentsdb.uid.NoSuchUniqueId;
import net.opentsdb.uid.NoSuchUniqueName;
import net.opentsdb.uid.UniqueId;
import net.opentsdb.uid.UniqueId.UniqueIdType;
/**
* Handles calls for UID processing including getting UID status, assigning UIDs
* and other functions.
* @since 2.0
*/
final class UniqueIdRpc implements HttpRpc {
@Override
public void execute(TSDB tsdb, HttpQuery query) throws IOException {
// the uri will be /api/vX/uid/? or /api/uid/?
final String[] uri = query.explodeAPIPath();
final String endpoint = uri.length > 1 ? uri[1] : "";
if (endpoint.toLowerCase().equals("assign")) {
this.handleAssign(tsdb, query);
return;
} else if (endpoint.toLowerCase().equals("uidmeta")) {
this.handleUIDMeta(tsdb, query);
return;
} else if (endpoint.toLowerCase().equals("tsmeta")) {
this.handleTSMeta(tsdb, query);
return;
} else {
throw new BadRequestException(HttpResponseStatus.NOT_IMPLEMENTED,
"Other UID endpoints have not been implemented yet");
}
}
/**
* Assigns UIDs to the given metric, tagk or tagv names if applicable
* <p>
* This handler supports GET and POST whereby the GET command can
* parse query strings with the {@code type} as their parameter and a comma
* separated list of values to assign UIDs to.
* <p>
* Multiple types and names can be provided in one call. Each name will be
* processed independently and if there's an error (such as an invalid name or
* it is already assigned) the error will be stored in a separate error map
* and other UIDs will be processed.
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleAssign(final TSDB tsdb, final HttpQuery query) {
// only accept GET And POST
if (query.method() != HttpMethod.GET && query.method() != HttpMethod.POST) {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + query.method().getName() +
"] is not permitted for this endpoint");
}
final HashMap<String, List<String>> source;
if (query.method() == HttpMethod.POST) {
source = query.serializer().parseUidAssignV1();
} else {
source = new HashMap<String, List<String>>(3);
// cut down on some repetitive code, split the query string values by
// comma and add them to the source hash
String[] types = {"metric", "tagk", "tagv"};
for (int i = 0; i < types.length; i++) {
final String values = query.getQueryStringParam(types[i]);
if (values != null && !values.isEmpty()) {
final String[] metrics = values.split(",");
if (metrics != null && metrics.length > 0) {
source.put(types[i], Arrays.asList(metrics));
}
}
}
}
if (source.size() < 1) {
throw new BadRequestException("Missing values to assign UIDs");
}
final Map<String, TreeMap<String, String>> response =
new HashMap<String, TreeMap<String, String>>();
int error_count = 0;
for (Map.Entry<String, List<String>> entry : source.entrySet()) {
final TreeMap<String, String> results =
new TreeMap<String, String>();
final TreeMap<String, String> errors =
new TreeMap<String, String>();
for (String name : entry.getValue()) {
try {
final byte[] uid = tsdb.assignUid(entry.getKey(), name);
results.put(name,
UniqueId.uidToString(uid));
} catch (IllegalArgumentException e) {
errors.put(name, e.getMessage());
error_count++;
}
}
response.put(entry.getKey(), results);
if (errors.size() > 0) {
response.put(entry.getKey() + "_errors", errors);
}
}
if (error_count < 1) {
query.sendReply(query.serializer().formatUidAssignV1(response));
} else {
query.sendReply(HttpResponseStatus.BAD_REQUEST,
query.serializer().formatUidAssignV1(response));
}
}
/**
* Handles CRUD calls to individual UIDMeta data entries
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleUIDMeta(final TSDB tsdb, final HttpQuery query) {
final HttpMethod method = query.getAPIMethod();
// GET
if (method == HttpMethod.GET) {
final String uid = query.getRequiredQueryStringParam("uid");
final UniqueIdType type = UniqueId.stringToUniqueIdType(
query.getRequiredQueryStringParam("type"));
try {
final UIDMeta meta = UIDMeta.getUIDMeta(tsdb, type, uid)
.joinUninterruptibly();
query.sendReply(query.serializer().formatUidMetaV1(meta));
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
// POST
} else if (method == HttpMethod.POST || method == HttpMethod.PUT) {
final UIDMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseUidMetaV1();
} else {
meta = this.parseUIDMetaQS(query);
}
/**
* Storage callback used to determine if the storage call was successful
* or not. Also returns the updated object from storage.
*/
class SyncCB implements Callback<Deferred<UIDMeta>, Boolean> {
@Override
public Deferred<UIDMeta> call(Boolean success) throws Exception {
if (!success) {
throw new BadRequestException(
HttpResponseStatus.INTERNAL_SERVER_ERROR,
"Failed to save the UIDMeta to storage",
"This may be caused by another process modifying storage data");
}
return UIDMeta.getUIDMeta(tsdb, meta.getType(), meta.getUID());
}
}
try {
final Deferred<UIDMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final UIDMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexUIDMeta(updated_meta);
query.sendReply(query.serializer().formatUidMetaV1(updated_meta));
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
// DELETE
} else if (method == HttpMethod.DELETE) {
final UIDMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseUidMetaV1();
} else {
meta = this.parseUIDMetaQS(query);
}
try {
meta.delete(tsdb).joinUninterruptibly();
tsdb.deleteUIDMeta(meta);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Unable to delete UIDMeta information", e);
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
query.sendStatusOnly(HttpResponseStatus.NO_CONTENT);
} else {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + method.getName() +
"] is not permitted for this endpoint");
}
}
/**
* Handles CRUD calls to individual TSMeta data entries
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleTSMeta(final TSDB tsdb, final HttpQuery query) {
final HttpMethod method = query.getAPIMethod();
// GET
if (method == HttpMethod.GET) {
String tsuid = null;
if (query.hasQueryStringParam("tsuid")) {
tsuid = query.getQueryStringParam("tsuid");
try {
final TSMeta meta = TSMeta.getTSMeta(tsdb, tsuid).joinUninterruptibly();
if (meta != null) {
query.sendReply(query.serializer().formatTSMetaV1(meta));
} else {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find Timeseries meta data");
}
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one of the UIDs", e);
} catch (BadRequestException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
String mquery = query.getRequiredQueryStringParam("m");
// m is of the following forms:
// metric[{tag=value,...}]
// where the parts in square brackets `[' .. `]' are optional.
final HashMap<String, String> tags = new HashMap<String, String>();
String metric = null;
try {
metric = Tags.parseWithMetric(mquery, tags);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
final TSUIDQuery tsuid_query = new TSUIDQuery(tsdb, metric, tags);
try {
final List<TSMeta> tsmetas = tsuid_query.getTSMetas()
.joinUninterruptibly();
query.sendReply(query.serializer().formatTSMetaListV1(tsmetas));
} catch (NoSuchUniqueName e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one of the UIDs", e);
} catch (BadRequestException e) {
throw e;
} catch (RuntimeException e) {
throw new BadRequestException(e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// POST / PUT
} else if (method == HttpMethod.POST || method == HttpMethod.PUT) {
final TSMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseTSMetaV1();
} else {
meta = this.parseTSMetaQS(query);
}
/**
* Storage callback used to determine if the storage call was successful
* or not. Also returns the updated object from storage.
*/
class SyncCB implements Callback<Deferred<TSMeta>, Boolean> {
@Override
public Deferred<TSMeta> call(Boolean success) throws Exception {
if (!success) {
throw new BadRequestException(
HttpResponseStatus.INTERNAL_SERVER_ERROR,
"Failed to save the TSMeta to storage",
"This may be caused by another process modifying storage data");
}
return TSMeta.getTSMeta(tsdb, meta.getTSUID());
}
}
if (meta.getTSUID() == null || meta.getTSUID().isEmpty()) {
// we got a JSON object without TSUID. Try to find a timeseries spec of
// the form "m": "metric{tagk=tagv,...}"
final String metric = query.getRequiredQueryStringParam("m");
final boolean create = query.getQueryStringParam("create") != null
&& query.getQueryStringParam("create").equals("true");
final String tsuid = getTSUIDForMetric(metric, tsdb);
class WriteCounterIfNotPresentCB implements Callback<Boolean, Boolean> {
@Override
public Boolean call(Boolean exists) throws Exception {
if (!exists && create) {
final PutRequest put = new PutRequest(tsdb.metaTable(),
UniqueId.stringToUid(tsuid), TSMeta.FAMILY(),
TSMeta.COUNTER_QUALIFIER(), Bytes.fromLong(0));
tsdb.getClient().put(put);
}
return exists;
}
}
try {
// Check whether we have a TSMeta stored already
final boolean exists = TSMeta
.metaExistsInStorage(tsdb, tsuid)
.joinUninterruptibly();
// set TSUID
meta.setTSUID(tsuid);
if (!exists && create) {
// Write 0 to counter column if not present
TSMeta.counterExistsInStorage(tsdb, UniqueId.stringToUid(tsuid))
.addCallback(new WriteCounterIfNotPresentCB())
.joinUninterruptibly();
// set TSUID
final Deferred<TSMeta> process_meta = meta.storeNew(tsdb)
.addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
tsdb.processTSMetaThroughTrees(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} else if (exists) {
final Deferred<TSMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} else {
throw new BadRequestException(
"Could not find TSMeta, specify \"create=true\" to create a new one.");
}
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (BadRequestException e) {
throw e;
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one or more UIDs", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
try {
final Deferred<TSMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one or more UIDs", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// DELETE
} else if (method == HttpMethod.DELETE) {
final TSMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseTSMetaV1();
} else {
meta = this.parseTSMetaQS(query);
}
try{
meta.delete(tsdb);
tsdb.deleteTSMeta(meta.getTSUID());
} catch (IllegalArgumentException e) {
throw new BadRequestException("Unable to delete TSMeta information", e);
}
query.sendStatusOnly(HttpResponseStatus.NO_CONTENT);
} else {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + method.getName() +
"] is not permitted for this endpoint");
}
}
/**
* Used with verb overrides to parse out values from a query string
* @param query The query to parse
* @return An UIDMeta object with configured values
* @throws BadRequestException if a required value was missing or could not
* be parsed
*/
private UIDMeta parseUIDMetaQS(final HttpQuery query) {
final String uid = query.getRequiredQueryStringParam("uid");
final String type = query.getRequiredQueryStringParam("type");
final UIDMeta meta = new UIDMeta(UniqueId.stringToUniqueIdType(type), uid);
final String display_name = query.getQueryStringParam("display_name");
if (display_name != null) {
meta.setDisplayName(display_name);
}
final String description = query.getQueryStringParam("description");
if (description != null) {
meta.setDescription(description);
}
final String notes = query.getQueryStringParam("notes");
if (notes != null) {
meta.setNotes(notes);
}
return meta;
}
/**
* Used with verb overrides to parse out values from a query string
* @param query The query to parse
* @return An TSMeta object with configured values
* @throws BadRequestException if a required value was missing or could not
* be parsed
*/
private TSMeta parseTSMetaQS(final HttpQuery query) {
final String tsuid = query.getRequiredQueryStringParam("tsuid");
final TSMeta meta = new TSMeta(tsuid);
final String display_name = query.getQueryStringParam("display_name");
if (display_name != null) {
meta.setDisplayName(display_name);
}
final String description = query.getQueryStringParam("description");
if (description != null) {
meta.setDescription(description);
}
final String notes = query.getQueryStringParam("notes");
if (notes != null) {
meta.setNotes(notes);
}
final String units = query.getQueryStringParam("units");
if (units != null) {
meta.setUnits(units);
}
final String data_type = query.getQueryStringParam("data_type");
if (data_type != null) {
meta.setDataType(data_type);
}
final String retention = query.getQueryStringParam("retention");
if (retention != null && !retention.isEmpty()) {
try {
meta.setRetention(Integer.parseInt(retention));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'retention' value");
}
}
final String max = query.getQueryStringParam("max");
if (max != null && !max.isEmpty()) {
try {
meta.setMax(Float.parseFloat(max));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'max' value");
}
}
final String min = query.getQueryStringParam("min");
if (min != null && !min.isEmpty()) {
try {
meta.setMin(Float.parseFloat(min));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'min' value");
}
}
return meta;
}
/**
* Parses a query string "m=metric{tagk1=tagv1,...}" type query and returns
* a tsuid.
* @param data_query The query we're building
* @throws BadRequestException if we are unable to parse the query or it is
* missing components
* @todo - make this asynchronous
*/
private String getTSUIDForMetric(final String query_string, TSDB tsdb) {
if (query_string == null || query_string.isEmpty()) {
throw new BadRequestException("The query string was empty");
}
// m is of the following forms:
// metric[{tag=value,...}]
// where the parts in square brackets `[' .. `]' are optional.
final HashMap<String, String> tags = new HashMap<String, String>();
String metric = null;
try {
metric = Tags.parseWithMetric(query_string, tags);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
// sort the UIDs on tagk values
final ByteMap<byte[]> tag_uids = new ByteMap<byte[]>();
for (final Entry<String, String> pair : tags.entrySet()) {
tag_uids.put(tsdb.getUID(UniqueIdType.TAGK, pair.getKey()),
tsdb.getUID(UniqueIdType.TAGV, pair.getValue()));
}
// Byte Buffer to generate TSUID, pre allocated to the size of the TSUID
final ByteArrayOutputStream buf = new ByteArrayOutputStream(
TSDB.metrics_width() + tag_uids.size() *
(TSDB.tagk_width() + TSDB.tagv_width()));
try {
buf.write(tsdb.getUID(UniqueIdType.METRIC, metric));
for (final Entry<byte[], byte[]> uids: tag_uids.entrySet()) {
buf.write(uids.getKey());
buf.write(uids.getValue());
}
} catch (IOException e) {
throw new BadRequestException(e);
}
final String tsuid = UniqueId.uidToString(buf.toByteArray());
return tsuid;
}
}
| src/tsd/UniqueIdRpc.java | // This file is part of OpenTSDB.
// Copyright (C) 2013 The OpenTSDB Authors.
//
// This program is free software: you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 2.1 of the License, or (at your
// option) any later version. This program is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
// General Public License for more details. You should have received a copy
// of the GNU Lesser General Public License along with this program. If not,
// see <http://www.gnu.org/licenses/>.
package net.opentsdb.tsd;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.hbase.async.Bytes;
import org.hbase.async.PutRequest;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import com.stumbleupon.async.Callback;
import com.stumbleupon.async.Deferred;
import net.opentsdb.core.TSDB;
import net.opentsdb.core.Tags;
import net.opentsdb.meta.TSMeta;
import net.opentsdb.meta.TSUIDQuery;
import net.opentsdb.meta.UIDMeta;
import net.opentsdb.uid.NoSuchUniqueId;
import net.opentsdb.uid.NoSuchUniqueName;
import net.opentsdb.uid.UniqueId;
import net.opentsdb.uid.UniqueId.UniqueIdType;
/**
* Handles calls for UID processing including getting UID status, assigning UIDs
* and other functions.
* @since 2.0
*/
final class UniqueIdRpc implements HttpRpc {
@Override
public void execute(TSDB tsdb, HttpQuery query) throws IOException {
// the uri will be /api/vX/uid/? or /api/uid/?
final String[] uri = query.explodeAPIPath();
final String endpoint = uri.length > 1 ? uri[1] : "";
if (endpoint.toLowerCase().equals("assign")) {
this.handleAssign(tsdb, query);
return;
} else if (endpoint.toLowerCase().equals("uidmeta")) {
this.handleUIDMeta(tsdb, query);
return;
} else if (endpoint.toLowerCase().equals("tsmeta")) {
this.handleTSMeta(tsdb, query);
return;
} else {
throw new BadRequestException(HttpResponseStatus.NOT_IMPLEMENTED,
"Other UID endpoints have not been implemented yet");
}
}
/**
* Assigns UIDs to the given metric, tagk or tagv names if applicable
* <p>
* This handler supports GET and POST whereby the GET command can
* parse query strings with the {@code type} as their parameter and a comma
* separated list of values to assign UIDs to.
* <p>
* Multiple types and names can be provided in one call. Each name will be
* processed independently and if there's an error (such as an invalid name or
* it is already assigned) the error will be stored in a separate error map
* and other UIDs will be processed.
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleAssign(final TSDB tsdb, final HttpQuery query) {
// only accept GET And POST
if (query.method() != HttpMethod.GET && query.method() != HttpMethod.POST) {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + query.method().getName() +
"] is not permitted for this endpoint");
}
final HashMap<String, List<String>> source;
if (query.method() == HttpMethod.POST) {
source = query.serializer().parseUidAssignV1();
} else {
source = new HashMap<String, List<String>>(3);
// cut down on some repetitive code, split the query string values by
// comma and add them to the source hash
String[] types = {"metric", "tagk", "tagv"};
for (int i = 0; i < types.length; i++) {
final String values = query.getQueryStringParam(types[i]);
if (values != null && !values.isEmpty()) {
final String[] metrics = values.split(",");
if (metrics != null && metrics.length > 0) {
source.put(types[i], Arrays.asList(metrics));
}
}
}
}
if (source.size() < 1) {
throw new BadRequestException("Missing values to assign UIDs");
}
final Map<String, TreeMap<String, String>> response =
new HashMap<String, TreeMap<String, String>>();
int error_count = 0;
for (Map.Entry<String, List<String>> entry : source.entrySet()) {
final TreeMap<String, String> results =
new TreeMap<String, String>();
final TreeMap<String, String> errors =
new TreeMap<String, String>();
for (String name : entry.getValue()) {
try {
final byte[] uid = tsdb.assignUid(entry.getKey(), name);
results.put(name,
UniqueId.uidToString(uid));
} catch (IllegalArgumentException e) {
errors.put(name, e.getMessage());
error_count++;
}
}
response.put(entry.getKey(), results);
if (errors.size() > 0) {
response.put(entry.getKey() + "_errors", errors);
}
}
if (error_count < 1) {
query.sendReply(query.serializer().formatUidAssignV1(response));
} else {
query.sendReply(HttpResponseStatus.BAD_REQUEST,
query.serializer().formatUidAssignV1(response));
}
}
/**
* Handles CRUD calls to individual UIDMeta data entries
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleUIDMeta(final TSDB tsdb, final HttpQuery query) {
final HttpMethod method = query.getAPIMethod();
// GET
if (method == HttpMethod.GET) {
final String uid = query.getRequiredQueryStringParam("uid");
final UniqueIdType type = UniqueId.stringToUniqueIdType(
query.getRequiredQueryStringParam("type"));
try {
final UIDMeta meta = UIDMeta.getUIDMeta(tsdb, type, uid)
.joinUninterruptibly();
query.sendReply(query.serializer().formatUidMetaV1(meta));
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
// POST
} else if (method == HttpMethod.POST || method == HttpMethod.PUT) {
final UIDMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseUidMetaV1();
} else {
meta = this.parseUIDMetaQS(query);
}
/**
* Storage callback used to determine if the storage call was successful
* or not. Also returns the updated object from storage.
*/
class SyncCB implements Callback<Deferred<UIDMeta>, Boolean> {
@Override
public Deferred<UIDMeta> call(Boolean success) throws Exception {
if (!success) {
throw new BadRequestException(
HttpResponseStatus.INTERNAL_SERVER_ERROR,
"Failed to save the UIDMeta to storage",
"This may be caused by another process modifying storage data");
}
return UIDMeta.getUIDMeta(tsdb, meta.getType(), meta.getUID());
}
}
try {
final Deferred<UIDMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final UIDMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexUIDMeta(updated_meta);
query.sendReply(query.serializer().formatUidMetaV1(updated_meta));
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
// DELETE
} else if (method == HttpMethod.DELETE) {
final UIDMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseUidMetaV1();
} else {
meta = this.parseUIDMetaQS(query);
}
try {
meta.delete(tsdb).joinUninterruptibly();
tsdb.deleteUIDMeta(meta);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Unable to delete UIDMeta information", e);
} catch (NoSuchUniqueId e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find the requested UID", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
query.sendStatusOnly(HttpResponseStatus.NO_CONTENT);
} else {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + method.getName() +
"] is not permitted for this endpoint");
}
}
/**
* Handles CRUD calls to individual TSMeta data entries
* @param tsdb The TSDB from the RPC router
* @param query The query for this request
*/
private void handleTSMeta(final TSDB tsdb, final HttpQuery query) {
final HttpMethod method = query.getAPIMethod();
// GET
if (method == HttpMethod.GET) {
String tsuid = null;
if (query.hasQueryStringParam("tsuid")) {
tsuid = query.getQueryStringParam("tsuid");
try {
final TSMeta meta = TSMeta.getTSMeta(tsdb, tsuid).joinUninterruptibly();
if (meta != null) {
query.sendReply(query.serializer().formatTSMetaV1(meta));
} else {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Could not find Timeseries meta data");
}
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one of the UIDs", e);
} catch (BadRequestException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
String mquery = query.getRequiredQueryStringParam("m");
// m is of the following forms:
// metric[{tag=value,...}]
// where the parts in square brackets `[' .. `]' are optional.
final HashMap<String, String> tags = new HashMap<String, String>();
String metric = null;
try {
metric = Tags.parseWithMetric(mquery, tags);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
final TSUIDQuery tsuid_query = new TSUIDQuery(tsdb, metric, tags);
try {
final List<TSMeta> tsmetas = tsuid_query.getTSMetas()
.joinUninterruptibly();
query.sendReply(query.serializer().formatTSMetaListV1(tsmetas));
} catch (NoSuchUniqueName e) {
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one of the UIDs", e);
} catch (BadRequestException e) {
throw e;
} catch (RuntimeException e) {
throw new BadRequestException(e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// POST / PUT
} else if (method == HttpMethod.POST || method == HttpMethod.PUT) {
final TSMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseTSMetaV1();
} else {
meta = this.parseTSMetaQS(query);
}
/**
* Storage callback used to determine if the storage call was successful
* or not. Also returns the updated object from storage.
*/
class SyncCB implements Callback<Deferred<TSMeta>, Boolean> {
@Override
public Deferred<TSMeta> call(Boolean success) throws Exception {
if (!success) {
throw new BadRequestException(
HttpResponseStatus.INTERNAL_SERVER_ERROR,
"Failed to save the TSMeta to storage",
"This may be caused by another process modifying storage data");
}
return TSMeta.getTSMeta(tsdb, meta.getTSUID());
}
}
if (meta.getTSUID() == null || meta.getTSUID().isEmpty()) {
// we got a JSON object without TSUID. Try to find a timeseries spec of
// the form "m": "metric{tagk=tagv,...}"
final String metric = query.getRequiredQueryStringParam("m");
final boolean create = query.getQueryStringParam("create") != null
&& query.getQueryStringParam("create").equals("true");
final String tsuid = getTSUIDForMetric(metric, tsdb);
class WriteCounterIfNotPresentCB implements Callback<Boolean, Boolean> {
@Override
public Boolean call(Boolean exists) throws Exception {
if (!exists && create) {
final PutRequest put = new PutRequest(tsdb.metaTable(),
UniqueId.stringToUid(tsuid), TSMeta.FAMILY(),
TSMeta.COUNTER_QUALIFIER(), Bytes.fromLong(0));
tsdb.getClient().put(put);
}
return exists;
}
}
try {
// Check whether we have a TSMeta stored already
final boolean exists = TSMeta
.metaExistsInStorage(tsdb, tsuid)
.joinUninterruptibly();
// set TSUID
meta.setTSUID(tsuid);
if (!exists && create) {
// Write 0 to counter column if not present
TSMeta.counterExistsInStorage(tsdb, UniqueId.stringToUid(tsuid))
.addCallback(new WriteCounterIfNotPresentCB())
.joinUninterruptibly();
// set TSUID
final Deferred<TSMeta> process_meta = meta.storeNew(tsdb)
.addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
tsdb.processTSMetaThroughTrees(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} else if (exists) {
final Deferred<TSMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} else {
throw new BadRequestException(
"Could not find TSMeta, specify \"create=true\" to create a new one.");
}
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (BadRequestException e) {
throw e;
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one or more UIDs", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
try {
final Deferred<TSMeta> process_meta = meta.syncToStorage(tsdb,
method == HttpMethod.PUT).addCallbackDeferring(new SyncCB());
final TSMeta updated_meta = process_meta.joinUninterruptibly();
tsdb.indexTSMeta(updated_meta);
query.sendReply(query.serializer().formatTSMetaV1(updated_meta));
} catch (IllegalStateException e) {
query.sendStatusOnly(HttpResponseStatus.NOT_MODIFIED);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (NoSuchUniqueName e) {
// this would only happen if someone deleted a UID but left the
// the timeseries meta data
throw new BadRequestException(HttpResponseStatus.NOT_FOUND,
"Unable to find one or more UIDs", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// DELETE
} else if (method == HttpMethod.DELETE) {
final TSMeta meta;
if (query.hasContent()) {
meta = query.serializer().parseTSMetaV1();
} else {
meta = this.parseTSMetaQS(query);
}
try{
meta.delete(tsdb);
tsdb.deleteTSMeta(meta.getTSUID());
} catch (IllegalArgumentException e) {
throw new BadRequestException("Unable to delete TSMeta information", e);
}
query.sendStatusOnly(HttpResponseStatus.NO_CONTENT);
} else {
throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED,
"Method not allowed", "The HTTP method [" + method.getName() +
"] is not permitted for this endpoint");
}
}
/**
* Used with verb overrides to parse out values from a query string
* @param query The query to parse
* @return An UIDMeta object with configured values
* @throws BadRequestException if a required value was missing or could not
* be parsed
*/
private UIDMeta parseUIDMetaQS(final HttpQuery query) {
final String uid = query.getRequiredQueryStringParam("uid");
final String type = query.getRequiredQueryStringParam("type");
final UIDMeta meta = new UIDMeta(UniqueId.stringToUniqueIdType(type), uid);
final String display_name = query.getQueryStringParam("display_name");
if (display_name != null) {
meta.setDisplayName(display_name);
}
final String description = query.getQueryStringParam("description");
if (description != null) {
meta.setDescription(description);
}
final String notes = query.getQueryStringParam("notes");
if (notes != null) {
meta.setNotes(notes);
}
return meta;
}
/**
* Used with verb overrides to parse out values from a query string
* @param query The query to parse
* @return An TSMeta object with configured values
* @throws BadRequestException if a required value was missing or could not
* be parsed
*/
private TSMeta parseTSMetaQS(final HttpQuery query) {
final String tsuid = query.getRequiredQueryStringParam("tsuid");
final TSMeta meta = new TSMeta(tsuid);
final String display_name = query.getQueryStringParam("display_name");
if (display_name != null) {
meta.setDisplayName(display_name);
}
final String description = query.getQueryStringParam("description");
if (description != null) {
meta.setDescription(description);
}
final String notes = query.getQueryStringParam("notes");
if (notes != null) {
meta.setNotes(notes);
}
final String units = query.getQueryStringParam("units");
if (units != null) {
meta.setUnits(units);
}
final String data_type = query.getQueryStringParam("data_type");
if (data_type != null) {
meta.setDataType(data_type);
}
final String retention = query.getQueryStringParam("retention");
if (retention != null && !retention.isEmpty()) {
try {
meta.setRetention(Integer.parseInt(retention));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'retention' value");
}
}
final String max = query.getQueryStringParam("max");
if (max != null && !max.isEmpty()) {
try {
meta.setMax(Float.parseFloat(max));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'max' value");
}
}
final String min = query.getQueryStringParam("min");
if (min != null && !min.isEmpty()) {
try {
meta.setMin(Float.parseFloat(min));
} catch (NumberFormatException nfe) {
throw new BadRequestException("Unable to parse 'min' value");
}
}
return meta;
}
/**
* Parses a query string "m=metric{tagk1=tagv1,...}" type query and returns
* a tsuid.
* @param data_query The query we're building
* @throws BadRequestException if we are unable to parse the query or it is
* missing components
*/
private String getTSUIDForMetric(final String query_string, TSDB tsdb) {
if (query_string == null || query_string.isEmpty()) {
throw new BadRequestException("The query string was empty");
}
// m is of the following forms:
// metric[{tag=value,...}]
// where the parts in square brackets `[' .. `]' are optional.
final HashMap<String, String> tags = new HashMap<String, String>();
String metric = null;
try {
metric = Tags.parseWithMetric(query_string, tags);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
final TreeMap<String, String> sortedTags = new TreeMap<String, String>(tags);
// Byte Buffer to generate TSUID, pre allocated to the size of the TSUID
final ByteArrayOutputStream buf = new ByteArrayOutputStream(
TSDB.metrics_width() + sortedTags.size() *
(TSDB.tagk_width() + TSDB.tagv_width()));
try {
buf.write(tsdb.getUID(UniqueIdType.METRIC, metric));
for (Entry<String, String> e: sortedTags.entrySet()) {
// Fix for net.opentsdb.tsd.TestUniqueIdRpc.tsuidPostByM()
buf.write(tsdb.getUID(UniqueIdType.TAGK, e.getKey()), 0, TSDB.tagk_width());
buf.write(tsdb.getUID(UniqueIdType.TAGV, e.getValue()), 0, TSDB.tagv_width());
}
} catch (IOException e) {
throw new BadRequestException(e);
}
final String tsuid = UniqueId.uidToString(buf.toByteArray());
return tsuid;
}
}
| Fix #642 by sorting the tags properly on the bytes, NOT the string
values. Sheesh. Thanks @wuxuehong214
Signed-off-by: Chris Larsen <[email protected]>
| src/tsd/UniqueIdRpc.java | Fix #642 by sorting the tags properly on the bytes, NOT the string values. Sheesh. Thanks @wuxuehong214 | <ide><path>rc/tsd/UniqueIdRpc.java
<ide> import java.util.TreeMap;
<ide>
<ide> import org.hbase.async.Bytes;
<add>import org.hbase.async.Bytes.ByteMap;
<ide> import org.hbase.async.PutRequest;
<ide> import org.jboss.netty.handler.codec.http.HttpMethod;
<ide> import org.jboss.netty.handler.codec.http.HttpResponseStatus;
<ide> * @param data_query The query we're building
<ide> * @throws BadRequestException if we are unable to parse the query or it is
<ide> * missing components
<add> * @todo - make this asynchronous
<ide> */
<ide> private String getTSUIDForMetric(final String query_string, TSDB tsdb) {
<ide> if (query_string == null || query_string.isEmpty()) {
<ide> } catch (IllegalArgumentException e) {
<ide> throw new BadRequestException(e);
<ide> }
<del> final TreeMap<String, String> sortedTags = new TreeMap<String, String>(tags);
<add>
<add> // sort the UIDs on tagk values
<add> final ByteMap<byte[]> tag_uids = new ByteMap<byte[]>();
<add> for (final Entry<String, String> pair : tags.entrySet()) {
<add> tag_uids.put(tsdb.getUID(UniqueIdType.TAGK, pair.getKey()),
<add> tsdb.getUID(UniqueIdType.TAGV, pair.getValue()));
<add> }
<add>
<ide> // Byte Buffer to generate TSUID, pre allocated to the size of the TSUID
<ide> final ByteArrayOutputStream buf = new ByteArrayOutputStream(
<del> TSDB.metrics_width() + sortedTags.size() *
<add> TSDB.metrics_width() + tag_uids.size() *
<ide> (TSDB.tagk_width() + TSDB.tagv_width()));
<ide> try {
<del> buf.write(tsdb.getUID(UniqueIdType.METRIC, metric));
<del> for (Entry<String, String> e: sortedTags.entrySet()) {
<del> // Fix for net.opentsdb.tsd.TestUniqueIdRpc.tsuidPostByM()
<del> buf.write(tsdb.getUID(UniqueIdType.TAGK, e.getKey()), 0, TSDB.tagk_width());
<del> buf.write(tsdb.getUID(UniqueIdType.TAGV, e.getValue()), 0, TSDB.tagv_width());
<add> buf.write(tsdb.getUID(UniqueIdType.METRIC, metric));
<add> for (final Entry<byte[], byte[]> uids: tag_uids.entrySet()) {
<add> buf.write(uids.getKey());
<add> buf.write(uids.getValue());
<ide> }
<ide> } catch (IOException e) {
<ide> throw new BadRequestException(e); |
|
Java | lgpl-2.1 | 39680c9e258cd0b26bf59bdf62dafc2b7385714b | 0 | samskivert/samskivert,samskivert/samskivert | //
// $Id: DispatcherServlet.java,v 1.2 2001/10/31 09:45:23 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.velocity;
import java.io.IOException;
import java.util.HashMap;
import java.util.Properties;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.Template;
import org.apache.velocity.context.Context;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.apache.velocity.servlet.VelocityServlet;
import com.samskivert.Log;
import com.samskivert.servlet.MessageManager;
import com.samskivert.servlet.RedirectException;
import com.samskivert.servlet.util.ExceptionMap;
import com.samskivert.servlet.util.FriendlyException;
import com.samskivert.util.ConfigUtil;
import com.samskivert.util.StringUtil;
/**
* The dispatcher servlet builds upon Velocity's architecture. It does so
* in the following ways:
*
* <ul>
* <li> It defines the notion of a logic object which populates the
* context with data to be used to satisfy a particular request. The logic
* is not a servlet and is therefore limited in what it can do while
* populating data. Experience dictates that ultimate flexibility leads to
* bad design decisions and that this is a place where that sort of thing
* can be comfortably nipped in the bud. <br><br>
*
* <li> It allows template files to be referenced directly in the URL
* while maintaining the ability to choose a cobranded template based on
* information in the request. The URI is mapped to a servlet based on
* some simple mapping rules. This provides template designers with a
* clearer understanding of the structure of a web application as well as
* with an easy way to test their templates in the absence of an
* associated servlet. <br><br>
*
* <li> It provides a common error handling paradigm that simplifies the
* task of authoring web applications.
* </ul>
*
* <p><b>URI to servlet mapping</b><br>
* The mapping process allows the Velocity framework to be invoked for all
* requests ending in a particular file extension (usually
* <code>.wm</code>). It is necessary to instruct your servlet engine of
* choice to invoke the <code>DispatcherServlet</code> for all requests
* ending in that extension. For Apache/JServ this looks something like
* this:
*
* <pre>
* ApJServAction .wm /servlets/com.samskivert.velocity.Dispatcher
* </pre>
*
* The request URI then defines the path of the template that will be used
* to satisfy the request. To understand how code is selected to go along
* with the request, let's look at an example. Consider the following
* configuration:
*
* <pre>
* applications=whowhere
* whowhere.base_uri=/whowhere
* whowhere.base_pkg=whowhere.logic
* </pre>
*
* This defines an application identified as <code>whowhere</code>. An
* application is defined by three parameters, the application identifier,
* the <code>base_uri</code>, and the <code>base_pkg</code>. The
* <code>base_uri</code> defines the prefix shared by all pages served by
* the application and which serves to identify which application to
* invoke when processing a request. The <code>base_pkg</code> is used to
* construct the logic classname based on the URI and the
* <code>base_uri</code> parameter.
*
* <p> Now let's look at a sample request to determine how the logic
* classname is resolved. Consider the following request URI:
*
* <pre>
* /whowhere/view/trips.wm
* </pre>
*
* It begins with <code>/whowhere</code> which tells the dispatcher that
* it's part of the <code>whowhere</code> application. That application's
* <code>base_uri</code> is then stripped from the URI leaving
* <code>/view/trips.wm</code>. The slashes are converted into periods to
* map directories to packages, giving us <code>view.trips.wm</code>.
* Finally, the <code>base_pkg</code> is prepended and the trailing
* <code>.wm</code> extension removed.
*
* <p> Thus the class invoked to populate the context for this request is
* <code>whowhere.servlets.view.trips</code> (note that the classname
* <em>is</em> lowercase which is an intentional choice in resolving
* conflicting recommendations that classnames should always start with a
* capital letter and URLs should always be lowercase).
*
* <p> The template used to generate the result is loaded based on the
* full URI, essentially with a call to
* <code>getTemplate("/whowhere/view/trips.wm")</code> in this example.
* This is the place where more sophisticated cobranding support could be
* inserted in the future (ie. if I ever want to use this to develop a
* cobranded web site).
*
* <p><b>Error handling</b><br>
* The dispatcher servlet provides a common error handling mechanism. The
* design is to catch any exceptions thrown by the logic and to convert
* them into friendly error messages that are inserted into the invocation
* context with the key <code>"error"</code> for easy display in the
* resulting web page.
*
* <p> The process of mapping exceptions to friendly error messages is
* done using the {@link ExceptionMap} class. Consult its documentation
* for an explanation of how it works.
*
* @see Logic
* @see ExceptionMap
*/
public class DispatcherServlet extends VelocityServlet
{
/**
* We load our velocity properties from the classpath rather than from
* a file.
*/
protected Properties loadConfiguration (ServletConfig config)
throws IOException
{
String propsPath = config.getInitParameter(INIT_PROPS_KEY);
// config util loads properties files from the classpath
return ConfigUtil.loadProperties(propsPath);
}
/**
* Initialize ourselves and our application.
*/
public void init (ServletConfig config)
throws ServletException
{
super.init(config);
// Log.info("Initializing dispatcher servlet.");
// load up our application configuration
try {
String appcl = config.getInitParameter(APP_CLASS_PROPS_KEY);
if (appcl == null) {
_app = new Application();
} else {
Class appclass = Class.forName(appcl);
_app = (Application)appclass.newInstance();
}
// now initialize the applicaiton
String logicPkg = config.getInitParameter(LOGIC_PKG_PROPS_KEY);
if (StringUtil.blank(logicPkg)) {
logicPkg = "";
}
_app.preInit(logicPkg);
_app.init(getServletContext());
} catch (Throwable t) {
Log.warning("Error instantiating application.");
Log.logStackTrace(t);
}
}
/**
* Clean up after ourselves and our application.
*/
public void destroy ()
{
super.destroy();
// shutdown our application
_app.shutdown();
}
/**
* Loads up the template appropriate for this request, locates and
* invokes any associated logic class and finally returns the prepared
* template which will be merged with the prepared context.
*/
public Template handleRequest (HttpServletRequest req,
HttpServletResponse rsp,
Context ctx) throws Exception
{
InvocationContext ictx = (InvocationContext)ctx;
String errmsg = null;
// first we select the template
Template tmpl = selectTemplate(ictx);
// assume an HTML response unless otherwise massaged by the logic
rsp.setContentType("text/html");
try {
// insert the application into the context in case the
// logic or a tool wishes to make use of it
ictx.put(APPLICATION_KEY, _app);
// if the application provides a message manager, we want
// to put a message resolver in the context as well
MessageManager msgmgr = _app.getMessageManager();
if (msgmgr != null) {
MessageResolver mrslv = new MessageResolver(msgmgr);
ictx.put(MSGRESOLVER_KEY, mrslv);
}
// resolve the appropriate logic class for this URI and
// execute it if it exists
String path = req.getServletPath();
Logic logic = resolveLogic(path);
if (logic != null) {
logic.invoke(_app, ictx);
}
} catch (RedirectException re) {
ictx.getResponse().sendRedirect(re.getRedirectURL());
return null;
} catch (FriendlyException fe) {
// grab the error message, we'll deal with it shortly
errmsg = fe.getMessage();
} catch (Exception e) {
errmsg = ExceptionMap.getMessage(e);
Log.logStackTrace(e);
}
// if we have an error message, insert it into the template
if (errmsg != null) {
// try using the application to localize the error message
// before we insert it
MessageManager msgmgr = _app.getMessageManager();
if (msgmgr != null) {
errmsg = msgmgr.getMessage(req, errmsg);
}
ictx.put(ERROR_KEY, errmsg);
}
return tmpl;
}
/**
* Returns the reference to the application that is handling this
* request.
*
* @return The application in effect for this request or null if no
* application was selected to handle the request.
*/
public static Application getApplication (InvocationContext context)
{
return (Application)context.get(APPLICATION_KEY);
}
/**
* We override this to create a context of our own devising.
*/
protected Context createContext (HttpServletRequest req,
HttpServletResponse rsp)
{
return new InvocationContext(req, rsp);
}
/**
* This method is called to select the appropriate template for this
* request. The default implementation simply loads the template using
* Velocity's default template loading services based on the URI
* provided in the request.
*
* @param ctx The context of this request.
*
* @return The template to be used in generating the response.
*/
protected Template selectTemplate (InvocationContext ctx)
throws ResourceNotFoundException, ParseErrorException, Exception
{
String path = ctx.getRequest().getServletPath();
// Log.info("Loading template [path=" + path + "].");
return getTemplate(path);
}
/**
* This method is called to select the appropriate logic for this
* request URI.
*
* @return The logic to be used in generating the response or null if
* no logic could be matched.
*/
protected Logic resolveLogic (String path)
{
// look for a cached logic instance
String lclass = _app.generateClass(path);
Logic logic = (Logic)_logic.get(lclass);
if (logic == null) {
try {
Class pcl = Class.forName(lclass);
logic = (Logic)pcl.newInstance();
} catch (ClassNotFoundException cnfe) {
// nothing interesting to report
} catch (Throwable t) {
Log.warning("Unable to instantiate logic for application " +
"[path=" + path + ", lclass=" + lclass +
", error=" + t + "].");
}
// if something failed, use a dummy in it's place so that we
// don't sit around all day freaking out about our inability
// to instantiate the proper logic class
if (logic == null) {
logic = new DummyLogic();
}
// cache the resolved logic instance
_logic.put(lclass, logic);
}
return logic;
}
/** The application being served by this dispatcher servlet. */
protected Application _app;
/** A table of resolved logic instances. */
protected HashMap _logic = new HashMap();
/** This is the key used in the context for error messages. */
protected static final String ERROR_KEY = "error";
/**
* This is the key used to store a reference back to the dispatcher
* servlet in our invocation context.
*/
protected static final String APPLICATION_KEY = "%_app_%";
/**
* This is the key used to store the message resolver in the context.
*/
protected static final String MSGRESOLVER_KEY = "i18n";
/** The servlet parameter key specifying the application class. */
protected static final String APP_CLASS_PROPS_KEY = "app_class";
/** The servlet parameter key specifying the base logic package. */
protected static final String LOGIC_PKG_PROPS_KEY = "logic_package";
}
| projects/samskivert/src/java/com/samskivert/velocity/DispatcherServlet.java | //
// $Id: DispatcherServlet.java,v 1.1 2001/10/31 09:44:22 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.velocity;
import java.io.IOException;
import java.util.HashMap;
import java.util.Properties;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.Template;
import org.apache.velocity.context.Context;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.apache.velocity.servlet.VelocityServlet;
import com.samskivert.Log;
import com.samskivert.servlet.MessageManager;
import com.samskivert.servlet.RedirectException;
import com.samskivert.servlet.util.ExceptionMap;
import com.samskivert.util.ConfigUtil;
import com.samskivert.util.StringUtil;
/**
* The dispatcher servlet builds upon Velocity's architecture. It does so
* in the following ways:
*
* <ul>
* <li> It defines the notion of a logic object which populates the
* context with data to be used to satisfy a particular request. The logic
* is not a servlet and is therefore limited in what it can do while
* populating data. Experience dictates that ultimate flexibility leads to
* bad design decisions and that this is a place where that sort of thing
* can be comfortably nipped in the bud. <br><br>
*
* <li> It allows template files to be referenced directly in the URL
* while maintaining the ability to choose a cobranded template based on
* information in the request. The URI is mapped to a servlet based on
* some simple mapping rules. This provides template designers with a
* clearer understanding of the structure of a web application as well as
* with an easy way to test their templates in the absence of an
* associated servlet. <br><br>
*
* <li> It provides a common error handling paradigm that simplifies the
* task of authoring web applications.
* </ul>
*
* <p><b>URI to servlet mapping</b><br>
* The mapping process allows the Velocity framework to be invoked for all
* requests ending in a particular file extension (usually
* <code>.wm</code>). It is necessary to instruct your servlet engine of
* choice to invoke the <code>DispatcherServlet</code> for all requests
* ending in that extension. For Apache/JServ this looks something like
* this:
*
* <pre>
* ApJServAction .wm /servlets/com.samskivert.velocity.Dispatcher
* </pre>
*
* The request URI then defines the path of the template that will be used
* to satisfy the request. To understand how code is selected to go along
* with the request, let's look at an example. Consider the following
* configuration:
*
* <pre>
* applications=whowhere
* whowhere.base_uri=/whowhere
* whowhere.base_pkg=whowhere.logic
* </pre>
*
* This defines an application identified as <code>whowhere</code>. An
* application is defined by three parameters, the application identifier,
* the <code>base_uri</code>, and the <code>base_pkg</code>. The
* <code>base_uri</code> defines the prefix shared by all pages served by
* the application and which serves to identify which application to
* invoke when processing a request. The <code>base_pkg</code> is used to
* construct the logic classname based on the URI and the
* <code>base_uri</code> parameter.
*
* <p> Now let's look at a sample request to determine how the logic
* classname is resolved. Consider the following request URI:
*
* <pre>
* /whowhere/view/trips.wm
* </pre>
*
* It begins with <code>/whowhere</code> which tells the dispatcher that
* it's part of the <code>whowhere</code> application. That application's
* <code>base_uri</code> is then stripped from the URI leaving
* <code>/view/trips.wm</code>. The slashes are converted into periods to
* map directories to packages, giving us <code>view.trips.wm</code>.
* Finally, the <code>base_pkg</code> is prepended and the trailing
* <code>.wm</code> extension removed.
*
* <p> Thus the class invoked to populate the context for this request is
* <code>whowhere.servlets.view.trips</code> (note that the classname
* <em>is</em> lowercase which is an intentional choice in resolving
* conflicting recommendations that classnames should always start with a
* capital letter and URLs should always be lowercase).
*
* <p> The template used to generate the result is loaded based on the
* full URI, essentially with a call to
* <code>getTemplate("/whowhere/view/trips.wm")</code> in this example.
* This is the place where more sophisticated cobranding support could be
* inserted in the future (ie. if I ever want to use this to develop a
* cobranded web site).
*
* <p><b>Error handling</b><br>
* The dispatcher servlet provides a common error handling mechanism. The
* design is to catch any exceptions thrown by the logic and to convert
* them into friendly error messages that are inserted into the invocation
* context with the key <code>"error"</code> for easy display in the
* resulting web page.
*
* <p> The process of mapping exceptions to friendly error messages is
* done using the {@link ExceptionMap} class. Consult its documentation
* for an explanation of how it works.
*
* @see Logic
* @see ExceptionMap
*/
public class DispatcherServlet extends VelocityServlet
{
/**
* We load our velocity properties from the classpath rather than from
* a file.
*/
protected Properties loadConfiguration (ServletConfig config)
throws IOException
{
String propsPath = config.getInitParameter(INIT_PROPS_KEY);
// config util loads properties files from the classpath
return ConfigUtil.loadProperties(propsPath);
}
/**
* Initialize ourselves and our application.
*/
public void init (ServletConfig config)
throws ServletException
{
super.init(config);
// Log.info("Initializing dispatcher servlet.");
// load up our application configuration
try {
String appcl = config.getInitParameter(APP_CLASS_PROPS_KEY);
if (appcl == null) {
_app = new Application();
} else {
Class appclass = Class.forName(appcl);
_app = (Application)appclass.newInstance();
}
// now initialize the applicaiton
String logicPkg = config.getInitParameter(LOGIC_PKG_PROPS_KEY);
if (StringUtil.blank(logicPkg)) {
logicPkg = "";
}
_app.preInit(logicPkg);
_app.init(getServletContext());
} catch (Throwable t) {
Log.warning("Error instantiating application.");
Log.logStackTrace(t);
}
}
/**
* Clean up after ourselves and our application.
*/
public void destroy ()
{
super.destroy();
// shutdown our application
_app.shutdown();
}
/**
* Loads up the template appropriate for this request, locates and
* invokes any associated logic class and finally returns the prepared
* template which will be merged with the prepared context.
*/
public Template handleRequest (HttpServletRequest req,
HttpServletResponse rsp,
Context ctx) throws Exception
{
InvocationContext ictx = (InvocationContext)ctx;
String errmsg = null;
// first we select the template
Template tmpl = selectTemplate(ictx);
// assume an HTML response unless otherwise massaged by the logic
rsp.setContentType("text/html");
try {
// insert the application into the context in case the
// logic or a tool wishes to make use of it
ictx.put(APPLICATION_KEY, _app);
// if the application provides a message manager, we want
// to put a message resolver in the context as well
MessageManager msgmgr = _app.getMessageManager();
if (msgmgr != null) {
MessageResolver mrslv = new MessageResolver(msgmgr);
ictx.put(MSGRESOLVER_KEY, mrslv);
}
// resolve the appropriate logic class for this URI and
// execute it if it exists
String path = req.getServletPath();
Logic logic = resolveLogic(path);
if (logic != null) {
logic.invoke(_app, ictx);
}
} catch (RedirectException re) {
ictx.getResponse().sendRedirect(re.getRedirectURL());
return null;
} catch (FriendlyException fe) {
// grab the error message, we'll deal with it shortly
errmsg = fe.getMessage();
} catch (Exception e) {
errmsg = ExceptionMap.getMessage(e);
Log.logStackTrace(e);
}
// if we have an error message, insert it into the template
if (errmsg != null) {
// try using the application to localize the error message
// before we insert it
MessageManager msgmgr = _app.getMessageManager();
if (msgmgr != null) {
errmsg = msgmgr.getMessage(req, errmsg);
}
ictx.put(ERROR_KEY, errmsg);
}
return tmpl;
}
/**
* Returns the reference to the application that is handling this
* request.
*
* @return The application in effect for this request or null if no
* application was selected to handle the request.
*/
public static Application getApplication (InvocationContext context)
{
return (Application)context.get(APPLICATION_KEY);
}
/**
* We override this to create a context of our own devising.
*/
protected Context createContext (HttpServletRequest req,
HttpServletResponse rsp)
{
return new InvocationContext(req, rsp);
}
/**
* This method is called to select the appropriate template for this
* request. The default implementation simply loads the template using
* Velocity's default template loading services based on the URI
* provided in the request.
*
* @param ctx The context of this request.
*
* @return The template to be used in generating the response.
*/
protected Template selectTemplate (InvocationContext ctx)
throws ResourceNotFoundException, ParseErrorException, Exception
{
String path = ctx.getRequest().getServletPath();
// Log.info("Loading template [path=" + path + "].");
return getTemplate(path);
}
/**
* This method is called to select the appropriate logic for this
* request URI.
*
* @return The logic to be used in generating the response or null if
* no logic could be matched.
*/
protected Logic resolveLogic (String path)
{
// look for a cached logic instance
String lclass = _app.generateClass(path);
Logic logic = (Logic)_logic.get(lclass);
if (logic == null) {
try {
Class pcl = Class.forName(lclass);
logic = (Logic)pcl.newInstance();
} catch (ClassNotFoundException cnfe) {
// nothing interesting to report
} catch (Throwable t) {
Log.warning("Unable to instantiate logic for application " +
"[path=" + path + ", lclass=" + lclass +
", error=" + t + "].");
}
// if something failed, use a dummy in it's place so that we
// don't sit around all day freaking out about our inability
// to instantiate the proper logic class
if (logic == null) {
logic = new DummyLogic();
}
// cache the resolved logic instance
_logic.put(lclass, logic);
}
return logic;
}
/** The application being served by this dispatcher servlet. */
protected Application _app;
/** A table of resolved logic instances. */
protected HashMap _logic = new HashMap();
/** This is the key used in the context for error messages. */
protected static final String ERROR_KEY = "error";
/**
* This is the key used to store a reference back to the dispatcher
* servlet in our invocation context.
*/
protected static final String APPLICATION_KEY = "%_app_%";
/**
* This is the key used to store the message resolver in the context.
*/
protected static final String MSGRESOLVER_KEY = "i18n";
/** The servlet parameter key specifying the application class. */
protected static final String APP_CLASS_PROPS_KEY = "app_class";
/** The servlet parameter key specifying the base logic package. */
protected static final String LOGIC_PKG_PROPS_KEY = "logic_package";
}
| Needed a FriendlyException import.
git-svn-id: 64ebf368729f38804935acb7146e017e0f909c6b@398 6335cc39-0255-0410-8fd6-9bcaacd3b74c
| projects/samskivert/src/java/com/samskivert/velocity/DispatcherServlet.java | Needed a FriendlyException import. | <ide><path>rojects/samskivert/src/java/com/samskivert/velocity/DispatcherServlet.java
<ide> //
<del>// $Id: DispatcherServlet.java,v 1.1 2001/10/31 09:44:22 mdb Exp $
<add>// $Id: DispatcherServlet.java,v 1.2 2001/10/31 09:45:23 mdb Exp $
<ide> //
<ide> // samskivert library - useful routines for java programs
<ide> // Copyright (C) 2001 Michael Bayne
<ide> import com.samskivert.servlet.MessageManager;
<ide> import com.samskivert.servlet.RedirectException;
<ide> import com.samskivert.servlet.util.ExceptionMap;
<add>import com.samskivert.servlet.util.FriendlyException;
<ide>
<ide> import com.samskivert.util.ConfigUtil;
<ide> import com.samskivert.util.StringUtil; |
|
Java | mit | 7081309e4b5d5a69e39c5b5d179fd7cdfe53f77a | 0 | CS2103AUG2016-W14-C1/main | package seedu.manager.logic.commands;
import java.util.Set;
import seedu.manager.model.activity.AMDate;
/**
* Finds and lists all persons in address book whose name contains any of the argument keywords.
* Keyword matching is case sensitive.
*/
public class SearchCommand extends Command {
public static final String COMMAND_WORD = "search";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Searches all activities whose names contain any of "
+ "the specified keywords (case-sensitive) and displays them as a list with index numbers.\n"
+ "Parameters: KEYWORD [MORE_KEYWORDS]...\n"
+ "Example: " + COMMAND_WORD + " alice bob charlie";
private final Set<String> keywords;
private AMDate dateTime;
private AMDate endDateTime;
private String status;
public SearchCommand(Set<String> keywords) {
this.keywords = keywords;
this.dateTime = null;
this.endDateTime = null;
this.status = null;
}
/**
* Add the start/end dateTime range for search, use default end (end of the same day)
*
* @param searchDateTime specified by user
*/
public void addDateTimeRange(String searchDateTime) {
addDateTimeRange(searchDateTime, searchDateTime);
}
/**
* Add the start/end dateTime range for search
*
* @param searchDateTime, searchEndDateTime specified by user
*/
public void addDateTimeRange(String searchDateTime, String searchEndDateTime) {
this.dateTime = new AMDate(searchDateTime);
this.endDateTime = new AMDate(searchEndDateTime);
this.dateTime.toStartOfDay();
this.endDateTime.toEndOfDay();
}
/**
* Add the status for search
*
* @param status specified by user
*/
public void addStatus(String status) {
this.status = status.toLowerCase();
}
@Override
public CommandResult execute() {
model.updateFilteredActivityList(keywords);
if (this.dateTime != null && this.endDateTime != null) {
model.updateFilteredActivityList(dateTime, endDateTime);
}
if (this.status != null) {
boolean isCompleted;
if ((this.status).equals("pending")) {
isCompleted = false;
} else {
isCompleted = true;
}
model.updateFilteredActivityList(isCompleted);
}
return new CommandResult(getMessageForActivityListShownSummary(model.getFilteredActivityList().size()));
}
}
| src/main/java/seedu/manager/logic/commands/SearchCommand.java | package seedu.manager.logic.commands;
import java.util.Set;
import seedu.manager.model.activity.AMDate;
/**
* Finds and lists all persons in address book whose name contains any of the argument keywords.
* Keyword matching is case sensitive.
*/
public class SearchCommand extends Command {
public static final String COMMAND_WORD = "search";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Searches all activities whose names contain any of "
+ "the specified keywords (case-sensitive) and displays them as a list with index numbers.\n"
+ "Parameters: KEYWORD [MORE_KEYWORDS]...\n"
+ "Example: " + COMMAND_WORD + " alice bob charlie";
private final Set<String> keywords;
private AMDate dateTime;
private AMDate endDateTime;
public SearchCommand(Set<String> keywords) {
this.keywords = keywords;
this.dateTime = null;
this.endDateTime = null;
}
/**
* Add the start/end dateTime range for search, use default end (end of the same day)
*
* @param searchDateTime specified by user
*/
public void addDateTimeRange(String searchDateTime) {
addDateTimeRange(searchDateTime, searchDateTime);
}
/**
* Add the start/end dateTime range for search
*
* @param searchDateTime, searchEndDateTime specified by user
*/
public void addDateTimeRange(String searchDateTime, String searchEndDateTime) {
this.dateTime = new AMDate(searchDateTime);
this.endDateTime = new AMDate(searchEndDateTime);
this.dateTime.toStartOfDay();
this.endDateTime.toEndOfDay();
}
@Override
public CommandResult execute() {
model.updateFilteredActivityList(keywords);
if (this.dateTime != null && this.endDateTime != null) {
model.updateFilteredActivityList(dateTime, endDateTime);
}
return new CommandResult(getMessageForActivityListShownSummary(model.getFilteredActivityList().size()));
}
}
| Added search by status
| src/main/java/seedu/manager/logic/commands/SearchCommand.java | Added search by status | <ide><path>rc/main/java/seedu/manager/logic/commands/SearchCommand.java
<ide> private final Set<String> keywords;
<ide> private AMDate dateTime;
<ide> private AMDate endDateTime;
<add> private String status;
<ide>
<ide> public SearchCommand(Set<String> keywords) {
<ide> this.keywords = keywords;
<ide> this.dateTime = null;
<ide> this.endDateTime = null;
<add> this.status = null;
<ide> }
<ide>
<ide> /**
<ide> this.dateTime.toStartOfDay();
<ide> this.endDateTime.toEndOfDay();
<ide> }
<del>
<add>
<add> /**
<add> * Add the status for search
<add> *
<add> * @param status specified by user
<add> */
<add> public void addStatus(String status) {
<add> this.status = status.toLowerCase();
<add> }
<add>
<ide> @Override
<ide> public CommandResult execute() {
<ide> model.updateFilteredActivityList(keywords);
<ide> if (this.dateTime != null && this.endDateTime != null) {
<ide> model.updateFilteredActivityList(dateTime, endDateTime);
<ide> }
<add>
<add> if (this.status != null) {
<add> boolean isCompleted;
<add> if ((this.status).equals("pending")) {
<add> isCompleted = false;
<add> } else {
<add> isCompleted = true;
<add> }
<add> model.updateFilteredActivityList(isCompleted);
<add> }
<add>
<ide> return new CommandResult(getMessageForActivityListShownSummary(model.getFilteredActivityList().size()));
<ide> }
<ide> |
|
Java | mit | ae30be551fcd511f12a6f192b4ea67a40c766f18 | 0 | jifalops/wsnlocalize | package com.jifalops.wsnlocalize.file;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class TextReaderWriter {
public interface IoCallbacks {
/** Called on non-main thread. */
void onReadCompleted(TextReaderWriter rw, List<String> lines);
/** Called on non-main thread. */
void onWriteCompleted(TextReaderWriter rw, int linesWritten);
}
final File file;
final HandlerThread thread;
final Handler handler;
protected final Handler creationThreadHandler = new Handler();
IoCallbacks callbacks;
protected TextReaderWriter(File file) {
this(file, null);
}
public TextReaderWriter(File file, IoCallbacks callbacks) {
this.file = file;
this.callbacks = callbacks;
thread = new HandlerThread(getClass().getName());
thread.start();
handler = new Handler(thread.getLooper());
}
protected void setIoCallbacks(IoCallbacks callbacks) {
this.callbacks = callbacks;
}
public boolean readLines() {
if (file.exists()) {
handler.post(new Runnable() {
@Override
public void run() {
List<String> lines = new ArrayList<>();
BufferedReader r = null;
try {
r = new BufferedReader(new FileReader(file));
String line;
while ((line = r.readLine()) != null && line.length() > 0) {
lines.add(line);
}
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (r != null) r.close();
} catch (IOException e) {
e.printStackTrace();
}
}
callbacks.onReadCompleted(TextReaderWriter.this, lines);
}
});
return true;
}
return false;
}
public void writeLines(final List<String> lines, final boolean append) {
handler.post(new Runnable() {
@Override
public void run() {
int count = 0;
BufferedWriter w = null;
try {
w = new BufferedWriter(new FileWriter(file, append));
for (String line : lines) {
w.write(line);
w.newLine();
++count;
}
w.flush();
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (w != null) w.close();
} catch (IOException e) {
e.printStackTrace();
}
}
callbacks.onWriteCompleted(TextReaderWriter.this, count);
}
});
}
public void truncate() {
handler.post(new Runnable() {
@Override
public void run() {
BufferedWriter w = null;
try {
w = new BufferedWriter(new FileWriter(file, false));
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (w != null) w.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
}
public void close() {
handler.removeCallbacksAndMessages(null);
if (thread.getState() != Thread.State.NEW) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
thread.quitSafely();
} else {
thread.quit();
}
}
}
}
| app/src/main/java/com/jifalops/wsnlocalize/file/TextReaderWriter.java | package com.jifalops.wsnlocalize.file;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class TextReaderWriter {
public interface IoCallbacks {
/** Called on non-main thread. */
void onReadCompleted(TextReaderWriter rw, List<String> lines);
/** Called on non-main thread. */
void onWriteCompleted(TextReaderWriter rw, int linesWritten);
}
final File file;
final HandlerThread thread;
final Handler handler;
protected final Handler creationThreadHandler = new Handler();
IoCallbacks callbacks;
protected TextReaderWriter(File file) {
this(file, null);
}
public TextReaderWriter(File file, IoCallbacks callbacks) {
this.file = file;
this.callbacks = callbacks;
thread = new HandlerThread(getClass().getName());
thread.start();
handler = new Handler(thread.getLooper());
}
protected void setIoCallbacks(IoCallbacks callbacks) {
this.callbacks = callbacks;
}
public boolean readLines() {
if (file.exists()) {
handler.post(new Runnable() {
@Override
public void run() {
List<String> lines = new ArrayList<>();
BufferedReader r = null;
try {
r = new BufferedReader(new FileReader(file));
String line;
while ((line = r.readLine()) != null && line.length() > 0) {
lines.add(line);
}
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (r != null) r.close();
} catch (IOException e) {
e.printStackTrace();
}
}
callbacks.onReadCompleted(TextReaderWriter.this, lines);
}
});
return true;
}
return false;
}
public void writeLines(final List<String> lines, final boolean append) {
handler.post(new Runnable() {
@Override
public void run() {
int count = 0;
BufferedWriter w = null;
try {
w = new BufferedWriter(new FileWriter(file, append));
for (String line : lines) {
w.write(line);
w.newLine();
++count;
}
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (w != null) w.close();
} catch (IOException e) {
e.printStackTrace();
}
}
callbacks.onWriteCompleted(TextReaderWriter.this, count);
}
});
}
public void truncate() {
handler.post(new Runnable() {
@Override
public void run() {
BufferedWriter w = null;
try {
w = new BufferedWriter(new FileWriter(file, false));
} catch (FileNotFoundException e) {
// ignored
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (w != null) w.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
}
public void close() {
handler.removeCallbacksAndMessages(null);
if (thread.getState() != Thread.State.NEW) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
thread.quitSafely();
} else {
thread.quit();
}
}
}
}
| Flush writer after write
| app/src/main/java/com/jifalops/wsnlocalize/file/TextReaderWriter.java | Flush writer after write | <ide><path>pp/src/main/java/com/jifalops/wsnlocalize/file/TextReaderWriter.java
<ide> w.newLine();
<ide> ++count;
<ide> }
<add> w.flush();
<ide> } catch (FileNotFoundException e) {
<ide> // ignored
<ide> } catch (IOException e) { |
|
Java | bsd-3-clause | 7215e5edb00ec15f1fa1f0e57b8db994418e18f4 | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | package gov.nih.nci.cananolab.restful.view.edit;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.dto.common.AccessibilityBean;
import gov.nih.nci.cananolab.dto.common.DataReviewStatusBean;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.particle.SampleBean;
import gov.nih.nci.cananolab.restful.sample.InitSampleSetup;
import gov.nih.nci.cananolab.service.curation.CurationService;
import gov.nih.nci.cananolab.service.sample.SampleService;
import gov.nih.nci.cananolab.service.security.SecurityService;
import gov.nih.nci.cananolab.service.security.UserBean;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
public class SampleEditGeneralBean {
private static Logger logger = Logger.getLogger(SampleEditGeneralBean.class);
String sampleName;
String newSampleName;
long sampleId;
boolean userIsCurator;
List<SimplePointOfContactBean> pointOfContacts;
List<String> keywords = new ArrayList<String>();
Map<String, List<SimpleAccessBean>> accessToSample;
List<AccessibilityBean> groupAccesses;// = new ArrayList<AccessibilityBean>();
List<AccessibilityBean> userAccesses; // = new ArrayList<AccessibilityBean>();
AccessibilityBean theAccess = new AccessibilityBean();
SimpleDataAvailabilityBean dataAvailability;
//These are lookups needed for dropdown lists
List<String> organizationNamesForUser;
List<String> contactRoles;
List<String> allGroupNames;
Map<String, String> filteredUsers;
Map<String, String> roleNames;
boolean showReviewButton;
List<String> errors = new ArrayList<String>();
public List<AccessibilityBean> getGroupAccesses() {
return groupAccesses;
}
public void setGroupAccesses(List<AccessibilityBean> groupAccesses) {
this.groupAccesses = groupAccesses;
}
public List<AccessibilityBean> getUserAccesses() {
return userAccesses;
}
public void setUserAccesses(List<AccessibilityBean> userAccesses) {
this.userAccesses = userAccesses;
}
public AccessibilityBean getTheAccess() {
return theAccess;
}
public void setTheAccess(AccessibilityBean theAccess) {
this.theAccess = theAccess;
}
public String getNewSampleName() {
return newSampleName;
}
public void setNewSampleName(String newSampleName) {
this.newSampleName = newSampleName;
}
public Map<String, String> getRoleNames() {
return roleNames;
}
public void setRoleNames(Map<String, String> roleNames) {
this.roleNames = roleNames;
}
public String getSampleName() {
return sampleName;
}
public void setSampleName(String sampleName) {
this.sampleName = sampleName;
}
public long getSampleId() {
return sampleId;
}
public void setSampleId(long sampleId) {
this.sampleId = sampleId;
}
public List<SimplePointOfContactBean> getPointOfContacts() {
return pointOfContacts;
}
public void setPointOfContacts(List<SimplePointOfContactBean> pointOfContacts) {
this.pointOfContacts = pointOfContacts;
}
public List<String> getKeywords() {
return keywords;
}
public void setKeywords(List<String> keywords) {
this.keywords = keywords;
}
public Map<String, List<SimpleAccessBean>> getAccessToSample() {
return accessToSample;
}
public void setAccessToSample(Map<String, List<SimpleAccessBean>> accessToSample) {
this.accessToSample = accessToSample;
}
public SimpleDataAvailabilityBean getDataAvailability() {
return dataAvailability;
}
public void setDataAvailability(SimpleDataAvailabilityBean dataAvailability) {
this.dataAvailability = dataAvailability;
}
public boolean isUserIsCurator() {
return userIsCurator;
}
public void setUserIsCurator(boolean userIsCurator) {
this.userIsCurator = userIsCurator;
}
public boolean isShowReviewButton() {
return showReviewButton;
}
public void setShowReviewButton(boolean showReviewButton) {
this.showReviewButton = showReviewButton;
}
public List<String> getAllGroupNames() {
return allGroupNames;
}
public void setAllGroupNames(List<String> allGroupNames) {
this.allGroupNames = allGroupNames;
}
public Map<String, String> getFilteredUsers() {
return filteredUsers;
}
public void setFilteredUsers(Map<String, String> filteredUsers) {
this.filteredUsers = filteredUsers;
}
public List<String> getErrors() {
return errors;
}
public List<String> getOrganizationNamesForUser() {
return organizationNamesForUser;
}
public void setOrganizationNamesForUser(List<String> organizationNamesForUser) {
this.organizationNamesForUser = organizationNamesForUser;
}
public List<String> getContactRoles() {
return contactRoles;
}
public void setContactRoles(List<String> contactRoles) {
this.contactRoles = contactRoles;
}
public void setErrors(List<String> errors) {
this.errors = errors;
}
public void transferSampleBeanData(HttpServletRequest request,
CurationService curatorService, SampleBean sampleBean, String[] availableEntityNames)
throws Exception {
this.sampleName = sampleBean.getDomain().getName();
this.sampleId = sampleBean.getDomain().getId();
this.userIsCurator = sampleBean.getUser().isCurator();
transferPointOfContactData(sampleBean);
SortedSet<String> keyws = sampleBean.getKeywordSet();
this.keywords = new ArrayList<String>(sampleBean.getKeywordSet());
transferAccessibilityData(sampleBean);
transferDataAvailability(request, sampleBean, availableEntityNames);
setupLookups(request);
setupGroupNamesForNewAccess(request);
setupFilteredUsersParamForNewAccess(request, sampleBean.getDomain().getCreatedBy());
setupReviewButton(request, curatorService, sampleBean);
setupRoleNameMap();
}
protected void setupRoleNameMap() {
this.roleNames = new HashMap<String, String>();
roleNames.put(AccessibilityBean.CSM_READ_ROLE, AccessibilityBean.R_ROLE_DISPLAY_NAME);
roleNames.put(AccessibilityBean.CSM_CURD_ROLE, AccessibilityBean.CURD_ROLE_DISPLAY_NAME);
}
/**
* Logic moved from SampleAction.setUpSubmitForReviewButton()
* @param request
* @param curatorService
* @param sampleBean
* @throws Exception
*/
protected void setupReviewButton(HttpServletRequest request, CurationService curatorService, SampleBean sampleBean)
throws Exception {
boolean publicData = sampleBean.getPublicStatus();
if (!publicData) {
UserBean user = (UserBean) request.getSession()
.getAttribute("user");
//SecurityService securityService = getSecurityServiceFromSession(request);
SecurityService securityService = (SecurityService) request
.getSession().getAttribute("securityService");
DataReviewStatusBean reviewStatus = curatorService
.findDataReviewStatusBeanByDataId(sampleBean.getDomain().getId()
.toString(), securityService);
if (!user.isCurator()
&& (reviewStatus == null || reviewStatus != null
&& reviewStatus.getReviewStatus().equals(
DataReviewStatusBean.RETRACTED_STATUS))) {
this.showReviewButton = true;
} else {
this.showReviewButton = false;
}
} else {
this.showReviewButton = false;
}
}
/**
* Logic for DWRAccessibilityManager.getMatchedUsers()
*
* @param request
* @param dataOwner
*/
protected void setupFilteredUsersParamForNewAccess(HttpServletRequest request, String dataOwner) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
List<UserBean> matchedUsers = sampleService.findUserBeans("");
List<UserBean> updatedUsers = new ArrayList<UserBean>(matchedUsers);
// remove current user from the list
UserBean user = (UserBean) request.getSession().getAttribute("user");
updatedUsers.remove(user);
// remove data owner from the list if owner is not the current user
if (!dataOwner.equalsIgnoreCase(user.getLoginName())) {
for (UserBean userBean : matchedUsers) {
if (userBean.getLoginName().equalsIgnoreCase(dataOwner)) {
updatedUsers.remove(userBean);
break;
}
}
}
// exclude curators;
SecurityService securityService = (SecurityService) request
.getSession().getAttribute("securityService");
List<String> curators = securityService
.getUserNames(AccessibilityBean.CSM_DATA_CURATOR);
for (UserBean userBean : matchedUsers) {
for (String curator : curators) {
if (userBean.getLoginName().equalsIgnoreCase(curator)) {
updatedUsers.remove(userBean);
}
}
}
UserBean[] users = updatedUsers.toArray(new UserBean[updatedUsers.size()]);
this.filteredUsers = new HashMap<String, String>();
for (UserBean u :users) {
this.filteredUsers.put(u.getLoginName(), u.getDisplayName());
}
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
protected void setupGroupNamesForNewAccess(HttpServletRequest request) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
this.allGroupNames = sampleService.findGroupNames("");
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
protected void setupFilteredUsersForNewAccess(HttpServletRequest request) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
UserBean user = (UserBean) request.getSession().getAttribute("user");
List<UserBean> matchedUsers = sampleService.findUserBeans("");
List<UserBean> updatedUsers = new ArrayList<UserBean>(matchedUsers);
// remove current user from the list
updatedUsers.remove(user);
// // remove data owner from the list if owner is not the current user
// if (!dataOwner.equalsIgnoreCase(user.getLoginName())) {
// for (UserBean userBean : matchedUsers) {
// if (userBean.getLoginName().equalsIgnoreCase(dataOwner)) {
// updatedUsers.remove(userBean);
// break;
// }
// }
// }
// // exclude curators;
// List<String> curators = securityService
// .getUserNames(AccessibilityBean.CSM_DATA_CURATOR);
// for (UserBean userBean : matchedUsers) {
// for (String curator : curators) {
// if (userBean.getLoginName().equalsIgnoreCase(curator)) {
// updatedUsers.remove(userBean);
// }
// }
// }
//
// UserBean[] users = updatedUsers.toArray(new UserBean[updatedUsers.size()]);
//
// return updatedUsers.toArray(new UserBean[updatedUsers.size()]);
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
/**
*
* @param request
*/
public void setupLookups(HttpServletRequest request) {
try {
InitSampleSetup.getInstance().setPOCDropdowns(request);
SortedSet<String> organizationNames = (SortedSet<String>)request.getSession().getAttribute("allOrganizationNames");
this.organizationNamesForUser = new ArrayList<String>(organizationNames);
SortedSet<String> roles = (SortedSet<String>)request.getSession().getAttribute("contactRoles");
this.contactRoles = new ArrayList<String>(roles);
} catch (Exception e) {
logger.error("Got error while setting up POC lookup for sample edit");
}
}
/**
* Replicate logic in bodyManageAccessibility.jsp
*
* @param sampleBean
*/
protected void transferAccessibilityData(SampleBean sampleBean) {
accessToSample = new HashMap<String, List<SimpleAccessBean>>();
List<AccessibilityBean> groupAccess = sampleBean.getGroupAccesses();
this.groupAccesses = groupAccess;
if (groupAccess != null) {
List<SimpleAccessBean> groupList = new ArrayList<SimpleAccessBean>();
for (AccessibilityBean accBean : groupAccess) {
String groupName = accBean.getGroupName();
SimpleAccessBean aBean = new SimpleAccessBean();
aBean.setGroupName(groupName);
aBean.setRoleDisplayName(accBean.getRoleDisplayName());
groupList.add(aBean);
}
accessToSample.put("groupAccesses", groupList);
}
List<AccessibilityBean> userAccess = sampleBean.getUserAccesses();
this.userAccesses = userAccess;
if (userAccess != null) {
List<SimpleAccessBean> userList = new ArrayList<SimpleAccessBean>();
for (AccessibilityBean accBean : userAccess) {
SimpleAccessBean aBean = new SimpleAccessBean();
aBean.setLoginName(accBean.getUserBean().getLoginName());
aBean.setRoleDisplayName(accBean.getRoleDisplayName());
userList.add(aBean);
}
accessToSample.put("userAccesses", userList);
}
}
//edit
protected void transferDataAvailability(HttpServletRequest request, SampleBean sampleBean, String[] availableEntityNames) {
if (!sampleBean.getHasDataAvailability())
return;
if (request == null) {
logger.error("HttpServletRequest object is null. Unable to transfer DataAvailability data");
return;
}
dataAvailability = new SimpleDataAvailabilityBean();
dataAvailability.transferSampleBeanForDataAvailability(sampleBean, request, availableEntityNames);
// dataAvailability.setCaNanoLabScore(sampleBean.getCaNanoLabScore());
// dataAvailability.setMincharScore(sampleBean.getMincharScore());
//
// SortedSet<String> ca = (SortedSet<String>) request.getSession().getServletContext().getAttribute("chemicalAssocs");
// dataAvailability.setChemicalAssocs(new ArrayList<String>(ca));
//
// dataAvailability.setCaNano2MINChar((Map<String, String>) request.getSession().getServletContext()
// .getAttribute("caNano2MINChar"));
//
//
// SortedSet<String> pc = (SortedSet<String>) request.getSession().getServletContext().getAttribute("physicoChars");
// dataAvailability.setPhysicoChars(new ArrayList<String>(pc));
// SortedSet<String> iv = (SortedSet<String>) request.getSession().getServletContext().getAttribute("invitroChars");
// dataAvailability.setInvitroChars(new ArrayList<String>(iv));
// SortedSet<String> invivo = (SortedSet<String>) request.getSession().getServletContext().getAttribute("invivoChars");
// dataAvailability.setInvivoChars(new ArrayList<String>(invivo));
}
protected void transferPointOfContactData(SampleBean sampleBean) {
pointOfContacts = new ArrayList<SimplePointOfContactBean>();
PointOfContact samplePOC = sampleBean.getPrimaryPOCBean().getDomain();
if (samplePOC != null && samplePOC.getId() > 0) {
SimplePointOfContactBean poc = new SimplePointOfContactBean();
transferPointOfContactData(samplePOC, poc);
poc.setPrimaryContact(true);
pointOfContacts.add(poc);
}
List<PointOfContactBean> others = sampleBean.getOtherPOCBeans();
if (others == null) return;
for (PointOfContactBean aPoc : others) {
SimplePointOfContactBean poc = new SimplePointOfContactBean();
transferPointOfContactData(aPoc.getDomain(), poc);
pointOfContacts.add(poc);
}
}
protected void transferPointOfContactData(PointOfContact samplePOC, SimplePointOfContactBean poc) {
poc.setFirstName(samplePOC.getFirstName());
poc.setLastName(samplePOC.getLastName());
SimpleOrganizationBean simpleOrg = new SimpleOrganizationBean();
simpleOrg.setName(samplePOC.getOrganization().getName());
simpleOrg.setId(samplePOC.getOrganization().getId());
poc.setOrganization(simpleOrg);
poc.setRole(samplePOC.getRole());
poc.setId(samplePOC.getId());
SimpleAddressBean simpleAddress = new SimpleAddressBean();
simpleAddress.setLine1(samplePOC.getOrganization().getStreetAddress1());
simpleAddress.setLine2(samplePOC.getOrganization().getStreetAddress2());
simpleAddress.setCity(samplePOC.getOrganization().getCity());
simpleAddress.setStateProvince(samplePOC.getOrganization().getState());
simpleAddress.setCountry(samplePOC.getOrganization().getCountry());
simpleAddress.setZip(samplePOC.getOrganization().getPostalCode());
poc.setAddress(simpleAddress);
}
/**
* Populate input data for saving a sample to a SampleBean. Currently, only sampleName
* and keywords are needed
*
* @param destSampleBean
*/
public void populateDataForSavingSample(SampleBean destSampleBean) {
if (destSampleBean == null)
return;
//When saving keywords, current implementation is to replace the whole set
//ref. SampleServiceLocalImpl.saveSample()
List<String> keywords = this.getKeywords();
if (keywords != null) {
Collection<Keyword> keywordColl = new HashSet<Keyword>();
String kws = "";
for (String keyword : keywords) {
kws += keyword;
kws += "\n";
Keyword kw = new Keyword();
kw.setName(keyword);
keywordColl.add(kw);
}
destSampleBean.setKeywordsStr(kws);
destSampleBean.getDomain().setKeywordCollection(keywordColl);
}
destSampleBean.getDomain().setName(this.sampleName);
}
}
| software/cananolab-webapp/src/gov/nih/nci/cananolab/restful/view/edit/SampleEditGeneralBean.java | package gov.nih.nci.cananolab.restful.view.edit;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.dto.common.AccessibilityBean;
import gov.nih.nci.cananolab.dto.common.DataReviewStatusBean;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.particle.SampleBean;
import gov.nih.nci.cananolab.restful.sample.InitSampleSetup;
import gov.nih.nci.cananolab.service.curation.CurationService;
import gov.nih.nci.cananolab.service.sample.SampleService;
import gov.nih.nci.cananolab.service.security.SecurityService;
import gov.nih.nci.cananolab.service.security.UserBean;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
public class SampleEditGeneralBean {
private static Logger logger = Logger.getLogger(SampleEditGeneralBean.class);
String sampleName;
String newSampleName;
long sampleId;
boolean userIsCurator;
List<SimplePointOfContactBean> pointOfContacts;
List<String> keywords = new ArrayList<String>();
Map<String, List<SimpleAccessBean>> accessToSample;
SimpleDataAvailabilityBean dataAvailability;
//These are lookups needed for dropdown lists
List<String> organizationNamesForUser;
List<String> contactRoles;
List<String> allGroupNames;
Map<String, String> filteredUsers;
Map<String, String> roleNames;
boolean showReviewButton;
List<String> errors = new ArrayList<String>();
public String getNewSampleName() {
return newSampleName;
}
public void setNewSampleName(String newSampleName) {
this.newSampleName = newSampleName;
}
public Map<String, String> getRoleNames() {
return roleNames;
}
public void setRoleNames(Map<String, String> roleNames) {
this.roleNames = roleNames;
}
public String getSampleName() {
return sampleName;
}
public void setSampleName(String sampleName) {
this.sampleName = sampleName;
}
public long getSampleId() {
return sampleId;
}
public void setSampleId(long sampleId) {
this.sampleId = sampleId;
}
public List<SimplePointOfContactBean> getPointOfContacts() {
return pointOfContacts;
}
public void setPointOfContacts(List<SimplePointOfContactBean> pointOfContacts) {
this.pointOfContacts = pointOfContacts;
}
public List<String> getKeywords() {
return keywords;
}
public void setKeywords(List<String> keywords) {
this.keywords = keywords;
}
public Map<String, List<SimpleAccessBean>> getAccessToSample() {
return accessToSample;
}
public void setAccessToSample(Map<String, List<SimpleAccessBean>> accessToSample) {
this.accessToSample = accessToSample;
}
public SimpleDataAvailabilityBean getDataAvailability() {
return dataAvailability;
}
public void setDataAvailability(SimpleDataAvailabilityBean dataAvailability) {
this.dataAvailability = dataAvailability;
}
public boolean isUserIsCurator() {
return userIsCurator;
}
public void setUserIsCurator(boolean userIsCurator) {
this.userIsCurator = userIsCurator;
}
public boolean isShowReviewButton() {
return showReviewButton;
}
public void setShowReviewButton(boolean showReviewButton) {
this.showReviewButton = showReviewButton;
}
public List<String> getAllGroupNames() {
return allGroupNames;
}
public void setAllGroupNames(List<String> allGroupNames) {
this.allGroupNames = allGroupNames;
}
public Map<String, String> getFilteredUsers() {
return filteredUsers;
}
public void setFilteredUsers(Map<String, String> filteredUsers) {
this.filteredUsers = filteredUsers;
}
public List<String> getErrors() {
return errors;
}
public List<String> getOrganizationNamesForUser() {
return organizationNamesForUser;
}
public void setOrganizationNamesForUser(List<String> organizationNamesForUser) {
this.organizationNamesForUser = organizationNamesForUser;
}
public List<String> getContactRoles() {
return contactRoles;
}
public void setContactRoles(List<String> contactRoles) {
this.contactRoles = contactRoles;
}
public void setErrors(List<String> errors) {
this.errors = errors;
}
public void transferSampleBeanData(HttpServletRequest request,
CurationService curatorService, SampleBean sampleBean, String[] availableEntityNames)
throws Exception {
this.sampleName = sampleBean.getDomain().getName();
this.sampleId = sampleBean.getDomain().getId();
this.userIsCurator = sampleBean.getUser().isCurator();
transferPointOfContactData(sampleBean);
this.keywords = new ArrayList<String>(sampleBean.getKeywordSet());
transferAccessibilityData(sampleBean);
transferDataAvailability(request, sampleBean, availableEntityNames);
setupLookups(request);
setupGroupNamesForNewAccess(request);
setupFilteredUsersParamForNewAccess(request, sampleBean.getDomain().getCreatedBy());
setupReviewButton(request, curatorService, sampleBean);
setupRoleNameMap();
}
protected void setupRoleNameMap() {
this.roleNames = new HashMap<String, String>();
roleNames.put(AccessibilityBean.CSM_READ_ROLE, AccessibilityBean.R_ROLE_DISPLAY_NAME);
roleNames.put(AccessibilityBean.CSM_CURD_ROLE, AccessibilityBean.CURD_ROLE_DISPLAY_NAME);
}
/**
* Logic moved from SampleAction.setUpSubmitForReviewButton()
* @param request
* @param curatorService
* @param sampleBean
* @throws Exception
*/
protected void setupReviewButton(HttpServletRequest request, CurationService curatorService, SampleBean sampleBean)
throws Exception {
boolean publicData = sampleBean.getPublicStatus();
if (!publicData) {
UserBean user = (UserBean) request.getSession()
.getAttribute("user");
//SecurityService securityService = getSecurityServiceFromSession(request);
SecurityService securityService = (SecurityService) request
.getSession().getAttribute("securityService");
DataReviewStatusBean reviewStatus = curatorService
.findDataReviewStatusBeanByDataId(sampleBean.getDomain().getId()
.toString(), securityService);
if (!user.isCurator()
&& (reviewStatus == null || reviewStatus != null
&& reviewStatus.getReviewStatus().equals(
DataReviewStatusBean.RETRACTED_STATUS))) {
this.showReviewButton = true;
} else {
this.showReviewButton = false;
}
} else {
this.showReviewButton = false;
}
}
/**
* Logic for DWRAccessibilityManager.getMatchedUsers()
*
* @param request
* @param dataOwner
*/
protected void setupFilteredUsersParamForNewAccess(HttpServletRequest request, String dataOwner) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
List<UserBean> matchedUsers = sampleService.findUserBeans("");
List<UserBean> updatedUsers = new ArrayList<UserBean>(matchedUsers);
// remove current user from the list
UserBean user = (UserBean) request.getSession().getAttribute("user");
updatedUsers.remove(user);
// remove data owner from the list if owner is not the current user
if (!dataOwner.equalsIgnoreCase(user.getLoginName())) {
for (UserBean userBean : matchedUsers) {
if (userBean.getLoginName().equalsIgnoreCase(dataOwner)) {
updatedUsers.remove(userBean);
break;
}
}
}
// exclude curators;
SecurityService securityService = (SecurityService) request
.getSession().getAttribute("securityService");
List<String> curators = securityService
.getUserNames(AccessibilityBean.CSM_DATA_CURATOR);
for (UserBean userBean : matchedUsers) {
for (String curator : curators) {
if (userBean.getLoginName().equalsIgnoreCase(curator)) {
updatedUsers.remove(userBean);
}
}
}
UserBean[] users = updatedUsers.toArray(new UserBean[updatedUsers.size()]);
this.filteredUsers = new HashMap<String, String>();
for (UserBean u :users) {
this.filteredUsers.put(u.getLoginName(), u.getDisplayName());
}
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
protected void setupGroupNamesForNewAccess(HttpServletRequest request) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
this.allGroupNames = sampleService.findGroupNames("");
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
protected void setupFilteredUsersForNewAccess(HttpServletRequest request) {
try {
SampleService sampleService = (SampleService) request.getSession().getAttribute("sampleService");
UserBean user = (UserBean) request.getSession().getAttribute("user");
List<UserBean> matchedUsers = sampleService.findUserBeans("");
List<UserBean> updatedUsers = new ArrayList<UserBean>(matchedUsers);
// remove current user from the list
updatedUsers.remove(user);
// // remove data owner from the list if owner is not the current user
// if (!dataOwner.equalsIgnoreCase(user.getLoginName())) {
// for (UserBean userBean : matchedUsers) {
// if (userBean.getLoginName().equalsIgnoreCase(dataOwner)) {
// updatedUsers.remove(userBean);
// break;
// }
// }
// }
// // exclude curators;
// List<String> curators = securityService
// .getUserNames(AccessibilityBean.CSM_DATA_CURATOR);
// for (UserBean userBean : matchedUsers) {
// for (String curator : curators) {
// if (userBean.getLoginName().equalsIgnoreCase(curator)) {
// updatedUsers.remove(userBean);
// }
// }
// }
//
// UserBean[] users = updatedUsers.toArray(new UserBean[updatedUsers.size()]);
//
// return updatedUsers.toArray(new UserBean[updatedUsers.size()]);
} catch (Exception e) {
logger.error("Got error while setting up params for adding access");
}
}
/**
*
* @param request
*/
public void setupLookups(HttpServletRequest request) {
try {
InitSampleSetup.getInstance().setPOCDropdowns(request);
SortedSet<String> organizationNames = (SortedSet<String>)request.getSession().getAttribute("allOrganizationNames");
this.organizationNamesForUser = new ArrayList<String>(organizationNames);
SortedSet<String> roles = (SortedSet<String>)request.getSession().getAttribute("contactRoles");
this.contactRoles = new ArrayList<String>(roles);
} catch (Exception e) {
logger.error("Got error while setting up POC lookup for sample edit");
}
}
/**
* Replicate logic in bodyManageAccessibility.jsp
*
* @param sampleBean
*/
protected void transferAccessibilityData(SampleBean sampleBean) {
accessToSample = new HashMap<String, List<SimpleAccessBean>>();
List<AccessibilityBean> groupAccess = sampleBean.getGroupAccesses();
if (groupAccess != null) {
List<SimpleAccessBean> groupList = new ArrayList<SimpleAccessBean>();
for (AccessibilityBean accBean : groupAccess) {
String groupName = accBean.getGroupName();
SimpleAccessBean aBean = new SimpleAccessBean();
aBean.setGroupName(groupName);
aBean.setRoleDisplayName(accBean.getRoleDisplayName());
groupList.add(aBean);
}
accessToSample.put("groupAccesses", groupList);
}
List<AccessibilityBean> userAccess = sampleBean.getUserAccesses();
if (userAccess != null) {
List<SimpleAccessBean> userList = new ArrayList<SimpleAccessBean>();
for (AccessibilityBean accBean : userAccess) {
SimpleAccessBean aBean = new SimpleAccessBean();
aBean.setLoginName(accBean.getUserBean().getLoginName());
aBean.setRoleDisplayName(accBean.getRoleDisplayName());
userList.add(aBean);
}
accessToSample.put("userAccesses", userList);
}
}
//edit
protected void transferDataAvailability(HttpServletRequest request, SampleBean sampleBean, String[] availableEntityNames) {
if (!sampleBean.getHasDataAvailability())
return;
if (request == null) {
logger.error("HttpServletRequest object is null. Unable to transfer DataAvailability data");
return;
}
dataAvailability = new SimpleDataAvailabilityBean();
dataAvailability.transferSampleBeanForDataAvailability(sampleBean, request, availableEntityNames);
// dataAvailability.setCaNanoLabScore(sampleBean.getCaNanoLabScore());
// dataAvailability.setMincharScore(sampleBean.getMincharScore());
//
// SortedSet<String> ca = (SortedSet<String>) request.getSession().getServletContext().getAttribute("chemicalAssocs");
// dataAvailability.setChemicalAssocs(new ArrayList<String>(ca));
//
// dataAvailability.setCaNano2MINChar((Map<String, String>) request.getSession().getServletContext()
// .getAttribute("caNano2MINChar"));
//
//
// SortedSet<String> pc = (SortedSet<String>) request.getSession().getServletContext().getAttribute("physicoChars");
// dataAvailability.setPhysicoChars(new ArrayList<String>(pc));
// SortedSet<String> iv = (SortedSet<String>) request.getSession().getServletContext().getAttribute("invitroChars");
// dataAvailability.setInvitroChars(new ArrayList<String>(iv));
// SortedSet<String> invivo = (SortedSet<String>) request.getSession().getServletContext().getAttribute("invivoChars");
// dataAvailability.setInvivoChars(new ArrayList<String>(invivo));
}
protected void transferPointOfContactData(SampleBean sampleBean) {
pointOfContacts = new ArrayList<SimplePointOfContactBean>();
PointOfContact samplePOC = sampleBean.getPrimaryPOCBean().getDomain();
if (samplePOC != null && samplePOC.getId() > 0) {
SimplePointOfContactBean poc = new SimplePointOfContactBean();
transferPointOfContactData(samplePOC, poc);
poc.setPrimaryContact(true);
pointOfContacts.add(poc);
}
List<PointOfContactBean> others = sampleBean.getOtherPOCBeans();
if (others == null) return;
for (PointOfContactBean aPoc : others) {
SimplePointOfContactBean poc = new SimplePointOfContactBean();
transferPointOfContactData(aPoc.getDomain(), poc);
pointOfContacts.add(poc);
}
}
protected void transferPointOfContactData(PointOfContact samplePOC, SimplePointOfContactBean poc) {
poc.setFirstName(samplePOC.getFirstName());
poc.setLastName(samplePOC.getLastName());
SimpleOrganizationBean simpleOrg = new SimpleOrganizationBean();
simpleOrg.setName(samplePOC.getOrganization().getName());
simpleOrg.setId(samplePOC.getOrganization().getId());
poc.setOrganization(simpleOrg);
poc.setRole(samplePOC.getRole());
poc.setId(samplePOC.getId());
SimpleAddressBean simpleAddress = new SimpleAddressBean();
simpleAddress.setLine1(samplePOC.getOrganization().getStreetAddress1());
simpleAddress.setLine2(samplePOC.getOrganization().getStreetAddress2());
simpleAddress.setCity(samplePOC.getOrganization().getCity());
simpleAddress.setStateProvince(samplePOC.getOrganization().getState());
simpleAddress.setCountry(samplePOC.getOrganization().getCountry());
simpleAddress.setZip(samplePOC.getOrganization().getPostalCode());
poc.setAddress(simpleAddress);
}
/**
* Populate input data for saving a sample to a SampleBean. Currently, only sampleName
* and keywords are needed
*
* @param destSampleBean
*/
public void populateDataForSavingSample(SampleBean destSampleBean) {
if (destSampleBean == null)
return;
//When saving keywords, current implementation is to replace the whole set
//ref. SampleServiceLocalImpl.saveSample()
List<String> keywords = this.getKeywords();
if (keywords != null) {
Collection<Keyword> keywordColl = new HashSet<Keyword>();
String kws = "";
for (String keyword : keywords) {
kws += keyword;
kws += "\n";
// Keyword kw = new Keyword();
// kw.setName(keyword);
// keywordColl.add(kw);
}
destSampleBean.setKeywordsStr(kws);
}
destSampleBean.getDomain().setName(this.sampleName);
}
}
| Added theAccess
| software/cananolab-webapp/src/gov/nih/nci/cananolab/restful/view/edit/SampleEditGeneralBean.java | Added theAccess | <ide><path>oftware/cananolab-webapp/src/gov/nih/nci/cananolab/restful/view/edit/SampleEditGeneralBean.java
<ide> List<String> keywords = new ArrayList<String>();
<ide> Map<String, List<SimpleAccessBean>> accessToSample;
<ide>
<add> List<AccessibilityBean> groupAccesses;// = new ArrayList<AccessibilityBean>();
<add> List<AccessibilityBean> userAccesses; // = new ArrayList<AccessibilityBean>();
<add> AccessibilityBean theAccess = new AccessibilityBean();
<add>
<ide> SimpleDataAvailabilityBean dataAvailability;
<ide>
<ide> //These are lookups needed for dropdown lists
<ide> boolean showReviewButton;
<ide>
<ide> List<String> errors = new ArrayList<String>();
<add>
<add>
<add>
<add> public List<AccessibilityBean> getGroupAccesses() {
<add> return groupAccesses;
<add> }
<add>
<add> public void setGroupAccesses(List<AccessibilityBean> groupAccesses) {
<add> this.groupAccesses = groupAccesses;
<add> }
<add>
<add> public List<AccessibilityBean> getUserAccesses() {
<add> return userAccesses;
<add> }
<add>
<add> public void setUserAccesses(List<AccessibilityBean> userAccesses) {
<add> this.userAccesses = userAccesses;
<add> }
<add>
<add> public AccessibilityBean getTheAccess() {
<add> return theAccess;
<add> }
<add>
<add> public void setTheAccess(AccessibilityBean theAccess) {
<add> this.theAccess = theAccess;
<add> }
<ide>
<ide> public String getNewSampleName() {
<ide> return newSampleName;
<ide>
<ide> transferPointOfContactData(sampleBean);
<ide>
<add> SortedSet<String> keyws = sampleBean.getKeywordSet();
<ide> this.keywords = new ArrayList<String>(sampleBean.getKeywordSet());
<ide>
<ide> transferAccessibilityData(sampleBean);
<ide> accessToSample = new HashMap<String, List<SimpleAccessBean>>();
<ide>
<ide> List<AccessibilityBean> groupAccess = sampleBean.getGroupAccesses();
<add>
<add> this.groupAccesses = groupAccess;
<add>
<ide> if (groupAccess != null) {
<ide> List<SimpleAccessBean> groupList = new ArrayList<SimpleAccessBean>();
<ide> for (AccessibilityBean accBean : groupAccess) {
<ide> }
<ide>
<ide> List<AccessibilityBean> userAccess = sampleBean.getUserAccesses();
<add> this.userAccesses = userAccess;
<ide> if (userAccess != null) {
<ide> List<SimpleAccessBean> userList = new ArrayList<SimpleAccessBean>();
<ide> for (AccessibilityBean accBean : userAccess) {
<ide> for (String keyword : keywords) {
<ide> kws += keyword;
<ide> kws += "\n";
<del>// Keyword kw = new Keyword();
<del>// kw.setName(keyword);
<del>// keywordColl.add(kw);
<add> Keyword kw = new Keyword();
<add> kw.setName(keyword);
<add> keywordColl.add(kw);
<ide> }
<ide>
<ide> destSampleBean.setKeywordsStr(kws);
<del>
<add> destSampleBean.getDomain().setKeywordCollection(keywordColl);
<ide> }
<ide>
<ide> destSampleBean.getDomain().setName(this.sampleName); |
|
Java | apache-2.0 | 801eb60caf59f2af13fcc7edceb00787840716f6 | 0 | peterholc/avro,eonezhang/avro,wojtkiewicz/avro,cloudera/avro,Romain-Geissler-1A/avro,eonezhang/avro,ecatmur/avro,RallySoftware/avro,eonezhang/avro,Yelp/avro,relateiq/avro,st0nx/avro,pwendell/Avro,st0nx/avro,relateiq/avro,jmattbeal/avro,apache/avro,ntent-ad/avro,DrAA/avro,cloudera/avro,bitbouncer/avro,Romain-Geissler-1A/avro,zolyfarkas/avro,Romain-Geissler-1A/avro,djudd/avro,alexanderlz/avro,eonezhang/avro,zolyfarkas/avro,peterholc/avro,yadavsaroj/avro,jmattbeal/avro,peterholc/avro,kurtharriger/avro,pwendell/Avro,st0nx/avro,st0nx/avro,mgaffney/avro,Yelp/avro,kurtharriger/avro,dasch/avro,massie/avro,DrAA/avro,pwendell/Avro,jmattbeal/avro,djudd/avro,alexanderlz/avro,kurtharriger/avro,jmattbeal/avro,massie/avro,mgaffney/avro,alexanderlz/avro,cloudera/avro,peterholc/avro,eonezhang/avro,massie/avro,djudd/avro,kurtharriger/avro,Yelp/avro,massie/avro,dasch/avro,st0nx/avro,restorando/avro,jmattbeal/avro,eonezhang/avro,mgaffney/avro,st0nx/avro,DrAA/avro,ecatmur/avro,wojtkiewicz/avro,ecatmur/avro,restorando/avro,djudd/avro,Romain-Geissler-1A/avro,ntent-ad/avro,DrAA/avro,dasch/avro,Romain-Geissler-1A/avro,alexanderlz/avro,bitbouncer/avro,ntent-ad/avro,bitbouncer/avro,cloudera/avro,eonezhang/avro,bitbouncer/avro,relateiq/avro,apache/avro,RallySoftware/avro,rdblue/avro-ruby,eonezhang/avro,relateiq/avro,wojtkiewicz/avro,Romain-Geissler-1A/avro,restorando/avro,Romain-Geissler-1A/avro,djudd/avro,djudd/avro,johnj/php5-xcom,kurtharriger/avro,dasch/avro,zolyfarkas/avro,alexanderlz/avro,zolyfarkas/avro,kurtharriger/avro,DrAA/avro,wojtkiewicz/avro,cloudera/avro,eonezhang/avro,ntent-ad/avro,djudd/avro,st0nx/avro,RallySoftware/avro,wojtkiewicz/avro,Romain-Geissler-1A/avro,relateiq/avro,DrAA/avro,restorando/avro,mgaffney/avro,jmattbeal/avro,wojtkiewicz/avro,djudd/avro,kurtharriger/avro,relateiq/avro,cloudera/avro,yadavsaroj/avro,RallySoftware/avro,bitbouncer/avro,restorando/avro,alexanderlz/avro,bitbouncer/avro,bitbouncer/avro,yadavsaroj/avro,peterholc/avro,dasch/avro,alexanderlz/avro,ntent-ad/avro,yadavsaroj/avro,ecatmur/avro,bitbouncer/avro,apache/avro,djudd/avro,st0nx/avro,DrAA/avro,ecatmur/avro,zolyfarkas/avro,Yelp/avro,restorando/avro,relateiq/avro,Yelp/avro,DrAA/avro,Yelp/avro,ntent-ad/avro,Yelp/avro,relateiq/avro,alexanderlz/avro,ecatmur/avro,Romain-Geissler-1A/avro,apache/avro,jmattbeal/avro,relateiq/avro,yadavsaroj/avro,dasch/avro,jmattbeal/avro,alexanderlz/avro,apache/avro,alexanderlz/avro,RallySoftware/avro,zolyfarkas/avro,restorando/avro,kurtharriger/avro,massie/avro,massie/avro,apache/avro,zolyfarkas/avro,mgaffney/avro,wojtkiewicz/avro,kurtharriger/avro,ntent-ad/avro,djudd/avro,bitbouncer/avro,RallySoftware/avro,yadavsaroj/avro,eonezhang/avro,DrAA/avro,dasch/avro,ecatmur/avro,restorando/avro,DrAA/avro,zolyfarkas/avro,RallySoftware/avro,jmattbeal/avro,jmattbeal/avro,massie/avro,st0nx/avro,massie/avro,djudd/avro,yadavsaroj/avro,restorando/avro,jmattbeal/avro,alexanderlz/avro,zolyfarkas/avro,bitbouncer/avro,st0nx/avro,ecatmur/avro,alexanderlz/avro,relateiq/avro,Yelp/avro,kurtharriger/avro,yadavsaroj/avro,ecatmur/avro,mgaffney/avro,yadavsaroj/avro,massie/avro,yadavsaroj/avro,kurtharriger/avro,massie/avro,massie/avro,massie/avro,apache/avro,pwendell/Avro,ntent-ad/avro,bitbouncer/avro,dasch/avro,djudd/avro,pwendell/Avro,wojtkiewicz/avro,apache/avro,ecatmur/avro,yadavsaroj/avro,pwendell/Avro,DrAA/avro,Yelp/avro,ntent-ad/avro,ntent-ad/avro,RallySoftware/avro,apache/avro,Romain-Geissler-1A/avro,DrAA/avro,eonezhang/avro,pwendell/Avro,jmattbeal/avro,dasch/avro,yadavsaroj/avro,johnj/php5-xcom,restorando/avro,dasch/avro,relateiq/avro,dasch/avro,Romain-Geissler-1A/avro,restorando/avro,zolyfarkas/avro,rdblue/avro-ruby,dasch/avro,ecatmur/avro,relateiq/avro,apache/avro,Yelp/avro,wojtkiewicz/avro,wojtkiewicz/avro,wojtkiewicz/avro,Yelp/avro,apache/avro,johnj/php5-xcom,RallySoftware/avro,zolyfarkas/avro,bitbouncer/avro,st0nx/avro,apache/avro,Yelp/avro,Romain-Geissler-1A/avro,ecatmur/avro,kurtharriger/avro,cloudera/avro,restorando/avro,peterholc/avro,eonezhang/avro,cloudera/avro,wojtkiewicz/avro,st0nx/avro | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.JsonDecoder;
import org.apache.avro.io.JsonEncoder;
import org.apache.avro.specific.TestSpecificCompiler;
import org.apache.avro.util.Utf8;
public class TestSchema {
public static final String BASIC_ENUM_SCHEMA = "{\"type\":\"enum\", \"name\":\"Test\","
+"\"symbols\": [\"A\", \"B\"]}";
public static final String SCHEMA_WITH_DOC_TAGS = "{\n"
+ " \"type\": \"record\",\n"
+ " \"name\": \"outer_record\",\n"
+ " \"doc\": \"This is not a world record.\",\n"
+ " \"fields\": [\n"
+ " { \"type\": { \"type\": \"fixed\", \"doc\": \"Very Inner Fixed\", "
+ " \"name\": \"very_inner_fixed\", \"size\": 1 },\n"
+ " \"doc\": \"Inner Fixed\", \"name\": \"inner_fixed\" },\n"
+ " { \"type\": \"string\",\n"
+ " \"name\": \"inner_string\",\n"
+ " \"doc\": \"Inner String\" },\n"
+ " { \"type\": { \"type\": \"enum\", \"doc\": \"Very Inner Enum\", \n"
+ " \"name\": \"very_inner_enum\", \n"
+ " \"symbols\": [ \"A\", \"B\", \"C\" ] },\n"
+ " \"doc\": \"Inner Enum\", \"name\": \"inner_enum\" },\n"
+ " { \"type\": [\"string\", \"int\"], \"doc\": \"Inner Union\", \n"
+ " \"name\": \"inner_union\" }\n" + " ]\n" + "}\n";
private static final int COUNT =
Integer.parseInt(System.getProperty("test.count", "10"));
@Test
public void testNull() throws Exception {
assertEquals(Schema.create(Type.NULL), Schema.parse("\"null\""));
assertEquals(Schema.create(Type.NULL), Schema.parse("{\"type\":\"null\"}"));
check("\"null\"", "null", null);
}
@Test
public void testBoolean() throws Exception {
assertEquals(Schema.create(Type.BOOLEAN), Schema.parse("\"boolean\""));
assertEquals(Schema.create(Type.BOOLEAN),
Schema.parse("{\"type\":\"boolean\"}"));
check("\"boolean\"", "true", Boolean.TRUE);
}
@Test
public void testString() throws Exception {
assertEquals(Schema.create(Type.STRING), Schema.parse("\"string\""));
assertEquals(Schema.create(Type.STRING),
Schema.parse("{\"type\":\"string\"}"));
check("\"string\"", "\"foo\"", new Utf8("foo"));
}
@Test
public void testBytes() throws Exception {
assertEquals(Schema.create(Type.BYTES), Schema.parse("\"bytes\""));
assertEquals(Schema.create(Type.BYTES),
Schema.parse("{\"type\":\"bytes\"}"));
check("\"bytes\"", "\"\\u0000ABC\\u00FF\"",
ByteBuffer.wrap(new byte[]{0,65,66,67,-1}));
}
@Test
public void testInt() throws Exception {
assertEquals(Schema.create(Type.INT), Schema.parse("\"int\""));
assertEquals(Schema.create(Type.INT), Schema.parse("{\"type\":\"int\"}"));
check("\"int\"", "9", new Integer(9));
}
@Test
public void testLong() throws Exception {
assertEquals(Schema.create(Type.LONG), Schema.parse("\"long\""));
assertEquals(Schema.create(Type.LONG), Schema.parse("{\"type\":\"long\"}"));
check("\"long\"", "11", new Long(11));
}
@Test
public void testFloat() throws Exception {
assertEquals(Schema.create(Type.FLOAT), Schema.parse("\"float\""));
assertEquals(Schema.create(Type.FLOAT),
Schema.parse("{\"type\":\"float\"}"));
check("\"float\"", "1.1", new Float(1.1));
}
@Test
public void testDouble() throws Exception {
assertEquals(Schema.create(Type.DOUBLE), Schema.parse("\"double\""));
assertEquals(Schema.create(Type.DOUBLE),
Schema.parse("{\"type\":\"double\"}"));
check("\"double\"", "1.2", new Double(1.2));
}
@Test
public void testArray() throws Exception {
String json = "{\"type\":\"array\", \"items\": \"long\"}";
Schema schema = Schema.parse(json);
GenericArray<Long> array = new GenericData.Array<Long>(1, schema);
array.add(1L);
check(json, "[1]", array);
checkParseError("{\"type\":\"array\"}"); // items required
}
@Test
public void testMap() throws Exception {
HashMap<Utf8,Long> map = new HashMap<Utf8,Long>();
map.put(new Utf8("a"), 1L);
check("{\"type\":\"map\", \"values\":\"long\"}", "{\"a\":1}", map);
checkParseError("{\"type\":\"map\"}"); // values required
}
@Test
public void testRecord() throws Exception {
String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+"[{\"name\":\"f\", \"type\":\"long\"}]}";
Schema schema = Schema.parse(recordJson);
GenericData.Record record = new GenericData.Record(schema);
record.put("f", 11L);
check(recordJson, "{\"f\":11}", record, false);
checkParseError("{\"type\":\"record\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":\"Y\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+"[{\"name\":\"f\"}]}"); // no type
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+"[{\"type\":\"long\"}]}"); // no name
}
@Test
public void testEnum() throws Exception {
check(BASIC_ENUM_SCHEMA, "\"B\"", "B", false);
checkParseError("{\"type\":\"enum\"}"); // symbols required
}
@Test
public void testFixed() throws Exception {
check("{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}", "\"a\"",
new GenericData.Fixed(new byte[]{(byte)'a'}), false);
checkParseError("{\"type\":\"fixed\"}"); // size required
}
@Test
public void testRecursive() throws Exception {
check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+"{\"name\":\"label\", \"type\":\"string\"},"
+"{\"name\":\"children\", \"type\":"
+"{\"type\": \"array\", \"items\": \"Node\" }}]}",
false);
}
@Test
public void testRecursiveEquals() throws Exception {
String jsonSchema = "{\"type\":\"record\", \"name\":\"List\", \"fields\": ["
+"{\"name\":\"next\", \"type\":\"List\"}]}";
Schema s1 = Schema.parse(jsonSchema);
Schema s2 = Schema.parse(jsonSchema);
assertEquals(s1, s2);
s1.hashCode(); // test no stackoverflow
}
@Test
public void testLisp() throws Exception {
check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+"{\"name\":\"value\", \"type\":[\"null\", \"string\","
+"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+"{\"name\":\"car\", \"type\":\"Lisp\"},"
+"{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
false);
}
@Test
public void testUnion() throws Exception {
check("[\"string\", \"long\"]", false);
checkDefault("[\"double\", \"long\"]", "1.1", new Double(1.1));
// check union json
String record = "{\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}";
String fixed = "{\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}";
String enu = "{\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
Schema union = Schema.parse("[\"null\",\"string\","
+record+","+ enu+","+fixed+"]");
checkJson(union, null, "null");
checkJson(union, new Utf8("foo"), "{\"string\":\"foo\"}");
checkJson(union,
new GenericData.Record(Schema.parse(record)),
"{\"Foo\":{}}");
checkJson(union,
new GenericData.Fixed(new byte[]{(byte)'a'}),
"{\"Bar\":\"a\"}");
checkJson(union, "X", "{\"Baz\":\"X\"}");
}
@Test
public void testComplexProp() throws Exception {
String json = "{\"type\":\"null\", \"foo\": [0]}";
Schema s = Schema.parse(json);
assertEquals(null, s.getProp("foo"));
}
@Test
public void testParseInputStream() throws IOException {
Schema s = Schema.parse(
new ByteArrayInputStream("\"boolean\"".getBytes("UTF-8")));
assertEquals(Schema.parse("\"boolean\""), s);
}
@Test
public void testNamespaceScope() throws Exception {
String z = "{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}";
String y = "{\"type\":\"record\",\"name\":\"q.Y\",\"fields\":["
+"{\"name\":\"f\",\"type\":"+z+"}]}";
String x = "{\"type\":\"record\",\"name\":\"p.X\",\"fields\":["
+"{\"name\":\"f\",\"type\":"+y+"},"
+"{\"name\":\"g\",\"type\":"+z+"}"
+"]}";
Schema xs = Schema.parse(x);
Schema ys = xs.getFields().get("f").schema();
assertEquals("p.Z", xs.getFields().get("g").schema().getFullName());
assertEquals("q.Z", ys.getFields().get("f").schema().getFullName());
}
private static void checkParseError(String json) {
try {
Schema schema = Schema.parse(json);
} catch (SchemaParseException e) {
return;
}
fail("Should not have parsed: "+json);
}
/**
* Makes sure that "doc" tags are transcribed in the schemas.
* Note that there are docs both for fields and for the records
* themselves.
*/
@Test
public void testDocs() {
Schema schema = Schema.parse(SCHEMA_WITH_DOC_TAGS);
assertEquals("This is not a world record.", schema.getDoc());
assertEquals("Inner Fixed", schema.getFields().get("inner_fixed").doc());
assertEquals("Very Inner Fixed", schema.getFields().get("inner_fixed").schema().getDoc());
assertEquals("Inner String", schema.getFields().get("inner_string").doc());
assertEquals("Inner Enum", schema.getFields().get("inner_enum").doc());
assertEquals("Very Inner Enum", schema.getFields().get("inner_enum").schema().getDoc());
assertEquals("Inner Union", schema.getFields().get("inner_union").doc());
}
private static void check(String schemaJson, String defaultJson,
Object defaultValue) throws Exception {
check(schemaJson, defaultJson, defaultValue, true);
}
private static void check(String schemaJson, String defaultJson,
Object defaultValue, boolean induce)
throws Exception {
check(schemaJson, induce);
checkDefault(schemaJson, defaultJson, defaultValue);
}
private static void check(String jsonSchema, boolean induce)
throws Exception {
Schema schema = Schema.parse(jsonSchema);
checkProp(schema);
for (Object datum : new RandomData(schema, COUNT)) {
if (induce) {
Schema induced = GenericData.get().induce(datum);
assertEquals("Induced schema does not match.", schema, induced);
}
assertTrue("Datum does not validate against schema "+datum,
GenericData.get().validate(schema, datum));
checkBinary(schema, datum,
new GenericDatumWriter<Object>(),
new GenericDatumReader<Object>());
checkJson(schema, datum,
new GenericDatumWriter<Object>(),
new GenericDatumReader<Object>());
// Check that we can generate the code for every schema we see.
TestSpecificCompiler.assertCompiles(schema, false);
}
}
private static void checkProp(Schema s0) throws Exception {
if(s0.getType().equals(Schema.Type.UNION)) return; // unions have no props
assertEquals(null, s0.getProp("foo"));
Schema s1 = Schema.parse(s0.toString());
s1.setProp("foo", "bar");
assertEquals("bar", s1.getProp("foo"));
assertFalse(s0.equals(s1));
Schema s2 = Schema.parse(s1.toString());
assertEquals("bar", s2.getProp("foo"));
assertEquals(s1, s2);
assertFalse(s0.equals(s2));
}
private static void checkBinary(Schema schema, Object datum,
DatumWriter<Object> writer,
DatumReader<Object> reader)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
writer.write(datum, new BinaryEncoder(out));
byte[] data = out.toByteArray();
reader.setSchema(schema);
Object decoded =
reader.read(null, new BinaryDecoder(new ByteArrayInputStream(data)));
assertEquals("Decoded data does not match.", datum, decoded);
}
private static void checkJson(Schema schema, Object datum,
DatumWriter<Object> writer,
DatumReader<Object> reader)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = new JsonEncoder(schema, out);
writer.setSchema(schema);
writer.write(datum, encoder);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
reader.setSchema(schema);
Decoder decoder = new JsonDecoder(schema, new ByteArrayInputStream(data));
Object decoded = reader.read(null, decoder);
assertEquals("Decoded data does not match.", datum, decoded);
decoded = reader.read(decoded, decoder);
assertEquals("Decoded data does not match.", datum, decoded);
}
private static void checkJson(Schema schema, Object datum,
String json) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = new JsonEncoder(schema, out);
DatumWriter<Object> writer = new GenericDatumWriter<Object>();
writer.setSchema(schema);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
String encoded = new String(data, "UTF-8");
assertEquals("Encoded data does not match.", json, encoded);
DatumReader<Object> reader = new GenericDatumReader<Object>();
reader.setSchema(schema);
Object decoded =
reader.read(null, new JsonDecoder(schema,new ByteArrayInputStream(data)));
assertEquals("Decoded data does not match.", datum, decoded);
}
private static final Schema ACTUAL = // an empty record schema
Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
@SuppressWarnings(value="unchecked")
private static void checkDefault(String schemaJson, String defaultJson,
Object defaultValue) throws Exception {
String recordJson =
"{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", "
+"\"type\":"+schemaJson+", "
+"\"default\":"+defaultJson+"}]}";
Schema expected = Schema.parse(recordJson);
DatumReader in = new GenericDatumReader(ACTUAL, expected);
GenericData.Record record = (GenericData.Record)
in.read(null, new BinaryDecoder(new ByteArrayInputStream(new byte[0])));
assertEquals("Wrong default.", defaultValue, record.get("f"));
assertEquals("Wrong toString", expected, Schema.parse(expected.toString()));
}
@SuppressWarnings(value="unchecked")
private static void testNoDefaultField() throws Exception {
Schema expected =
Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":"+
"[{\"name\":\"f\", \"type\": \"string\"}]}");
DatumReader in = new GenericDatumReader(ACTUAL, expected);
try {
GenericData.Record record = (GenericData.Record)
in.read(null, new BinaryDecoder(new ByteArrayInputStream(new byte[0])));
} catch (AvroTypeException e) {
return;
}
fail("Should not read: "+expected);
}
@SuppressWarnings(value="unchecked")
private static void testEnumMismatch() throws Exception {
Schema actual = Schema.parse
("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}");
Schema expected = Schema.parse
("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}");
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<Object> writer = new GenericDatumWriter<Object>(actual);
Encoder encoder = new BinaryEncoder(out);
writer.write("Y", encoder);
writer.write("X", encoder);
byte[] data = out.toByteArray();
Decoder decoder = new BinaryDecoder(new ByteArrayInputStream(data));
DatumReader in = new GenericDatumReader(actual, expected);
assertEquals("Wrong value", "Y", in.read(null, decoder));
try {
in.read(null, decoder);
} catch (AvroTypeException e) {
return;
}
fail("Should not read: "+expected);
}
}
| lang/java/src/test/java/org/apache/avro/TestSchema.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.JsonDecoder;
import org.apache.avro.io.JsonEncoder;
import org.apache.avro.specific.TestSpecificCompiler;
import org.apache.avro.util.Utf8;
public class TestSchema {
public static final String BASIC_ENUM_SCHEMA = "{\"type\":\"enum\", \"name\":\"Test\","
+"\"symbols\": [\"A\", \"B\"]}";
public static final String SCHEMA_WITH_DOC_TAGS = "{\n"
+ " \"type\": \"record\",\n"
+ " \"name\": \"outer_record\",\n"
+ " \"doc\": \"This is not a world record.\",\n"
+ " \"fields\": [\n"
+ " { \"type\": { \"type\": \"fixed\", \"doc\": \"Very Inner Fixed\", "
+ " \"name\": \"very_inner_fixed\", \"size\": 1 },\n"
+ " \"doc\": \"Inner Fixed\", \"name\": \"inner_fixed\" },\n"
+ " { \"type\": \"string\",\n"
+ " \"name\": \"inner_string\",\n"
+ " \"doc\": \"Inner String\" },\n"
+ " { \"type\": { \"type\": \"enum\", \"doc\": \"Very Inner Enum\", \n"
+ " \"name\": \"very_inner_enum\", \n"
+ " \"symbols\": [ \"A\", \"B\", \"C\" ] },\n"
+ " \"doc\": \"Inner Enum\", \"name\": \"inner_enum\" },\n"
+ " { \"type\": [\"string\", \"int\"], \"doc\": \"Inner Union\", \n"
+ " \"name\": \"inner_union\" }\n" + " ]\n" + "}\n";
private static final int COUNT =
Integer.parseInt(System.getProperty("test.count", "10"));
@Test
public void testNull() throws Exception {
check("\"null\"", "null", null);
}
@Test
public void testBoolean() throws Exception {
check("\"boolean\"", "true", Boolean.TRUE);
}
@Test
public void testString() throws Exception {
check("\"string\"", "\"foo\"", new Utf8("foo"));
}
@Test
public void testBytes() throws Exception {
check("\"bytes\"", "\"\\u0000ABC\\u00FF\"",
ByteBuffer.wrap(new byte[]{0,65,66,67,-1}));
}
@Test
public void testInt() throws Exception {
check("\"int\"", "9", new Integer(9));
}
@Test
public void testLong() throws Exception {
check("\"long\"", "11", new Long(11));
}
@Test
public void testFloat() throws Exception {
check("\"float\"", "1.1", new Float(1.1));
}
@Test
public void testDouble() throws Exception {
check("\"double\"", "1.2", new Double(1.2));
}
@Test
public void testArray() throws Exception {
String json = "{\"type\":\"array\", \"items\": \"long\"}";
Schema schema = Schema.parse(json);
GenericArray<Long> array = new GenericData.Array<Long>(1, schema);
array.add(1L);
check(json, "[1]", array);
checkParseError("{\"type\":\"array\"}"); // items required
}
@Test
public void testMap() throws Exception {
HashMap<Utf8,Long> map = new HashMap<Utf8,Long>();
map.put(new Utf8("a"), 1L);
check("{\"type\":\"map\", \"values\":\"long\"}", "{\"a\":1}", map);
checkParseError("{\"type\":\"map\"}"); // values required
}
@Test
public void testRecord() throws Exception {
String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+"[{\"name\":\"f\", \"type\":\"long\"}]}";
Schema schema = Schema.parse(recordJson);
GenericData.Record record = new GenericData.Record(schema);
record.put("f", 11L);
check(recordJson, "{\"f\":11}", record, false);
checkParseError("{\"type\":\"record\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":\"Y\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+"[{\"name\":\"f\"}]}"); // no type
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+"[{\"type\":\"long\"}]}"); // no name
}
@Test
public void testEnum() throws Exception {
check(BASIC_ENUM_SCHEMA, "\"B\"", "B", false);
checkParseError("{\"type\":\"enum\"}"); // symbols required
}
@Test
public void testFixed() throws Exception {
check("{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}", "\"a\"",
new GenericData.Fixed(new byte[]{(byte)'a'}), false);
checkParseError("{\"type\":\"fixed\"}"); // size required
}
@Test
public void testRecursive() throws Exception {
check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+"{\"name\":\"label\", \"type\":\"string\"},"
+"{\"name\":\"children\", \"type\":"
+"{\"type\": \"array\", \"items\": \"Node\" }}]}",
false);
}
@Test
public void testRecursiveEquals() throws Exception {
String jsonSchema = "{\"type\":\"record\", \"name\":\"List\", \"fields\": ["
+"{\"name\":\"next\", \"type\":\"List\"}]}";
Schema s1 = Schema.parse(jsonSchema);
Schema s2 = Schema.parse(jsonSchema);
assertEquals(s1, s2);
s1.hashCode(); // test no stackoverflow
}
@Test
public void testLisp() throws Exception {
check("{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+"{\"name\":\"value\", \"type\":[\"null\", \"string\","
+"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+"{\"name\":\"car\", \"type\":\"Lisp\"},"
+"{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
false);
}
@Test
public void testUnion() throws Exception {
check("[\"string\", \"long\"]", false);
checkDefault("[\"double\", \"long\"]", "1.1", new Double(1.1));
// check union json
String record = "{\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}";
String fixed = "{\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}";
String enu = "{\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
Schema union = Schema.parse("[\"null\",\"string\","
+record+","+ enu+","+fixed+"]");
checkJson(union, null, "null");
checkJson(union, new Utf8("foo"), "{\"string\":\"foo\"}");
checkJson(union,
new GenericData.Record(Schema.parse(record)),
"{\"Foo\":{}}");
checkJson(union,
new GenericData.Fixed(new byte[]{(byte)'a'}),
"{\"Bar\":\"a\"}");
checkJson(union, "X", "{\"Baz\":\"X\"}");
}
@Test
public void testComplexProp() throws Exception {
String json = "{\"type\":\"null\", \"foo\": [0]}";
Schema s = Schema.parse(json);
assertEquals(null, s.getProp("foo"));
}
@Test
public void testParseInputStream() throws IOException {
Schema s = Schema.parse(
new ByteArrayInputStream("\"boolean\"".getBytes("UTF-8")));
assertEquals(Schema.parse("\"boolean\""), s);
}
@Test
public void testNamespaceScope() throws Exception {
String z = "{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}";
String y = "{\"type\":\"record\",\"name\":\"q.Y\",\"fields\":["
+"{\"name\":\"f\",\"type\":"+z+"}]}";
String x = "{\"type\":\"record\",\"name\":\"p.X\",\"fields\":["
+"{\"name\":\"f\",\"type\":"+y+"},"
+"{\"name\":\"g\",\"type\":"+z+"}"
+"]}";
Schema xs = Schema.parse(x);
Schema ys = xs.getFields().get("f").schema();
assertEquals("p.Z", xs.getFields().get("g").schema().getFullName());
assertEquals("q.Z", ys.getFields().get("f").schema().getFullName());
}
private static void checkParseError(String json) {
try {
Schema schema = Schema.parse(json);
} catch (SchemaParseException e) {
return;
}
fail("Should not have parsed: "+json);
}
/**
* Makes sure that "doc" tags are transcribed in the schemas.
* Note that there are docs both for fields and for the records
* themselves.
*/
@Test
public void testDocs() {
Schema schema = Schema.parse(SCHEMA_WITH_DOC_TAGS);
assertEquals("This is not a world record.", schema.getDoc());
assertEquals("Inner Fixed", schema.getFields().get("inner_fixed").doc());
assertEquals("Very Inner Fixed", schema.getFields().get("inner_fixed").schema().getDoc());
assertEquals("Inner String", schema.getFields().get("inner_string").doc());
assertEquals("Inner Enum", schema.getFields().get("inner_enum").doc());
assertEquals("Very Inner Enum", schema.getFields().get("inner_enum").schema().getDoc());
assertEquals("Inner Union", schema.getFields().get("inner_union").doc());
}
private static void check(String schemaJson, String defaultJson,
Object defaultValue) throws Exception {
check(schemaJson, defaultJson, defaultValue, true);
}
private static void check(String schemaJson, String defaultJson,
Object defaultValue, boolean induce)
throws Exception {
check(schemaJson, induce);
checkDefault(schemaJson, defaultJson, defaultValue);
}
private static void check(String jsonSchema, boolean induce)
throws Exception {
Schema schema = Schema.parse(jsonSchema);
checkProp(schema);
for (Object datum : new RandomData(schema, COUNT)) {
if (induce) {
Schema induced = GenericData.get().induce(datum);
assertEquals("Induced schema does not match.", schema, induced);
}
assertTrue("Datum does not validate against schema "+datum,
GenericData.get().validate(schema, datum));
checkBinary(schema, datum,
new GenericDatumWriter<Object>(),
new GenericDatumReader<Object>());
checkJson(schema, datum,
new GenericDatumWriter<Object>(),
new GenericDatumReader<Object>());
// Check that we can generate the code for every schema we see.
TestSpecificCompiler.assertCompiles(schema, false);
}
}
private static void checkProp(Schema s0) throws Exception {
if(s0.getType().equals(Schema.Type.UNION)) return; // unions have no props
assertEquals(null, s0.getProp("foo"));
Schema s1 = Schema.parse(s0.toString());
s1.setProp("foo", "bar");
assertEquals("bar", s1.getProp("foo"));
assertFalse(s0.equals(s1));
Schema s2 = Schema.parse(s1.toString());
assertEquals("bar", s2.getProp("foo"));
assertEquals(s1, s2);
assertFalse(s0.equals(s2));
}
private static void checkBinary(Schema schema, Object datum,
DatumWriter<Object> writer,
DatumReader<Object> reader)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
writer.write(datum, new BinaryEncoder(out));
byte[] data = out.toByteArray();
reader.setSchema(schema);
Object decoded =
reader.read(null, new BinaryDecoder(new ByteArrayInputStream(data)));
assertEquals("Decoded data does not match.", datum, decoded);
}
private static void checkJson(Schema schema, Object datum,
DatumWriter<Object> writer,
DatumReader<Object> reader)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = new JsonEncoder(schema, out);
writer.setSchema(schema);
writer.write(datum, encoder);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
reader.setSchema(schema);
Decoder decoder = new JsonDecoder(schema, new ByteArrayInputStream(data));
Object decoded = reader.read(null, decoder);
assertEquals("Decoded data does not match.", datum, decoded);
decoded = reader.read(decoded, decoder);
assertEquals("Decoded data does not match.", datum, decoded);
}
private static void checkJson(Schema schema, Object datum,
String json) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = new JsonEncoder(schema, out);
DatumWriter<Object> writer = new GenericDatumWriter<Object>();
writer.setSchema(schema);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
String encoded = new String(data, "UTF-8");
assertEquals("Encoded data does not match.", json, encoded);
DatumReader<Object> reader = new GenericDatumReader<Object>();
reader.setSchema(schema);
Object decoded =
reader.read(null, new JsonDecoder(schema,new ByteArrayInputStream(data)));
assertEquals("Decoded data does not match.", datum, decoded);
}
private static final Schema ACTUAL = // an empty record schema
Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
@SuppressWarnings(value="unchecked")
private static void checkDefault(String schemaJson, String defaultJson,
Object defaultValue) throws Exception {
String recordJson =
"{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", "
+"\"type\":"+schemaJson+", "
+"\"default\":"+defaultJson+"}]}";
Schema expected = Schema.parse(recordJson);
DatumReader in = new GenericDatumReader(ACTUAL, expected);
GenericData.Record record = (GenericData.Record)
in.read(null, new BinaryDecoder(new ByteArrayInputStream(new byte[0])));
assertEquals("Wrong default.", defaultValue, record.get("f"));
assertEquals("Wrong toString", expected, Schema.parse(expected.toString()));
}
@SuppressWarnings(value="unchecked")
private static void testNoDefaultField() throws Exception {
Schema expected =
Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":"+
"[{\"name\":\"f\", \"type\": \"string\"}]}");
DatumReader in = new GenericDatumReader(ACTUAL, expected);
try {
GenericData.Record record = (GenericData.Record)
in.read(null, new BinaryDecoder(new ByteArrayInputStream(new byte[0])));
} catch (AvroTypeException e) {
return;
}
fail("Should not read: "+expected);
}
@SuppressWarnings(value="unchecked")
private static void testEnumMismatch() throws Exception {
Schema actual = Schema.parse
("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}");
Schema expected = Schema.parse
("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}");
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<Object> writer = new GenericDatumWriter<Object>(actual);
Encoder encoder = new BinaryEncoder(out);
writer.write("Y", encoder);
writer.write("X", encoder);
byte[] data = out.toByteArray();
Decoder decoder = new BinaryDecoder(new ByteArrayInputStream(data));
DatumReader in = new GenericDatumReader(actual, expected);
assertEquals("Wrong value", "Y", in.read(null, decoder));
try {
in.read(null, decoder);
} catch (AvroTypeException e) {
return;
}
fail("Should not read: "+expected);
}
}
| AVRO-206. Improved checks for schema parsing. Contributed by Pat Hunt.
git-svn-id: 4501c95bbfd3a21325bd627231304976a200aade@898971 13f79535-47bb-0310-9956-ffa450edef68
| lang/java/src/test/java/org/apache/avro/TestSchema.java | AVRO-206. Improved checks for schema parsing. Contributed by Pat Hunt. | <ide><path>ang/java/src/test/java/org/apache/avro/TestSchema.java
<ide> import java.nio.ByteBuffer;
<ide> import java.util.HashMap;
<ide>
<add>import org.apache.avro.Schema.Type;
<ide> import org.apache.avro.generic.GenericArray;
<ide> import org.apache.avro.generic.GenericData;
<ide> import org.apache.avro.generic.GenericDatumReader;
<ide>
<ide> @Test
<ide> public void testNull() throws Exception {
<add> assertEquals(Schema.create(Type.NULL), Schema.parse("\"null\""));
<add> assertEquals(Schema.create(Type.NULL), Schema.parse("{\"type\":\"null\"}"));
<ide> check("\"null\"", "null", null);
<ide> }
<ide>
<ide> @Test
<ide> public void testBoolean() throws Exception {
<add> assertEquals(Schema.create(Type.BOOLEAN), Schema.parse("\"boolean\""));
<add> assertEquals(Schema.create(Type.BOOLEAN),
<add> Schema.parse("{\"type\":\"boolean\"}"));
<ide> check("\"boolean\"", "true", Boolean.TRUE);
<ide> }
<ide>
<ide> @Test
<ide> public void testString() throws Exception {
<add> assertEquals(Schema.create(Type.STRING), Schema.parse("\"string\""));
<add> assertEquals(Schema.create(Type.STRING),
<add> Schema.parse("{\"type\":\"string\"}"));
<ide> check("\"string\"", "\"foo\"", new Utf8("foo"));
<ide> }
<ide>
<ide> @Test
<ide> public void testBytes() throws Exception {
<add> assertEquals(Schema.create(Type.BYTES), Schema.parse("\"bytes\""));
<add> assertEquals(Schema.create(Type.BYTES),
<add> Schema.parse("{\"type\":\"bytes\"}"));
<ide> check("\"bytes\"", "\"\\u0000ABC\\u00FF\"",
<ide> ByteBuffer.wrap(new byte[]{0,65,66,67,-1}));
<ide> }
<ide>
<ide> @Test
<ide> public void testInt() throws Exception {
<add> assertEquals(Schema.create(Type.INT), Schema.parse("\"int\""));
<add> assertEquals(Schema.create(Type.INT), Schema.parse("{\"type\":\"int\"}"));
<ide> check("\"int\"", "9", new Integer(9));
<ide> }
<ide>
<ide> @Test
<ide> public void testLong() throws Exception {
<add> assertEquals(Schema.create(Type.LONG), Schema.parse("\"long\""));
<add> assertEquals(Schema.create(Type.LONG), Schema.parse("{\"type\":\"long\"}"));
<ide> check("\"long\"", "11", new Long(11));
<ide> }
<ide>
<ide> @Test
<ide> public void testFloat() throws Exception {
<add> assertEquals(Schema.create(Type.FLOAT), Schema.parse("\"float\""));
<add> assertEquals(Schema.create(Type.FLOAT),
<add> Schema.parse("{\"type\":\"float\"}"));
<ide> check("\"float\"", "1.1", new Float(1.1));
<ide> }
<ide>
<ide> @Test
<ide> public void testDouble() throws Exception {
<add> assertEquals(Schema.create(Type.DOUBLE), Schema.parse("\"double\""));
<add> assertEquals(Schema.create(Type.DOUBLE),
<add> Schema.parse("{\"type\":\"double\"}"));
<ide> check("\"double\"", "1.2", new Double(1.2));
<ide> }
<ide> |
|
Java | bsd-3-clause | fe620703d5f91e472dec6c26940d46ea8a6377ac | 0 | NCIP/cacore-sdk,NCIP/cacore-sdk,NCIP/cacore-sdk,NCIP/cacore-sdk,NCIP/cacore-sdk,NCIP/cacore-sdk,NCIP/cacore-sdk | package gov.nih.nci.codegen.util;
import gov.nih.nci.codegen.GenerationException;
import gov.nih.nci.codegen.validator.ValidatorAttribute;
import gov.nih.nci.codegen.validator.ValidatorClass;
import gov.nih.nci.codegen.validator.ValidatorModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAssociation;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAssociationEnd;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAttribute;
import gov.nih.nci.ncicb.xmiinout.domain.UMLClass;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDatatype;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDependency;
import gov.nih.nci.ncicb.xmiinout.domain.UMLGeneralization;
import gov.nih.nci.ncicb.xmiinout.domain.UMLInterface;
import gov.nih.nci.ncicb.xmiinout.domain.UMLModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLPackage;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggableElement;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggedValue;
import gov.nih.nci.ncicb.xmiinout.domain.bean.UMLAssociationEndBean;
import gov.nih.nci.ncicb.xmiinout.util.ModelUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.Vector;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.jdom.Element;
public class TransformerUtils
{
private static Logger log = Logger.getLogger(TransformerUtils.class);
private String BASE_PKG_LOGICAL_MODEL;
private String BASE_PKG_DATA_MODEL;
private String INCLUDE_PACKAGE;
private String EXCLUDE_PACKAGE;
private String EXCLUDE_NAME;
private String EXCLUDE_NAMESPACE;
private String IDENTITY_GENERATOR_TAG;
private Set<String> INCLUDE_PACKAGE_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_PACKAGE_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_CLASS_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_NAMESPACE_PATTERNS = new HashSet<String>();
private String DATABASE_TYPE;
private Map<String,String> CASCADE_STYLES = new HashMap<String,String>();
private ValidatorModel vModel;
private ValidatorModel vModelExtension;
private String namespaceUriPrefix;
private boolean useGMETags = false;
private boolean isJaxbEnabled = false;
/**
* UMLModel from which the code is to be generated
*/
private UMLModel model;
private static final String TV_ID_ATTR_COLUMN = "id-attribute";
private static final String TV_MAPPED_ATTR_COLUMN = "mapped-attributes";
private static final String TV_ASSOC_COLUMN = "implements-association";
private static final String TV_INVERSE_ASSOC_COLUMN = "inverse-of";
private static final String TV_DISCR_COLUMN = "discriminator";
private static final String TV_CORRELATION_TABLE = "correlation-table";
private static final String TV_DOCUMENTATION = "documentation";
private static final String TV_DESCRIPTION = "description";
private static final String TV_LAZY_LOAD = "lazy-load";
private static final String TV_TYPE="type";
private static final String TV_MAPPED_COLLECTION_TABLE = "mapped-collection-table";
private static final String TV_MAPPED_ELEMENT_COLUMN = "mapped-element";
private static final String TV_CADSR_PUBLICID = "CADSR_ConceptualDomainPublicID";
private static final String TV_CADSR_VERSION = "CADSR_ConceptualDomainVersion";
private static final String TV_NCI_CASCADE_ASSOCIATION = "NCI_CASCADE_ASSOCIATION";
private static final String TV_NCI_EAGER_LOAD = "NCI_EAGER_LOAD";
public static final String TV_PK_GENERATOR = "NCI_GENERATOR.";
public static final String TV_PK_GENERATOR_PROPERTY = "NCI_GENERATOR_PROPERTY";
//Global Model Exchange (GME) Project Tag Value Constants; see: https://wiki.nci.nih.gov/display/caCORE/GME+Namespace
public static final String TV_NCI_GME_XML_NAMESPACE = "NCI_GME_XML_NAMESPACE"; //Used for projects, Packages, Classes
public static final String TV_NCI_GME_XML_ELEMENT = "NCI_GME_XML_ELEMENT"; //Used for Classes
public static final String TV_NCI_GME_XML_LOC_REF = "NCI_GME_XML_LOC_REF"; //Used for Attributes
public static final String TV_NCI_GME_SOURCE_XML_LOC_REF = "NCI_GME_SOURCE_XML_LOC_REF"; //Used for Associations
public static final String TV_NCI_GME_TARGET_XML_LOC_REF = "NCI_GME_TARGET_XML_LOC_REF"; //Used for Associations
private static final String STEREO_TYPE_TABLE = "table";
private static final String STEREO_TYPE_DATASOURCE_DEPENDENCY = "DataSource";
public static final String PK_GENERATOR_SYSTEMWIDE = "NCI_GENERATOR_SYSTEMWIDE.";
public TransformerUtils(Properties umlModelFileProperties,Properties transformerProperties,List cascadeStyles, ValidatorModel vModel, ValidatorModel vModelExtension, UMLModel model) {
BASE_PKG_LOGICAL_MODEL = umlModelFileProperties.getProperty("Logical Model") == null ? "" :umlModelFileProperties.getProperty("Logical Model").trim();
BASE_PKG_DATA_MODEL = umlModelFileProperties.getProperty("Data Model")==null ? "" : umlModelFileProperties.getProperty("Data Model").trim();
EXCLUDE_PACKAGE = umlModelFileProperties.getProperty("Exclude Package")==null ? "" : umlModelFileProperties.getProperty("Exclude Package").trim();
INCLUDE_PACKAGE = umlModelFileProperties.getProperty("Include Package")==null ? "" : umlModelFileProperties.getProperty("Include Package").trim();
EXCLUDE_NAME = umlModelFileProperties.getProperty("Exclude Name")==null ? "" : umlModelFileProperties.getProperty("Exclude Name").trim();
EXCLUDE_NAMESPACE = umlModelFileProperties.getProperty("Exclude Namespace")==null ? "" : umlModelFileProperties.getProperty("Exclude Namespace").trim();
namespaceUriPrefix = transformerProperties.getProperty("namespaceUriPrefix")==null ? "" : transformerProperties.getProperty("namespaceUriPrefix").trim().replace(" ", "_");
useGMETags = transformerProperties.getProperty("useGMETags")==null ? false : Boolean.parseBoolean(transformerProperties.getProperty("useGMETags"));
isJaxbEnabled = transformerProperties.getProperty("isJaxbEnabled")==null ? false : Boolean.parseBoolean(transformerProperties.getProperty("isJaxbEnabled"));
this.model = model;
if (useGMETags){
setModelNamespace(model,this.getBasePkgLogicalModel());
}
for(String excludeToken:EXCLUDE_PACKAGE.split(","))
EXCLUDE_PACKAGE_PATTERNS.add(excludeToken.trim());
for(String includeToken:INCLUDE_PACKAGE.split(","))
INCLUDE_PACKAGE_PATTERNS.add(includeToken.trim());
for(String excludeToken:EXCLUDE_NAME.split(","))
EXCLUDE_CLASS_PATTERNS.add(excludeToken.trim());
for(String excludeToken:EXCLUDE_NAMESPACE.split(","))
EXCLUDE_NAMESPACE_PATTERNS.add(excludeToken.trim());
IDENTITY_GENERATOR_TAG = umlModelFileProperties.getProperty("Identity Generator Tag") == null ? "": umlModelFileProperties.getProperty("Identity Generator Tag").trim();
DATABASE_TYPE = umlModelFileProperties.getProperty("Database Type") == null ? "": umlModelFileProperties.getProperty("Database Type").trim();
for (Object cascadeStyle : cascadeStyles){
CASCADE_STYLES.put((String) cascadeStyle, (String)cascadeStyle);
}
this.vModel = vModel;
log.debug("ValidatorModel: " + vModel);
this.vModelExtension = vModelExtension;
log.debug("ValidatorModel Extension: " + vModelExtension);
}
private void setModelNamespace(UMLModel model, String basePkgLogicalModel){
//override codegen.properties NAMESPACE_PREFIX property with GME namespace tag value, if it exists
try {
String namespaceUriPrefix = this.getModelNamespace(model, basePkgLogicalModel);
if (namespaceUriPrefix != null) {
this.namespaceUriPrefix = namespaceUriPrefix;
}
} catch (GenerationException e) {
log.error("Exception caught trying to set GME model namespace URI Prefix: ", e);
}
}
public String getDatabaseType() {
return DATABASE_TYPE;
}
public boolean isIncluded(UMLClass klass) throws GenerationException
{
String fqcn = getFQCN(klass);
return isIncluded(fqcn);
}
public boolean isIncluded(UMLInterface interfaze) throws GenerationException
{
String fqcn = getFQCN(interfaze);
return isIncluded(fqcn);
}
public boolean isIncluded(String fqcn)
{
log.debug("isIncluded(String fqcn) for fqcn: "+fqcn);
for (String excludePkgPattern:EXCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(excludePkgPattern, fqcn))
return false;
for (String excludeClassPattern:EXCLUDE_CLASS_PATTERNS){
if (Pattern.matches(excludeClassPattern, fqcn))
return false;
}
for(String includePkgPattern: INCLUDE_PACKAGE_PATTERNS){
log.debug("includePkgPattern: "+includePkgPattern+"; fqcn: "+fqcn);
if(Pattern.matches(includePkgPattern, fqcn))
return true;
}
return false;
}
public boolean isIncluded(UMLPackage pkg) throws GenerationException
{
String fullPkgName = getFullPackageName(pkg);
log.debug("isIncluded(UMLPackage pkg) for fullPkgName: "+fullPkgName);
for(String excludePkgPattern: EXCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(excludePkgPattern, fullPkgName))
return false;
for(String includePkgPattern: INCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(includePkgPattern, fullPkgName))
return true;
return true;
}
public boolean isNamespaceIncluded(UMLClass klass, String defaultNamespacePrefix) throws GenerationException
{
String pkgNamespace=null;
try {
pkgNamespace = getGMENamespace(klass);
} catch (GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Namespace tag value for: " + getFullPackageName(klass.getPackage()), ge);
}
if (pkgNamespace==null) //use default namespace
pkgNamespace = defaultNamespacePrefix+getFullPackageName(klass);
log.debug("* * * * * pkgNamespace:"+pkgNamespace);
for(String excludePkgNamespacePattern: EXCLUDE_NAMESPACE_PATTERNS)
if(Pattern.matches(excludePkgNamespacePattern,pkgNamespace)){
return false;
}
return true;
}
public String getEmptySpace(Integer count)
{
String spaces = "";
for(Integer i=0;i<count;i++)
spaces +="\t";
return spaces;
}
public String getFQEN(UMLTaggableElement elt) throws GenerationException {
if (elt instanceof UMLClass)
return getFQCN((UMLClass)elt);
if (elt instanceof UMLPackage)
return getFullPackageName((UMLPackage)elt);
throw new GenerationException("Error getting fully qualified element name. Supported taggable element types include UMLClass and UMLPackage; element is neither");
}
public String getFQCN(UMLClass klass)
{
return removeBasePackage(ModelUtil.getFullName(klass));
}
public String getFQCN(UMLInterface interfaze)
{
return removeBasePackage(ModelUtil.getFullName(interfaze));
}
public String getFullPackageName(UMLTaggableElement te)
{
if (te instanceof UMLClass)
return removeBasePackage(ModelUtil.getFullPackageName((UMLClass)te));
if (te instanceof UMLInterface)
return removeBasePackage(ModelUtil.getFullPackageName((UMLInterface)te));
if (te instanceof UMLPackage)
return removeBasePackage(ModelUtil.getFullPackageName((UMLPackage)te));
return "";
}
private String removeBasePackage(String path)
{
if(path.startsWith(BASE_PKG_LOGICAL_MODEL+"."))
return path.substring(BASE_PKG_LOGICAL_MODEL.length()+1);
else if(path.startsWith(BASE_PKG_DATA_MODEL+"."))
return path.substring(BASE_PKG_DATA_MODEL.length()+1);
else
return path;
}
public String getBasePkgLogicalModel(){
return BASE_PKG_LOGICAL_MODEL;
}
public UMLClass getSuperClass(UMLClass klass) throws GenerationException
{
UMLClass[] superClasses = ModelUtil.getSuperclasses(klass);
if(superClasses.length == 0) {
log.debug("*** Getting superclass for class " + klass.getName() + ": " + null);
return null;
}
if(superClasses.length > 1)
throw new GenerationException("Class can not have more than one super class");
log.debug("*** Getting superclass for class " + klass.getName() + ": " + superClasses[0].getName());
return superClasses[0];
}
public String getSuperClassString(UMLClass klass) throws GenerationException
{
UMLClass superClass = getSuperClass(klass);
if(superClass == null)
if (isJaxbEnabled()){
return "";
} else {
return "";
}
else
return "extends " + superClass.getName();
}
public UMLInterface[] getSuperInterface(UMLInterface interfaze) throws GenerationException
{
UMLInterface[] superInterfaces = ModelUtil.getSuperInterfaces(interfaze);
if(superInterfaces.length == 0) {
log.debug("*** Getting superinterface for interface " + interfaze.getName() + ": " + null);
return null;
}
log.debug("*** Getting superinterface for interface " + interfaze.getName() + ": " + superInterfaces[0].getName());
return superInterfaces;
}
public String getSuperInterfaceString(UMLInterface interfaze) throws GenerationException
{
String superInterfaceStr = "extends ";
UMLInterface[] superInterfaces = getSuperInterface(interfaze);
if(superInterfaces == null)
return "";
else {
superInterfaceStr += superInterfaces[0].getName();
for (int i = 1; i < superInterfaces.length; i++){
superInterfaceStr += ", " + superInterfaces[i].getName();
}
}
return superInterfaceStr;
}
public UMLInterface[] getInterfaces(UMLClass klass) throws GenerationException
{
UMLInterface[] interfaces = ModelUtil.getInterfaces(klass);
if(interfaces.length == 0) {
log.debug("*** Getting interface for class " + klass.getName() + ": " + null);
return null;
}
log.debug("*** Getting superclass for class " + klass.getName() + ": " + interfaces[0].getName());
return interfaces;
}
public String getInterfaceString(UMLClass klass) throws GenerationException
{
UMLInterface[] interfaces = getInterfaces(klass);
if(interfaces == null)
return "";
else {
String interfaceStr = "";
for (UMLInterface interfaze : interfaces){
interfaceStr += ", " + interfaze.getName();
}
return interfaceStr;
}
}
public String getInterfaceImports(UMLInterface interfaze) throws GenerationException
{
StringBuilder sb = new StringBuilder();
Set<String> importList = new HashSet<String>();
UMLInterface[] interfaces = ModelUtil.getSuperInterfaces(interfaze);
String pkgName = getFullPackageName(interfaze);
for (UMLInterface superInterfaze : interfaces) {
String superInterfacePkg = getFullPackageName(superInterfaze);
if (!pkgName.equals(superInterfacePkg))
importList.add(getFQCN(superInterfaze));
}
for(String importClass:importList)
sb.append("import ").append(importClass).append(";\n");
return sb.toString();
}
public String getImports(UMLClass klass) throws GenerationException
{
StringBuilder sb = new StringBuilder();
Set<String> importList = new HashSet<String>();
UMLClass[] superClasses = ModelUtil.getSuperclasses(klass);
UMLInterface[] interfaces = ModelUtil.getInterfaces(klass);
if(superClasses.length>1)
throw new GenerationException("Class can not have more than one super classes");
String pkgName = getFullPackageName(klass);
if(superClasses.length == 1)
{
String superPkg = getFullPackageName(superClasses[0]);
if(!pkgName.equals(superPkg))
importList.add(getFQCN(superClasses[0]));
}
for (UMLInterface interfaze : interfaces) {
String interfacePkg = getFullPackageName(interfaze);
if (!pkgName.equals(interfacePkg))
importList.add(getFQCN(interfaze));
}
for(UMLAttribute attr: klass.getAttributes())
{
if(getDataType(attr).startsWith("Collection") && !importList.contains("java.util.Collection"))
{
importList.add("java.util.Collection");
break;
}
}
for(UMLAssociation association: klass.getAssociations())
{
List<UMLAssociationEnd> assocEnds = association.getAssociationEnds();
UMLAssociationEnd otherEnd = getOtherEnd(klass,assocEnds);
String assocKlass = getFQCN ((UMLClass)otherEnd.getUMLElement());
if(!pkgName.equals(getFullPackageName ((UMLClass)otherEnd.getUMLElement())) && !importList.contains(assocKlass))
importList.add(assocKlass);
if(isAssociationEndMany(otherEnd) && otherEnd.isNavigable()&& !importList.contains("java.util.Collection"))
importList.add("java.util.Collection");
}
importList.addAll(getHibernateValidatorConstraintImports(klass));
for(String importClass:importList)
sb.append("import ").append(importClass).append(";\n");
return sb.toString();
}
public String getDataType(UMLAttribute attr)
{
UMLDatatype dataType = attr.getDatatype();
String name = dataType.getName();
if(dataType instanceof UMLClass)
name = getFQCN((UMLClass)dataType);
if(name.startsWith("java.lang."))
name = name.substring("java.lang.".length());
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "Integer";
if("double".equalsIgnoreCase(name))
return "Double";
if("float".equalsIgnoreCase(name))
return "Float";
if("long".equalsIgnoreCase(name))
return "Long";
if("string".equalsIgnoreCase(name))
return "String";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "Character";
if("boolean".equalsIgnoreCase(name) )
return "Boolean";
if("byte".equalsIgnoreCase(name) )
return "Byte";
if("byte[]".equalsIgnoreCase(name) )
return "byte[]";
if("short".equalsIgnoreCase(name) )
return "Short";
if("date".equalsIgnoreCase(name) || "java.util.date".equalsIgnoreCase(name))
return "java.util.Date";
if("collection<int>".equalsIgnoreCase(name) || "collection<integer>".equalsIgnoreCase(name))
return "Collection<Integer>";
if("collection<double>".equalsIgnoreCase(name))
return "Collection<Double>";
if("collection<float>".equalsIgnoreCase(name))
return "Collection<Float>";
if("collection<long>".equalsIgnoreCase(name))
return "Collection<Long>";
if("collection<string>".equalsIgnoreCase(name))
return "Collection<String>";
if("collection<boolean>".equalsIgnoreCase(name))
return "Collection<Boolean>";
if("collection<byte>".equalsIgnoreCase(name))
return "Collection<Byte>";
if("collection<short>".equalsIgnoreCase(name))
return "Collection<Short>";
if("collection<char>".equalsIgnoreCase(name) || "collection<character>".equalsIgnoreCase(name))
return "Collection<Character>";
log.error("Unknown data type = "+name);
return name;
}
public HashMap<String, String> getPKGeneratorTags(UMLClass table,String fqcn,UMLAttribute classIdAttr) throws GenerationException {
HashMap<String, String> pkTags = new HashMap<String, String>();
String pkgenClassKey = TV_PK_GENERATOR + DATABASE_TYPE;
UMLAttribute tableIdAttribute=getMappedColumn(table,fqcn+"."+classIdAttr.getName());
Collection<UMLTaggedValue> tableTaggedValues = tableIdAttribute.getTaggedValues();
String pkGeneratorClass = getTagValue(tableTaggedValues,pkgenClassKey, 1);
if (pkGeneratorClass != null && !("".equals(pkGeneratorClass))) {
for (int i = 1; i <= tableTaggedValues.size(); i++) {
String pkgenProp = TV_PK_GENERATOR_PROPERTY + i + "."+ DATABASE_TYPE;
String pkParam = getTagValue(tableTaggedValues, pkgenProp, 1);
StringTokenizer tokenizer = new StringTokenizer(pkParam, ":");
if(tokenizer.hasMoreTokens()){
pkTags.put(tokenizer.nextToken(), tokenizer.nextToken());
}
}
pkTags.put(pkgenClassKey, pkGeneratorClass);
} else {
pkTags.put(PK_GENERATOR_SYSTEMWIDE+DATABASE_TYPE, IDENTITY_GENERATOR_TAG);
}
return pkTags;
}
public String getHibernateDataType(UMLClass klass, UMLAttribute attr) throws GenerationException
{
log.debug("getHibernateDataType for klass: " + klass.getName() + ", attr: " + attr.getName());
String fqcn = getFQCN(klass);
UMLClass table = getTable(klass);
UMLAttribute col = getMappedColumn(table,fqcn+"."+attr.getName());
Boolean isClob = "CLOB".equalsIgnoreCase(getTagValue(col.getTaggedValues(),TV_TYPE, 1));
UMLDatatype dataType = attr.getDatatype();
String name = dataType.getName();
if(dataType instanceof UMLClass)
name = getFQCN((UMLClass)dataType);
if(name.startsWith("java.lang."))
name = name.substring("java.lang.".length());
if(isClob && "string".equalsIgnoreCase(name))
return "text";
if(isClob && !"string".equalsIgnoreCase(name))
throw new GenerationException("Can not map CLOB to anything other than String");
if("byte[]".equalsIgnoreCase(name))
return "org.springframework.orm.hibernate3.support.BlobByteArrayType";
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "integer";
if("double".equalsIgnoreCase(name))
return "double";
if("float".equalsIgnoreCase(name))
return "float";
if("long".equalsIgnoreCase(name))
return "long";
if("string".equalsIgnoreCase(name))
return "string";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "character";
if("boolean".equalsIgnoreCase(name) )
return "boolean";
if("byte".equalsIgnoreCase(name) )
return "byte";
if("short".equalsIgnoreCase(name) )
return "short";
if("date".equalsIgnoreCase(name) || "java.util.date".equalsIgnoreCase(name))
return "java.util.Date";
log.info("Type = "+name);
return name;
}
public String getGetterMethodName(UMLAttribute attr)
{
String name = attr.getName();
return "get"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public String getSetterMethodName(UMLAttribute attr)
{
String name = attr.getName();
return "set"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public UMLAssociationEnd getThisEnd(UMLClass klass, List<UMLAssociationEnd>assocEnds) throws GenerationException
{
UMLAssociationEnd end1 = assocEnds.get(0);
UMLAssociationEnd end2 = assocEnds.get(1);
if(end1.getUMLElement().equals(klass))
return end1;
else if(end2.getUMLElement().equals(klass))
return end2;
else
throw new GenerationException("Could not figureout this end");
}
public UMLAssociationEnd getOtherEnd(UMLClass klass, List<UMLAssociationEnd>assocEnds) throws GenerationException
{
UMLAssociationEnd end1 = assocEnds.get(0);
UMLAssociationEnd end2 = assocEnds.get(1);
if(end1.getUMLElement().equals(klass))
return end2;
else if(end2.getUMLElement().equals(klass))
return end1;
else
throw new GenerationException("Could not figureout other end" );
}
public Boolean isAssociationEndMany(UMLAssociationEnd assocEnd)
{
if((assocEnd.getHighMultiplicity()<0)||(assocEnd.getLowMultiplicity()<0))
return true;
else
return false;
}
public Boolean isImplicitParent(UMLAssociationEnd assocEnd)
{
return isImplicitParent((UMLClass)assocEnd.getUMLElement());
}
public String getGetterMethodName(UMLAssociationEnd assocEnd)
{
String name = assocEnd.getRoleName();
return "get"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public String getSetterMethodName(UMLAssociationEnd assocEnd)
{
String name = assocEnd.getRoleName();
return "set"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public Boolean isSelfAssociation(UMLAssociationEnd assocEnd1,UMLAssociationEnd assocEnd2)
{
return assocEnd1.getUMLElement().equals(assocEnd2.getUMLElement());
}
public String getClassIdGetterMthod(UMLClass klass) throws GenerationException
{
String idAttrName = getClassIdAttrName(klass);
if (idAttrName == null) return null;
return "get"+firstCharUpper(getClassIdAttrName(klass));
}
private String firstCharUpper(String data)
{
if(data == null || data.length() == 0) return data;
return Character.toUpperCase(data.charAt(0)) + data.substring(1);
}
public String getClassIdAttrName(UMLClass klass) throws GenerationException
{
UMLAttribute idAttr = getClassIdAttr(klass);
if (idAttr == null) return null;
return getClassIdAttr(klass).getName();
}
public UMLAttribute getClassIdAttr(UMLClass klass) throws GenerationException
{
String fqcn = getFQCN(klass);
UMLAttribute idAttr = getColumn(klass,TV_ID_ATTR_COLUMN, fqcn,true,0,1);
if(idAttr !=null) return idAttr;
String idAttrName = "id";
for(UMLAttribute attribute:klass.getAttributes())
if(idAttrName.equals(attribute.getName()))
return attribute;
for(UMLGeneralization gen: klass.getGeneralizations())
{
if(gen.getSubtype() == klass && gen.getSupertype() != klass)
{
UMLAttribute superId = getClassIdAttr((UMLClass)gen.getSupertype());
if(superId != null)
return superId;
}
}
return null;
//throw new GenerationException("No attribute found that maps to the primary key identifier for class : "+fqcn);
}
public Boolean isCollection(UMLClass klass, UMLAttribute attr ) throws GenerationException
{
if(getDataType(attr).startsWith("Collection"))
return true;
return false;
}
public boolean isStatic(UMLAttribute att){
UMLTaggedValue tValue = att.getTaggedValue("static");
if (tValue == null) {
return false;
}
log.debug("UMLAttribute 'static' Tagged Value: " + tValue.getValue());
return ("1".equalsIgnoreCase(tValue.getValue()));
}
public boolean isAbstract(UMLClass klass){
return klass.getAbstractModifier().isAbstract();
}
public String getType(UMLAssociationEnd assocEnd){
UMLTaggedValue tValue = assocEnd.getTaggedValue("type");
if (tValue == null) {
return "";
}
log.debug("UMLAttribute Type Tagged Value: " + tValue.getValue());
return tValue.getValue();
}
public UMLAssociationEnd getOtherAssociationEnd(UMLAssociationEnd assocEnd) {
UMLAssociationEnd otherAssocEnd = null;
for (Iterator i = assocEnd.getOwningAssociation().getAssociationEnds().iterator(); i
.hasNext();) {
UMLAssociationEnd ae = (UMLAssociationEnd) i.next();
if (ae != assocEnd) {
otherAssocEnd = ae;
break;
}
}
return otherAssocEnd;
}
public String getUpperBound(UMLAssociationEnd otherEnd) {
int multiplicity = otherEnd.getHighMultiplicity();
String finalMultiplicity = new String();
if (multiplicity == -1) {
finalMultiplicity = "unbounded";
} else {
Integer x = new Integer(multiplicity);
finalMultiplicity = x.toString();
}
return finalMultiplicity;
}
public String getLowerBound(UMLAssociationEnd otherEnd) {
int multiplicity = otherEnd.getLowMultiplicity();
String finalMultiplicity = new String();
if (multiplicity == -1) {
finalMultiplicity = "unbounded";
} else {
Integer x = new Integer(multiplicity);
finalMultiplicity = x.toString();
}
return finalMultiplicity;
}
public String getMultiplicityValue(UMLAssociationEnd assocEnd){
Element element = ((UMLAssociationEndBean)assocEnd).getJDomElement();
org.jdom.Attribute multAtt = element.getAttribute("multiplicity");
//log.debug("associationEnd: " + assocEnd.getRoleName() + "; multiplicity: " + multAtt.getValue());
if (multAtt!=null)
return multAtt.getValue();
int low = assocEnd.getLowMultiplicity();
int high = assocEnd.getHighMultiplicity();
if(low <0 && high<0)
return "";
if(low >=0 && high>=0)
return low+".."+high;
if(low<0)
return high+"";
return low+"";
}
public boolean isMultiplicityValid(UMLAssociationEnd assocEnd){
String multValue = getMultiplicityValue(assocEnd);
if(multValue == null || "".equalsIgnoreCase(multValue) || multValue.startsWith(".") || multValue.endsWith("."))
return false;
return true;
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2One(UMLAssociationEnd thisEnd, UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isAny(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd) && isImplicitParent(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isOne2Many(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return !isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2Many(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2Any(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd) && isImplicitParent(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isOne2One(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return !isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd);
}
public Collection getAssociationEnds(UMLClass klass) {
return getAssociationEnds(klass, false);
}
public Collection getAssociationEnds(UMLClass klass,
boolean includeInherited) {
log.debug("class = " + klass.getName() + ", includeInherited = "
+ includeInherited);
List<UMLAssociationEnd> assocEndsList = new ArrayList<UMLAssociationEnd>();
UMLClass superClass = klass;
while (superClass != null) {
Collection assocs = superClass.getAssociations();
log.debug( superClass.getName() + " association collection size(): " + assocs.size());
for (Iterator i = assocs.iterator(); i.hasNext();) {
UMLAssociation assoc = (UMLAssociation) i.next();
for (UMLAssociationEnd ae:assoc.getAssociationEnds()){
UMLAssociationEnd otherEnd = getOtherAssociationEnd(ae);
String id = ((UMLClass)(otherEnd.getUMLElement())).getName() + Constant.LEFT_BRACKET
+ getFQCN((UMLClass)(otherEnd.getUMLElement())) + Constant.RIGHT_BRACKET;
log.debug("id (otherEnd): " + id);
log.debug("superClass: " + superClass.getName());
if ((UMLClass)ae.getUMLElement() == superClass) {
log.debug("adding association: " + id + " for class " + superClass.getName());
assocEndsList.add(ae);
}
}
}
if (includeInherited) {
// TODO :: Implement includeInherited
// Collection gens = superClass.getGeneralization();
// if (gens.size() > 0) {
// superClass = (Classifier) ((Generalization) gens.iterator()
// .next()).getParent();
// } else {
// superClass = null;
// }
log.debug("Need to implement includeInherited");
} else {
superClass = null;
}
}
return assocEndsList;
}
public void collectPackages(Collection<UMLPackage> nextLevelPackages, Hashtable<String, Collection<UMLClass>> pkgColl) throws GenerationException
{
for(UMLPackage pkg:nextLevelPackages){
if (isIncluded(pkg)){
String pkgName=getFullPackageName(pkg);
log.debug("including package: " + pkgName);
Collection<UMLClass> pkgClasses = pkg.getClasses();
if (pkgClasses != null && pkgClasses.size() > 0){
for (UMLClass klass:pkgClasses){
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass)) {
if(!pkgColl.containsKey(pkgName)) {
List<UMLClass> classes = new ArrayList<UMLClass>();
classes.add(klass);
pkgColl.put(pkgName, classes);
} else {
Collection<UMLClass> existingCollection = pkgColl.get(pkgName);
existingCollection.add(klass);
}
}
}
}
} else{
log.debug("excluding package: " + pkg.getName());
}
collectPackages(pkg.getPackages(), pkgColl);
}
}
public void collectPackages(Collection<UMLClass> allClasses, Hashtable<String, Collection<UMLClass>> pkgColl,String defaultNamespacePrefix)
throws GenerationException {
String pkgName=null;
String pkgNamespace=null;
for(UMLClass klass:allClasses){
pkgName = getGMEPackageName(klass);
if (pkgName == null)
pkgName=getFullPackageName(klass);
log.debug("processing klass: " + klass.getName() + " of package " + pkgName);
if (isNamespaceIncluded(klass,defaultNamespacePrefix)){
log.debug("including package: " + pkgName);
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass)) { //No longer using GME ClassName; e.g., no longer using isIncluded(pkgName+"."+getClassName(klass))) {
pkgNamespace=getGMENamespace(klass);
if (pkgNamespace !=null && (pkgNamespace.endsWith("/") || !pkgNamespace.endsWith(pkgName)))
pkgNamespace=pkgNamespace+pkgName;
log.debug("pkgNamespace: " + pkgNamespace);
if(!pkgColl.containsKey(pkgNamespace)) {
List<UMLClass> classes = new ArrayList<UMLClass>();
classes.add(klass);
pkgColl.put(pkgNamespace, classes);
} else {
Collection<UMLClass> existingCollection = pkgColl.get(pkgNamespace);
existingCollection.add(klass);
}
}
} else{
log.debug("excluding class: " +klass.getName()+" with package: " + pkgName);
}
}
}
public String getGMEPackageName(UMLClass klass) throws GenerationException{
String namespacePkgName = null;
try {
namespacePkgName = getNamespacePackageName(klass);
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME package name for: " + getFQEN(klass), ge);
}
namespacePkgName=getGMEPackageName(klass.getPackage());
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
log.debug("GME Package name not found for: "+getFullPackageName(klass)+". Returning null");
return null;
}
public String getGMEPackageName(UMLPackage pkg) throws GenerationException{
if (pkg==null)
return null;
log.debug("Getting Package Name for: " +pkg.getName());
String namespacePkgName = getNamespacePackageName(pkg);
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
return getGMEPackageName(pkg.getParent());
}
private String getClassName(UMLClass klass)throws GenerationException{
try {
String klassName = getXMLClassName(klass);
if (klassName!=null)
return klassName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Class (XML Element) name for klass: " + getFQCN(klass));
}
return klass.getName();
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package
* @param model
* @return
*/
public Collection<UMLClass> getAllClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = null;
try {
classes = new HashSet<UMLClass>();
getAllClasses(model.getPackages(),classes);
} catch(Exception e){
log.error("Unable to retrieve classes from model: ", e);
throw new GenerationException("Unable to retrieve classes from model: ", e);
}
return classes;
}
private void getAllClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes)throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllClasses(pkg,classes);
}
private void getAllClasses(UMLPackage rootPkg,Collection<UMLClass> classes) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass))
classes.add(klass);
}
}
getAllClasses(rootPkg.getPackages(),classes);
}
/**
* Returns all the interfaces in the XMI file which do not belong to java.lang or java.util package
* @param model
* @return
*/
public Collection<UMLInterface> getAllInterfaces(UMLModel model) throws GenerationException
{
Collection<UMLInterface> interfaces = null;
try {
interfaces = new HashSet<UMLInterface>();
getAllInterfaces(model.getPackages(),interfaces);
} catch(Exception e){
log.error("Unable to retrieve interfaces from model: ", e);
throw new GenerationException("Unable to retrieve interfaces from model: ", e);
}
return interfaces;
}
private void getAllInterfaces(Collection<UMLPackage> pkgCollection,Collection<UMLInterface> interfaces)throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllInterfaces(pkg,interfaces);
}
private void getAllInterfaces(UMLPackage rootPkg,Collection<UMLInterface> interfaces) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLInterface interfaze:rootPkg.getInterfaces())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(interfaze.getStereotype()) && isIncluded(interfaze))
interfaces.add(interfaze);
}
}
getAllInterfaces(rootPkg.getPackages(),interfaces);
}
public Collection<UMLClass> getAllHibernateClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> allHibernateClasses = getAllParentClasses(model);
allHibernateClasses.addAll(getAllImplicitParentLeafClasses(model));
return allHibernateClasses;
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package.
* The class also have to be the root class in the inheritnace hierarchy to be included in the final list
* @param model
* @return
*/
public Collection<UMLClass> getAllParentClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = new ArrayList<UMLClass>();
getAllParentClasses(model.getPackages(),classes);
return classes;
}
private void getAllParentClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes) throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllParentClasses(pkg,classes);
}
private void getAllParentClasses(UMLPackage rootPkg,Collection<UMLClass> classes)throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass) && ModelUtil.getSuperclasses(klass).length == 0 && !isImplicitParent(klass))
classes.add(klass);
}
}
getAllParentClasses(rootPkg.getPackages(),classes);
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package.
* The class also has to be an implicit parent (parent class with no table mapping) in the inheritance hierarchy to be included in the final list
* @param model
* @return
*/
public Collection<UMLClass> getAllImplicitParentLeafClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = new ArrayList<UMLClass>();
getAllImplicitParentLeafClasses(model.getPackages(),classes);
return classes;
}
private void getAllImplicitParentLeafClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes) throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllImplicitParentLeafClasses(pkg,classes);
}
private void getAllImplicitParentLeafClasses(UMLPackage rootPkg,Collection<UMLClass> classes) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
try {
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass) && isImplicitParent(getSuperClass(klass)) && !isImplicitParent(klass))
classes.add(klass);
} catch(GenerationException e){
continue;
}
}
}
getAllImplicitParentLeafClasses(rootPkg.getPackages(),classes);
}
/**
* Retrieves the table corresponding to the Dependency link between class and a table.
* If there are no Dependencies that links the class to table or there is more than
* one Dependency then the method throws an exception
*
* @param klass
* @return
* @throws GenerationException
*/
public UMLClass getTable(UMLClass klass) throws GenerationException
{
Set<UMLDependency> dependencies = klass.getDependencies();
Map<String,UMLClass> clientMap = new HashMap<String,UMLClass>();
int count = 0;
UMLClass result = null;
for(UMLDependency dependency:dependencies)
{
UMLClass client = (UMLClass) dependency.getClient();
log.debug("getTable: klass: " + klass.getName() + "Client stereotype: " +client.getStereotype() + "; dependency stereotype: " + dependency.getStereotype());
if(STEREO_TYPE_TABLE.equalsIgnoreCase(client.getStereotype()) && STEREO_TYPE_DATASOURCE_DEPENDENCY.equalsIgnoreCase(dependency.getStereotype()))
{
log.debug("* * * client.getName(): " + client.getName());
clientMap.put(client.getName(), client);
result = client;
}
}
count = clientMap.size();
if(count!=1){
log.debug("getTable: klass: " +klass.getName()+"; count: " + count);
throw new GenerationException("No table found for : "+getFQCN(klass)+". Make sure the corresponding Data Model table (class) has a 'table' Stereotype assigned, and the Dependency between the Data Model table and Logical Model class has a 'DataSource' Stereotype assigned.");
}
return result;
}
/**
* Determines whether the input class is an implicit superclass. Used
* by the code generator to determine whether an implicit inheritance
* hibernate mapping should be created for the input class
* @param klass
* @return
* @throws GenerationException
*/
public boolean isImplicitParent(UMLClass klass)
{
if (klass != null)
log.debug("isImplicitClass " + klass.getName()+": " + (isSuperclass(klass) && hasNoTableMapping(klass)));
return (isSuperclass(klass) && hasNoTableMapping(klass));
}
public boolean hasImplicitParent(UMLClass klass){
UMLClass superclass = klass;
do {
try {
superclass = getSuperClass(superclass);
if(isImplicitParent(superclass)){
return true;
}
} catch (GenerationException e) {
log.error("ERROR encountered checking if class " +klass.getName() + " has an implicit parent: ", e);
return false;
}
} while (!(superclass==null) && !(superclass.getName().equalsIgnoreCase("java.lang.Object")));
return false;
}
/**
* Determines whether the input class is a superclass
* @param klass
* @return
*/
private boolean isSuperclass(UMLClass klass)
{
boolean isSuperClass = false;
if (klass != null)
for(UMLGeneralization gen:klass.getGeneralizations())
{
if(gen.getSupertype() instanceof UMLClass && ((UMLClass)gen.getSupertype()) == klass)
return true;
}
return isSuperClass;
}
/**
* Determines whether the input class is missing a table mapping
* @param klass
* @return
*/
private boolean hasNoTableMapping(UMLClass klass)
{
try {
getTable(klass);
} catch (GenerationException e){
return true;
}
return false;
}
/**
* Scans the tag values of the association to determine which JOIN table the association is using.
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public UMLClass findCorrelationTable(UMLAssociation association, UMLModel model, UMLClass klass) throws GenerationException
{
return findCorrelationTable(association, model, klass, true);
}
public UMLClass findCorrelationTable(UMLAssociation association, UMLModel model, UMLClass klass, boolean throwException) throws GenerationException
{
int minReq = throwException ? 1:0;
String tableName = getTagValue(klass, association,TV_CORRELATION_TABLE, null,minReq,1);
if(!throwException && (tableName == null || tableName.length() ==0)) return null;
UMLClass correlationTable = ModelUtil.findClass(model,BASE_PKG_DATA_MODEL+"."+tableName);
if(correlationTable == null) throw new GenerationException("No correlation table found named : \""+tableName+"\"");
return correlationTable;
}
public String getMappedColumnName(UMLClass table, String fullyQualifiedAttrName) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ATTR_COLUMN,fullyQualifiedAttrName,false,1,1);
}
public UMLAttribute getMappedColumn(UMLClass table, String fullyQualifiedAttrName) throws GenerationException
{
return getColumn(table,TV_MAPPED_ATTR_COLUMN,fullyQualifiedAttrName,false,1,1);
}
/**
* @param tgElt The TaggableElement (UMLClass, UMLAttribute)
* @return String containing a concatenation of any, all caDSR
* tag values
*/
public String getCaDSRAnnotationContent(UMLTaggableElement tgElt)
{
String publicID = getTagValue(tgElt, TV_CADSR_PUBLICID);
String version = getTagValue(tgElt, TV_CADSR_VERSION);
if (publicID == null && version == null) {
return null;
}
StringBuilder sb = new StringBuilder();
if (publicID != null)
sb.append(TV_CADSR_PUBLICID).append("=\"").append(publicID).append("\"; ");
if (version != null)
sb.append(TV_CADSR_VERSION).append("=\"").append(version).append("\"");
return sb.toString();
}
public String findAssociatedColumn(UMLClass table,UMLClass klass, UMLAssociationEnd otherEnd, UMLClass assocKlass, UMLAssociationEnd thisEnd, Boolean throwException, Boolean isJoin) throws GenerationException
{
String col1 = getColumnName(table,TV_ASSOC_COLUMN,getFQCN(klass) +"."+ otherEnd.getRoleName(),false,0,1);
String col2 = getColumnName(table,TV_ASSOC_COLUMN,getFQCN(assocKlass) +"."+ thisEnd.getRoleName(),false,0,1);
String col3 = getColumnName(table,TV_INVERSE_ASSOC_COLUMN,getFQCN(assocKlass) +"."+ thisEnd.getRoleName(),false,0,1);
log.debug("***** col1: " + col1 + "; col2: " + col2 + "; col3: " + col3);
if("".equals(col1)) col1=null;
if("".equals(col2)) col2=null;
if("".equals(col3)) col3=null;
if((col1==null && col3==null && isJoin && throwException) || (col1==null && col2==null && !isJoin && throwException)){
log.debug("***** col1: " + col1 + "; col2: " + col2 + "; col3: " + col3);
log.debug("klass: " + klass.getName());
log.debug("assocKlass: " + assocKlass.getName());
log.debug("table: " + table.getName());
log.debug("isJoin: " + isJoin);
log.debug("otherEnd.getRoleName(): " +otherEnd.getRoleName());
log.debug("thisEnd.getRoleName(): " +thisEnd.getRoleName());
throw new GenerationException("Could not determine the column for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass) +". Check for missing implements-association/inverse-of/correlation-table tag(s), where appropriate");
}
/*if(col1!=null && col2!=null && !col1.equals(col2))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
if(col1!=null && col3!=null && !col1.equals(col3))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
if(col2!=null && col3!=null && !col2.equals(col3))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
*/
if(isJoin)
{
return col1==null ? col3 : col1;
}
else
{
return col1==null ? col2 : col1;
}
/* if(col1!=null) return col1;
else if (col3!=null) return col3;
else return col2;
*/ }
public String findAssociatedColumn(UMLClass table,UMLClass klass, UMLAssociationEnd otherEnd, UMLClass assocKlass, UMLAssociationEnd thisEnd, Boolean isJoin) throws GenerationException
{
return findAssociatedColumn(table,klass, otherEnd, assocKlass, thisEnd, true, isJoin);
}
public String findInverseColumnValue(UMLClass table,UMLClass klass, UMLAssociationEnd thisEnd) throws GenerationException
{
return getColumnName(table,TV_INVERSE_ASSOC_COLUMN,getFQCN(klass) +"."+ thisEnd.getRoleName(),false,0,1);
}
public String findDiscriminatingColumnName(UMLClass klass) throws GenerationException
{
UMLClass superKlass = klass;
UMLClass temp = klass;
while ((temp = getSuperClass(temp))!=null && !isImplicitParent(temp))
superKlass = temp;
UMLClass table = getTable(superKlass);
String fqcn = getFQCN(superKlass);
return getColumnName(table,TV_DISCR_COLUMN,fqcn,false,0,1);
}
public String getDiscriminatorValue(UMLClass klass) throws GenerationException
{
return getTagValue(klass,TV_DISCR_COLUMN,null, 1,1);
}
public String getRootDiscriminatorValue(UMLClass klass) throws GenerationException
{
return getTagValue(klass,TV_DISCR_COLUMN,null,0,1);
}
public String getImplicitDiscriminatorColumn(UMLClass table, UMLClass klass, String roleName) throws GenerationException
{
log.debug("**** getImplicitDiscriminator: table: " + table.getName() +"; klass: " + klass.getName() +"; roleName: " + roleName);
return getColumnName(table,TV_DISCR_COLUMN,getFQCN(klass)+"."+roleName,false,1,1);
}
public String getImplicitIdColumn(UMLClass table, UMLClass klass, String roleName) throws GenerationException
{
return getColumnName(table,TV_ASSOC_COLUMN,getFQCN(klass)+"."+roleName,false,1,1);
}
public boolean isLazyLoad(UMLClass klass, UMLAssociation association) throws GenerationException
{
String temp = getTagValue(klass,association, TV_LAZY_LOAD,null, 0,1);
if (temp != null)
throw new GenerationException("Invalid Tag Value found: The '" + TV_LAZY_LOAD + "' Tag Value which is attached to the association link has been replaced with the '" + TV_NCI_EAGER_LOAD + "' Tag Value. Also, it's value must now conform to the following pattern: "+TV_NCI_EAGER_LOAD+"#<fully qualified class name>.<role name>. The value of the tag continues to be 'yes' or 'no'. Please update your model accordingly" );
return true;
}
private String getTagValue(UMLTaggableElement elt, String key, String value, int minOccurrence, int maxOccurrence) throws GenerationException
{
String result = null;
int count = 0;
for(UMLTaggedValue tv: elt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(value.equals(val))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence || (minOccurrence>0 && (result == null || result.trim().length() == 0))) throw new GenerationException("No value found for "+key+" tag in : "+getFQEN(elt));
if(count > maxOccurrence) throw new GenerationException("More than one value found for "+key+" tag in : "+getFQEN(elt));
return result;
}
private String getTagValue(UMLTaggableElement tgElt, String key)
{
for(UMLTaggedValue tv: tgElt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
return tv.getValue();
}
}
return null;
}
private List<String> getTagValues(UMLTaggableElement tgElt, String key)
{
List<String> tagValues = new ArrayList<String>();
for(UMLTaggedValue tv: tgElt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
log.debug(tv.getName() + ": " + tv.getValue());
tagValues.add(tv.getValue());
}
}
return tagValues;
}
private String getColumnName(UMLClass klass, String key, String value, boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
UMLAttribute attr = getColumn(klass,key,value,isValuePrefix,minOccurrence,maxOccurrence);
return (attr==null) ? "" : attr.getName();
}
private UMLAttribute getColumn(UMLClass klass, String key, String value, boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
UMLAttribute result = null;
int count = 0;
for(UMLAttribute attr: klass.getAttributes())
{
for(UMLTaggedValue tv: attr.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = attr;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = attr;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = attr;
}
}
}
}
}
if(count < minOccurrence) throw new GenerationException("No value of "+value+" found for "+key+" tag in class : "+getFQCN(klass));
if(count > maxOccurrence) throw new GenerationException("More than one values found for "+key+" tag in class : "+getFQCN(klass));
return result;
}
private String getTagValue(UMLClass klass, UMLAttribute attribute, String key, String value, Boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
String result = null;
int count = 0;
for(UMLTaggedValue tv: attribute.getTaggedValues())
{
log.debug("Processing tv: " + tv.getName());
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
log.debug("Key equals tv. TV value is: " + tv.getValue());
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = val;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence) throw new GenerationException("No value of "+value+" found for "+key+" tag in class : "+getFQCN(klass));
if(count > maxOccurrence) throw new GenerationException("More than one values found for "+key+" tag in class : "+getFQCN(klass));
return result;
}
private String getTagValue(UMLClass klass, UMLAssociation association, String key, String value, Boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
List <UMLAssociationEnd>ends = association.getAssociationEnds();
UMLAssociationEnd thisEnd = getThisEnd(klass, ends);
UMLAssociationEnd otherEnd = getOtherEnd(klass, ends);
String thisClassName = getFQCN(((UMLClass)thisEnd.getUMLElement()));
String otherClassName = getFQCN(((UMLClass)otherEnd.getUMLElement()));
String result = null;
int count = 0;
for(UMLTaggedValue tv: association.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = val;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence || (minOccurrence >0 && (result == null || result.trim().length() == 0))) throw new GenerationException("No tag value of "+key+" found for the association between "+thisClassName +" and "+ otherClassName +":"+count+":"+result);
if(count > maxOccurrence) throw new GenerationException("More than the expected maximum number (" + maxOccurrence + ") of tag value occurrences for "+key+" found for the association between "+thisClassName +" and "+ otherClassName);
return result;
}
private String getTagValue(UMLClass klass, UMLAssociation association, String key, String value, int minOccurrence, int maxOccurrence) throws GenerationException
{
return getTagValue(klass, association, key, value,false, minOccurrence, maxOccurrence);
}
public String getTagValue(Collection<UMLTaggedValue> tagValues, String key, int maxOccurrence) throws GenerationException
{
StringBuilder temp = new StringBuilder();
for(int i=0;i<maxOccurrence;i++)
{
String searchKey = i==0 ? key : key + (i+1);
for(UMLTaggedValue tv:tagValues)
{
if(searchKey.equals(tv.getName()))
{
temp.append(tv.getValue());
}
}
}
return temp.toString();
}
private String getJavaDocs(Collection<UMLTaggedValue> tagValues) throws GenerationException
{
String documentation = getTagValue(tagValues, TV_DOCUMENTATION, 8);
String description = getTagValue(tagValues, TV_DESCRIPTION, 8);
String temp = documentation == null || documentation.trim().length()==0 ? description : documentation;
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n\t* ").append(temp);
doc.append("\n\t**/");
return doc.toString();
}
public String getJavaDocs(UMLInterface interfaze) throws GenerationException
{
return getJavaDocs(interfaze.getTaggedValues());
}
public String getJavaDocs(UMLClass klass) throws GenerationException
{
return getJavaDocs(klass.getTaggedValues());
}
public String getJavaDocs(UMLAttribute attr) throws GenerationException
{
return getJavaDocs(attr.getTaggedValues());
}
public String getJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException
{
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * An associated "+getFQCN(((UMLClass)otherEnd.getUMLElement()))+" object");
if(isAssociationEndMany(otherEnd))
doc.append("'s collection ");
doc.append("\n **/\n");
return doc.toString();
}
public String getGetterMethodJavaDocs(UMLAttribute attr) {
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Retrieves the value of the "+attr.getName()+" attribute");
doc.append("\n * @return ").append(attr.getName());
doc.append("\n **/\n");
return doc.toString();
}
public String getSetterMethodJavaDocs(UMLAttribute attr) {
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Sets the value of "+attr.getName()+" attribute");
doc.append("\n **/\n");
return doc.toString();
}
public String getGetterMethodJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException {
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Retrieves the value of the "+otherEnd.getRoleName()+" attribute");
doc.append("\n * @return ").append(otherEnd.getRoleName());
doc.append("\n **/\n");
return doc.toString();
}
public String getSetterMethodJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException {
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Sets the value of "+otherEnd.getRoleName()+" attribute");
doc.append("\n **/\n");
return doc.toString();
}
public String reversePackageName(String s) {
StringTokenizer st = new StringTokenizer(s,".");
Vector<String> myVector = new Vector<String>();
StringBuilder myStringBuilder = new StringBuilder();
while (st.hasMoreTokens()) {
String t = st.nextToken();
myVector.add(t);
}
for (int i = myVector.size(); i>0; i--) {
myStringBuilder.append(myVector.elementAt(i-1));
myStringBuilder.append(Constant.DOT);
}
int length1 = myStringBuilder.length();
String finalString1 = myStringBuilder.substring(0,length1-1);
return finalString1;
}
public String getWSDDServiceValue(Collection<UMLClass> classColl)throws GenerationException{
StringBuilder nn1 = new StringBuilder();
for(UMLClass klass:classColl){
String pkgName = getFullPackageName(klass);
nn1.append(pkgName)
.append(Constant.DOT)
.append(klass.getName())
.append(Constant.COMMA);
}
// remove last Comma
return nn1.substring(0, nn1.length()-1);
}
public UMLClass findCollectionTable(UMLAttribute attr, UMLModel model) throws GenerationException
{
String tableName = getTagValue(attr.getTaggedValues(),TV_MAPPED_COLLECTION_TABLE, 1);
UMLClass collectionTable = ModelUtil.findClass(model,BASE_PKG_DATA_MODEL+"."+tableName);
if(collectionTable == null) throw new GenerationException("No collection table found named : \""+tableName+"\"");
return collectionTable;
}
public String getCollectionKeyColumnName(UMLClass table,UMLClass klass, UMLAttribute attr) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ATTR_COLUMN,getFQCN(klass) +"."+ attr.getName(),false,1,1);
}
public String getCollectionElementColumnName(UMLClass table,UMLClass klass, UMLAttribute attr) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ELEMENT_COLUMN,getFQCN(klass) +"."+ attr.getName(),false,1,1);
}
public String getCollectionElementHibernateType(UMLClass klass, UMLAttribute attr) throws GenerationException
{
String name = getDataType(attr);
if(name.startsWith("Collection<"))
{
name = name.substring("Collection<".length());
name = name.substring(0,name.length()-1);
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "integer";
if("double".equalsIgnoreCase(name))
return "double";
if("float".equalsIgnoreCase(name))
return "float";
if("long".equalsIgnoreCase(name))
return "long";
if("string".equalsIgnoreCase(name))
return "string";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "character";
if("boolean".equalsIgnoreCase(name) )
return "boolean";
if("byte".equalsIgnoreCase(name) )
return "byte";
if("short".equalsIgnoreCase(name) )
return "short";
}
return name;
}
public String getJaxbXmlAttributeAnnotation(UMLClass klass, UMLAttribute attr){
String type = this.getDataType(attr);
String collectionType = "";
if (type.startsWith("Collection")){
collectionType = type.substring(type.indexOf("<")+1,type.indexOf(">"));
StringBuffer sb = new StringBuffer(" @XmlElementWrapper(name=\"");
sb.append(attr.getName()).append("\", ");
sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
sb.append(" @XmlElement(name=\"");
sb.append(collectionType.toLowerCase()).append("\", ");
sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
log.debug("Collection Attribute @XmlElement annotation: "+sb.toString());
return sb.toString();
}
return " @XmlAttribute";
}
public String getJaxbXmlRootElementAnnotation(UMLClass klass){
// todo :: remove commented code
// List<UMLClass> subClasses = getNonImplicitSubclasses(klass);
// List<UMLClass> superClasses = getNonImplicitSuperclasses(klass);
StringBuffer sb = new StringBuffer();
// if (isSuperclass(klass) || !getNonImplicitSubclasses(klass).isEmpty() || !getNonImplicitSuperclasses(klass).isEmpty()){
//
// }
//Default - use klass name as XML Root Element
sb.append("@XmlRootElement(name=\"");
sb.append(klass.getName()).append("\", ");
sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
log.debug("@XmlRootElement annotation for class "+klass.getName()+": "+sb.toString());
return sb.toString();
}
public String getJaxbXmlTypeAnnotation(UMLClass klass){
StringBuffer sb = new StringBuffer("@XmlType(name = \"").append(klass.getName());
sb.append("\", propOrder = {");
int counter = 0;
int totalAttrCount = klass.getAttributes().size();
for(UMLAttribute attr:klass.getAttributes()){
counter++;
sb.append("\"").append(attr.getName()).append("\"");
if (counter < totalAttrCount){
sb.append(", ");
}
}
counter = 0;
int totalAssocCount = klass.getAssociations().size();
if ((totalAttrCount > 0) && (totalAssocCount > 0)){
sb.append(", ");
}
for(UMLAssociation assoc:klass.getAssociations()){
List<UMLAssociationEnd> assocEnds = assoc.getAssociationEnds();
try {
// UMLAssociationEnd thisEnd = this.getThisEnd(klass,assocEnds);
UMLAssociationEnd otherEnd = this.getOtherEnd(klass,assocEnds);
counter++;
if(otherEnd.isNavigable())
{
sb.append("\"").append(otherEnd.getRoleName()).append("\"");
if (counter < totalAssocCount){
sb.append(", ");
}
}
} catch (GenerationException e) {
log.error("Error generating XML Type Property order for association role name: "+assoc.getRoleName(),e);
}
}
char c = sb.charAt(sb.length()-2);
log.debug("Last propOrder char: " +c);
if ( c==',' ){
sb.deleteCharAt(sb.length()-2);
}
sb.append("})");
log.debug("@XMLType string for class " + klass.getName() + sb.toString() );
return sb.toString();
}
public String getJaxbXmlSeeAlsoAnnotation(UMLClass klass){
List<UMLClass> subClasses = getNonImplicitSubclasses(klass);
List<UMLClass> superClasses = getNonImplicitSuperclasses(klass);
StringBuffer sb = new StringBuffer();
boolean found = false;
if (!subClasses.isEmpty()){
int counter = 0;
int totalCount = subClasses.size();
for (UMLClass subKlass:subClasses){
counter++;
found = true;
sb.append(subKlass.getName()+".class");
if (counter < totalCount){
sb.append(", ");
}
}
}
if (!superClasses.isEmpty()){
int counter = 0;
int totalCount = superClasses.size();
if(found)
sb.append(",");
for (UMLClass superKlass:superClasses){
counter++;
found = true;
sb.append(superKlass.getName()+".class");
if (counter < totalCount){
sb.append(", ");
}
}
}
if(found)
{
StringBuffer sbreturn = new StringBuffer("@XmlSeeAlso({");
sbreturn.append(sb.toString());
sbreturn.append("})");
log.debug("@XMLSeeAlso string for class " + klass.getName() + sb.toString() );
return sbreturn.toString();
}
return "";
}
public List<UMLClass> getNonImplicitSuperclasses(UMLClass implicitKlass){
ArrayList<UMLClass> nonImplicitSuperclasses = new ArrayList<UMLClass>();
getNonImplicitSuperclasses(implicitKlass, nonImplicitSuperclasses);
return nonImplicitSuperclasses;
}
private void getNonImplicitSuperclasses(UMLClass klass, ArrayList<UMLClass> nonImplicitSuperclasses){
for(UMLGeneralization gen:klass.getGeneralizations()){
UMLClass superKlass = (UMLClass)gen.getSupertype();
if(superKlass!=klass && isSuperclass(superKlass)){
if(!nonImplicitSuperclasses.contains(superKlass)){
nonImplicitSuperclasses.add(superKlass);
}
}
if(superKlass!=klass)
getNonImplicitSuperclasses(superKlass, nonImplicitSuperclasses);
}
}
public List<UMLClass> getNonImplicitSubclasses(UMLClass implicitKlass){
ArrayList<UMLClass> nonImplicitSubclasses = new ArrayList<UMLClass>();
getNonImplicitSubclasses(implicitKlass, nonImplicitSubclasses);
return nonImplicitSubclasses;
}
private void getNonImplicitSubclasses(UMLClass klass, ArrayList<UMLClass> nonImplicitSubclasses){
for(UMLGeneralization gen:klass.getGeneralizations()){
UMLClass subKlass = (UMLClass)gen.getSubtype();
if(subKlass!=klass && !isImplicitParent(subKlass)){
nonImplicitSubclasses.add(subKlass);
}
if(subKlass!=klass)
getNonImplicitSubclasses(subKlass, nonImplicitSubclasses);
}
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public String findCascadeStyle(UMLClass klass, String roleName, UMLAssociation association) throws GenerationException
{
for (String cascadeStyles : getTagValues(association, TV_NCI_CASCADE_ASSOCIATION + "#" + getFQCN(klass)+"."+roleName)){
List<String> validCascadeStyles = new ArrayList<String>();
for(String cascadeStyle:cascadeStyles.split(",")){
validCascadeStyles.add(cascadeStyle.trim());
}
StringBuilder validCascadeStyleSB = new StringBuilder();
validCascadeStyleSB.append(validCascadeStyles.get(0));
for (int i = 1; i <validCascadeStyles.size(); i++ ){
validCascadeStyleSB.append(",").append(validCascadeStyles.get(i));
}
return validCascadeStyleSB.toString();
}
return "none";
}
public String isFKAttributeNull(UMLAssociationEnd otherEnd) {
if (otherEnd.getLowMultiplicity() == 0) {
return "false";
}
return "true";
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param klass
* @param roleName
* @param association
* @return
* @throws GenerationException
*/
public boolean isLazyLoad(UMLClass klass, String roleName, UMLAssociation association) throws GenerationException
{
for( String eagerLoadValue : getTagValues(association, TV_NCI_EAGER_LOAD + "#" +getFQCN(klass)+"."+roleName)){
if ("true".equalsIgnoreCase(eagerLoadValue) || "yes".equalsIgnoreCase(eagerLoadValue) ){
return false;
}
}
return true;
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public Map<String,String> getValidCascadeStyles(){
return CASCADE_STYLES;
}
/**
* Scans the tag values of the association to determine whether or not an inverse-of tag
* is present in any of the table columns
*
* @param klass
* @param key
* @return
* @throws GenerationException
*/
public List findInverseSettingColumns(UMLClass klass) throws GenerationException
{
List<String> attrs = new ArrayList<String>();
for(UMLAttribute attr: klass.getAttributes())
{
for(UMLTaggedValue tv: attr.getTaggedValues())
{
if (TV_INVERSE_ASSOC_COLUMN.equals(tv.getName()))
{
attrs.add(attr.getName());
}
}
}
return attrs;
}
public String getHibernateValidatorConstraints(UMLClass klass){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
String constraintAnnotationString="";
if (vClass != null)
constraintAnnotationString = "\t" + vClass.getConstraintAnnotationString()+"\n";
if (vClassExtension != null)
constraintAnnotationString += "\t" + vClassExtension.getConstraintAnnotationString()+"\n";
return constraintAnnotationString;
}
public String getHibernateValidatorConstraints(UMLClass klass,UMLAttribute attr){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
List<String> cadsrConstraintAnnotations=new ArrayList<String>();
List<String> userConstraintAnnotations=new ArrayList<String>();
ValidatorAttribute vAttr=null;
if (vClass != null)
vAttr=vClass.getAttribute(attr.getName());
if (vAttr!=null)
cadsrConstraintAnnotations.addAll(vAttr.getConstraintAnnotations());
ValidatorAttribute vAttrExtension=null;
if (vClassExtension != null)
vAttrExtension=vClassExtension.getAttribute(attr.getName());
if (vAttrExtension!=null)
userConstraintAnnotations.addAll(vAttrExtension.getConstraintAnnotations());
//remove duplicates - user constraints override caDSR constraints
List<String> constraintAnnotations=new ArrayList<String>();
for(String cadsrConstraintAnnotation : cadsrConstraintAnnotations){
String cadsrConstraintPrefix = cadsrConstraintAnnotation.indexOf("(") > 0 ? cadsrConstraintAnnotation.substring(0, cadsrConstraintAnnotation.indexOf("(")) : cadsrConstraintAnnotation;
boolean duplicateConstraint = false;
for(String userConstraintAnnotation : userConstraintAnnotations){
if (userConstraintAnnotation.startsWith(cadsrConstraintPrefix)){
duplicateConstraint = true;
break;
}
}
if (!duplicateConstraint)
constraintAnnotations.add(cadsrConstraintAnnotation);
}
constraintAnnotations.addAll(userConstraintAnnotations);
//Handle special @Patterns scenario
List<String> patternConstraintAnnotations=new ArrayList<String>();
for(String constraintAnnotation : constraintAnnotations){
if (constraintAnnotation.indexOf("Pattern")>0){
patternConstraintAnnotations.add(constraintAnnotation);
}
}
StringBuilder sb;
if (!patternConstraintAnnotations.isEmpty()){
sb = new StringBuilder();
constraintAnnotations.removeAll(patternConstraintAnnotations);
sb.append(patternConstraintAnnotations.remove(0));
for (String patternConstraintAnnotation:patternConstraintAnnotations){
sb.append(",").append(patternConstraintAnnotation);
}
constraintAnnotations.add("@Patterns({"+sb.toString()+"})");
}
sb = new StringBuilder();
for(String constraintAnnotation: constraintAnnotations){
sb.append("\n\t").append(constraintAnnotation);
}
return sb.toString();
}
public Collection<String> getXSDRestrictionValues(UMLClass klass,UMLAttribute attr){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
ArrayList<String> permissibleValues = new ArrayList<String>();
//get user supplied permissible value collection from validator extension file
ValidatorAttribute vAttrExtension=null;
if (vClassExtension != null)
vAttrExtension = vClassExtension.getAttribute(attr.getName());
if (vAttrExtension != null)
permissibleValues.addAll(vAttrExtension.getXSDRestrictionCollection());
//user supplied constraints override caDSR constraints, so only retrieve
//caDSR constraints if user did not supply any constraints
if (permissibleValues.isEmpty()){
ValidatorAttribute vAttr=null;
if (vClass != null)
vAttr = vClass.getAttribute(attr.getName());
if (vAttr != null)
permissibleValues.addAll(vAttr.getXSDRestrictionCollection());
}
return permissibleValues;
}
private Collection<String> getHibernateValidatorConstraintImports(UMLClass klass){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
Collection<String> constraintImports = new HashSet<String>();
if (vClass != null)
constraintImports.addAll(vClass.getConstraintImports());
if (vClassExtension != null)
constraintImports.addAll(vClassExtension.getConstraintImports());
if (constraintImports.contains("org.hibernate.validator.Pattern"))
constraintImports.add("org.hibernate.validator.Patterns");
return constraintImports;
}
public String getNamespace(UMLTaggableElement te) throws GenerationException {
String gmeNamespacePrefix = null;
try {
gmeNamespacePrefix = getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for element", ge);
}
return gmeNamespacePrefix;
}
public String getGMENamespace(UMLClass klass) throws GenerationException{
String gmeNamespace = null;
try {
gmeNamespace = getNamespace(klass);
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME namespace for: " + getFQEN(klass), ge);
}
gmeNamespace=getGMENamespace(klass.getPackage());
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
log.error("GME Namespace name not found for: "+getFullPackageName(klass)+". Returning null");
return null;
}
public String getGMENamespace(UMLPackage pkg) throws GenerationException{
if (pkg==null)
return null;
log.debug("Getting Package Namespace for: " +pkg.getName());
String gmeNamespace = getNamespace(pkg);
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
return getGMENamespace(pkg.getParent());
}
public boolean hasGMEXMLNamespaceTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
private String getNamespacePackageName(UMLTaggableElement te) throws GenerationException {
String gmeNamespace = null;
try {
gmeNamespace = getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for: " + getFQEN(te), ge);
}
if (gmeNamespace != null && gmeNamespace.lastIndexOf('/')<0)
throw new GenerationException("Invalid GME Namespace found for:" + getFQEN(te)+": "+gmeNamespace);
if (gmeNamespace!=null){
return gmeNamespace.substring(gmeNamespace.lastIndexOf('/')+1, gmeNamespace.length());
}
return null;
}
public String getModelNamespace(UMLModel model, String basePkgLogicalModel) throws GenerationException {
//override codegen.properties NAMESPACE_PREFIX property with GME namespace tag value, if it exists
StringTokenizer tokenizer = new StringTokenizer(basePkgLogicalModel, ".");
UMLPackage pkg=null;
if(tokenizer.hasMoreTokens()){
pkg = model.getPackage(tokenizer.nextToken());
while(pkg!=null && tokenizer.hasMoreTokens()){
pkg = pkg.getPackage(tokenizer.nextToken());
}
}
if (pkg==null){
throw new GenerationException("Error getting the Logical Model package for model: " + pkg.getName()+". Make sure the LOGICAL_MODEL property in codegen.properties file is valid.");
}
if (pkg!=null){
log.debug("* * * pkgName: " + pkg.getName());
try {
String modelNamespacePrefix = this.getNamespace(pkg);
log.debug("* * * modelNamespacePrefix: " + modelNamespacePrefix);
if (modelNamespacePrefix != null) {
if (!modelNamespacePrefix.endsWith("/"))
modelNamespacePrefix=modelNamespacePrefix+"/";
return modelNamespacePrefix.replace(" ", "_");
}
} catch (GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Namespace value for model: " + pkg.getName() + ge.getMessage());
}
}
return null;
}
public String getNamespacePrefix(UMLPackage pkg) throws GenerationException {
String gmeNamespace = null;
try {
gmeNamespace = getTagValue(pkg,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for UML package: " + getFullPackageName(pkg), ge);
}
if (gmeNamespace != null && gmeNamespace.lastIndexOf('/')<0)
throw new GenerationException("Invalid GME Namespace found for UML package " + getFullPackageName(pkg)+": "+gmeNamespace);
if (gmeNamespace!=null){
return gmeNamespace.substring(0,gmeNamespace.lastIndexOf('/'));
}
return null;
}
public String getXMLClassName(UMLClass klass) throws GenerationException {
try {
return getTagValue(klass,TV_NCI_GME_XML_ELEMENT,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_ELEMENT' tag value for klass: " + klass.getName(), ge);
}
}
public boolean hasGMEXMLClassTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_ELEMENT,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public String getXMLAttributeName(UMLAttribute attr)throws GenerationException{
try {
String attributeName = getTagValue(attr,TV_NCI_GME_XML_LOC_REF,null,0,1);
if (attributeName !=null && attributeName.length()>0 && (attributeName.startsWith("@")))
attributeName=attributeName.substring(1); //remove leading '@' character
return attributeName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_LOC_REF' tag value for attribute: " + attr.getName(), ge);
}
}
public boolean generateXMLAttributeAsElement(UMLAttribute attr)throws GenerationException{
try {
String attributeName = getTagValue(attr,TV_NCI_GME_XML_LOC_REF,null,0,1);
if (attributeName !=null && attributeName.length()>0 && !(attributeName.startsWith("@")))
return true;
return false;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_LOC_REF' tag value for attribute: " + attr.getName(), ge);
}
}
public boolean hasGMEXMLAttributeTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_LOC_REF,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public String getXMLLocRef(UMLAssociationEnd assocEnd, String klassName)throws GenerationException
{
try {
return getGmeLocRef(assocEnd.getOwningAssociation(),klassName);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_SOURCE_XML_LOC_REF' or 'NCI_GME_TARGET_XML_LOC_REF' tag value for association roleName: " + assocEnd.getRoleName(), ge);
}
}
private String getGmeLocRef(UMLAssociation assoc,String klassName) throws GenerationException
{
String tv = getTagValue(assoc,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,1);
if (tv !=null && tv.endsWith("/"+klassName)){
return tv.substring(0, tv.lastIndexOf('/'));
}
tv = getTagValue(assoc,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,1);
if (tv !=null && tv.endsWith("/"+klassName)){
return tv.substring(0, tv.lastIndexOf('/'));
}
return null;
}
public String getGmeSourceLocRef(UMLAssociation assoc) throws GenerationException
{
return getTagValue(assoc,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,1);
}
public String getGmeTargetLocRef(UMLAssociation assoc) throws GenerationException
{
return getTagValue(assoc,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,1);
}
public boolean hasGMELocRefTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,0);
getTagValue(te,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public boolean containsIncludedClass(UMLPackage pkg)
throws GenerationException {
for (UMLClass klass : pkg.getClasses()) {
if (isIncluded(klass) && !STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype())){
return true;
}
}
return false;
}
public String getNamespaceUriPrefix() {
return namespaceUriPrefix;
}
public boolean isUseGMETags() {
return useGMETags;
}
public boolean isJaxbEnabled() {
return isJaxbEnabled;
}
} | sdk-toolkit/software/modules/codegen/src/gov/nih/nci/codegen/util/TransformerUtils.java | package gov.nih.nci.codegen.util;
import gov.nih.nci.codegen.GenerationException;
import gov.nih.nci.codegen.validator.ValidatorAttribute;
import gov.nih.nci.codegen.validator.ValidatorClass;
import gov.nih.nci.codegen.validator.ValidatorModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAssociation;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAssociationEnd;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAttribute;
import gov.nih.nci.ncicb.xmiinout.domain.UMLClass;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDatatype;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDependency;
import gov.nih.nci.ncicb.xmiinout.domain.UMLGeneralization;
import gov.nih.nci.ncicb.xmiinout.domain.UMLInterface;
import gov.nih.nci.ncicb.xmiinout.domain.UMLModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLPackage;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggableElement;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggedValue;
import gov.nih.nci.ncicb.xmiinout.domain.bean.UMLAssociationEndBean;
import gov.nih.nci.ncicb.xmiinout.util.ModelUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.Vector;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.jdom.Element;
public class TransformerUtils
{
private static Logger log = Logger.getLogger(TransformerUtils.class);
private String BASE_PKG_LOGICAL_MODEL;
private String BASE_PKG_DATA_MODEL;
private String INCLUDE_PACKAGE;
private String EXCLUDE_PACKAGE;
private String EXCLUDE_NAME;
private String EXCLUDE_NAMESPACE;
private String IDENTITY_GENERATOR_TAG;
private Set<String> INCLUDE_PACKAGE_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_PACKAGE_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_CLASS_PATTERNS = new HashSet<String>();
private Set<String> EXCLUDE_NAMESPACE_PATTERNS = new HashSet<String>();
private String DATABASE_TYPE;
private Map<String,String> CASCADE_STYLES = new HashMap<String,String>();
private ValidatorModel vModel;
private ValidatorModel vModelExtension;
private String namespaceUriPrefix;
private boolean useGMETags = false;
private boolean isJaxbEnabled = false;
/**
* UMLModel from which the code is to be generated
*/
private UMLModel model;
private static final String TV_ID_ATTR_COLUMN = "id-attribute";
private static final String TV_MAPPED_ATTR_COLUMN = "mapped-attributes";
private static final String TV_ASSOC_COLUMN = "implements-association";
private static final String TV_INVERSE_ASSOC_COLUMN = "inverse-of";
private static final String TV_DISCR_COLUMN = "discriminator";
private static final String TV_CORRELATION_TABLE = "correlation-table";
private static final String TV_DOCUMENTATION = "documentation";
private static final String TV_DESCRIPTION = "description";
private static final String TV_LAZY_LOAD = "lazy-load";
private static final String TV_TYPE="type";
private static final String TV_MAPPED_COLLECTION_TABLE = "mapped-collection-table";
private static final String TV_MAPPED_ELEMENT_COLUMN = "mapped-element";
private static final String TV_CADSR_PUBLICID = "CADSR_ConceptualDomainPublicID";
private static final String TV_CADSR_VERSION = "CADSR_ConceptualDomainVersion";
private static final String TV_NCI_CASCADE_ASSOCIATION = "NCI_CASCADE_ASSOCIATION";
private static final String TV_NCI_EAGER_LOAD = "NCI_EAGER_LOAD";
public static final String TV_PK_GENERATOR = "NCI_GENERATOR.";
public static final String TV_PK_GENERATOR_PROPERTY = "NCI_GENERATOR_PROPERTY";
//Global Model Exchange (GME) Project Tag Value Constants; see: https://wiki.nci.nih.gov/display/caCORE/GME+Namespace
public static final String TV_NCI_GME_XML_NAMESPACE = "NCI_GME_XML_NAMESPACE"; //Used for projects, Packages, Classes
public static final String TV_NCI_GME_XML_ELEMENT = "NCI_GME_XML_ELEMENT"; //Used for Classes
public static final String TV_NCI_GME_XML_LOC_REF = "NCI_GME_XML_LOC_REF"; //Used for Attributes
public static final String TV_NCI_GME_SOURCE_XML_LOC_REF = "NCI_GME_SOURCE_XML_LOC_REF"; //Used for Associations
public static final String TV_NCI_GME_TARGET_XML_LOC_REF = "NCI_GME_TARGET_XML_LOC_REF"; //Used for Associations
private static final String STEREO_TYPE_TABLE = "table";
private static final String STEREO_TYPE_DATASOURCE_DEPENDENCY = "DataSource";
public static final String PK_GENERATOR_SYSTEMWIDE = "NCI_GENERATOR_SYSTEMWIDE.";
public TransformerUtils(Properties umlModelFileProperties,Properties transformerProperties,List cascadeStyles, ValidatorModel vModel, ValidatorModel vModelExtension, UMLModel model) {
BASE_PKG_LOGICAL_MODEL = umlModelFileProperties.getProperty("Logical Model") == null ? "" :umlModelFileProperties.getProperty("Logical Model").trim();
BASE_PKG_DATA_MODEL = umlModelFileProperties.getProperty("Data Model")==null ? "" : umlModelFileProperties.getProperty("Data Model").trim();
EXCLUDE_PACKAGE = umlModelFileProperties.getProperty("Exclude Package")==null ? "" : umlModelFileProperties.getProperty("Exclude Package").trim();
INCLUDE_PACKAGE = umlModelFileProperties.getProperty("Include Package")==null ? "" : umlModelFileProperties.getProperty("Include Package").trim();
EXCLUDE_NAME = umlModelFileProperties.getProperty("Exclude Name")==null ? "" : umlModelFileProperties.getProperty("Exclude Name").trim();
EXCLUDE_NAMESPACE = umlModelFileProperties.getProperty("Exclude Namespace")==null ? "" : umlModelFileProperties.getProperty("Exclude Namespace").trim();
namespaceUriPrefix = transformerProperties.getProperty("namespaceUriPrefix")==null ? "" : transformerProperties.getProperty("namespaceUriPrefix").trim().replace(" ", "_");
useGMETags = transformerProperties.getProperty("useGMETags")==null ? false : Boolean.parseBoolean(transformerProperties.getProperty("useGMETags"));
isJaxbEnabled = transformerProperties.getProperty("isJaxbEnabled")==null ? false : Boolean.parseBoolean(transformerProperties.getProperty("isJaxbEnabled"));
this.model = model;
if (useGMETags){
setModelNamespace(model,this.getBasePkgLogicalModel());
}
for(String excludeToken:EXCLUDE_PACKAGE.split(","))
EXCLUDE_PACKAGE_PATTERNS.add(excludeToken.trim());
for(String includeToken:INCLUDE_PACKAGE.split(","))
INCLUDE_PACKAGE_PATTERNS.add(includeToken.trim());
for(String excludeToken:EXCLUDE_NAME.split(","))
EXCLUDE_CLASS_PATTERNS.add(excludeToken.trim());
for(String excludeToken:EXCLUDE_NAMESPACE.split(","))
EXCLUDE_NAMESPACE_PATTERNS.add(excludeToken.trim());
IDENTITY_GENERATOR_TAG = umlModelFileProperties.getProperty("Identity Generator Tag") == null ? "": umlModelFileProperties.getProperty("Identity Generator Tag").trim();
DATABASE_TYPE = umlModelFileProperties.getProperty("Database Type") == null ? "": umlModelFileProperties.getProperty("Database Type").trim();
for (Object cascadeStyle : cascadeStyles){
CASCADE_STYLES.put((String) cascadeStyle, (String)cascadeStyle);
}
this.vModel = vModel;
log.debug("ValidatorModel: " + vModel);
this.vModelExtension = vModelExtension;
log.debug("ValidatorModel Extension: " + vModelExtension);
}
private void setModelNamespace(UMLModel model, String basePkgLogicalModel){
//override codegen.properties NAMESPACE_PREFIX property with GME namespace tag value, if it exists
try {
String namespaceUriPrefix = this.getModelNamespace(model, basePkgLogicalModel);
if (namespaceUriPrefix != null) {
this.namespaceUriPrefix = namespaceUriPrefix;
}
} catch (GenerationException e) {
log.error("Exception caught trying to set GME model namespace URI Prefix: ", e);
}
}
public String getDatabaseType() {
return DATABASE_TYPE;
}
public boolean isIncluded(UMLClass klass) throws GenerationException
{
String fqcn = getFQCN(klass);
return isIncluded(fqcn);
}
public boolean isIncluded(UMLInterface interfaze) throws GenerationException
{
String fqcn = getFQCN(interfaze);
return isIncluded(fqcn);
}
public boolean isIncluded(String fqcn)
{
log.debug("isIncluded(String fqcn) for fqcn: "+fqcn);
for (String excludePkgPattern:EXCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(excludePkgPattern, fqcn))
return false;
for (String excludeClassPattern:EXCLUDE_CLASS_PATTERNS){
if (Pattern.matches(excludeClassPattern, fqcn))
return false;
}
for(String includePkgPattern: INCLUDE_PACKAGE_PATTERNS){
log.debug("includePkgPattern: "+includePkgPattern+"; fqcn: "+fqcn);
if(Pattern.matches(includePkgPattern, fqcn))
return true;
}
return false;
}
public boolean isIncluded(UMLPackage pkg) throws GenerationException
{
String fullPkgName = getFullPackageName(pkg);
log.debug("isIncluded(UMLPackage pkg) for fullPkgName: "+fullPkgName);
for(String excludePkgPattern: EXCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(excludePkgPattern, fullPkgName))
return false;
for(String includePkgPattern: INCLUDE_PACKAGE_PATTERNS)
if (Pattern.matches(includePkgPattern, fullPkgName))
return true;
return true;
}
public boolean isNamespaceIncluded(UMLClass klass, String defaultNamespacePrefix) throws GenerationException
{
String pkgNamespace=null;
try {
pkgNamespace = getGMENamespace(klass);
} catch (GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Namespace tag value for: " + getFullPackageName(klass.getPackage()), ge);
}
if (pkgNamespace==null) //use default namespace
pkgNamespace = defaultNamespacePrefix+getFullPackageName(klass);
log.debug("* * * * * pkgNamespace:"+pkgNamespace);
for(String excludePkgNamespacePattern: EXCLUDE_NAMESPACE_PATTERNS)
if(Pattern.matches(excludePkgNamespacePattern,pkgNamespace)){
return false;
}
return true;
}
public String getEmptySpace(Integer count)
{
String spaces = "";
for(Integer i=0;i<count;i++)
spaces +="\t";
return spaces;
}
public String getFQEN(UMLTaggableElement elt) throws GenerationException {
if (elt instanceof UMLClass)
return getFQCN((UMLClass)elt);
if (elt instanceof UMLPackage)
return getFullPackageName((UMLPackage)elt);
throw new GenerationException("Error getting fully qualified element name. Supported taggable element types include UMLClass and UMLPackage; element is neither");
}
public String getFQCN(UMLClass klass)
{
return removeBasePackage(ModelUtil.getFullName(klass));
}
public String getFQCN(UMLInterface interfaze)
{
return removeBasePackage(ModelUtil.getFullName(interfaze));
}
public String getFullPackageName(UMLTaggableElement te)
{
if (te instanceof UMLClass)
return removeBasePackage(ModelUtil.getFullPackageName((UMLClass)te));
if (te instanceof UMLInterface)
return removeBasePackage(ModelUtil.getFullPackageName((UMLInterface)te));
if (te instanceof UMLPackage)
return removeBasePackage(ModelUtil.getFullPackageName((UMLPackage)te));
return "";
}
private String removeBasePackage(String path)
{
if(path.startsWith(BASE_PKG_LOGICAL_MODEL+"."))
return path.substring(BASE_PKG_LOGICAL_MODEL.length()+1);
else if(path.startsWith(BASE_PKG_DATA_MODEL+"."))
return path.substring(BASE_PKG_DATA_MODEL.length()+1);
else
return path;
}
public String getBasePkgLogicalModel(){
return BASE_PKG_LOGICAL_MODEL;
}
public UMLClass getSuperClass(UMLClass klass) throws GenerationException
{
UMLClass[] superClasses = ModelUtil.getSuperclasses(klass);
if(superClasses.length == 0) {
log.debug("*** Getting superclass for class " + klass.getName() + ": " + null);
return null;
}
if(superClasses.length > 1)
throw new GenerationException("Class can not have more than one super class");
log.debug("*** Getting superclass for class " + klass.getName() + ": " + superClasses[0].getName());
return superClasses[0];
}
public String getSuperClassString(UMLClass klass) throws GenerationException
{
UMLClass superClass = getSuperClass(klass);
if(superClass == null)
if (isJaxbEnabled()){
return "";
} else {
return "";
}
else
return "extends " + superClass.getName();
}
public UMLInterface[] getSuperInterface(UMLInterface interfaze) throws GenerationException
{
UMLInterface[] superInterfaces = ModelUtil.getSuperInterfaces(interfaze);
if(superInterfaces.length == 0) {
log.debug("*** Getting superinterface for interface " + interfaze.getName() + ": " + null);
return null;
}
log.debug("*** Getting superinterface for interface " + interfaze.getName() + ": " + superInterfaces[0].getName());
return superInterfaces;
}
public String getSuperInterfaceString(UMLInterface interfaze) throws GenerationException
{
String superInterfaceStr = "extends ";
UMLInterface[] superInterfaces = getSuperInterface(interfaze);
if(superInterfaces == null)
return "";
else {
superInterfaceStr += superInterfaces[0].getName();
for (int i = 1; i < superInterfaces.length; i++){
superInterfaceStr += ", " + superInterfaces[i].getName();
}
}
return superInterfaceStr;
}
public UMLInterface[] getInterfaces(UMLClass klass) throws GenerationException
{
UMLInterface[] interfaces = ModelUtil.getInterfaces(klass);
if(interfaces.length == 0) {
log.debug("*** Getting interface for class " + klass.getName() + ": " + null);
return null;
}
log.debug("*** Getting superclass for class " + klass.getName() + ": " + interfaces[0].getName());
return interfaces;
}
public String getInterfaceString(UMLClass klass) throws GenerationException
{
UMLInterface[] interfaces = getInterfaces(klass);
if(interfaces == null)
return "";
else {
String interfaceStr = "";
for (UMLInterface interfaze : interfaces){
interfaceStr += ", " + interfaze.getName();
}
return interfaceStr;
}
}
public String getInterfaceImports(UMLInterface interfaze) throws GenerationException
{
StringBuilder sb = new StringBuilder();
Set<String> importList = new HashSet<String>();
UMLInterface[] interfaces = ModelUtil.getSuperInterfaces(interfaze);
String pkgName = getFullPackageName(interfaze);
for (UMLInterface superInterfaze : interfaces) {
String superInterfacePkg = getFullPackageName(superInterfaze);
if (!pkgName.equals(superInterfacePkg))
importList.add(getFQCN(superInterfaze));
}
for(String importClass:importList)
sb.append("import ").append(importClass).append(";\n");
return sb.toString();
}
public String getImports(UMLClass klass) throws GenerationException
{
StringBuilder sb = new StringBuilder();
Set<String> importList = new HashSet<String>();
UMLClass[] superClasses = ModelUtil.getSuperclasses(klass);
UMLInterface[] interfaces = ModelUtil.getInterfaces(klass);
if(superClasses.length>1)
throw new GenerationException("Class can not have more than one super classes");
String pkgName = getFullPackageName(klass);
if(superClasses.length == 1)
{
String superPkg = getFullPackageName(superClasses[0]);
if(!pkgName.equals(superPkg))
importList.add(getFQCN(superClasses[0]));
}
for (UMLInterface interfaze : interfaces) {
String interfacePkg = getFullPackageName(interfaze);
if (!pkgName.equals(interfacePkg))
importList.add(getFQCN(interfaze));
}
for(UMLAttribute attr: klass.getAttributes())
{
if(getDataType(attr).startsWith("Collection") && !importList.contains("java.util.Collection"))
{
importList.add("java.util.Collection");
break;
}
}
for(UMLAssociation association: klass.getAssociations())
{
List<UMLAssociationEnd> assocEnds = association.getAssociationEnds();
UMLAssociationEnd otherEnd = getOtherEnd(klass,assocEnds);
String assocKlass = getFQCN ((UMLClass)otherEnd.getUMLElement());
if(!pkgName.equals(getFullPackageName ((UMLClass)otherEnd.getUMLElement())) && !importList.contains(assocKlass))
importList.add(assocKlass);
if(isAssociationEndMany(otherEnd) && otherEnd.isNavigable()&& !importList.contains("java.util.Collection"))
importList.add("java.util.Collection");
}
importList.addAll(getHibernateValidatorConstraintImports(klass));
for(String importClass:importList)
sb.append("import ").append(importClass).append(";\n");
return sb.toString();
}
public String getDataType(UMLAttribute attr)
{
UMLDatatype dataType = attr.getDatatype();
String name = dataType.getName();
if(dataType instanceof UMLClass)
name = getFQCN((UMLClass)dataType);
if(name.startsWith("java.lang."))
name = name.substring("java.lang.".length());
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "Integer";
if("double".equalsIgnoreCase(name))
return "Double";
if("float".equalsIgnoreCase(name))
return "Float";
if("long".equalsIgnoreCase(name))
return "Long";
if("string".equalsIgnoreCase(name))
return "String";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "Character";
if("boolean".equalsIgnoreCase(name) )
return "Boolean";
if("byte".equalsIgnoreCase(name) )
return "Byte";
if("byte[]".equalsIgnoreCase(name) )
return "byte[]";
if("short".equalsIgnoreCase(name) )
return "Short";
if("date".equalsIgnoreCase(name) || "java.util.date".equalsIgnoreCase(name))
return "java.util.Date";
if("collection<int>".equalsIgnoreCase(name) || "collection<integer>".equalsIgnoreCase(name))
return "Collection<Integer>";
if("collection<double>".equalsIgnoreCase(name))
return "Collection<Double>";
if("collection<float>".equalsIgnoreCase(name))
return "Collection<Float>";
if("collection<long>".equalsIgnoreCase(name))
return "Collection<Long>";
if("collection<string>".equalsIgnoreCase(name))
return "Collection<String>";
if("collection<boolean>".equalsIgnoreCase(name))
return "Collection<Boolean>";
if("collection<byte>".equalsIgnoreCase(name))
return "Collection<Byte>";
if("collection<short>".equalsIgnoreCase(name))
return "Collection<Short>";
if("collection<char>".equalsIgnoreCase(name) || "collection<character>".equalsIgnoreCase(name))
return "Collection<Character>";
log.error("Unknown data type = "+name);
return name;
}
public HashMap<String, String> getPKGeneratorTags(UMLClass table,String fqcn,UMLAttribute classIdAttr) throws GenerationException {
HashMap<String, String> pkTags = new HashMap<String, String>();
String pkgenClassKey = TV_PK_GENERATOR + DATABASE_TYPE;
UMLAttribute tableIdAttribute=getMappedColumn(table,fqcn+"."+classIdAttr.getName());
Collection<UMLTaggedValue> tableTaggedValues = tableIdAttribute.getTaggedValues();
String pkGeneratorClass = getTagValue(tableTaggedValues,pkgenClassKey, 1);
if (pkGeneratorClass != null && !("".equals(pkGeneratorClass))) {
for (int i = 1; i <= tableTaggedValues.size(); i++) {
String pkgenProp = TV_PK_GENERATOR_PROPERTY + i + "."+ DATABASE_TYPE;
String pkParam = getTagValue(tableTaggedValues, pkgenProp, 1);
StringTokenizer tokenizer = new StringTokenizer(pkParam, ":");
if(tokenizer.hasMoreTokens()){
pkTags.put(tokenizer.nextToken(), tokenizer.nextToken());
}
}
pkTags.put(pkgenClassKey, pkGeneratorClass);
} else {
pkTags.put(PK_GENERATOR_SYSTEMWIDE+DATABASE_TYPE, IDENTITY_GENERATOR_TAG);
}
return pkTags;
}
public String getHibernateDataType(UMLClass klass, UMLAttribute attr) throws GenerationException
{
log.debug("getHibernateDataType for klass: " + klass.getName() + ", attr: " + attr.getName());
String fqcn = getFQCN(klass);
UMLClass table = getTable(klass);
UMLAttribute col = getMappedColumn(table,fqcn+"."+attr.getName());
Boolean isClob = "CLOB".equalsIgnoreCase(getTagValue(col.getTaggedValues(),TV_TYPE, 1));
UMLDatatype dataType = attr.getDatatype();
String name = dataType.getName();
if(dataType instanceof UMLClass)
name = getFQCN((UMLClass)dataType);
if(name.startsWith("java.lang."))
name = name.substring("java.lang.".length());
if(isClob && "string".equalsIgnoreCase(name))
return "text";
if(isClob && !"string".equalsIgnoreCase(name))
throw new GenerationException("Can not map CLOB to anything other than String");
if("byte[]".equalsIgnoreCase(name))
return "org.springframework.orm.hibernate3.support.BlobByteArrayType";
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "integer";
if("double".equalsIgnoreCase(name))
return "double";
if("float".equalsIgnoreCase(name))
return "float";
if("long".equalsIgnoreCase(name))
return "long";
if("string".equalsIgnoreCase(name))
return "string";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "character";
if("boolean".equalsIgnoreCase(name) )
return "boolean";
if("byte".equalsIgnoreCase(name) )
return "byte";
if("short".equalsIgnoreCase(name) )
return "short";
if("date".equalsIgnoreCase(name) || "java.util.date".equalsIgnoreCase(name))
return "java.util.Date";
log.info("Type = "+name);
return name;
}
public String getGetterMethodName(UMLAttribute attr)
{
String name = attr.getName();
return "get"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public String getSetterMethodName(UMLAttribute attr)
{
String name = attr.getName();
return "set"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public UMLAssociationEnd getThisEnd(UMLClass klass, List<UMLAssociationEnd>assocEnds) throws GenerationException
{
UMLAssociationEnd end1 = assocEnds.get(0);
UMLAssociationEnd end2 = assocEnds.get(1);
if(end1.getUMLElement().equals(klass))
return end1;
else if(end2.getUMLElement().equals(klass))
return end2;
else
throw new GenerationException("Could not figureout this end");
}
public UMLAssociationEnd getOtherEnd(UMLClass klass, List<UMLAssociationEnd>assocEnds) throws GenerationException
{
UMLAssociationEnd end1 = assocEnds.get(0);
UMLAssociationEnd end2 = assocEnds.get(1);
if(end1.getUMLElement().equals(klass))
return end2;
else if(end2.getUMLElement().equals(klass))
return end1;
else
throw new GenerationException("Could not figureout other end" );
}
public Boolean isAssociationEndMany(UMLAssociationEnd assocEnd)
{
if((assocEnd.getHighMultiplicity()<0)||(assocEnd.getLowMultiplicity()<0))
return true;
else
return false;
}
public Boolean isImplicitParent(UMLAssociationEnd assocEnd)
{
return isImplicitParent((UMLClass)assocEnd.getUMLElement());
}
public String getGetterMethodName(UMLAssociationEnd assocEnd)
{
String name = assocEnd.getRoleName();
return "get"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public String getSetterMethodName(UMLAssociationEnd assocEnd)
{
String name = assocEnd.getRoleName();
return "set"+name.substring(0,1).toUpperCase()+name.substring(1,name.length());
}
public Boolean isSelfAssociation(UMLAssociationEnd assocEnd1,UMLAssociationEnd assocEnd2)
{
return assocEnd1.getUMLElement().equals(assocEnd2.getUMLElement());
}
public String getClassIdGetterMthod(UMLClass klass) throws GenerationException
{
String idAttrName = getClassIdAttrName(klass);
if (idAttrName == null) return null;
return "get"+firstCharUpper(getClassIdAttrName(klass));
}
private String firstCharUpper(String data)
{
if(data == null || data.length() == 0) return data;
return Character.toUpperCase(data.charAt(0)) + data.substring(1);
}
public String getClassIdAttrName(UMLClass klass) throws GenerationException
{
UMLAttribute idAttr = getClassIdAttr(klass);
if (idAttr == null) return null;
return getClassIdAttr(klass).getName();
}
public UMLAttribute getClassIdAttr(UMLClass klass) throws GenerationException
{
String fqcn = getFQCN(klass);
UMLAttribute idAttr = getColumn(klass,TV_ID_ATTR_COLUMN, fqcn,true,0,1);
if(idAttr !=null) return idAttr;
String idAttrName = "id";
for(UMLAttribute attribute:klass.getAttributes())
if(idAttrName.equals(attribute.getName()))
return attribute;
for(UMLGeneralization gen: klass.getGeneralizations())
{
if(gen.getSubtype() == klass && gen.getSupertype() != klass)
{
UMLAttribute superId = getClassIdAttr((UMLClass)gen.getSupertype());
if(superId != null)
return superId;
}
}
return null;
//throw new GenerationException("No attribute found that maps to the primary key identifier for class : "+fqcn);
}
public Boolean isCollection(UMLClass klass, UMLAttribute attr ) throws GenerationException
{
if(getDataType(attr).startsWith("Collection"))
return true;
return false;
}
public boolean isStatic(UMLAttribute att){
UMLTaggedValue tValue = att.getTaggedValue("static");
if (tValue == null) {
return false;
}
log.debug("UMLAttribute 'static' Tagged Value: " + tValue.getValue());
return ("1".equalsIgnoreCase(tValue.getValue()));
}
public boolean isAbstract(UMLClass klass){
return klass.getAbstractModifier().isAbstract();
}
public String getType(UMLAssociationEnd assocEnd){
UMLTaggedValue tValue = assocEnd.getTaggedValue("type");
if (tValue == null) {
return "";
}
log.debug("UMLAttribute Type Tagged Value: " + tValue.getValue());
return tValue.getValue();
}
public UMLAssociationEnd getOtherAssociationEnd(UMLAssociationEnd assocEnd) {
UMLAssociationEnd otherAssocEnd = null;
for (Iterator i = assocEnd.getOwningAssociation().getAssociationEnds().iterator(); i
.hasNext();) {
UMLAssociationEnd ae = (UMLAssociationEnd) i.next();
if (ae != assocEnd) {
otherAssocEnd = ae;
break;
}
}
return otherAssocEnd;
}
public String getUpperBound(UMLAssociationEnd otherEnd) {
int multiplicity = otherEnd.getHighMultiplicity();
String finalMultiplicity = new String();
if (multiplicity == -1) {
finalMultiplicity = "unbounded";
} else {
Integer x = new Integer(multiplicity);
finalMultiplicity = x.toString();
}
return finalMultiplicity;
}
public String getLowerBound(UMLAssociationEnd otherEnd) {
int multiplicity = otherEnd.getLowMultiplicity();
String finalMultiplicity = new String();
if (multiplicity == -1) {
finalMultiplicity = "unbounded";
} else {
Integer x = new Integer(multiplicity);
finalMultiplicity = x.toString();
}
return finalMultiplicity;
}
public String getMultiplicityValue(UMLAssociationEnd assocEnd){
Element element = ((UMLAssociationEndBean)assocEnd).getJDomElement();
org.jdom.Attribute multAtt = element.getAttribute("multiplicity");
//log.debug("associationEnd: " + assocEnd.getRoleName() + "; multiplicity: " + multAtt.getValue());
if (multAtt!=null)
return multAtt.getValue();
int low = assocEnd.getLowMultiplicity();
int high = assocEnd.getHighMultiplicity();
if(low <0 && high<0)
return "";
if(low >=0 && high>=0)
return low+".."+high;
if(low<0)
return high+"";
return low+"";
}
public boolean isMultiplicityValid(UMLAssociationEnd assocEnd){
String multValue = getMultiplicityValue(assocEnd);
if(multValue == null || "".equalsIgnoreCase(multValue) || multValue.startsWith(".") || multValue.endsWith("."))
return false;
return true;
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2One(UMLAssociationEnd thisEnd, UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isAny(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd) && isImplicitParent(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isOne2Many(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return !isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2Many(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isMany2Any(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return isAssociationEndMany(thisEnd) && isAssociationEndMany(otherEnd) && isImplicitParent(otherEnd);
}
/**
* @param thisEnd
* @param otherEnd
* @return
*/
public boolean isOne2One(UMLAssociationEnd thisEnd,UMLAssociationEnd otherEnd) {
return !isAssociationEndMany(thisEnd) && !isAssociationEndMany(otherEnd);
}
public Collection getAssociationEnds(UMLClass klass) {
return getAssociationEnds(klass, false);
}
public Collection getAssociationEnds(UMLClass klass,
boolean includeInherited) {
log.debug("class = " + klass.getName() + ", includeInherited = "
+ includeInherited);
List<UMLAssociationEnd> assocEndsList = new ArrayList<UMLAssociationEnd>();
UMLClass superClass = klass;
while (superClass != null) {
Collection assocs = superClass.getAssociations();
log.debug( superClass.getName() + " association collection size(): " + assocs.size());
for (Iterator i = assocs.iterator(); i.hasNext();) {
UMLAssociation assoc = (UMLAssociation) i.next();
for (UMLAssociationEnd ae:assoc.getAssociationEnds()){
UMLAssociationEnd otherEnd = getOtherAssociationEnd(ae);
String id = ((UMLClass)(otherEnd.getUMLElement())).getName() + Constant.LEFT_BRACKET
+ getFQCN((UMLClass)(otherEnd.getUMLElement())) + Constant.RIGHT_BRACKET;
log.debug("id (otherEnd): " + id);
log.debug("superClass: " + superClass.getName());
if ((UMLClass)ae.getUMLElement() == superClass) {
log.debug("adding association: " + id + " for class " + superClass.getName());
assocEndsList.add(ae);
}
}
}
if (includeInherited) {
// TODO :: Implement includeInherited
// Collection gens = superClass.getGeneralization();
// if (gens.size() > 0) {
// superClass = (Classifier) ((Generalization) gens.iterator()
// .next()).getParent();
// } else {
// superClass = null;
// }
log.debug("Need to implement includeInherited");
} else {
superClass = null;
}
}
return assocEndsList;
}
public void collectPackages(Collection<UMLPackage> nextLevelPackages, Hashtable<String, Collection<UMLClass>> pkgColl) throws GenerationException
{
for(UMLPackage pkg:nextLevelPackages){
if (isIncluded(pkg)){
String pkgName=getFullPackageName(pkg);
log.debug("including package: " + pkgName);
Collection<UMLClass> pkgClasses = pkg.getClasses();
if (pkgClasses != null && pkgClasses.size() > 0){
for (UMLClass klass:pkgClasses){
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass)) {
if(!pkgColl.containsKey(pkgName)) {
List<UMLClass> classes = new ArrayList<UMLClass>();
classes.add(klass);
pkgColl.put(pkgName, classes);
} else {
Collection<UMLClass> existingCollection = pkgColl.get(pkgName);
existingCollection.add(klass);
}
}
}
}
} else{
log.debug("excluding package: " + pkg.getName());
}
collectPackages(pkg.getPackages(), pkgColl);
}
}
public void collectPackages(Collection<UMLClass> allClasses, Hashtable<String, Collection<UMLClass>> pkgColl,String defaultNamespacePrefix)
throws GenerationException {
String pkgName=null;
String pkgNamespace=null;
for(UMLClass klass:allClasses){
pkgName = getGMEPackageName(klass);
if (pkgName == null)
pkgName=getFullPackageName(klass);
log.debug("processing klass: " + klass.getName() + " of package " + pkgName);
if (isNamespaceIncluded(klass,defaultNamespacePrefix)){
log.debug("including package: " + pkgName);
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass)) { //No longer using GME ClassName; e.g., no longer using isIncluded(pkgName+"."+getClassName(klass))) {
pkgNamespace=getGMENamespace(klass);
if (pkgNamespace !=null && (pkgNamespace.endsWith("/") || !pkgNamespace.endsWith(pkgName)))
pkgNamespace=pkgNamespace+pkgName;
log.debug("pkgNamespace: " + pkgNamespace);
if(!pkgColl.containsKey(pkgNamespace)) {
List<UMLClass> classes = new ArrayList<UMLClass>();
classes.add(klass);
pkgColl.put(pkgNamespace, classes);
} else {
Collection<UMLClass> existingCollection = pkgColl.get(pkgNamespace);
existingCollection.add(klass);
}
}
} else{
log.debug("excluding class: " +klass.getName()+" with package: " + pkgName);
}
}
}
public String getGMEPackageName(UMLClass klass) throws GenerationException{
String namespacePkgName = null;
try {
namespacePkgName = getNamespacePackageName(klass);
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME package name for: " + getFQEN(klass), ge);
}
namespacePkgName=getGMEPackageName(klass.getPackage());
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
log.debug("GME Package name not found for: "+getFullPackageName(klass)+". Returning null");
return null;
}
public String getGMEPackageName(UMLPackage pkg) throws GenerationException{
if (pkg==null)
return null;
log.debug("Getting Package Name for: " +pkg.getName());
String namespacePkgName = getNamespacePackageName(pkg);
if (namespacePkgName!=null && namespacePkgName.length()>0)
return namespacePkgName;
return getGMEPackageName(pkg.getParent());
}
private String getClassName(UMLClass klass)throws GenerationException{
try {
String klassName = getXMLClassName(klass);
if (klassName!=null)
return klassName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Class (XML Element) name for klass: " + getFQCN(klass));
}
return klass.getName();
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package
* @param model
* @return
*/
public Collection<UMLClass> getAllClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = null;
try {
classes = new HashSet<UMLClass>();
getAllClasses(model.getPackages(),classes);
} catch(Exception e){
log.error("Unable to retrieve classes from model: ", e);
throw new GenerationException("Unable to retrieve classes from model: ", e);
}
return classes;
}
private void getAllClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes)throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllClasses(pkg,classes);
}
private void getAllClasses(UMLPackage rootPkg,Collection<UMLClass> classes) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass))
classes.add(klass);
}
}
getAllClasses(rootPkg.getPackages(),classes);
}
/**
* Returns all the interfaces in the XMI file which do not belong to java.lang or java.util package
* @param model
* @return
*/
public Collection<UMLInterface> getAllInterfaces(UMLModel model) throws GenerationException
{
Collection<UMLInterface> interfaces = null;
try {
interfaces = new HashSet<UMLInterface>();
getAllInterfaces(model.getPackages(),interfaces);
} catch(Exception e){
log.error("Unable to retrieve interfaces from model: ", e);
throw new GenerationException("Unable to retrieve interfaces from model: ", e);
}
return interfaces;
}
private void getAllInterfaces(Collection<UMLPackage> pkgCollection,Collection<UMLInterface> interfaces)throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllInterfaces(pkg,interfaces);
}
private void getAllInterfaces(UMLPackage rootPkg,Collection<UMLInterface> interfaces) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLInterface interfaze:rootPkg.getInterfaces())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(interfaze.getStereotype()) && isIncluded(interfaze))
interfaces.add(interfaze);
}
}
getAllInterfaces(rootPkg.getPackages(),interfaces);
}
public Collection<UMLClass> getAllHibernateClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> allHibernateClasses = getAllParentClasses(model);
allHibernateClasses.addAll(getAllImplicitParentLeafClasses(model));
return allHibernateClasses;
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package.
* The class also have to be the root class in the inheritnace hierarchy to be included in the final list
* @param model
* @return
*/
public Collection<UMLClass> getAllParentClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = new ArrayList<UMLClass>();
getAllParentClasses(model.getPackages(),classes);
return classes;
}
private void getAllParentClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes) throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllParentClasses(pkg,classes);
}
private void getAllParentClasses(UMLPackage rootPkg,Collection<UMLClass> classes)throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass) && ModelUtil.getSuperclasses(klass).length == 0 && !isImplicitParent(klass))
classes.add(klass);
}
}
getAllParentClasses(rootPkg.getPackages(),classes);
}
/**
* Returns all the classes (not the tables) in the XMI file which do not belong to java.lang or java.util package.
* The class also has to be an implicit parent (parent class with no table mapping) in the inheritance hierarchy to be included in the final list
* @param model
* @return
*/
public Collection<UMLClass> getAllImplicitParentLeafClasses(UMLModel model) throws GenerationException
{
Collection<UMLClass> classes = new ArrayList<UMLClass>();
getAllImplicitParentLeafClasses(model.getPackages(),classes);
return classes;
}
private void getAllImplicitParentLeafClasses(Collection<UMLPackage> pkgCollection,Collection<UMLClass> classes) throws GenerationException
{
for(UMLPackage pkg:pkgCollection)
getAllImplicitParentLeafClasses(pkg,classes);
}
private void getAllImplicitParentLeafClasses(UMLPackage rootPkg,Collection<UMLClass> classes) throws GenerationException
{
if(isIncluded(rootPkg))
{
for(UMLClass klass:rootPkg.getClasses())
{
try {
if(!STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype()) && isIncluded(klass) && isImplicitParent(getSuperClass(klass)) && !isImplicitParent(klass))
classes.add(klass);
} catch(GenerationException e){
continue;
}
}
}
getAllImplicitParentLeafClasses(rootPkg.getPackages(),classes);
}
/**
* Retrieves the table corresponding to the Dependency link between class and a table.
* If there are no Dependencies that links the class to table or there is more than
* one Dependency then the method throws an exception
*
* @param klass
* @return
* @throws GenerationException
*/
public UMLClass getTable(UMLClass klass) throws GenerationException
{
Set<UMLDependency> dependencies = klass.getDependencies();
Map<String,UMLClass> clientMap = new HashMap<String,UMLClass>();
int count = 0;
UMLClass result = null;
for(UMLDependency dependency:dependencies)
{
UMLClass client = (UMLClass) dependency.getClient();
log.debug("getTable: klass: " + klass.getName() + "Client stereotype: " +client.getStereotype() + "; dependency stereotype: " + dependency.getStereotype());
if(STEREO_TYPE_TABLE.equalsIgnoreCase(client.getStereotype()) && STEREO_TYPE_DATASOURCE_DEPENDENCY.equalsIgnoreCase(dependency.getStereotype()))
{
log.debug("* * * client.getName(): " + client.getName());
clientMap.put(client.getName(), client);
result = client;
}
}
count = clientMap.size();
if(count!=1){
log.debug("getTable: klass: " +klass.getName()+"; count: " + count);
throw new GenerationException("No table found for : "+getFQCN(klass)+". Make sure the corresponding Data Model table (class) has a 'table' Stereotype assigned, and the Dependency between the Data Model table and Logical Model class has a 'DataSource' Stereotype assigned.");
}
return result;
}
/**
* Determines whether the input class is an implicit superclass. Used
* by the code generator to determine whether an implicit inheritance
* hibernate mapping should be created for the input class
* @param klass
* @return
* @throws GenerationException
*/
public boolean isImplicitParent(UMLClass klass)
{
if (klass != null)
log.debug("isImplicitClass " + klass.getName()+": " + (isSuperclass(klass) && hasNoTableMapping(klass)));
return (isSuperclass(klass) && hasNoTableMapping(klass));
}
public boolean hasImplicitParent(UMLClass klass){
UMLClass superclass = klass;
do {
try {
superclass = getSuperClass(superclass);
if(isImplicitParent(superclass)){
return true;
}
} catch (GenerationException e) {
log.error("ERROR encountered checking if class " +klass.getName() + " has an implicit parent: ", e);
return false;
}
} while (!(superclass==null) && !(superclass.getName().equalsIgnoreCase("java.lang.Object")));
return false;
}
/**
* Determines whether the input class is a superclass
* @param klass
* @return
*/
private boolean isSuperclass(UMLClass klass)
{
boolean isSuperClass = false;
if (klass != null)
for(UMLGeneralization gen:klass.getGeneralizations())
{
if(gen.getSupertype() instanceof UMLClass && ((UMLClass)gen.getSupertype()) == klass)
return true;
}
return isSuperClass;
}
/**
* Determines whether the input class is a superclass
* @param klass
* @return
*/
private boolean hasNoTableMapping(UMLClass klass)
{
try {
getTable(klass);
} catch (GenerationException e){
return true;
}
return false;
}
/**
* Scans the tag values of the association to determine which JOIN table the association is using.
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public UMLClass findCorrelationTable(UMLAssociation association, UMLModel model, UMLClass klass) throws GenerationException
{
return findCorrelationTable(association, model, klass, true);
}
public UMLClass findCorrelationTable(UMLAssociation association, UMLModel model, UMLClass klass, boolean throwException) throws GenerationException
{
int minReq = throwException ? 1:0;
String tableName = getTagValue(klass, association,TV_CORRELATION_TABLE, null,minReq,1);
if(!throwException && (tableName == null || tableName.length() ==0)) return null;
UMLClass correlationTable = ModelUtil.findClass(model,BASE_PKG_DATA_MODEL+"."+tableName);
if(correlationTable == null) throw new GenerationException("No correlation table found named : \""+tableName+"\"");
return correlationTable;
}
public String getMappedColumnName(UMLClass table, String fullyQualifiedAttrName) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ATTR_COLUMN,fullyQualifiedAttrName,false,1,1);
}
public UMLAttribute getMappedColumn(UMLClass table, String fullyQualifiedAttrName) throws GenerationException
{
return getColumn(table,TV_MAPPED_ATTR_COLUMN,fullyQualifiedAttrName,false,1,1);
}
/**
* @param tgElt The TaggableElement (UMLClass, UMLAttribute)
* @return String containing a concatenation of any, all caDSR
* tag values
*/
public String getCaDSRAnnotationContent(UMLTaggableElement tgElt)
{
String publicID = getTagValue(tgElt, TV_CADSR_PUBLICID);
String version = getTagValue(tgElt, TV_CADSR_VERSION);
if (publicID == null && version == null) {
return null;
}
StringBuilder sb = new StringBuilder();
if (publicID != null)
sb.append(TV_CADSR_PUBLICID).append("=\"").append(publicID).append("\"; ");
if (version != null)
sb.append(TV_CADSR_VERSION).append("=\"").append(version).append("\"");
return sb.toString();
}
public String findAssociatedColumn(UMLClass table,UMLClass klass, UMLAssociationEnd otherEnd, UMLClass assocKlass, UMLAssociationEnd thisEnd, Boolean throwException, Boolean isJoin) throws GenerationException
{
String col1 = getColumnName(table,TV_ASSOC_COLUMN,getFQCN(klass) +"."+ otherEnd.getRoleName(),false,0,1);
String col2 = getColumnName(table,TV_ASSOC_COLUMN,getFQCN(assocKlass) +"."+ thisEnd.getRoleName(),false,0,1);
String col3 = getColumnName(table,TV_INVERSE_ASSOC_COLUMN,getFQCN(assocKlass) +"."+ thisEnd.getRoleName(),false,0,1);
log.debug("***** col1: " + col1 + "; col2: " + col2 + "; col3: " + col3);
if("".equals(col1)) col1=null;
if("".equals(col2)) col2=null;
if("".equals(col3)) col3=null;
if((col1==null && col3==null && isJoin && throwException) || (col1==null && col2==null && !isJoin && throwException)){
log.debug("***** col1: " + col1 + "; col2: " + col2 + "; col3: " + col3);
log.debug("klass: " + klass.getName());
log.debug("assocKlass: " + assocKlass.getName());
log.debug("table: " + table.getName());
log.debug("isJoin: " + isJoin);
log.debug("otherEnd.getRoleName(): " +otherEnd.getRoleName());
log.debug("thisEnd.getRoleName(): " +thisEnd.getRoleName());
throw new GenerationException("Could not determine the column for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass) +". Check for missing implements-association/inverse-of/correlation-table tag(s), where appropriate");
}
/*if(col1!=null && col2!=null && !col1.equals(col2))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
if(col1!=null && col3!=null && !col1.equals(col3))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
if(col2!=null && col3!=null && !col2.equals(col3))
throw new GenerationException("More than one column found for the association between "+getFQCN(klass)+" and "+getFQCN(assocKlass));
*/
if(isJoin)
{
return col1==null ? col3 : col1;
}
else
{
return col1==null ? col2 : col1;
}
/* if(col1!=null) return col1;
else if (col3!=null) return col3;
else return col2;
*/ }
public String findAssociatedColumn(UMLClass table,UMLClass klass, UMLAssociationEnd otherEnd, UMLClass assocKlass, UMLAssociationEnd thisEnd, Boolean isJoin) throws GenerationException
{
return findAssociatedColumn(table,klass, otherEnd, assocKlass, thisEnd, true, isJoin);
}
public String findInverseColumnValue(UMLClass table,UMLClass klass, UMLAssociationEnd thisEnd) throws GenerationException
{
return getColumnName(table,TV_INVERSE_ASSOC_COLUMN,getFQCN(klass) +"."+ thisEnd.getRoleName(),false,0,1);
}
public String findDiscriminatingColumnName(UMLClass klass) throws GenerationException
{
UMLClass superKlass = klass;
UMLClass temp = klass;
while ((temp = getSuperClass(temp))!=null && !isImplicitParent(temp))
superKlass = temp;
UMLClass table = getTable(superKlass);
String fqcn = getFQCN(superKlass);
return getColumnName(table,TV_DISCR_COLUMN,fqcn,false,0,1);
}
public String getDiscriminatorValue(UMLClass klass) throws GenerationException
{
return getTagValue(klass,TV_DISCR_COLUMN,null, 1,1);
}
public String getRootDiscriminatorValue(UMLClass klass) throws GenerationException
{
return getTagValue(klass,TV_DISCR_COLUMN,null,0,1);
}
public String getImplicitDiscriminatorColumn(UMLClass table, UMLClass klass, String roleName) throws GenerationException
{
log.debug("**** getImplicitDiscriminator: table: " + table.getName() +"; klass: " + klass.getName() +"; roleName: " + roleName);
return getColumnName(table,TV_DISCR_COLUMN,getFQCN(klass)+"."+roleName,false,1,1);
}
public String getImplicitIdColumn(UMLClass table, UMLClass klass, String roleName) throws GenerationException
{
return getColumnName(table,TV_ASSOC_COLUMN,getFQCN(klass)+"."+roleName,false,1,1);
}
public boolean isLazyLoad(UMLClass klass, UMLAssociation association) throws GenerationException
{
String temp = getTagValue(klass,association, TV_LAZY_LOAD,null, 0,1);
if (temp != null)
throw new GenerationException("Invalid Tag Value found: The '" + TV_LAZY_LOAD + "' Tag Value which is attached to the association link has been replaced with the '" + TV_NCI_EAGER_LOAD + "' Tag Value. Also, it's value must now conform to the following pattern: "+TV_NCI_EAGER_LOAD+"#<fully qualified class name>.<role name>. The value of the tag continues to be 'yes' or 'no'. Please update your model accordingly" );
return true;
}
private String getTagValue(UMLTaggableElement elt, String key, String value, int minOccurrence, int maxOccurrence) throws GenerationException
{
String result = null;
int count = 0;
for(UMLTaggedValue tv: elt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(value.equals(val))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence || (minOccurrence>0 && (result == null || result.trim().length() == 0))) throw new GenerationException("No value found for "+key+" tag in : "+getFQEN(elt));
if(count > maxOccurrence) throw new GenerationException("More than one value found for "+key+" tag in : "+getFQEN(elt));
return result;
}
private String getTagValue(UMLTaggableElement tgElt, String key)
{
for(UMLTaggedValue tv: tgElt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
return tv.getValue();
}
}
return null;
}
private List<String> getTagValues(UMLTaggableElement tgElt, String key)
{
List<String> tagValues = new ArrayList<String>();
for(UMLTaggedValue tv: tgElt.getTaggedValues())
{
if (key.equals(tv.getName()))
{
log.debug(tv.getName() + ": " + tv.getValue());
tagValues.add(tv.getValue());
}
}
return tagValues;
}
private String getColumnName(UMLClass klass, String key, String value, boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
UMLAttribute attr = getColumn(klass,key,value,isValuePrefix,minOccurrence,maxOccurrence);
return (attr==null) ? "" : attr.getName();
}
private UMLAttribute getColumn(UMLClass klass, String key, String value, boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
UMLAttribute result = null;
int count = 0;
for(UMLAttribute attr: klass.getAttributes())
{
for(UMLTaggedValue tv: attr.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = attr;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = attr;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = attr;
}
}
}
}
}
if(count < minOccurrence) throw new GenerationException("No value of "+value+" found for "+key+" tag in class : "+getFQCN(klass));
if(count > maxOccurrence) throw new GenerationException("More than one values found for "+key+" tag in class : "+getFQCN(klass));
return result;
}
private String getTagValue(UMLClass klass, UMLAttribute attribute, String key, String value, Boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
String result = null;
int count = 0;
for(UMLTaggedValue tv: attribute.getTaggedValues())
{
log.debug("Processing tv: " + tv.getName());
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
log.debug("Key equals tv. TV value is: " + tv.getValue());
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = val;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence) throw new GenerationException("No value of "+value+" found for "+key+" tag in class : "+getFQCN(klass));
if(count > maxOccurrence) throw new GenerationException("More than one values found for "+key+" tag in class : "+getFQCN(klass));
return result;
}
private String getTagValue(UMLClass klass, UMLAssociation association, String key, String value, Boolean isValuePrefix, int minOccurrence, int maxOccurrence) throws GenerationException
{
List <UMLAssociationEnd>ends = association.getAssociationEnds();
UMLAssociationEnd thisEnd = getThisEnd(klass, ends);
UMLAssociationEnd otherEnd = getOtherEnd(klass, ends);
String thisClassName = getFQCN(((UMLClass)thisEnd.getUMLElement()));
String otherClassName = getFQCN(((UMLClass)otherEnd.getUMLElement()));
String result = null;
int count = 0;
for(UMLTaggedValue tv: association.getTaggedValues())
{
if (key.equals(tv.getName()))
{
String tvValue = tv.getValue();
String[] tvValues = tvValue.split(",");
for(String val:tvValues)
{
if(value==null)
{
count++;
result = val;
}
else if(isValuePrefix && val.startsWith(value))
{
count++;
result = val;
}
else if(!isValuePrefix && val.equals(value))
{
count++;
result = val;
}
}
}
}
if(count < minOccurrence || (minOccurrence >0 && (result == null || result.trim().length() == 0))) throw new GenerationException("No tag value of "+key+" found for the association between "+thisClassName +" and "+ otherClassName +":"+count+":"+result);
if(count > maxOccurrence) throw new GenerationException("More than the expected maximum number (" + maxOccurrence + ") of tag value occurrences for "+key+" found for the association between "+thisClassName +" and "+ otherClassName);
return result;
}
private String getTagValue(UMLClass klass, UMLAssociation association, String key, String value, int minOccurrence, int maxOccurrence) throws GenerationException
{
return getTagValue(klass, association, key, value,false, minOccurrence, maxOccurrence);
}
public String getTagValue(Collection<UMLTaggedValue> tagValues, String key, int maxOccurrence) throws GenerationException
{
StringBuilder temp = new StringBuilder();
for(int i=0;i<maxOccurrence;i++)
{
String searchKey = i==0 ? key : key + (i+1);
for(UMLTaggedValue tv:tagValues)
{
if(searchKey.equals(tv.getName()))
{
temp.append(tv.getValue());
}
}
}
return temp.toString();
}
private String getJavaDocs(Collection<UMLTaggedValue> tagValues) throws GenerationException
{
String documentation = getTagValue(tagValues, TV_DOCUMENTATION, 8);
String description = getTagValue(tagValues, TV_DESCRIPTION, 8);
String temp = documentation == null || documentation.trim().length()==0 ? description : documentation;
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n\t* ").append(temp);
doc.append("\n\t**/");
return doc.toString();
}
public String getJavaDocs(UMLInterface interfaze) throws GenerationException
{
return getJavaDocs(interfaze.getTaggedValues());
}
public String getJavaDocs(UMLClass klass) throws GenerationException
{
return getJavaDocs(klass.getTaggedValues());
}
public String getJavaDocs(UMLAttribute attr) throws GenerationException
{
return getJavaDocs(attr.getTaggedValues());
}
public String getJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException
{
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * An associated "+getFQCN(((UMLClass)otherEnd.getUMLElement()))+" object");
if(isAssociationEndMany(otherEnd))
doc.append("'s collection ");
doc.append("\n **/\n");
return doc.toString();
}
public String getGetterMethodJavaDocs(UMLAttribute attr) {
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Retrieves the value of the "+attr.getName()+" attribute");
doc.append("\n * @return ").append(attr.getName());
doc.append("\n **/\n");
return doc.toString();
}
public String getSetterMethodJavaDocs(UMLAttribute attr) {
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Sets the value of "+attr.getName()+" attribute");
doc.append("\n **/\n");
return doc.toString();
}
public String getGetterMethodJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException {
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Retrieves the value of the "+otherEnd.getRoleName()+" attribute");
doc.append("\n * @return ").append(otherEnd.getRoleName());
doc.append("\n **/\n");
return doc.toString();
}
public String getSetterMethodJavaDocs(UMLClass klass, UMLAssociation assoc) throws GenerationException {
UMLAssociationEnd otherEnd = getOtherEnd(klass, assoc.getAssociationEnds());
StringBuilder doc = new StringBuilder();
doc.append("/**");
doc.append("\n * Sets the value of "+otherEnd.getRoleName()+" attribute");
doc.append("\n **/\n");
return doc.toString();
}
public String reversePackageName(String s) {
StringTokenizer st = new StringTokenizer(s,".");
Vector<String> myVector = new Vector<String>();
StringBuilder myStringBuilder = new StringBuilder();
while (st.hasMoreTokens()) {
String t = st.nextToken();
myVector.add(t);
}
for (int i = myVector.size(); i>0; i--) {
myStringBuilder.append(myVector.elementAt(i-1));
myStringBuilder.append(Constant.DOT);
}
int length1 = myStringBuilder.length();
String finalString1 = myStringBuilder.substring(0,length1-1);
return finalString1;
}
public String getWSDDServiceValue(Collection<UMLClass> classColl)throws GenerationException{
StringBuilder nn1 = new StringBuilder();
for(UMLClass klass:classColl){
String pkgName = getFullPackageName(klass);
nn1.append(pkgName)
.append(Constant.DOT)
.append(klass.getName())
.append(Constant.COMMA);
}
// remove last Comma
return nn1.substring(0, nn1.length()-1);
}
public UMLClass findCollectionTable(UMLAttribute attr, UMLModel model) throws GenerationException
{
String tableName = getTagValue(attr.getTaggedValues(),TV_MAPPED_COLLECTION_TABLE, 1);
UMLClass collectionTable = ModelUtil.findClass(model,BASE_PKG_DATA_MODEL+"."+tableName);
if(collectionTable == null) throw new GenerationException("No collection table found named : \""+tableName+"\"");
return collectionTable;
}
public String getCollectionKeyColumnName(UMLClass table,UMLClass klass, UMLAttribute attr) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ATTR_COLUMN,getFQCN(klass) +"."+ attr.getName(),false,1,1);
}
public String getCollectionElementColumnName(UMLClass table,UMLClass klass, UMLAttribute attr) throws GenerationException
{
return getColumnName(table,TV_MAPPED_ELEMENT_COLUMN,getFQCN(klass) +"."+ attr.getName(),false,1,1);
}
public String getCollectionElementHibernateType(UMLClass klass, UMLAttribute attr) throws GenerationException
{
String name = getDataType(attr);
if(name.startsWith("Collection<"))
{
name = name.substring("Collection<".length());
name = name.substring(0,name.length()-1);
if("int".equalsIgnoreCase(name) || "integer".equalsIgnoreCase(name))
return "integer";
if("double".equalsIgnoreCase(name))
return "double";
if("float".equalsIgnoreCase(name))
return "float";
if("long".equalsIgnoreCase(name))
return "long";
if("string".equalsIgnoreCase(name))
return "string";
if("char".equalsIgnoreCase(name) || "character".equalsIgnoreCase(name))
return "character";
if("boolean".equalsIgnoreCase(name) )
return "boolean";
if("byte".equalsIgnoreCase(name) )
return "byte";
if("short".equalsIgnoreCase(name) )
return "short";
}
return name;
}
public String getJaxbXmlAttributeAnnotation(UMLClass klass, UMLAttribute attr){
String type = this.getDataType(attr);
String collectionType = "";
if (type.startsWith("Collection")){
collectionType = type.substring(type.indexOf("<")+1,type.indexOf(">"));
StringBuffer sb = new StringBuffer(" @XmlElementWrapper(name=\"");
sb.append(attr.getName()).append("\", ");
sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
sb.append(" @XmlElement(name=\"");
sb.append(collectionType.toLowerCase()).append("\", ");
sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
log.debug("Collection Attribute @XmlElement annotation: "+sb.toString());
return sb.toString();
}
return " @XmlAttribute";
}
public String getJaxbXmlTypeAnnotation(UMLClass klass){
StringBuffer sb = new StringBuffer("@XmlType(name = \"").append(klass.getName());
sb.append("\", propOrder = {");
int counter = 0;
int totalAttrCount = klass.getAttributes().size();
for(UMLAttribute attr:klass.getAttributes()){
counter++;
sb.append("\"").append(attr.getName()).append("\"");
if (counter < totalAttrCount){
sb.append(", ");
}
}
counter = 0;
int totalAssocCount = klass.getAssociations().size();
if ((totalAttrCount > 0) && (totalAssocCount > 0)){
sb.append(", ");
}
for(UMLAssociation assoc:klass.getAssociations()){
List<UMLAssociationEnd> assocEnds = assoc.getAssociationEnds();
try {
// UMLAssociationEnd thisEnd = this.getThisEnd(klass,assocEnds);
UMLAssociationEnd otherEnd = this.getOtherEnd(klass,assocEnds);
counter++;
if(otherEnd.isNavigable())
{
sb.append("\"").append(otherEnd.getRoleName()).append("\"");
if (counter < totalAssocCount){
sb.append(", ");
}
}
} catch (GenerationException e) {
log.error("Error generating XML Type Property order for association role name: "+assoc.getRoleName(),e);
}
}
char c = sb.charAt(sb.length()-2);
log.debug("Last propOrder char: " +c);
if ( c==',' ){
sb.deleteCharAt(sb.length()-2);
}
sb.append("})");
log.debug("@XMLType string for class " + klass.getName() + sb.toString() );
return sb.toString();
}
public String getJaxbXmlSeeAlsoAnnotation(UMLClass klass){
List<UMLClass> subClasses = getNonImplicitSubclasses(klass);
List<UMLClass> superClasses = getNonImplicitSuperclasses(klass);
StringBuffer sb = new StringBuffer();
boolean found = false;
if (!subClasses.isEmpty()){
int counter = 0;
int totalCount = subClasses.size();
for (UMLClass subKlass:subClasses){
counter++;
found = true;
sb.append(getFullPackageName(subKlass)+"."+subKlass.getName()+".class");
if (counter < totalCount){
sb.append(", ");
}
}
}
if (!superClasses.isEmpty()){
int counter = 0;
int totalCount = superClasses.size();
if(found)
sb.append(",");
for (UMLClass superKlass:superClasses){
counter++;
found = true;
sb.append(getFullPackageName(superKlass)+"."+superKlass.getName()+".class");
if (counter < totalCount){
sb.append(", ");
}
}
}
if(found)
{
StringBuffer sbreturn = new StringBuffer("@XmlSeeAlso({");
sbreturn.append(sb.toString());
sbreturn.append("})");
log.debug("@XMLSeeAlso string for class " + klass.getName() + sb.toString() );
return sbreturn.toString();
}
return "";
}
public List<UMLClass> getNonImplicitSuperclasses(UMLClass implicitKlass){
ArrayList<UMLClass> nonImplicitSuperclasses = new ArrayList<UMLClass>();
getNonImplicitSuperclasses(implicitKlass, nonImplicitSuperclasses);
return nonImplicitSuperclasses;
}
private void getNonImplicitSuperclasses(UMLClass klass, ArrayList<UMLClass> nonImplicitSuperclasses){
for(UMLGeneralization gen:klass.getGeneralizations()){
UMLClass superKlass = (UMLClass)gen.getSupertype();
if(superKlass!=klass && isSuperclass(superKlass)){
if(!nonImplicitSuperclasses.contains(superKlass)){
nonImplicitSuperclasses.add(superKlass);
}
}
if(superKlass!=klass)
getNonImplicitSuperclasses(superKlass, nonImplicitSuperclasses);
}
}
public List<UMLClass> getNonImplicitSubclasses(UMLClass implicitKlass){
ArrayList<UMLClass> nonImplicitSubclasses = new ArrayList<UMLClass>();
getNonImplicitSubclasses(implicitKlass, nonImplicitSubclasses);
return nonImplicitSubclasses;
}
private void getNonImplicitSubclasses(UMLClass klass, ArrayList<UMLClass> nonImplicitSubclasses){
for(UMLGeneralization gen:klass.getGeneralizations()){
UMLClass subKlass = (UMLClass)gen.getSubtype();
if(subKlass!=klass && !isImplicitParent(subKlass)){
nonImplicitSubclasses.add(subKlass);
}
if(subKlass!=klass)
getNonImplicitSubclasses(subKlass, nonImplicitSubclasses);
}
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public String findCascadeStyle(UMLClass klass, String roleName, UMLAssociation association) throws GenerationException
{
for (String cascadeStyles : getTagValues(association, TV_NCI_CASCADE_ASSOCIATION + "#" + getFQCN(klass)+"."+roleName)){
List<String> validCascadeStyles = new ArrayList<String>();
for(String cascadeStyle:cascadeStyles.split(",")){
validCascadeStyles.add(cascadeStyle.trim());
}
StringBuilder validCascadeStyleSB = new StringBuilder();
validCascadeStyleSB.append(validCascadeStyles.get(0));
for (int i = 1; i <validCascadeStyles.size(); i++ ){
validCascadeStyleSB.append(",").append(validCascadeStyles.get(i));
}
return validCascadeStyleSB.toString();
}
return "none";
}
public String isFKAttributeNull(UMLAssociationEnd otherEnd) {
if (otherEnd.getLowMultiplicity() == 0) {
return "false";
}
return "true";
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param klass
* @param roleName
* @param association
* @return
* @throws GenerationException
*/
public boolean isLazyLoad(UMLClass klass, String roleName, UMLAssociation association) throws GenerationException
{
for( String eagerLoadValue : getTagValues(association, TV_NCI_EAGER_LOAD + "#" +getFQCN(klass)+"."+roleName)){
if ("true".equalsIgnoreCase(eagerLoadValue) || "yes".equalsIgnoreCase(eagerLoadValue) ){
return false;
}
}
return true;
}
/**
* Scans the tag values of the association to determine the cascade-style
*
* @param association
* @param model
* @param klass
* @return
* @throws GenerationException
*/
public Map<String,String> getValidCascadeStyles(){
return CASCADE_STYLES;
}
/**
* Scans the tag values of the association to determine whether or not an inverse-of tag
* is present in any of the table columns
*
* @param klass
* @param key
* @return
* @throws GenerationException
*/
public List findInverseSettingColumns(UMLClass klass) throws GenerationException
{
List<String> attrs = new ArrayList<String>();
for(UMLAttribute attr: klass.getAttributes())
{
for(UMLTaggedValue tv: attr.getTaggedValues())
{
if (TV_INVERSE_ASSOC_COLUMN.equals(tv.getName()))
{
attrs.add(attr.getName());
}
}
}
return attrs;
}
public String getHibernateValidatorConstraints(UMLClass klass){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
String constraintAnnotationString="";
if (vClass != null)
constraintAnnotationString = "\t" + vClass.getConstraintAnnotationString()+"\n";
if (vClassExtension != null)
constraintAnnotationString += "\t" + vClassExtension.getConstraintAnnotationString()+"\n";
return constraintAnnotationString;
}
public String getHibernateValidatorConstraints(UMLClass klass,UMLAttribute attr){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
List<String> cadsrConstraintAnnotations=new ArrayList<String>();
List<String> userConstraintAnnotations=new ArrayList<String>();
ValidatorAttribute vAttr=null;
if (vClass != null)
vAttr=vClass.getAttribute(attr.getName());
if (vAttr!=null)
cadsrConstraintAnnotations.addAll(vAttr.getConstraintAnnotations());
ValidatorAttribute vAttrExtension=null;
if (vClassExtension != null)
vAttrExtension=vClassExtension.getAttribute(attr.getName());
if (vAttrExtension!=null)
userConstraintAnnotations.addAll(vAttrExtension.getConstraintAnnotations());
//remove duplicates - user constraints override caDSR constraints
List<String> constraintAnnotations=new ArrayList<String>();
for(String cadsrConstraintAnnotation : cadsrConstraintAnnotations){
String cadsrConstraintPrefix = cadsrConstraintAnnotation.indexOf("(") > 0 ? cadsrConstraintAnnotation.substring(0, cadsrConstraintAnnotation.indexOf("(")) : cadsrConstraintAnnotation;
boolean duplicateConstraint = false;
for(String userConstraintAnnotation : userConstraintAnnotations){
if (userConstraintAnnotation.startsWith(cadsrConstraintPrefix)){
duplicateConstraint = true;
break;
}
}
if (!duplicateConstraint)
constraintAnnotations.add(cadsrConstraintAnnotation);
}
constraintAnnotations.addAll(userConstraintAnnotations);
//Handle special @Patterns scenario
List<String> patternConstraintAnnotations=new ArrayList<String>();
for(String constraintAnnotation : constraintAnnotations){
if (constraintAnnotation.indexOf("Pattern")>0){
patternConstraintAnnotations.add(constraintAnnotation);
}
}
StringBuilder sb;
if (!patternConstraintAnnotations.isEmpty()){
sb = new StringBuilder();
constraintAnnotations.removeAll(patternConstraintAnnotations);
sb.append(patternConstraintAnnotations.remove(0));
for (String patternConstraintAnnotation:patternConstraintAnnotations){
sb.append(",").append(patternConstraintAnnotation);
}
constraintAnnotations.add("@Patterns({"+sb.toString()+"})");
}
sb = new StringBuilder();
for(String constraintAnnotation: constraintAnnotations){
sb.append("\n\t").append(constraintAnnotation);
}
return sb.toString();
}
public Collection<String> getXSDRestrictionValues(UMLClass klass,UMLAttribute attr){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
ArrayList<String> permissibleValues = new ArrayList<String>();
//get user supplied permissible value collection from validator extension file
ValidatorAttribute vAttrExtension=null;
if (vClassExtension != null)
vAttrExtension = vClassExtension.getAttribute(attr.getName());
if (vAttrExtension != null)
permissibleValues.addAll(vAttrExtension.getXSDRestrictionCollection());
//user supplied constraints override caDSR constraints, so only retrieve
//caDSR constraints if user did not supply any constraints
if (permissibleValues.isEmpty()){
ValidatorAttribute vAttr=null;
if (vClass != null)
vAttr = vClass.getAttribute(attr.getName());
if (vAttr != null)
permissibleValues.addAll(vAttr.getXSDRestrictionCollection());
}
return permissibleValues;
}
private Collection<String> getHibernateValidatorConstraintImports(UMLClass klass){
ValidatorClass vClass = vModel.getClass(getFQCN(klass));
ValidatorClass vClassExtension = vModelExtension.getClass(getFQCN(klass));
Collection<String> constraintImports = new HashSet<String>();
if (vClass != null)
constraintImports.addAll(vClass.getConstraintImports());
if (vClassExtension != null)
constraintImports.addAll(vClassExtension.getConstraintImports());
if (constraintImports.contains("org.hibernate.validator.Pattern"))
constraintImports.add("org.hibernate.validator.Patterns");
return constraintImports;
}
public String getNamespace(UMLTaggableElement te) throws GenerationException {
String gmeNamespacePrefix = null;
try {
gmeNamespacePrefix = getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for element", ge);
}
return gmeNamespacePrefix;
}
public String getGMENamespace(UMLClass klass) throws GenerationException{
String gmeNamespace = null;
try {
gmeNamespace = getNamespace(klass);
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME namespace for: " + getFQEN(klass), ge);
}
gmeNamespace=getGMENamespace(klass.getPackage());
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
log.error("GME Namespace name not found for: "+getFullPackageName(klass)+". Returning null");
return null;
}
public String getGMENamespace(UMLPackage pkg) throws GenerationException{
if (pkg==null)
return null;
log.debug("Getting Package Namespace for: " +pkg.getName());
String gmeNamespace = getNamespace(pkg);
if (gmeNamespace!=null && gmeNamespace.length()>0)
return gmeNamespace;
return getGMENamespace(pkg.getParent());
}
public boolean hasGMEXMLNamespaceTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
private String getNamespacePackageName(UMLTaggableElement te) throws GenerationException {
String gmeNamespace = null;
try {
gmeNamespace = getTagValue(te,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for: " + getFQEN(te), ge);
}
if (gmeNamespace != null && gmeNamespace.lastIndexOf('/')<0)
throw new GenerationException("Invalid GME Namespace found for:" + getFQEN(te)+": "+gmeNamespace);
if (gmeNamespace!=null){
return gmeNamespace.substring(gmeNamespace.lastIndexOf('/')+1, gmeNamespace.length());
}
return null;
}
public String getModelNamespace(UMLModel model, String basePkgLogicalModel) throws GenerationException {
//override codegen.properties NAMESPACE_PREFIX property with GME namespace tag value, if it exists
StringTokenizer tokenizer = new StringTokenizer(basePkgLogicalModel, ".");
UMLPackage pkg=null;
if(tokenizer.hasMoreTokens()){
pkg = model.getPackage(tokenizer.nextToken());
while(pkg!=null && tokenizer.hasMoreTokens()){
pkg = pkg.getPackage(tokenizer.nextToken());
}
}
if (pkg==null){
throw new GenerationException("Error getting the Logical Model package for model: " + pkg.getName()+". Make sure the LOGICAL_MODEL property in codegen.properties file is valid.");
}
if (pkg!=null){
log.debug("* * * pkgName: " + pkg.getName());
try {
String modelNamespacePrefix = this.getNamespace(pkg);
log.debug("* * * modelNamespacePrefix: " + modelNamespacePrefix);
if (modelNamespacePrefix != null) {
if (!modelNamespacePrefix.endsWith("/"))
modelNamespacePrefix=modelNamespacePrefix+"/";
return modelNamespacePrefix.replace(" ", "_");
}
} catch (GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME Namespace value for model: " + pkg.getName() + ge.getMessage());
}
}
return null;
}
public String getNamespacePrefix(UMLPackage pkg) throws GenerationException {
String gmeNamespace = null;
try {
gmeNamespace = getTagValue(pkg,TV_NCI_GME_XML_NAMESPACE,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_NAMESPACE' tag value for UML package: " + getFullPackageName(pkg), ge);
}
if (gmeNamespace != null && gmeNamespace.lastIndexOf('/')<0)
throw new GenerationException("Invalid GME Namespace found for UML package " + getFullPackageName(pkg)+": "+gmeNamespace);
if (gmeNamespace!=null){
return gmeNamespace.substring(0,gmeNamespace.lastIndexOf('/'));
}
return null;
}
public String getXMLClassName(UMLClass klass) throws GenerationException {
try {
return getTagValue(klass,TV_NCI_GME_XML_ELEMENT,null,0,1);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_ELEMENT' tag value for klass: " + klass.getName(), ge);
}
}
public boolean hasGMEXMLClassTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_ELEMENT,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public String getXMLAttributeName(UMLAttribute attr)throws GenerationException{
try {
String attributeName = getTagValue(attr,TV_NCI_GME_XML_LOC_REF,null,0,1);
if (attributeName !=null && attributeName.length()>0 && (attributeName.startsWith("@")))
attributeName=attributeName.substring(1); //remove leading '@' character
return attributeName;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_LOC_REF' tag value for attribute: " + attr.getName(), ge);
}
}
public boolean generateXMLAttributeAsElement(UMLAttribute attr)throws GenerationException{
try {
String attributeName = getTagValue(attr,TV_NCI_GME_XML_LOC_REF,null,0,1);
if (attributeName !=null && attributeName.length()>0 && !(attributeName.startsWith("@")))
return true;
return false;
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_XML_LOC_REF' tag value for attribute: " + attr.getName(), ge);
}
}
public boolean hasGMEXMLAttributeTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_XML_LOC_REF,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public String getXMLLocRef(UMLAssociationEnd assocEnd, String klassName)throws GenerationException
{
try {
return getGmeLocRef(assocEnd.getOwningAssociation(),klassName);
} catch(GenerationException ge) {
log.error("ERROR: ", ge);
throw new GenerationException("Error getting the GME 'NCI_GME_SOURCE_XML_LOC_REF' or 'NCI_GME_TARGET_XML_LOC_REF' tag value for association roleName: " + assocEnd.getRoleName(), ge);
}
}
private String getGmeLocRef(UMLAssociation assoc,String klassName) throws GenerationException
{
String tv = getTagValue(assoc,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,1);
if (tv !=null && tv.endsWith("/"+klassName)){
return tv.substring(0, tv.lastIndexOf('/'));
}
tv = getTagValue(assoc,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,1);
if (tv !=null && tv.endsWith("/"+klassName)){
return tv.substring(0, tv.lastIndexOf('/'));
}
return null;
}
public String getGmeSourceLocRef(UMLAssociation assoc) throws GenerationException
{
return getTagValue(assoc,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,1);
}
public String getGmeTargetLocRef(UMLAssociation assoc) throws GenerationException
{
return getTagValue(assoc,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,1);
}
public boolean hasGMELocRefTag(UMLTaggableElement te){
try {
getTagValue(te,TV_NCI_GME_SOURCE_XML_LOC_REF,null,0,0);
getTagValue(te,TV_NCI_GME_TARGET_XML_LOC_REF,null,0,0);
} catch (GenerationException e) {
return true;
}
return false;
}
public boolean containsIncludedClass(UMLPackage pkg)
throws GenerationException {
for (UMLClass klass : pkg.getClasses()) {
if (isIncluded(klass) && !STEREO_TYPE_TABLE.equalsIgnoreCase(klass.getStereotype())){
return true;
}
}
return false;
}
public String getNamespaceUriPrefix() {
return namespaceUriPrefix;
}
public boolean isUseGMETags() {
return useGMETags;
}
public boolean isJaxbEnabled() {
return isJaxbEnabled;
}
} | JAXB implementation - Added getJaxbXmlRootElementAnnotation() method
SVN-Revision: 634
| sdk-toolkit/software/modules/codegen/src/gov/nih/nci/codegen/util/TransformerUtils.java | JAXB implementation - Added getJaxbXmlRootElementAnnotation() method | <ide><path>dk-toolkit/software/modules/codegen/src/gov/nih/nci/codegen/util/TransformerUtils.java
<ide> }
<ide>
<ide> /**
<del> * Determines whether the input class is a superclass
<add> * Determines whether the input class is missing a table mapping
<ide> * @param klass
<ide> * @return
<ide> */
<ide>
<ide> return " @XmlAttribute";
<ide> }
<add>
<add>
<add> public String getJaxbXmlRootElementAnnotation(UMLClass klass){
<add>
<add> // todo :: remove commented code
<add>// List<UMLClass> subClasses = getNonImplicitSubclasses(klass);
<add>// List<UMLClass> superClasses = getNonImplicitSuperclasses(klass);
<add>
<add> StringBuffer sb = new StringBuffer();
<add>
<add>
<add>// if (isSuperclass(klass) || !getNonImplicitSubclasses(klass).isEmpty() || !getNonImplicitSuperclasses(klass).isEmpty()){
<add>//
<add>// }
<add>
<add> //Default - use klass name as XML Root Element
<add> sb.append("@XmlRootElement(name=\"");
<add> sb.append(klass.getName()).append("\", ");
<add> sb.append("namespace=\"").append(this.getNamespaceUriPrefix() + this.getFullPackageName(klass)).append("\")");
<add>
<add> log.debug("@XmlRootElement annotation for class "+klass.getName()+": "+sb.toString());
<add>
<add> return sb.toString();
<add>
<add> }
<ide>
<ide> public String getJaxbXmlTypeAnnotation(UMLClass klass){
<ide>
<ide> for (UMLClass subKlass:subClasses){
<ide> counter++;
<ide> found = true;
<del> sb.append(getFullPackageName(subKlass)+"."+subKlass.getName()+".class");
<add> sb.append(subKlass.getName()+".class");
<ide> if (counter < totalCount){
<ide> sb.append(", ");
<ide> }
<ide> for (UMLClass superKlass:superClasses){
<ide> counter++;
<ide> found = true;
<del> sb.append(getFullPackageName(superKlass)+"."+superKlass.getName()+".class");
<add> sb.append(superKlass.getName()+".class");
<ide> if (counter < totalCount){
<ide> sb.append(", ");
<ide> } |
|
Java | mit | 118aff257518655dc40f5aa234b7fdd28671519c | 0 | spoluyan/book2speech | package pw.spn.book2speech.service;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.util.IOUtils;
import com.ivona.services.tts.IvonaSpeechCloudClient;
import com.ivona.services.tts.model.CreateSpeechRequest;
import com.ivona.services.tts.model.CreateSpeechResult;
import com.ivona.services.tts.model.Input;
import com.ivona.services.tts.model.ListVoicesRequest;
import com.ivona.services.tts.model.ListVoicesResult;
import com.ivona.services.tts.model.OutputFormat;
import com.ivona.services.tts.model.Parameters;
import com.ivona.services.tts.model.Voice;
import pw.spn.book2speech.model.TransformationOptions;
import pw.spn.book2speech.service.parser.InputFileParserFactory;
public class IvonaAPIClient {
private static final int TEXT_BLOCK_MAX_SIZE = 8192;
private final IvonaSpeechCloudClient cloudClient;
public IvonaAPIClient(String accessKey, String secretKey, String endpoint) {
cloudClient = new IvonaSpeechCloudClient(new BasicAWSCredentials(accessKey, secretKey));
cloudClient.setEndpoint(endpoint);
}
public void getVoiceList(Voice filter) {
ListVoicesRequest allVoicesRequest = new ListVoicesRequest();
allVoicesRequest.setVoice(filter);
ListVoicesResult allVoicesResult = cloudClient.listVoices(allVoicesRequest);
System.out.println("Available voices (" + allVoicesResult.getVoices().size() + "):");
allVoicesResult.getVoices().forEach(voice -> {
System.out.println("---------");
System.out.println("Language: " + voice.getLanguage());
System.out.println("Name: " + voice.getName());
System.out.println("Gender: " + voice.getGender());
});
}
public void transformTextToSpeech(TransformationOptions options) {
System.out.println("Transforming " + options.getInputFile() + " to speech.");
System.out.println("Output dir is " + options.getOutputDir());
File inputFile = new File(options.getInputFile());
File outputDir = new File(options.getOutputDir());
validateFiles(inputFile, outputDir);
List<String> content = readFile(inputFile, options.getEncoding());
int numbersInOutputFilesNames = calculateLengthOfNumber(content.size());
OutputFormat outputFormat = new OutputFormat();
outputFormat.setCodec(options.getCodec());
Voice voice = new Voice();
voice.setGender(options.getGender());
voice.setLanguage(options.getLanguage());
voice.setName(options.getName());
Parameters parameters = new Parameters();
parameters.setRate(options.getRate());
parameters.setVolume(options.getVolume());
AtomicInteger counter = new AtomicInteger(0);
IntStream.rangeClosed(1, content.size()).forEach(i -> {
String text = content.get(i - 1);
System.out.println("Processing file " + counter.incrementAndGet() + " of " + content.size());
CreateSpeechRequest createSpeechRequest = new CreateSpeechRequest();
createSpeechRequest.setGeneralProgressListener(progressEvent -> {
System.out.print("=");
});
Input input = new Input();
input.setData(text);
createSpeechRequest.setInput(input);
createSpeechRequest.setOutputFormat(outputFormat);
createSpeechRequest.setVoice(voice);
createSpeechRequest.setParameters(parameters);
CreateSpeechResult createSpeechResult = cloudClient.createSpeech(createSpeechRequest);
writeFile(outputDir, numbersInOutputFilesNames, i, options.getCodec(), createSpeechResult.getBody());
System.out.println(" 100%");
});
System.out.println("Done!");
}
private void validateFiles(File inputFile, File outputDir) {
if (!inputFile.exists()) {
System.err.println("Input file does not exists.");
System.exit(0);
}
if (!inputFile.isFile()) {
System.err.println("Input file is not a file.");
System.exit(0);
}
if (!outputDir.exists() || !outputDir.isDirectory()) {
boolean outputDirCreated = outputDir.mkdirs();
if (!outputDirCreated) {
System.err.println("Unable to create output dir.");
System.exit(0);
}
}
}
private List<String> readFile(File inputFile, String encoding) {
List<String> lines = InputFileParserFactory.getParser(inputFile).toPlainText(inputFile, encoding);
List<String> concated = new ArrayList<>();
IntStream.range(0, lines.size()).forEachOrdered(i -> {
String line = lines.get(i);
if (line.isEmpty()) {
return;
}
if (concated.size() == 0) {
concated.add(line);
return;
}
String newLine = concated.get(concated.size() - 1) + line;
int length = newLine.length();
if (length <= TEXT_BLOCK_MAX_SIZE) {
concated.remove(concated.size() - 1);
concated.add(newLine);
} else {
concated.add(line);
}
});
return concated;
}
private int calculateLengthOfNumber(int number) {
int result = 0;
for (int i = number; i > 0; i = i / 10) {
result++;
}
return result;
}
private void writeFile(File outputDir, int numbersInOutputFilesNames, int index, String codec, InputStream stream) {
String fileName = generateFileName(outputDir, numbersInOutputFilesNames, index, codec);
Path outputFile = new File(fileName).toPath();
try {
byte[] content = IOUtils.toByteArray(stream);
Files.write(outputFile, content);
} catch (IOException e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(stream, null);
}
}
private String generateFileName(File outputDir, int numbersInOutputFilesNames, int index, String codec) {
int reservedNumbers = calculateLengthOfNumber(index);
StringBuilder name = new StringBuilder(outputDir.getAbsolutePath()).append(File.separatorChar);
for (int i = 0; i < numbersInOutputFilesNames - reservedNumbers; i++) {
name.append('0');
}
name.append(index).append('.').append(codec.toLowerCase());
return name.toString();
}
}
| src/main/java/pw/spn/book2speech/service/IvonaAPIClient.java | package pw.spn.book2speech.service;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.util.IOUtils;
import com.ivona.services.tts.IvonaSpeechCloudClient;
import com.ivona.services.tts.model.CreateSpeechRequest;
import com.ivona.services.tts.model.CreateSpeechResult;
import com.ivona.services.tts.model.Input;
import com.ivona.services.tts.model.ListVoicesRequest;
import com.ivona.services.tts.model.ListVoicesResult;
import com.ivona.services.tts.model.OutputFormat;
import com.ivona.services.tts.model.Parameters;
import com.ivona.services.tts.model.Voice;
import pw.spn.book2speech.model.Rate;
import pw.spn.book2speech.model.TransformationOptions;
import pw.spn.book2speech.service.parser.InputFileParserFactory;
public class IvonaAPIClient {
private static final int WORDS_PER_FILE = 900;
private final IvonaSpeechCloudClient cloudClient;
public IvonaAPIClient(String accessKey, String secretKey, String endpoint) {
cloudClient = new IvonaSpeechCloudClient(new BasicAWSCredentials(accessKey, secretKey));
cloudClient.setEndpoint(endpoint);
}
public void getVoiceList(Voice filter) {
ListVoicesRequest allVoicesRequest = new ListVoicesRequest();
allVoicesRequest.setVoice(filter);
ListVoicesResult allVoicesResult = cloudClient.listVoices(allVoicesRequest);
System.out.println("Available voices (" + allVoicesResult.getVoices().size() + "):");
allVoicesResult.getVoices().forEach(voice -> {
System.out.println("---------");
System.out.println("Language: " + voice.getLanguage());
System.out.println("Name: " + voice.getName());
System.out.println("Gender: " + voice.getGender());
});
}
public void transformTextToSpeech(TransformationOptions options) {
System.out.println("Transforming " + options.getInputFile() + " to speech.");
System.out.println("Output dir is " + options.getOutputDir());
File inputFile = new File(options.getInputFile());
File outputDir = new File(options.getOutputDir());
validateFiles(inputFile, outputDir);
List<String> content = readFile(inputFile, options.getRate(), options.getEncoding());
int numbersInOutputFilesNames = calculateLengthOfNumber(content.size());
OutputFormat outputFormat = new OutputFormat();
outputFormat.setCodec(options.getCodec());
Voice voice = new Voice();
voice.setGender(options.getGender());
voice.setLanguage(options.getLanguage());
voice.setName(options.getName());
Parameters parameters = new Parameters();
parameters.setRate(options.getRate());
parameters.setVolume(options.getVolume());
AtomicInteger counter = new AtomicInteger(0);
IntStream.rangeClosed(1, content.size()).forEach(i -> {
String text = content.get(i - 1);
System.out.println("Processing file " + counter.incrementAndGet() + " of " + content.size());
CreateSpeechRequest createSpeechRequest = new CreateSpeechRequest();
createSpeechRequest.setGeneralProgressListener(progressEvent -> {
System.out.print("=");
});
Input input = new Input();
input.setData(text);
createSpeechRequest.setInput(input);
createSpeechRequest.setOutputFormat(outputFormat);
createSpeechRequest.setVoice(voice);
createSpeechRequest.setParameters(parameters);
CreateSpeechResult createSpeechResult = cloudClient.createSpeech(createSpeechRequest);
writeFile(outputDir, numbersInOutputFilesNames, i, options.getCodec(), createSpeechResult.getBody());
System.out.println(" 100%");
});
System.out.println("Done!");
}
private void validateFiles(File inputFile, File outputDir) {
if (!inputFile.exists()) {
System.err.println("Input file does not exists.");
System.exit(0);
}
if (!inputFile.isFile()) {
System.err.println("Input file is not a file.");
System.exit(0);
}
if (!outputDir.exists() || !outputDir.isDirectory()) {
boolean outputDirCreated = outputDir.mkdirs();
if (!outputDirCreated) {
System.err.println("Unable to create output dir.");
System.exit(0);
}
}
}
private List<String> readFile(File inputFile, String rate, String encoding) {
int ratio = -2;
while (!Rate.values()[ratio + 2].getRate().equals(rate)) {
ratio++;
}
int wordsPerFile = WORDS_PER_FILE + ratio * WORDS_PER_FILE / (Rate.values().length + 1) * 2;
List<String> lines = InputFileParserFactory.getParser(inputFile).toPlainText(inputFile, encoding);
List<String> concated = new ArrayList<>();
IntStream.range(0, lines.size()).forEachOrdered(i -> {
String line = lines.get(i);
if (line.isEmpty()) {
return;
}
if (concated.size() == 0) {
concated.add(line);
return;
}
String newLine = concated.get(concated.size() - 1) + line;
int words = newLine.split("\\W+").length;
if (words <= wordsPerFile) {
concated.remove(concated.size() - 1);
concated.add(newLine);
} else {
concated.add(line);
}
});
return concated;
}
private int calculateLengthOfNumber(int number) {
int result = 0;
for (int i = number; i > 0; i = i / 10) {
result++;
}
return result;
}
private void writeFile(File outputDir, int numbersInOutputFilesNames, int index, String codec, InputStream stream) {
String fileName = generateFileName(outputDir, numbersInOutputFilesNames, index, codec);
Path outputFile = new File(fileName).toPath();
try {
byte[] content = IOUtils.toByteArray(stream);
Files.write(outputFile, content);
} catch (IOException e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(stream, null);
}
}
private String generateFileName(File outputDir, int numbersInOutputFilesNames, int index, String codec) {
int reservedNumbers = calculateLengthOfNumber(index);
StringBuilder name = new StringBuilder(outputDir.getAbsolutePath()).append(File.separatorChar);
for (int i = 0; i < numbersInOutputFilesNames - reservedNumbers; i++) {
name.append('0');
}
name.append(index).append('.').append(codec.toLowerCase());
return name.toString();
}
}
| Change text limit calculation | src/main/java/pw/spn/book2speech/service/IvonaAPIClient.java | Change text limit calculation | <ide><path>rc/main/java/pw/spn/book2speech/service/IvonaAPIClient.java
<ide> import com.ivona.services.tts.model.Parameters;
<ide> import com.ivona.services.tts.model.Voice;
<ide>
<del>import pw.spn.book2speech.model.Rate;
<ide> import pw.spn.book2speech.model.TransformationOptions;
<ide> import pw.spn.book2speech.service.parser.InputFileParserFactory;
<ide>
<ide> public class IvonaAPIClient {
<del> private static final int WORDS_PER_FILE = 900;
<add> private static final int TEXT_BLOCK_MAX_SIZE = 8192;
<ide>
<ide> private final IvonaSpeechCloudClient cloudClient;
<ide>
<ide>
<ide> validateFiles(inputFile, outputDir);
<ide>
<del> List<String> content = readFile(inputFile, options.getRate(), options.getEncoding());
<add> List<String> content = readFile(inputFile, options.getEncoding());
<ide>
<ide> int numbersInOutputFilesNames = calculateLengthOfNumber(content.size());
<ide>
<ide> }
<ide> }
<ide>
<del> private List<String> readFile(File inputFile, String rate, String encoding) {
<del> int ratio = -2;
<del>
<del> while (!Rate.values()[ratio + 2].getRate().equals(rate)) {
<del> ratio++;
<del> }
<del>
<del> int wordsPerFile = WORDS_PER_FILE + ratio * WORDS_PER_FILE / (Rate.values().length + 1) * 2;
<del>
<add> private List<String> readFile(File inputFile, String encoding) {
<ide> List<String> lines = InputFileParserFactory.getParser(inputFile).toPlainText(inputFile, encoding);
<ide>
<ide> List<String> concated = new ArrayList<>();
<ide> return;
<ide> }
<ide> String newLine = concated.get(concated.size() - 1) + line;
<del> int words = newLine.split("\\W+").length;
<del> if (words <= wordsPerFile) {
<add> int length = newLine.length();
<add> if (length <= TEXT_BLOCK_MAX_SIZE) {
<ide> concated.remove(concated.size() - 1);
<ide> concated.add(newLine);
<ide> } else { |
|
Java | bsd-3-clause | 828f118f6cf6988b3ae5684cb7981d246fb0155b | 0 | cmorty/avrora,cmorty/avrora,cmorty/avrora | /**
* Copyright (c) 2004-2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package avrora.sim.mcu;
import java.util.LinkedList;
import avrora.sim.Simulator;
import avrora.sim.clock.ClockDomain;
import avrora.sim.platform.Platform;
import avrora.sim.state.BooleanRegister;
import avrora.sim.state.BooleanView;
/**
* The <code>Microcontroller</code> interface corresponds to a hardware device that implements the AVR
* instruction set. This interface contains methods that get commonly needed information about the particular
* hardware device and and can load programs onto this virtual device.
*
* @author Ben L. Titzer
*/
public interface Microcontroller {
/**
* The <code>Pin</code> interface encapsulates the notion of a physical pin on the microcontroller chip.
* It is generally used in wiring up external devices to the microcontroller.
*
* @author Ben L. Titzer
*/
public interface Pin {
/**
* Listener which will be called if the value of a pin changes.
*/
public interface InputListener {
/**
* Called when the value of <code>input</code> was changed.
*
* @param input input which was affected
* @param newValue new value of the input
*/
void onInputChanged(Input input, boolean newValue);
}
/**
* The <code>Input</code> interface represents an input pin. When the pin is configured to be an input
* and the microcontroller attempts to read from this pin, the installed instance of this interface
* will be called.
*/
public interface Input {
/**
* The <code>read()</code> method is called by the simulator when the program attempts to read the
* level of the pin. The device can then compute and return the current level of the pin.
*
* @return true if the level of the pin is high; false otherwise
*/
public boolean read();
/**
* Registers a {@link PinChangeListener}.
*
* @param listener listener to register
*/
public void registerListener(InputListener listener);
/**
* Unregisters a {@link PinChangeListener} if found, or does nothing otherwise.
*
* @param listener listener to unregister.
*/
public void unregisterListener(InputListener listener);
}
/** ListenableInput which is implemented using a BooleanView */
public class ListenableBooleanViewInput extends ListenableInput implements BooleanView.ValueSetListener {
/***
* Value of this input
*/
private BooleanView level;
/**
* Creates this input pin and creates a new BooleanRegister as a value basis.
*/
public ListenableBooleanViewInput() {
this(new BooleanRegister());
}
/**
* Creates this input pin and uses an existing BooleanView
* @param view existing BooleanView to use
*/
public ListenableBooleanViewInput(BooleanView view) {
setLevelView(view);
}
/**
* Returns the view used as pin level
*/
public BooleanView getLevelView() {
return level;
}
/**
* Changes the view used for pin level
*/
public void setLevelView(BooleanView view) {
boolean hadLevel = (level != null);
if (hadLevel) { // unregister previously registered views
level.setValueSetListener(null);
}
level = view;
level.setValueSetListener(this);
if (hadLevel) {
notifyListeners(level.getValue()); // value might have changed -> re-test this!
}
}
/**
* Changes the level of this pin by modifying the underlying view.
*/
public void setLevel(boolean newLevel) {
level.setValue(newLevel);
}
/**
* Returns the level of this pin by reading the underlying view.
* @return
*/
public boolean getLevel() {
return level.getValue();
}
@Override
public boolean read() {
return level.getValue();
}
@Override
public void onValueSet(BooleanView view, boolean newValue) {
this.notifyListeners(newValue);
}
}
/** Abstract implementation of <code>Input</code> which supports InputListeners. */
public abstract class ListenableInput implements Input {
private LinkedList<InputListener> listeners;
private boolean oldValue;
/** Checks if a change in level has occured, and notifies all listeners. */
protected void update() {
boolean newValue = read();
if (oldValue != newValue) {
notifyListeners(newValue);
oldValue = newValue;
}
}
/** Notifies every listener about a changed value. */
protected void notifyListeners(boolean newValue) {
if (listeners != null) {
for (InputListener l : listeners) {
l.onInputChanged(this, newValue);
}
}
}
public void registerListener(InputListener listener) {
if (listeners == null) {
listeners = new LinkedList<InputListener>();
}
listeners.add(listener);
}
public void unregisterListener(InputListener listener) {
if (listeners != null) {
if (listeners.remove(listener) && listeners.isEmpty()) {
listeners = null;
}
}
}
}
/**
* The <code>Output</code> interface represents an output pin. When the pin is configured to be an
* output and the microcontroller attempts to wrote to this pin, the installed instance of this
* interface will be called.
*/
public interface Output {
/**
* The <code>write()</code> method is called by the simulator when the program writes a logical
* level to the pin. The device can then take the appropriate action.
*
* @param level a boolean representing the logical level of the write
*/
public void write(boolean level);
}
/**
* The <code>connect()</code> method will connect this pin to the specified input. Attempts by the
* microcontroller to read from this pin when it is configured as an input will then call this
* instance's <code>read()</code> method.
*
* @param i the <code>Input</code> instance to connect to
*/
public void connectInput(Input i);
/**
* The <code>connect()</code> method will connect this pin to the specified output. Attempts by the
* microcontroller to write to this pin when it is configured as an output will then call this
* instance's <code>write()</code> method.
*
* @param o the <code>Output</code> instance to connect to
*/
public void connectOutput(Output o);
/**
* The <code>setInput()</code> method sets the current level of the pin. This can be used <b>additionally</b>
* to <code>connectInput()</code> to trigger interrupts.
*
* @param level the <code>Intput</code>
*/
public void setInput(Boolean level);
}
/**
* The <code>getSimulator()</code> method gets a simulator instance that is capable of emulating this
* hardware device.
*
* @return a <code>Simulator</code> instance corresponding to this device
*/
public Simulator getSimulator();
/**
* The <code>getPlatform()</code> method gets a platform instance that contains this microcontroller.
*
* @return the platform instance containing this microcontroller, if it exists; null otherwise
*/
public Platform getPlatform();
/**
* The <code>setPlatform()</code> method sets the platform instance that contains this microcontroller.
* @param p the new platform for this microcontroller
*/
public void setPlatform(Platform p);
/**
* The <code>getPin()</code> method looks up the named pin and returns a reference to that pin. Names of
* pins should be UPPERCASE. The intended users of this method are external device implementors which
* connect their devices to the microcontroller through the pins.
*
* @param name the name of the pin; for example "PA0" or "OC1A"
* @return a reference to the <code>Pin</code> object corresponding to the named pin if it exists; null
* otherwise
*/
public Pin getPin(String name);
/**
* The <code>getPin()</code> method looks up the specified pin by its number and returns a reference to
* that pin. The intended users of this method are external device implementors which connect their
* devices to the microcontroller through the pins.
*
* @param num the pin number to look up
* @return a reference to the <code>Pin</code> object corresponding to the named pin if it exists; null
* otherwise
*/
public Pin getPin(int num);
/**
* The <code>sleep()</code> method puts the microcontroller into the sleep mode defined by its
* internal sleep configuration register. It may shutdown devices and disable some clocks. This
* method should only be called from within the interpreter.
*/
public void sleep();
/**
* The <code>wakeup()</code> method wakes the microcontroller from a sleep mode. It may resume
* devices, turn clocks back on, etc. This method is expected to return the number of cycles that
* is required for the microcontroller to wake completely from the sleep state it was in.
*
* @return cycles required to wake from the current sleep mode
*/
public int wakeup();
/**
* The <code>getClockDomain()</code> method returns the clock domain for this microcontroller. The clock
* domain contains all of the clocks attached to the microcontroller and platform, including the main clock.
* @return an instance of the <code>ClockDomain</code> class representing the clock domain for this
* microcontroller
*/
public ClockDomain getClockDomain();
/**
* The <code>getRegisterSet()</code> method returns the register set containing all of the IO registers
* for this microcontroller.
* @return a reference to the <code>RegisterSet</code> instance which stores all of the IO registers
* for this microcontroller.
*/
public RegisterSet getRegisterSet();
/**
* The <code>getProperties()</code> method gets an object that describes the microcontroller
* including the size of the RAM, EEPROM, flash, etc.
* @return an instance of the <code>MicrocontrollerProperties</code> class that contains all
* the relevant information about this microcontroller
*/
public MCUProperties getProperties();
}
| src/avrora/sim/mcu/Microcontroller.java | /**
* Copyright (c) 2004-2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package avrora.sim.mcu;
import java.util.LinkedList;
import avrora.sim.Simulator;
import avrora.sim.clock.ClockDomain;
import avrora.sim.platform.Platform;
import avrora.sim.state.BooleanRegister;
import avrora.sim.state.BooleanView;
/**
* The <code>Microcontroller</code> interface corresponds to a hardware device that implements the AVR
* instruction set. This interface contains methods that get commonly needed information about the particular
* hardware device and and can load programs onto this virtual device.
*
* @author Ben L. Titzer
*/
public interface Microcontroller {
/**
* The <code>Pin</code> interface encapsulates the notion of a physical pin on the microcontroller chip.
* It is generally used in wiring up external devices to the microcontroller.
*
* @author Ben L. Titzer
*/
public interface Pin {
/**
* Listener which will be called if the value of a pin changes.
*/
public interface InputListener {
/**
* Called when the value of <code>input</code> was changed.
*
* @param input input which was affected
* @param newValue new value of the input
*/
void onInputChanged(Input input, boolean newValue);
}
/**
* The <code>Input</code> interface represents an input pin. When the pin is configured to be an input
* and the microcontroller attempts to read from this pin, the installed instance of this interface
* will be called.
*/
public interface Input {
/**
* The <code>read()</code> method is called by the simulator when the program attempts to read the
* level of the pin. The device can then compute and return the current level of the pin.
*
* @return true if the level of the pin is high; false otherwise
*/
public boolean read();
/**
* Registers a {@link PinChangeListener}.
*
* @param listener listener to register
*/
public void registerListener(InputListener listener);
/**
* Unregisters a {@link PinChangeListener} if found, or does nothing otherwise.
*
* @param listener listener to unregister.
*/
public void unregisterListener(InputListener listener);
}
/** ListenableInput which is implemented using a BooleanView */
public class ListenableBooleanViewInput extends ListenableInput implements BooleanView.ValueSetListener {
/***
* Value of this input
*/
private BooleanView level;
/**
* Creates this input pin and creates a new BooleanRegister as a value basis.
*/
public ListenableBooleanViewInput() {
this(new BooleanRegister());
}
/**
* Creates this input pin and uses an existing BooleanView
* @param view existing BooleanView to use
*/
public ListenableBooleanViewInput(BooleanView view) {
setLevelView(view);
}
/**
* Returns the view used as pin level
*/
public BooleanView getLevelView() {
return level;
}
/**
* Changes the view used for pin level
*/
public void setLevelView(BooleanView view) {
boolean hadLevel = (level != null);
if (hadLevel) { // unregister previously registered views
level.setValueSetListener(null);
}
level = view;
level.setValueSetListener(this);
if (hadLevel) {
notifyListeners(level.getValue()); // value might have changed -> re-test this!
}
}
/**
* Changes the level of this pin by modifying the underlying view.
*/
public void setLevel(boolean newLevel) {
level.setValue(newLevel);
}
/**
* Returns the level of this pin by reading the underlying view.
* @return
*/
public boolean getLevel() {
return level.getValue();
}
@Override
public boolean read() {
return level.getValue();
}
@Override
public void onValueSet(BooleanView view, boolean newValue) {
this.notifyListeners(newValue);
}
}
/** Abstract implementation of <code>Input</code> which supports InputListeners. */
public abstract class ListenableInput implements Input {
private LinkedList<InputListener> listeners;
private boolean oldValue;
/** Checks if a change in level has occured, and notifies all listeners. */
protected void update() {
boolean newValue = read();
if (oldValue != newValue) {
notifyListeners(newValue);
oldValue = newValue;
}
}
/** Notifies every listener about a changed value. */
protected void notifyListeners(boolean newValue) {
if (listeners != null) {
for (InputListener l : listeners) {
l.onInputChanged(this, newValue);
}
}
}
public void registerListener(InputListener listener) {
if (listeners == null) {
listeners = new LinkedList<InputListener>();
}
listeners.add(listener);
}
public void unregisterListener(InputListener listener) {
if (listeners != null) {
if (listeners.remove(listener) && listeners.isEmpty()) {
listeners = null;
}
}
}
}
/**
* The <code>Output</code> interface represents an output pin. When the pin is configured to be an
* output and the microcontroller attempts to wrote to this pin, the installed instance of this
* interface will be called.
*/
public interface Output {
/**
* The <code>write()</code> method is called by the simulator when the program writes a logical
* level to the pin. The device can then take the appropriate action.
*
* @param level a boolean representing the logical level of the write
*/
public void write(boolean level);
}
/**
* The <code>connect()</code> method will connect this pin to the specified input. Attempts by the
* microcontroller to read from this pin when it is configured as an input will then call this
* instance's <code>read()</code> method.
*
* @param i the <code>Input</code> instance to connect to
*/
public void connectInput(Input i);
/**
* The <code>connect()</code> method will connect this pin to the specified output. Attempts by the
* microcontroller to write to this pin when it is configured as an output will then call this
* instance's <code>write()</code> method.
*
* @param o the <code>Output</code> instance to connect to
*/
public void connectOutput(Output o);
}
/**
* The <code>getSimulator()</code> method gets a simulator instance that is capable of emulating this
* hardware device.
*
* @return a <code>Simulator</code> instance corresponding to this device
*/
public Simulator getSimulator();
/**
* The <code>getPlatform()</code> method gets a platform instance that contains this microcontroller.
*
* @return the platform instance containing this microcontroller, if it exists; null otherwise
*/
public Platform getPlatform();
/**
* The <code>setPlatform()</code> method sets the platform instance that contains this microcontroller.
* @param p the new platform for this microcontroller
*/
public void setPlatform(Platform p);
/**
* The <code>getPin()</code> method looks up the named pin and returns a reference to that pin. Names of
* pins should be UPPERCASE. The intended users of this method are external device implementors which
* connect their devices to the microcontroller through the pins.
*
* @param name the name of the pin; for example "PA0" or "OC1A"
* @return a reference to the <code>Pin</code> object corresponding to the named pin if it exists; null
* otherwise
*/
public Pin getPin(String name);
/**
* The <code>getPin()</code> method looks up the specified pin by its number and returns a reference to
* that pin. The intended users of this method are external device implementors which connect their
* devices to the microcontroller through the pins.
*
* @param num the pin number to look up
* @return a reference to the <code>Pin</code> object corresponding to the named pin if it exists; null
* otherwise
*/
public Pin getPin(int num);
/**
* The <code>sleep()</code> method puts the microcontroller into the sleep mode defined by its
* internal sleep configuration register. It may shutdown devices and disable some clocks. This
* method should only be called from within the interpreter.
*/
public void sleep();
/**
* The <code>wakeup()</code> method wakes the microcontroller from a sleep mode. It may resume
* devices, turn clocks back on, etc. This method is expected to return the number of cycles that
* is required for the microcontroller to wake completely from the sleep state it was in.
*
* @return cycles required to wake from the current sleep mode
*/
public int wakeup();
/**
* The <code>getClockDomain()</code> method returns the clock domain for this microcontroller. The clock
* domain contains all of the clocks attached to the microcontroller and platform, including the main clock.
* @return an instance of the <code>ClockDomain</code> class representing the clock domain for this
* microcontroller
*/
public ClockDomain getClockDomain();
/**
* The <code>getRegisterSet()</code> method returns the register set containing all of the IO registers
* for this microcontroller.
* @return a reference to the <code>RegisterSet</code> instance which stores all of the IO registers
* for this microcontroller.
*/
public RegisterSet getRegisterSet();
/**
* The <code>getProperties()</code> method gets an object that describes the microcontroller
* including the size of the RAM, EEPROM, flash, etc.
* @return an instance of the <code>MicrocontrollerProperties</code> class that contains all
* the relevant information about this microcontroller
*/
public MCUProperties getProperties();
}
| Suppoort pushing levels to pins
| src/avrora/sim/mcu/Microcontroller.java | Suppoort pushing levels to pins | <ide><path>rc/avrora/sim/mcu/Microcontroller.java
<ide> * @param o the <code>Output</code> instance to connect to
<ide> */
<ide> public void connectOutput(Output o);
<add>
<add> /**
<add> * The <code>setInput()</code> method sets the current level of the pin. This can be used <b>additionally</b>
<add> * to <code>connectInput()</code> to trigger interrupts.
<add> *
<add> * @param level the <code>Intput</code>
<add> */
<add> public void setInput(Boolean level);
<add>
<ide> }
<ide>
<ide> /** |
|
Java | lgpl-2.1 | 4d3a7b2592c70a4b5f95ecdcc9dd97244d47c3ed | 0 | despc/jcommune,Vitalij-Voronkoff/jcommune,CocoJumbo/jcommune,Relvl/jcommune,a-nigredo/jcommune,vps2/jcommune,jtalks-org/jcommune,0x0000-dot-ru/jcommune,illerax/jcommune,CocoJumbo/jcommune,0x0000-dot-ru/jcommune,mihnayan/jcommune,NCNecros/jcommune,mihnayan/jcommune,Noctrunal/jcommune,oatkachenko/jcommune,illerax/jcommune,SurfVaporizer/jcommune,Noctrunal/jcommune,oatkachenko/jcommune,Z00M/jcommune,CocoJumbo/jcommune,mihnayan/jcommune,Z00M/jcommune,Relvl/jcommune,Vitalij-Voronkoff/jcommune,jtalks-org/jcommune,illerax/jcommune,despc/jcommune,NCNecros/jcommune,SurfVaporizer/jcommune,Vitalij-Voronkoff/jcommune,jtalks-org/jcommune,Noctrunal/jcommune,a-nigredo/jcommune,oatkachenko/jcommune,despc/jcommune,SurfVaporizer/jcommune,Z00M/jcommune,shevarnadze/jcommune,shevarnadze/jcommune,a-nigredo/jcommune,vps2/jcommune,Relvl/jcommune,shevarnadze/jcommune,vps2/jcommune,NCNecros/jcommune | /**
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jtalks.jcommune.web.exception;
import org.apache.commons.logging.Log;
import org.jtalks.jcommune.service.exceptions.NotFoundException;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.util.ReflectionUtils;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import sun.security.acl.PrincipalImpl;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Field;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
/**
* @author Vitaliy Kravchenko
*/
public class PrettyLogExceptionResolverTest {
private PrettyLogExceptionResolver prettyLogExceptionResolver;
@BeforeMethod
public void setUp() throws Exception {
prettyLogExceptionResolver = new PrettyLogExceptionResolver();
}
@Test
public void testLogExceptionWithIncomingNotFoundException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
NotFoundException notFoundException = new NotFoundException("Entity not found");
prettyLogExceptionResolver.logException(notFoundException, mock(HttpServletRequest.class));
verify(mockLog).info("Entity not found");
}
@Test
public void testLogExceptionWithIncomingAccessDeniedException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
AccessDeniedException accessDeniedException = new AccessDeniedException("Access denied");
MockHttpServletRequest request = new MockHttpServletRequest("POST", "/testing/url/42");
request.setServerName("testserver.com");
request.setServerPort(8080);
request.setUserPrincipal(new PrincipalImpl("test user"));
prettyLogExceptionResolver.logException(accessDeniedException, request);
verify(mockLog).info("Access was denied for user [test user] trying to POST http://testserver.com:8080/testing/url/42");
}
@Test
public void testLogExceptionWithoutNotFoundException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
Exception exception = new Exception();
prettyLogExceptionResolver.logException(exception, mock(HttpServletRequest.class));
verify(mockLog, times(0)).info(anyString());
}
private Log replaceLoggerWithMock(PrettyLogExceptionResolver resolver) throws Exception {
Log mockLog = mock(Log.class);
Field loggerField = ReflectionUtils.findField(PrettyLogExceptionResolver.class, "logger");
loggerField.setAccessible(true);
loggerField.set(resolver, mockLog);
return mockLog;
}
}
| jcommune-view/jcommune-web-controller/src/test/java/org/jtalks/jcommune/web/exception/PrettyLogExceptionResolverTest.java | /**
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jtalks.jcommune.web.exception;
import org.apache.commons.logging.Log;
import org.jtalks.jcommune.service.exceptions.NotFoundException;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.util.ReflectionUtils;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import sun.security.acl.PrincipalImpl;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Field;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
/**
* @author Vitaliy Kravchenko
*/
public class PrettyLogExceptionResolverTest {
private PrettyLogExceptionResolver prettyLogExceptionResolver;
@BeforeMethod
public void setUp() throws Exception {
prettyLogExceptionResolver = new PrettyLogExceptionResolver();
}
@Test
public void testLogExceptionWithIncomingNotFoundException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
NotFoundException notFoundException = new NotFoundException("Entity not found");
prettyLogExceptionResolver.logException(notFoundException, mock(HttpServletRequest.class));
verify(mockLog).info("Entity not found");
}
@Test
public void testLogExceptionWithIncomingAccessDeniedException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
AccessDeniedException accessDeniedException = new AccessDeniedException("Access denied");
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getMethod()).thenReturn("POST");
when(request.getUserPrincipal()).thenReturn(new PrincipalImpl("test user"));
String url = "http://testserver.com/testing/url/42";
when(request.getRequestURL()).thenReturn(new StringBuffer(url));
prettyLogExceptionResolver.logException(accessDeniedException, request);
verify(mockLog).info("Access was denied for user [test user] trying to POST http://testserver.com/testing/url/42");
}
@Test
public void testLogExceptionWithoutNotFoundException() throws Exception {
Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
Exception exception = new Exception();
prettyLogExceptionResolver.logException(exception, mock(HttpServletRequest.class));
verify(mockLog, times(0)).info(anyString());
}
private Log replaceLoggerWithMock(PrettyLogExceptionResolver resolver) throws Exception {
Log mockLog = mock(Log.class);
Field loggerField = ReflectionUtils.findField(PrettyLogExceptionResolver.class, "logger");
loggerField.setAccessible(true);
loggerField.set(resolver, mockLog);
return mockLog;
}
}
| #JC-1212 Fixed unit test (to use MockHttpServletRequest)
| jcommune-view/jcommune-web-controller/src/test/java/org/jtalks/jcommune/web/exception/PrettyLogExceptionResolverTest.java | #JC-1212 Fixed unit test (to use MockHttpServletRequest) | <ide><path>commune-view/jcommune-web-controller/src/test/java/org/jtalks/jcommune/web/exception/PrettyLogExceptionResolverTest.java
<ide>
<ide> import org.apache.commons.logging.Log;
<ide> import org.jtalks.jcommune.service.exceptions.NotFoundException;
<add>import org.springframework.mock.web.MockHttpServletRequest;
<ide> import org.springframework.security.access.AccessDeniedException;
<ide> import org.springframework.util.ReflectionUtils;
<ide> import org.testng.annotations.BeforeMethod;
<ide> Log mockLog = replaceLoggerWithMock(prettyLogExceptionResolver);
<ide> AccessDeniedException accessDeniedException = new AccessDeniedException("Access denied");
<ide>
<del> HttpServletRequest request = mock(HttpServletRequest.class);
<del> when(request.getMethod()).thenReturn("POST");
<del> when(request.getUserPrincipal()).thenReturn(new PrincipalImpl("test user"));
<del> String url = "http://testserver.com/testing/url/42";
<del> when(request.getRequestURL()).thenReturn(new StringBuffer(url));
<add> MockHttpServletRequest request = new MockHttpServletRequest("POST", "/testing/url/42");
<add> request.setServerName("testserver.com");
<add> request.setServerPort(8080);
<add> request.setUserPrincipal(new PrincipalImpl("test user"));
<ide>
<ide> prettyLogExceptionResolver.logException(accessDeniedException, request);
<ide>
<del> verify(mockLog).info("Access was denied for user [test user] trying to POST http://testserver.com/testing/url/42");
<add> verify(mockLog).info("Access was denied for user [test user] trying to POST http://testserver.com:8080/testing/url/42");
<ide> }
<ide>
<ide> @Test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.