lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | mit | error: pathspec 'src/GeeksforGeeksPractice/_0083TicTacToeValidity.java' did not match any file(s) known to git
| f674235661b432e4d555a553916f30fe9ac5ccbd | 1 | darshanhs90/Java-InterviewPrep,darshanhs90/Java-Coding | package GeeksforGeeksPractice;
import java.util.Arrays;
/*
* Link : http://www.geeksforgeeks.org/validity-of-a-given-tic-tac-toe-board-configuration/
*/
public class _0083TicTacToeValidity {
public static void main(String[] args) {
char board[] = {'X', 'X', 'O',
'O', 'O', 'X',
'X', 'O', 'X'};
System.out.println(checkValidity(board));
}
static int winMatrix[][]=new int[][]{{0, 1, 2}, // Check first row.
{3, 4, 5}, // Check second Row
{6, 7, 8}, // Check third Row
{0, 3, 6}, // Check first column
{1, 4, 7}, // Check second Column
{2, 5, 8}, // Check third Column
{0, 4, 8}, // Check first Diagonal
{2, 4, 6}};
private static boolean checkValidity(char[] board) {
//getXcount
//getOcount
boolean xWin=checkWin(board,'X');
boolean oWin=checkWin(board,'O');
int xCount=0,oCount=0;
for (int i = 0; i < board.length; i++) {
if(board[i]=='X')
xCount++;
else if(board[i]=='O')
oCount++;
}
if(xCount==oCount||xCount==oCount+1)
{
if(oWin)
{
if(xWin){
return false;
}
if(xCount==oCount)
return true;
else
return false;
}
}
if(xWin && xCount!=oCount+1)
return false;
return true;
}
private static boolean checkWin(char[] board, char c) {
for (int i = 0; i < winMatrix.length; i++) {
if(board[winMatrix[i][0]]=='c'
&& board[winMatrix[i][1]]=='c'
&& board[winMatrix[i][2]]=='c')
return true;
}
return false;
}
/*private static void printMatrix(int[][] s) {
for (int i = 0; i < s.length; i++) {
System.out.println(Arrays.toString(s[i]));
}
}
*/
}
| src/GeeksforGeeksPractice/_0083TicTacToeValidity.java | Tic Tac Toe validity completed | src/GeeksforGeeksPractice/_0083TicTacToeValidity.java | Tic Tac Toe validity completed | <ide><path>rc/GeeksforGeeksPractice/_0083TicTacToeValidity.java
<add>package GeeksforGeeksPractice;
<add>
<add>import java.util.Arrays;
<add>
<add>/*
<add> * Link : http://www.geeksforgeeks.org/validity-of-a-given-tic-tac-toe-board-configuration/
<add> */
<add>public class _0083TicTacToeValidity {
<add>
<add> public static void main(String[] args) {
<add> char board[] = {'X', 'X', 'O',
<add> 'O', 'O', 'X',
<add> 'X', 'O', 'X'};
<add> System.out.println(checkValidity(board));
<add> }
<add> static int winMatrix[][]=new int[][]{{0, 1, 2}, // Check first row.
<add> {3, 4, 5}, // Check second Row
<add> {6, 7, 8}, // Check third Row
<add> {0, 3, 6}, // Check first column
<add> {1, 4, 7}, // Check second Column
<add> {2, 5, 8}, // Check third Column
<add> {0, 4, 8}, // Check first Diagonal
<add> {2, 4, 6}};
<add>
<add>
<add> private static boolean checkValidity(char[] board) {
<add> //getXcount
<add> //getOcount
<add> boolean xWin=checkWin(board,'X');
<add> boolean oWin=checkWin(board,'O');
<add> int xCount=0,oCount=0;
<add> for (int i = 0; i < board.length; i++) {
<add> if(board[i]=='X')
<add> xCount++;
<add> else if(board[i]=='O')
<add> oCount++;
<add> }
<add>
<add> if(xCount==oCount||xCount==oCount+1)
<add> {
<add> if(oWin)
<add> {
<add> if(xWin){
<add> return false;
<add> }
<add> if(xCount==oCount)
<add> return true;
<add> else
<add> return false;
<add> }
<add> }
<add>
<add> if(xWin && xCount!=oCount+1)
<add> return false;
<add> return true;
<add> }
<add>
<add>
<add>
<add> private static boolean checkWin(char[] board, char c) {
<add> for (int i = 0; i < winMatrix.length; i++) {
<add> if(board[winMatrix[i][0]]=='c'
<add> && board[winMatrix[i][1]]=='c'
<add> && board[winMatrix[i][2]]=='c')
<add> return true;
<add> }
<add> return false;
<add>
<add> }
<add>
<add>
<add>
<add> /*private static void printMatrix(int[][] s) {
<add> for (int i = 0; i < s.length; i++) {
<add> System.out.println(Arrays.toString(s[i]));
<add> }
<add>
<add> }
<add>*/
<add>
<add>} |
|
JavaScript | mit | d1013dff3f64d82288517564db1c9cc05056ac33 | 0 | lijanele/slush-y,joelcoxokc/slush-y,lijanele/slush-y,joelcoxokc/slush-y,joelcoxokc/slush-y,joelcoxokc/slush-y,joehannes-generation/slush-y,joehannes-generation/slush-y | ;(function(){
'use strict';
angular
.module('<%= moduleNames.slug %>')
.filter('<%= names.single.camel %>', <%= names.single.camel %>);
/* @inject */
function <%= names.single.camel %>() {
return filter;
//////////////
function filter(input) {
// <%= names.single.humanized %> directive logic
// ...
return '<%= names.single.camel %> filter: ' + input;
}
}
}).call(this); | generators/client/filter/templates/base/filters/_.filter.js | ;(function(){
'use strict';
angular
.module('<%= moduleNames.slug %>')
.filter('<%= names.single.camel %>', <%= names.single.camel %>);
/* @inject */
function <%= names.single.camel %>() {
return function(input) {
// <%= names.single.humanized %> directive logic
// ...
return '<%= names.single.camel %> filter: ' + input;
};
}
}).call(this); | Update: factory sub-generator:
Refactor Logic
| generators/client/filter/templates/base/filters/_.filter.js | Update: factory sub-generator: Refactor Logic | <ide><path>enerators/client/filter/templates/base/filters/_.filter.js
<ide>
<ide> /* @inject */
<ide> function <%= names.single.camel %>() {
<del> return function(input) {
<add>
<add> return filter;
<add>
<add> //////////////
<add>
<add> function filter(input) {
<ide> // <%= names.single.humanized %> directive logic
<ide> // ...
<ide>
<ide> return '<%= names.single.camel %> filter: ' + input;
<del> };
<add> }
<ide> }
<ide> }).call(this); |
|
Java | apache-2.0 | 92737e21adca31e9a2d4722048ae0028456b93d5 | 0 | cache2k/cache2k,cache2k/cache2k,cache2k/cache2k | package org.cache2k.expiry;
/*
* #%L
* cache2k API
* %%
* Copyright (C) 2000 - 2016 headissue GmbH, Munich
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* Expiry time values that have a special meaning. Used for expressive return values in the
* customizations {@link org.cache2k.integration.ResiliencePolicy} and {@link ExpiryPolicy}
* as well as {@link org.cache2k.Cache#expireAt(Object, long)}.
*
* @author Jens Wilke
*/
public interface ExpiryTimeValues {
/**
* Don't change the expiry of the entry. This can be used for an update.
*/
long NEUTRAL = -1;
/**
* Value should not be cached. In a read through configuration the value will be
* reloaded, when it is requested again.
*/
long NO_CACHE = 0;
/**
* Identical to {@link #NO_CACHE}. More meaningful when used together with
* {@link org.cache2k.Cache#expireAt}. The value expires immediately. An immediate
* load is triggered if refreshAhead is enabled.
*/
long NOW = 0;
/**
* An immediate load is triggered if refreshAhead is enabled. If the refresh is not
* possible, for example because of no loader threads are available the value will expire.
*
* <p>After the load operation is completed, the entry is in a special area and not accessible
* by direct cache operations, meaning {@code containsKey} returns false. After an operation which
* triggers a load (e.g. {@code get} or {@code loadAll}, the entry is included in the cache.
*/
long REFRESH = 1;
/**
* Return value signalling to keep the value forever in the cache, switching off expiry.
* If the cache has a static expiry time configured, then this is used instead.
*/
long ETERNAL = Long.MAX_VALUE;
}
| api/src/main/java/org/cache2k/expiry/ExpiryTimeValues.java | package org.cache2k.expiry;
/*
* #%L
* cache2k API
* %%
* Copyright (C) 2000 - 2016 headissue GmbH, Munich
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* Expiry time values that have a special meaning. Used for expressive return values in the
* customizations {@link org.cache2k.integration.ResiliencePolicy} and {@link ExpiryPolicy}
* as well as {@link org.cache2k.Cache#expireAt(Object, long)}.
*
* @author Jens Wilke
*/
public interface ExpiryTimeValues {
/**
* Don't change the expiry of the entry. This can be used for an update.
*/
long NEUTRAL = -1;
/**
* Value should not be cached. In a read through configuration the value will be
* reloaded, when it is requested again.
*/
long NO_CACHE = 0;
/**
* Identical value to {@link #NO_CACHE}, meaningful when used together with
* {@link org.cache2k.Cache#expireAt}. The value expires immediately. An immediate
* load is triggered if refreshAhead is enabled.
*/
long NOW = 0;
/**
* If refresh ahead is enabled, the value will be cached and visible. An immediate
* load is triggered if refreshAhead is enabled. If the refresh is not possible, for example because of
* no loader threads are available the value will expire.
*/
long REFRESH = 1;
/**
* Return value signalling to keep the value forever in the cache, switching off expiry.
* If the cache has a static expiry time configured, then this is used instead.
*/
long ETERNAL = Long.MAX_VALUE;
}
| improve refresh documentation
| api/src/main/java/org/cache2k/expiry/ExpiryTimeValues.java | improve refresh documentation | <ide><path>pi/src/main/java/org/cache2k/expiry/ExpiryTimeValues.java
<ide> long NO_CACHE = 0;
<ide>
<ide> /**
<del> * Identical value to {@link #NO_CACHE}, meaningful when used together with
<add> * Identical to {@link #NO_CACHE}. More meaningful when used together with
<ide> * {@link org.cache2k.Cache#expireAt}. The value expires immediately. An immediate
<ide> * load is triggered if refreshAhead is enabled.
<ide> */
<ide> long NOW = 0;
<ide>
<ide> /**
<del> * If refresh ahead is enabled, the value will be cached and visible. An immediate
<del> * load is triggered if refreshAhead is enabled. If the refresh is not possible, for example because of
<del> * no loader threads are available the value will expire.
<add> * An immediate load is triggered if refreshAhead is enabled. If the refresh is not
<add> * possible, for example because of no loader threads are available the value will expire.
<add> *
<add> * <p>After the load operation is completed, the entry is in a special area and not accessible
<add> * by direct cache operations, meaning {@code containsKey} returns false. After an operation which
<add> * triggers a load (e.g. {@code get} or {@code loadAll}, the entry is included in the cache.
<ide> */
<ide> long REFRESH = 1;
<ide> |
|
Java | apache-2.0 | 084bcec742b5ef4d77ab75d498ba62a94305096e | 0 | amaembo/streamex | /*
* Copyright 2015, 2019 StreamEx contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package one.util.streamex;
import java.util.Spliterator;
import java.util.Spliterators.AbstractDoubleSpliterator;
import java.util.Spliterators.AbstractIntSpliterator;
import java.util.Spliterators.AbstractLongSpliterator;
import java.util.Spliterators.AbstractSpliterator;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.DoubleBinaryOperator;
import java.util.function.DoubleConsumer;
import java.util.function.IntBinaryOperator;
import java.util.function.IntConsumer;
import java.util.function.LongBinaryOperator;
import java.util.function.LongConsumer;
import static one.util.streamex.Internals.CloneableSpliterator;
import static one.util.streamex.Internals.NONE;
import static one.util.streamex.Internals.none;
/**
* @author Tagir Valeev
*/
/* package */ abstract class PrefixOps<T, S extends Spliterator<T>> extends CloneableSpliterator<T, PrefixOps<T, S>> {
private static final int BUF_SIZE = 128;
protected S source;
protected int idx = 0;
PrefixOps(S source) {
this.source = source;
}
@Override
public long estimateSize() {
return source.estimateSize();
}
@Override
public int characteristics() {
return source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | SUBSIZED);
}
static final class OfRef<T> extends AbstractSpliterator<T> implements Consumer<T> {
private final BinaryOperator<T> op;
private final Spliterator<T> source;
private boolean started;
private T acc;
OfRef(Spliterator<T> source, BinaryOperator<T> op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(Consumer<? super T> action) {
source.forEachRemaining(next -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(T next) {
if (started) {
acc = op.apply(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfInt extends AbstractIntSpliterator implements IntConsumer {
private final IntBinaryOperator op;
private final Spliterator.OfInt source;
private boolean started;
private int acc;
OfInt(Spliterator.OfInt source, IntBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(IntConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
source.forEachRemaining((int next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(int next) {
if (started) {
acc = op.applyAsInt(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfLong extends AbstractLongSpliterator implements LongConsumer {
private final LongBinaryOperator op;
private final Spliterator.OfLong source;
private boolean started;
private long acc;
OfLong(Spliterator.OfLong source, LongBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(LongConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(LongConsumer action) {
source.forEachRemaining((long next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(long next) {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfDouble extends AbstractDoubleSpliterator implements DoubleConsumer {
private final DoubleBinaryOperator op;
private final Spliterator.OfDouble source;
private boolean started;
private double acc;
OfDouble(Spliterator.OfDouble source, DoubleBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(DoubleConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(DoubleConsumer action) {
source.forEachRemaining((double next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(double next) {
if (started) {
acc = op.applyAsDouble(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfUnordRef<T> extends PrefixOps<T, Spliterator<T>> implements Consumer<T> {
private final BinaryOperator<T> localOp;
private AtomicReference<T> accRef;
private T acc = none();
private final BinaryOperator<T> op;
OfUnordRef(Spliterator<T> source, BinaryOperator<T> op) {
super(source);
this.localOp = op;
this.op = (a, b) -> a == NONE ? b : op.apply(a, b);
}
@Override
public Spliterator<T> trySplit() {
if (acc != NONE) {
return null;
}
Spliterator<T> prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new AtomicReference<>(none());
}
OfUnordRef<T> pref = (OfUnordRef<T>) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(Consumer<? super T> action) {
if (accRef == null) {
source.forEachRemaining(next -> action.accept(acc = op.apply(acc, next)));
} else {
@SuppressWarnings("unchecked")
T[] buf = (T[]) new Object[BUF_SIZE];
source.forEachRemaining(next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
T prev = buf[idx - 1];
buf[idx++] = localOp.apply(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(Consumer<? super T> action, T[] buf) {
T last = buf[idx - 1];
T acc = accRef.getAndAccumulate(last, op);
if (acc != NONE) {
for (int i = 0; i < idx; i++) {
action.accept(localOp.apply(buf[i], acc));
}
} else {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
}
}
@Override
public void accept(T next) {
if (accRef == null) {
acc = op.apply(acc, next);
} else {
acc = accRef.accumulateAndGet(next, op);
}
}
}
static final class OfUnordInt extends PrefixOps<Integer, Spliterator.OfInt> implements IntConsumer, Spliterator.OfInt {
private final LongBinaryOperator op;
private final IntBinaryOperator localOp;
private boolean started;
private AtomicLong accRef;
private int acc;
OfUnordInt(Spliterator.OfInt source, IntBinaryOperator op) {
super(source);
this.localOp = op;
this.op = (a, b) -> a == Long.MAX_VALUE ? b : op.applyAsInt((int) a, (int) b);;
}
@Override
public Spliterator.OfInt trySplit() {
if (started) {
return null;
}
Spliterator.OfInt prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new AtomicLong(Long.MAX_VALUE);
}
OfUnordInt pref = (OfUnordInt) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(IntConsumer action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
if (accRef == null) {
source.forEachRemaining((IntConsumer) next -> {
if (started) {
acc = localOp.applyAsInt(acc, next);
} else {
acc = next;
started = true;
}
action.accept(acc);
});
} else {
int[] buf = new int[BUF_SIZE];
source.forEachRemaining((IntConsumer) next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
int prev = buf[idx - 1];
buf[idx++] = localOp.applyAsInt(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(IntConsumer action, int[] buf) {
int last = buf[idx - 1];
if (accRef.compareAndSet(Long.MAX_VALUE, last)) {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
} else {
int acc = (int) accRef.getAndAccumulate(last, op);
for (int i = 0; i < idx; i++) {
action.accept(localOp.applyAsInt(buf[i], acc));
}
}
}
@Override
public void accept(int next) {
if (accRef == null) {
if (started) {
acc = localOp.applyAsInt(acc, next);
} else {
started = true;
acc = next;
}
} else {
acc = (int) accRef.accumulateAndGet(next, op);
}
}
}
static final class OfUnordLong extends PrefixOps<Long, Spliterator.OfLong> implements LongConsumer, Spliterator.OfLong {
private final LongBinaryOperator op;
private boolean started;
private MyAtomicLong accRef;
private long acc;
OfUnordLong(Spliterator.OfLong source, LongBinaryOperator op) {
super(source);
this.op = op;
}
private static final class MyAtomicLong extends AtomicLong {
private boolean init;
/**
* On the very first call sets the value to {@code x}
* @param x the initial value
* @return {@code true} if it was the very first call
*/
public synchronized boolean initialize(long x) {
if (!init) {
init = true;
set(x);
return true;
}
return false;
}
}
@Override
public Spliterator.OfLong trySplit() {
if (started) {
return null;
}
Spliterator.OfLong prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new MyAtomicLong();
}
OfUnordLong pref = (OfUnordLong) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(LongConsumer action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(LongConsumer action) {
if (accRef == null) {
source.forEachRemaining((LongConsumer) next -> {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
acc = next;
started = true;
}
action.accept(acc);
});
} else {
long[] buf = new long[BUF_SIZE];
source.forEachRemaining((LongConsumer) next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
long prev = buf[idx - 1];
buf[idx++] = op.applyAsLong(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(LongConsumer action, long[] buf) {
long last = buf[idx - 1];
boolean accRefJustInitialized = accRef.initialize(last);
if (accRefJustInitialized) {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
} else {
long acc = accRef.getAndAccumulate(last, op);
for (int i = 0; i < idx; i++) {
action.accept(op.applyAsLong(buf[i], acc));
}
}
}
@Override
public void accept(long next) {
if (accRef == null) {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
started = true;
acc = next;
}
} else {
boolean accRefJustInitialized = accRef.initialize(next);
if (!accRefJustInitialized) {
acc = accRef.accumulateAndGet(next, op);
} else {
acc = next;
}
}
}
}
}
| src/main/java/one/util/streamex/PrefixOps.java | /*
* Copyright 2015, 2019 StreamEx contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package one.util.streamex;
import java.util.Spliterator;
import java.util.Spliterators.AbstractDoubleSpliterator;
import java.util.Spliterators.AbstractIntSpliterator;
import java.util.Spliterators.AbstractLongSpliterator;
import java.util.Spliterators.AbstractSpliterator;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.DoubleBinaryOperator;
import java.util.function.DoubleConsumer;
import java.util.function.IntBinaryOperator;
import java.util.function.IntConsumer;
import java.util.function.LongBinaryOperator;
import java.util.function.LongConsumer;
import static one.util.streamex.Internals.CloneableSpliterator;
import static one.util.streamex.Internals.NONE;
import static one.util.streamex.Internals.none;
/**
* @author Tagir Valeev
*/
/* package */ abstract class PrefixOps<T, S extends Spliterator<T>> extends CloneableSpliterator<T, PrefixOps<T, S>> {
private static final int BUF_SIZE = 128;
protected S source;
protected int idx = 0;
PrefixOps(S source) {
this.source = source;
}
@Override
public long estimateSize() {
return source.estimateSize();
}
@Override
public int characteristics() {
return source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | SUBSIZED);
}
static final class OfRef<T> extends AbstractSpliterator<T> implements Consumer<T> {
private final BinaryOperator<T> op;
private final Spliterator<T> source;
private boolean started;
private T acc;
OfRef(Spliterator<T> source, BinaryOperator<T> op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(Consumer<? super T> action) {
source.forEachRemaining(next -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(T next) {
if (started) {
acc = op.apply(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfInt extends AbstractIntSpliterator implements IntConsumer {
private final IntBinaryOperator op;
private final Spliterator.OfInt source;
private boolean started;
private int acc;
OfInt(Spliterator.OfInt source, IntBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(IntConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
source.forEachRemaining((int next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(int next) {
if (started) {
acc = op.applyAsInt(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfLong extends AbstractLongSpliterator implements LongConsumer {
private final LongBinaryOperator op;
private final Spliterator.OfLong source;
private boolean started;
private long acc;
OfLong(Spliterator.OfLong source, LongBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(LongConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(LongConsumer action) {
source.forEachRemaining((long next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(long next) {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfDouble extends AbstractDoubleSpliterator implements DoubleConsumer {
private final DoubleBinaryOperator op;
private final Spliterator.OfDouble source;
private boolean started;
private double acc;
OfDouble(Spliterator.OfDouble source, DoubleBinaryOperator op) {
super(source.estimateSize(), source.characteristics() & (ORDERED | IMMUTABLE | CONCURRENT | SIZED | NONNULL));
this.source = source;
this.op = op;
}
@Override
public boolean tryAdvance(DoubleConsumer action) {
if (!source.tryAdvance(this))
return false;
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(DoubleConsumer action) {
source.forEachRemaining((double next) -> {
this.accept(next);
action.accept(acc);
});
}
@Override
public void accept(double next) {
if (started) {
acc = op.applyAsDouble(acc, next);
} else {
started = true;
acc = next;
}
}
}
static final class OfUnordRef<T> extends PrefixOps<T, Spliterator<T>> implements Consumer<T> {
private final BinaryOperator<T> localOp;
private AtomicReference<T> accRef;
private T acc = none();
private final BinaryOperator<T> op;
OfUnordRef(Spliterator<T> source, BinaryOperator<T> op) {
super(source);
this.localOp = op;
this.op = (a, b) -> a == NONE ? b : op.apply(a, b);
}
@Override
public Spliterator<T> trySplit() {
if (acc != NONE) {
return null;
}
Spliterator<T> prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new AtomicReference<>(none());
}
OfUnordRef<T> pref = (OfUnordRef<T>) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(Consumer<? super T> action) {
if (accRef == null) {
source.forEachRemaining(next -> action.accept(acc = op.apply(acc, next)));
} else {
@SuppressWarnings("unchecked")
T[] buf = (T[]) new Object[BUF_SIZE];
source.forEachRemaining(next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
T prev = buf[idx - 1];
buf[idx++] = localOp.apply(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(Consumer<? super T> action, T[] buf) {
T last = buf[idx - 1];
T acc = accRef.getAndAccumulate(last, op);
if (acc != NONE) {
for (int i = 0; i < idx; i++) {
action.accept(localOp.apply(buf[i], acc));
}
} else {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
}
}
@Override
public void accept(T next) {
if (accRef == null) {
acc = op.apply(acc, next);
} else {
acc = accRef.accumulateAndGet(next, op);
}
}
}
static final class OfUnordInt extends PrefixOps<Integer, Spliterator.OfInt> implements IntConsumer, Spliterator.OfInt {
private final IntBinaryOperator op;
private boolean started;
private MyAtomicInteger accRef;
private int acc;
OfUnordInt(Spliterator.OfInt source, IntBinaryOperator op) {
super(source);
this.op = op;
}
private static final class MyAtomicInteger extends AtomicInteger {
private boolean init;
/**
* On the very first call sets the value to {@code x}
* @param x the initial value
* @return {@code true} if it was the very first call
*/
public synchronized boolean initialize(int x) {
if (!init) {
init = true;
set(x);
return true;
}
return false;
}
}
@Override
public Spliterator.OfInt trySplit() {
if (started) {
return null;
}
Spliterator.OfInt prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new MyAtomicInteger();
}
OfUnordInt pref = (OfUnordInt) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(IntConsumer action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
if (accRef == null) {
source.forEachRemaining((IntConsumer) next -> {
if (started) {
acc = op.applyAsInt(acc, next);
} else {
acc = next;
started = true;
}
action.accept(acc);
});
} else {
int[] buf = new int[BUF_SIZE];
source.forEachRemaining((IntConsumer) next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
int prev = buf[idx - 1];
buf[idx++] = op.applyAsInt(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(IntConsumer action, int[] buf) {
int last = buf[idx - 1];
boolean accRefJustInitialized = accRef.initialize(last);
if (accRefJustInitialized) {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
} else {
int acc = accRef.getAndAccumulate(last, op);
for (int i = 0; i < idx; i++) {
action.accept(op.applyAsInt(buf[i], acc));
}
}
}
@Override
public void accept(int next) {
if (accRef == null) {
if (started) {
acc = op.applyAsInt(acc, next);
} else {
started = true;
acc = next;
}
} else {
boolean accRefJustInitialized = accRef.initialize(next);
if (!accRefJustInitialized) {
acc = accRef.accumulateAndGet(next, op);
} else {
acc = next;
}
}
}
}
static final class OfUnordLong extends PrefixOps<Long, Spliterator.OfLong> implements LongConsumer, Spliterator.OfLong {
private final LongBinaryOperator op;
private boolean started;
private MyAtomicLong accRef;
private long acc;
OfUnordLong(Spliterator.OfLong source, LongBinaryOperator op) {
super(source);
this.op = op;
}
private static final class MyAtomicLong extends AtomicLong {
private boolean init;
/**
* On the very first call sets the value to {@code x}
* @param x the initial value
* @return {@code true} if it was the very first call
*/
public synchronized boolean initialize(long x) {
if (!init) {
init = true;
set(x);
return true;
}
return false;
}
}
@Override
public Spliterator.OfLong trySplit() {
if (started) {
return null;
}
Spliterator.OfLong prefix = source.trySplit();
if (prefix == null) {
return null;
}
if (accRef == null) {
accRef = new MyAtomicLong();
}
OfUnordLong pref = (OfUnordLong) doClone();
pref.source = prefix;
return pref;
}
@Override
public boolean tryAdvance(LongConsumer action) {
if (!source.tryAdvance(this)) {
return false;
}
action.accept(acc);
return true;
}
@Override
public void forEachRemaining(LongConsumer action) {
if (accRef == null) {
source.forEachRemaining((LongConsumer) next -> {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
acc = next;
started = true;
}
action.accept(acc);
});
} else {
long[] buf = new long[BUF_SIZE];
source.forEachRemaining((LongConsumer) next -> {
if (idx == 0) {
buf[idx++] = next;
} else {
long prev = buf[idx - 1];
buf[idx++] = op.applyAsLong(prev, next);
if (idx == buf.length) {
drain(action, buf);
idx = 0;
}
}
});
if (idx > 0)
drain(action, buf);
}
}
private void drain(LongConsumer action, long[] buf) {
long last = buf[idx - 1];
boolean accRefJustInitialized = accRef.initialize(last);
if (accRefJustInitialized) {
for (int i = 0; i < idx; i++) {
action.accept(buf[i]);
}
} else {
long acc = accRef.getAndAccumulate(last, op);
for (int i = 0; i < idx; i++) {
action.accept(op.applyAsLong(buf[i], acc));
}
}
}
@Override
public void accept(long next) {
if (accRef == null) {
if (started) {
acc = op.applyAsLong(acc, next);
} else {
started = true;
acc = next;
}
} else {
boolean accRefJustInitialized = accRef.initialize(next);
if (!accRefJustInitialized) {
acc = accRef.accumulateAndGet(next, op);
} else {
acc = next;
}
}
}
}
}
| Further performance improve for unordered parralel IntStreams
| src/main/java/one/util/streamex/PrefixOps.java | Further performance improve for unordered parralel IntStreams | <ide><path>rc/main/java/one/util/streamex/PrefixOps.java
<ide> import java.util.Spliterators.AbstractIntSpliterator;
<ide> import java.util.Spliterators.AbstractLongSpliterator;
<ide> import java.util.Spliterators.AbstractSpliterator;
<del>import java.util.concurrent.atomic.AtomicInteger;
<ide> import java.util.concurrent.atomic.AtomicLong;
<ide> import java.util.concurrent.atomic.AtomicReference;
<ide> import java.util.function.BinaryOperator;
<ide> }
<ide>
<ide> static final class OfUnordInt extends PrefixOps<Integer, Spliterator.OfInt> implements IntConsumer, Spliterator.OfInt {
<del> private final IntBinaryOperator op;
<add> private final LongBinaryOperator op;
<add> private final IntBinaryOperator localOp;
<ide> private boolean started;
<del> private MyAtomicInteger accRef;
<add> private AtomicLong accRef;
<ide> private int acc;
<ide>
<ide> OfUnordInt(Spliterator.OfInt source, IntBinaryOperator op) {
<ide> super(source);
<del> this.op = op;
<del> }
<del>
<del> private static final class MyAtomicInteger extends AtomicInteger {
<del> private boolean init;
<del>
<del> /**
<del> * On the very first call sets the value to {@code x}
<del> * @param x the initial value
<del> * @return {@code true} if it was the very first call
<del> */
<del> public synchronized boolean initialize(int x) {
<del> if (!init) {
<del> init = true;
<del> set(x);
<del> return true;
<del> }
<del> return false;
<del> }
<add> this.localOp = op;
<add> this.op = (a, b) -> a == Long.MAX_VALUE ? b : op.applyAsInt((int) a, (int) b);;
<ide> }
<ide>
<ide> @Override
<ide> return null;
<ide> }
<ide> if (accRef == null) {
<del> accRef = new MyAtomicInteger();
<add> accRef = new AtomicLong(Long.MAX_VALUE);
<ide> }
<ide> OfUnordInt pref = (OfUnordInt) doClone();
<ide> pref.source = prefix;
<ide> if (accRef == null) {
<ide> source.forEachRemaining((IntConsumer) next -> {
<ide> if (started) {
<del> acc = op.applyAsInt(acc, next);
<add> acc = localOp.applyAsInt(acc, next);
<ide> } else {
<ide> acc = next;
<ide> started = true;
<ide> buf[idx++] = next;
<ide> } else {
<ide> int prev = buf[idx - 1];
<del> buf[idx++] = op.applyAsInt(prev, next);
<add> buf[idx++] = localOp.applyAsInt(prev, next);
<ide> if (idx == buf.length) {
<ide> drain(action, buf);
<ide> idx = 0;
<ide>
<ide> private void drain(IntConsumer action, int[] buf) {
<ide> int last = buf[idx - 1];
<del> boolean accRefJustInitialized = accRef.initialize(last);
<del> if (accRefJustInitialized) {
<add> if (accRef.compareAndSet(Long.MAX_VALUE, last)) {
<ide> for (int i = 0; i < idx; i++) {
<ide> action.accept(buf[i]);
<ide> }
<ide> } else {
<del> int acc = accRef.getAndAccumulate(last, op);
<add> int acc = (int) accRef.getAndAccumulate(last, op);
<ide> for (int i = 0; i < idx; i++) {
<del> action.accept(op.applyAsInt(buf[i], acc));
<add> action.accept(localOp.applyAsInt(buf[i], acc));
<ide> }
<ide> }
<ide> }
<ide> public void accept(int next) {
<ide> if (accRef == null) {
<ide> if (started) {
<del> acc = op.applyAsInt(acc, next);
<add> acc = localOp.applyAsInt(acc, next);
<ide> } else {
<ide> started = true;
<ide> acc = next;
<ide> }
<ide> } else {
<del> boolean accRefJustInitialized = accRef.initialize(next);
<del> if (!accRefJustInitialized) {
<del> acc = accRef.accumulateAndGet(next, op);
<del> } else {
<del> acc = next;
<del> }
<add> acc = (int) accRef.accumulateAndGet(next, op);
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | f2d799ae01b5303c755443b509095f12f1ba367b | 0 | Distrotech/buck,stuhood/buck,rmaz/buck,vine/buck,siddhartharay007/buck,raviagarwal7/buck,stuhood/buck,mikekap/buck,sdwilsh/buck,neonichu/buck,OkBuilds/buck,shybovycha/buck,liuyang-li/buck,janicduplessis/buck,MarkRunWu/buck,liuyang-li/buck,ilya-klyuchnikov/buck,darkforestzero/buck,justinmuller/buck,shs96c/buck,darkforestzero/buck,brettwooldridge/buck,Distrotech/buck,darkforestzero/buck,shs96c/buck,clonetwin26/buck,justinmuller/buck,dushmis/buck,sdwilsh/buck,hgl888/buck,zhuxiaohao/buck,zhan-xiong/buck,LegNeato/buck,bocon13/buck,zpao/buck,sdwilsh/buck,justinmuller/buck,vine/buck,vschs007/buck,janicduplessis/buck,MarkRunWu/buck,lukw00/buck,facebook/buck,illicitonion/buck,luiseduardohdbackup/buck,Heart2009/buck,stuhood/buck,davido/buck,rhencke/buck,Addepar/buck,JoelMarcey/buck,clonetwin26/buck,zpao/buck,pwz3n0/buck,neonichu/buck,clonetwin26/buck,JoelMarcey/buck,romanoid/buck,rhencke/buck,zhuxiaohao/buck,vschs007/buck,shs96c/buck,raviagarwal7/buck,rhencke/buck,sdwilsh/buck,vschs007/buck,k21/buck,Dominator008/buck,Learn-Android-app/buck,Dominator008/buck,mnuessler/buck,dsyang/buck,daedric/buck,grumpyjames/buck,zhan-xiong/buck,grumpyjames/buck,shybovycha/buck,mogers/buck,kageiit/buck,ilya-klyuchnikov/buck,1yvT0s/buck,Addepar/buck,ilya-klyuchnikov/buck,Addepar/buck,luiseduardohdbackup/buck,1yvT0s/buck,Heart2009/buck,MarkRunWu/buck,zhan-xiong/buck,pwz3n0/buck,brettwooldridge/buck,liuyang-li/buck,k21/buck,hgl888/buck,nguyentruongtho/buck,tgummerer/buck,rmaz/buck,lukw00/buck,Dominator008/buck,dsyang/buck,clonetwin26/buck,rowillia/buck,zhan-xiong/buck,zhuxiaohao/buck,darkforestzero/buck,Addepar/buck,daedric/buck,artiya4u/buck,bocon13/buck,Addepar/buck,pwz3n0/buck,k21/buck,k21/buck,artiya4u/buck,robbertvanginkel/buck,liuyang-li/buck,rowillia/buck,grumpyjames/buck,siddhartharay007/buck,dpursehouse/buck,kageiit/buck,dsyang/buck,daedric/buck,Heart2009/buck,rowillia/buck,justinmuller/buck,MarkRunWu/buck,shybovycha/buck,luiseduardohdbackup/buck,dushmis/buck,daedric/buck,liuyang-li/buck,justinmuller/buck,mikekap/buck,Dominator008/buck,darkforestzero/buck,zhan-xiong/buck,SeleniumHQ/buck,justinmuller/buck,mikekap/buck,LegNeato/buck,mogers/buck,mikekap/buck,janicduplessis/buck,vschs007/buck,marcinkwiatkowski/buck,zhan-xiong/buck,vschs007/buck,LegNeato/buck,MarkRunWu/buck,SeleniumHQ/buck,JoelMarcey/buck,rmaz/buck,luiseduardohdbackup/buck,mikekap/buck,clonetwin26/buck,zhan-xiong/buck,sdwilsh/buck,mogers/buck,Dominator008/buck,mnuessler/buck,mnuessler/buck,shs96c/buck,Learn-Android-app/buck,rmaz/buck,pwz3n0/buck,mogers/buck,OkBuilds/buck,lukw00/buck,rhencke/buck,LegNeato/buck,mikekap/buck,darkforestzero/buck,Distrotech/buck,shs96c/buck,illicitonion/buck,hgl888/buck,nguyentruongtho/buck,artiya4u/buck,luiseduardohdbackup/buck,daedric/buck,liuyang-li/buck,k21/buck,LegNeato/buck,shybovycha/buck,nguyentruongtho/buck,mikekap/buck,LegNeato/buck,1yvT0s/buck,shybovycha/buck,robbertvanginkel/buck,rmaz/buck,kageiit/buck,hgl888/buck,bocon13/buck,facebook/buck,Dominator008/buck,1yvT0s/buck,bocon13/buck,illicitonion/buck,artiya4u/buck,dushmis/buck,raviagarwal7/buck,rhencke/buck,bocon13/buck,JoelMarcey/buck,OkBuilds/buck,SeleniumHQ/buck,mnuessler/buck,mogers/buck,1yvT0s/buck,shs96c/buck,neonichu/buck,Addepar/buck,mnuessler/buck,neonichu/buck,tgummerer/buck,lukw00/buck,MarkRunWu/buck,mogers/buck,artiya4u/buck,sdwilsh/buck,rhencke/buck,kageiit/buck,liuyang-li/buck,shybovycha/buck,robbertvanginkel/buck,raviagarwal7/buck,Addepar/buck,rowillia/buck,luiseduardohdbackup/buck,vine/buck,JoelMarcey/buck,k21/buck,k21/buck,clonetwin26/buck,daedric/buck,janicduplessis/buck,rmaz/buck,romanoid/buck,clonetwin26/buck,Learn-Android-app/buck,SeleniumHQ/buck,pwz3n0/buck,SeleniumHQ/buck,Learn-Android-app/buck,k21/buck,justinmuller/buck,daedric/buck,dushmis/buck,tgummerer/buck,davido/buck,siddhartharay007/buck,brettwooldridge/buck,Learn-Android-app/buck,mnuessler/buck,romanoid/buck,vine/buck,rmaz/buck,neonichu/buck,Addepar/buck,illicitonion/buck,zhan-xiong/buck,siddhartharay007/buck,bocon13/buck,tgummerer/buck,brettwooldridge/buck,facebook/buck,stuhood/buck,marcinkwiatkowski/buck,zhuxiaohao/buck,sdwilsh/buck,JoelMarcey/buck,artiya4u/buck,stuhood/buck,robbertvanginkel/buck,Learn-Android-app/buck,marcinkwiatkowski/buck,dushmis/buck,stuhood/buck,darkforestzero/buck,dpursehouse/buck,davido/buck,shs96c/buck,pwz3n0/buck,marcinkwiatkowski/buck,davido/buck,zhuxiaohao/buck,shs96c/buck,LegNeato/buck,raviagarwal7/buck,justinmuller/buck,Distrotech/buck,vine/buck,zhan-xiong/buck,shs96c/buck,dpursehouse/buck,justinmuller/buck,OkBuilds/buck,Distrotech/buck,luiseduardohdbackup/buck,Dominator008/buck,rmaz/buck,dsyang/buck,clonetwin26/buck,nguyentruongtho/buck,1yvT0s/buck,dsyang/buck,mikekap/buck,janicduplessis/buck,kageiit/buck,k21/buck,mnuessler/buck,robbertvanginkel/buck,darkforestzero/buck,illicitonion/buck,OkBuilds/buck,mikekap/buck,romanoid/buck,dsyang/buck,davido/buck,artiya4u/buck,grumpyjames/buck,bocon13/buck,dsyang/buck,zhuxiaohao/buck,sdwilsh/buck,OkBuilds/buck,nguyentruongtho/buck,clonetwin26/buck,dpursehouse/buck,SeleniumHQ/buck,rowillia/buck,mikekap/buck,rmaz/buck,tgummerer/buck,nguyentruongtho/buck,sdwilsh/buck,vine/buck,mogers/buck,zhuxiaohao/buck,siddhartharay007/buck,facebook/buck,neonichu/buck,davido/buck,raviagarwal7/buck,janicduplessis/buck,Learn-Android-app/buck,robbertvanginkel/buck,rhencke/buck,tgummerer/buck,pwz3n0/buck,tgummerer/buck,OkBuilds/buck,rmaz/buck,brettwooldridge/buck,brettwooldridge/buck,brettwooldridge/buck,zpao/buck,Heart2009/buck,Dominator008/buck,mikekap/buck,tgummerer/buck,nguyentruongtho/buck,vine/buck,OkBuilds/buck,ilya-klyuchnikov/buck,sdwilsh/buck,zpao/buck,JoelMarcey/buck,shybovycha/buck,SeleniumHQ/buck,zhuxiaohao/buck,vine/buck,romanoid/buck,rowillia/buck,Addepar/buck,Distrotech/buck,zhan-xiong/buck,lukw00/buck,Distrotech/buck,darkforestzero/buck,JoelMarcey/buck,facebook/buck,JoelMarcey/buck,artiya4u/buck,grumpyjames/buck,mogers/buck,illicitonion/buck,kageiit/buck,liuyang-li/buck,dushmis/buck,neonichu/buck,rowillia/buck,daedric/buck,janicduplessis/buck,lukw00/buck,romanoid/buck,SeleniumHQ/buck,lukw00/buck,mogers/buck,rmaz/buck,Distrotech/buck,vine/buck,stuhood/buck,shs96c/buck,grumpyjames/buck,grumpyjames/buck,luiseduardohdbackup/buck,brettwooldridge/buck,shs96c/buck,JoelMarcey/buck,robbertvanginkel/buck,dpursehouse/buck,raviagarwal7/buck,romanoid/buck,robbertvanginkel/buck,OkBuilds/buck,LegNeato/buck,hgl888/buck,clonetwin26/buck,dushmis/buck,LegNeato/buck,Heart2009/buck,zpao/buck,daedric/buck,raviagarwal7/buck,lukw00/buck,siddhartharay007/buck,robbertvanginkel/buck,sdwilsh/buck,ilya-klyuchnikov/buck,Dominator008/buck,JoelMarcey/buck,hgl888/buck,Addepar/buck,MarkRunWu/buck,vschs007/buck,davido/buck,dsyang/buck,zpao/buck,Heart2009/buck,Dominator008/buck,raviagarwal7/buck,robbertvanginkel/buck,grumpyjames/buck,marcinkwiatkowski/buck,davido/buck,Learn-Android-app/buck,illicitonion/buck,dpursehouse/buck,rmaz/buck,vschs007/buck,justinmuller/buck,grumpyjames/buck,illicitonion/buck,daedric/buck,vschs007/buck,Dominator008/buck,siddhartharay007/buck,davido/buck,dushmis/buck,neonichu/buck,tgummerer/buck,shybovycha/buck,bocon13/buck,janicduplessis/buck,raviagarwal7/buck,k21/buck,bocon13/buck,daedric/buck,dsyang/buck,romanoid/buck,clonetwin26/buck,sdwilsh/buck,stuhood/buck,mnuessler/buck,tgummerer/buck,marcinkwiatkowski/buck,justinmuller/buck,Heart2009/buck,lukw00/buck,illicitonion/buck,pwz3n0/buck,grumpyjames/buck,ilya-klyuchnikov/buck,davido/buck,romanoid/buck,brettwooldridge/buck,SeleniumHQ/buck,OkBuilds/buck,Learn-Android-app/buck,liuyang-li/buck,davido/buck,brettwooldridge/buck,SeleniumHQ/buck,raviagarwal7/buck,justinmuller/buck,vine/buck,ilya-klyuchnikov/buck,bocon13/buck,vschs007/buck,pwz3n0/buck,ilya-klyuchnikov/buck,shs96c/buck,marcinkwiatkowski/buck,vschs007/buck,shs96c/buck,daedric/buck,dpursehouse/buck,robbertvanginkel/buck,ilya-klyuchnikov/buck,darkforestzero/buck,vschs007/buck,shybovycha/buck,robbertvanginkel/buck,romanoid/buck,facebook/buck,hgl888/buck,rowillia/buck,ilya-klyuchnikov/buck,illicitonion/buck,zhuxiaohao/buck,marcinkwiatkowski/buck,1yvT0s/buck,robbertvanginkel/buck,raviagarwal7/buck,mogers/buck,clonetwin26/buck,zhan-xiong/buck,artiya4u/buck,hgl888/buck,1yvT0s/buck,SeleniumHQ/buck,OkBuilds/buck,vine/buck,Learn-Android-app/buck,vschs007/buck,mnuessler/buck,k21/buck,lukw00/buck,LegNeato/buck,raviagarwal7/buck,romanoid/buck,darkforestzero/buck,Distrotech/buck,luiseduardohdbackup/buck,marcinkwiatkowski/buck,zpao/buck,kageiit/buck,siddhartharay007/buck,LegNeato/buck,marcinkwiatkowski/buck,JoelMarcey/buck,dushmis/buck,marcinkwiatkowski/buck,illicitonion/buck,grumpyjames/buck,hgl888/buck,shybovycha/buck,dsyang/buck,justinmuller/buck,Distrotech/buck,dsyang/buck,LegNeato/buck,rhencke/buck,k21/buck,LegNeato/buck,mikekap/buck,janicduplessis/buck,zhan-xiong/buck,artiya4u/buck,JoelMarcey/buck,dushmis/buck,lukw00/buck,bocon13/buck,pwz3n0/buck,mogers/buck,marcinkwiatkowski/buck,Addepar/buck,stuhood/buck,SeleniumHQ/buck,Distrotech/buck,stuhood/buck,1yvT0s/buck,liuyang-li/buck,dpursehouse/buck,sdwilsh/buck,shybovycha/buck,illicitonion/buck,darkforestzero/buck,ilya-klyuchnikov/buck,ilya-klyuchnikov/buck,shybovycha/buck,artiya4u/buck,rmaz/buck,davido/buck,dsyang/buck,rhencke/buck,OkBuilds/buck,janicduplessis/buck,illicitonion/buck,SeleniumHQ/buck,darkforestzero/buck,Heart2009/buck,rowillia/buck,janicduplessis/buck,janicduplessis/buck,zhan-xiong/buck,Addepar/buck,Dominator008/buck,rowillia/buck,shybovycha/buck,dsyang/buck,bocon13/buck,k21/buck,romanoid/buck,rhencke/buck,clonetwin26/buck,neonichu/buck,grumpyjames/buck,brettwooldridge/buck,OkBuilds/buck,tgummerer/buck,MarkRunWu/buck,davido/buck,stuhood/buck,marcinkwiatkowski/buck,daedric/buck,rowillia/buck,facebook/buck,vschs007/buck,rhencke/buck,ilya-klyuchnikov/buck,Addepar/buck,Learn-Android-app/buck,brettwooldridge/buck,pwz3n0/buck,rowillia/buck,liuyang-li/buck,Heart2009/buck,siddhartharay007/buck,brettwooldridge/buck,romanoid/buck | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple.xcode.xcodeproj;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
/**
* File types used in Apple targets.
*/
public final class FileTypes {
// Utility class. Do not instantiate.
private FileTypes() { }
/**
* Map of file extension to Apple UTI (Uniform Type Identifier).
*/
public static final ImmutableMap<String, String> FILE_EXTENSION_TO_UTI =
ImmutableMap.<String, String>builder()
.put("a", "archive.ar")
.put("app", "wrapper.application")
.put("appex", "wrapper.app-extension")
.put("bdic", "file")
.put("bin", "archive.macbinary")
.put("bmp", "image.bmp")
.put("bundle", "wrapper.cfbundle")
.put("c", "sourcecode.c.c")
.put("cc", "sourcecode.cpp.cpp")
.put("cpp", "sourcecode.cpp.cpp")
.put("css", "text.css")
.put("cxx", "sourcecode.cpp.cpp")
.put("d", "sourcecode.dtrace")
.put("dart", "sourcecode")
.put("dylib", "compiled.mach-o.dylib")
.put("exp", "sourcecode.exports")
.put("framework", "wrapper.framework")
.put("fsh", "sourcecode.glsl")
.put("gyp", "sourcecode")
.put("gypi", "text")
.put("h", "sourcecode.c.h")
.put("hxx", "sourcecode.cpp.h")
.put("icns", "image.icns")
.put("java", "sourcecode.java")
.put("jar", "archive.jar")
.put("jpeg", "image.jpeg")
.put("jpg", "image.jpeg")
.put("js", "sourcecode.javascript")
.put("json", "text.json")
.put("m", "sourcecode.c.objc")
.put("mm", "sourcecode.cpp.objcpp")
.put("nib", "wrapper.nib")
.put("o", "compiled.mach-o.objfile")
.put("octest", "wrapper.cfbundle")
.put("pdf", "image.pdf")
.put("pl", "text.script.perl")
.put("plist", "text.plist.xml")
.put("pm", "text.script.perl")
.put("png", "image.png")
.put("proto", "text")
.put("py", "text.script.python")
.put("r", "sourcecode.rez")
.put("rez", "sourcecode.rez")
.put("rtf", "text.rtf")
.put("s", "sourcecode.asm")
.put("storyboard", "file.storyboard")
.put("strings", "text.plist.strings")
.put("stringsdict", "text.plist.xml")
.put("tif", "image.tiff")
.put("tiff", "image.tiff")
.put("tcc", "sourcecode.cpp.cpp")
.put("ttf", "file")
.put("vsh", "sourcecode.glsl")
.put("xcassets", "folder.assetcatalog")
.put("xcconfig", "text.xcconfig")
.put("xcodeproj", "wrapper.pb-project")
.put("xcdatamodel", "wrapper.xcdatamodel")
.put("xcdatamodeld", "wrapper.xcdatamodeld")
.put("xctest", "wrapper.cfbundle")
.put("xib", "file.xib")
.put("y", "sourcecode.yacc")
.put("zip", "archive.zip")
.build();
/**
* Set of UTIs which only work as "lastKnownFileType" and not "explicitFileType"
* in a PBXFileReference.
*
* Yes, really. Because Xcode.
*/
public static final ImmutableSet<String> EXPLICIT_FILE_TYPE_BROKEN_UTIS =
ImmutableSet.of("file.xib");
/**
* Multimap of Apple UTI (Uniform Type Identifier) to file extension(s).
*/
public static final ImmutableMultimap<String, String> UTI_TO_FILE_EXTENSIONS;
static {
// Invert the map of (file extension -> UTI) pairs to
// (UTI -> [file extension 1, ...]) pairs.
ImmutableMultimap.Builder<String, String> builder = ImmutableMultimap.builder();
for (ImmutableMap.Entry<String, String> entry : FILE_EXTENSION_TO_UTI.entrySet()) {
builder.put(entry.getValue(), entry.getKey());
}
UTI_TO_FILE_EXTENSIONS = builder.build();
}
}
| src/com/facebook/buck/apple/xcode/xcodeproj/FileTypes.java | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple.xcode.xcodeproj;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
/**
* File types used in Apple targets.
*/
public final class FileTypes {
// Utility class. Do not instantiate.
private FileTypes() { }
/**
* Map of file extension to Apple UTI (Uniform Type Identifier).
*/
public static final ImmutableMap<String, String> FILE_EXTENSION_TO_UTI =
ImmutableMap.<String, String>builder()
.put("a", "archive.ar")
.put("app", "wrapper.application")
.put("appex", "wrapper.app-extension")
.put("bdic", "file")
.put("bin", "archive.macbinary")
.put("bmp", "image.bmp")
.put("bundle", "wrapper.cfbundle")
.put("c", "sourcecode.c.c")
.put("cc", "sourcecode.cpp.cpp")
.put("cpp", "sourcecode.cpp.cpp")
.put("css", "text.css")
.put("cxx", "sourcecode.cpp.cpp")
.put("d", "sourcecode.dtrace")
.put("dart", "sourcecode")
.put("dylib", "compiled.mach-o.dylib")
.put("exp", "sourcecode.exports")
.put("framework", "wrapper.framework")
.put("fsh", "sourcecode.glsl")
.put("gyp", "sourcecode")
.put("gypi", "text")
.put("h", "sourcecode.c.h")
.put("hxx", "sourcecode.cpp.h")
.put("icns", "image.icns")
.put("java", "sourcecode.java")
.put("jar", "archive.jar")
.put("jpeg", "image.jpeg")
.put("jpg", "image.jpeg")
.put("js", "sourcecode.javascript")
.put("json", "text.json")
.put("m", "sourcecode.c.objc")
.put("mm", "sourcecode.cpp.objcpp")
.put("nib", "wrapper.nib")
.put("o", "compiled.mach-o.objfile")
.put("octest", "wrapper.cfbundle")
.put("pdf", "image.pdf")
.put("pl", "text.script.perl")
.put("plist", "text.plist.xml")
.put("pm", "text.script.perl")
.put("png", "image.png")
.put("proto", "text")
.put("py", "text.script.python")
.put("r", "sourcecode.rez")
.put("rez", "sourcecode.rez")
.put("rtf", "text.rtf")
.put("s", "sourcecode.asm")
.put("storyboard", "file.storyboard")
.put("strings", "text.plist.strings")
.put("tif", "image.tiff")
.put("tiff", "image.tiff")
.put("tcc", "sourcecode.cpp.cpp")
.put("ttf", "file")
.put("vsh", "sourcecode.glsl")
.put("xcassets", "folder.assetcatalog")
.put("xcconfig", "text.xcconfig")
.put("xcodeproj", "wrapper.pb-project")
.put("xcdatamodel", "wrapper.xcdatamodel")
.put("xcdatamodeld", "wrapper.xcdatamodeld")
.put("xctest", "wrapper.cfbundle")
.put("xib", "file.xib")
.put("y", "sourcecode.yacc")
.put("zip", "archive.zip")
.build();
/**
* Set of UTIs which only work as "lastKnownFileType" and not "explicitFileType"
* in a PBXFileReference.
*
* Yes, really. Because Xcode.
*/
public static final ImmutableSet<String> EXPLICIT_FILE_TYPE_BROKEN_UTIS =
ImmutableSet.of("file.xib");
/**
* Multimap of Apple UTI (Uniform Type Identifier) to file extension(s).
*/
public static final ImmutableMultimap<String, String> UTI_TO_FILE_EXTENSIONS;
static {
// Invert the map of (file extension -> UTI) pairs to
// (UTI -> [file extension 1, ...]) pairs.
ImmutableMultimap.Builder<String, String> builder = ImmutableMultimap.builder();
for (ImmutableMap.Entry<String, String> entry : FILE_EXTENSION_TO_UTI.entrySet()) {
builder.put(entry.getValue(), entry.getKey());
}
UTI_TO_FILE_EXTENSIONS = builder.build();
}
}
| Add stringsdict file type for Xcode project generation
Summary: A user asked us to add this new file type.
Test Plan: `ant java-test -Dtest.class=FileTypesTest`
| src/com/facebook/buck/apple/xcode/xcodeproj/FileTypes.java | Add stringsdict file type for Xcode project generation | <ide><path>rc/com/facebook/buck/apple/xcode/xcodeproj/FileTypes.java
<ide> .put("s", "sourcecode.asm")
<ide> .put("storyboard", "file.storyboard")
<ide> .put("strings", "text.plist.strings")
<add> .put("stringsdict", "text.plist.xml")
<ide> .put("tif", "image.tiff")
<ide> .put("tiff", "image.tiff")
<ide> .put("tcc", "sourcecode.cpp.cpp") |
|
Java | apache-2.0 | 3a028a2a94d6a18d920e80f076ddbad2d27f1ee5 | 0 | Hipparchus-Math/hipparchus,sdinot/hipparchus,apache/commons-math,apache/commons-math,sdinot/hipparchus,apache/commons-math,apache/commons-math,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,sdinot/hipparchus,sdinot/hipparchus,Hipparchus-Math/hipparchus | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math.stat.descriptive.moment;
import java.io.Serializable;
import org.apache.commons.math.MathRuntimeException;
import org.apache.commons.math.stat.descriptive.AbstractUnivariateStatistic;
/**
* <p>Computes the semivariance of a set of values with respect to a given cutoff value.
* We define the <i>downside semivariance</i> of a set of values <code>x</code>
* against the <i>cutoff value</i> <code>cutoff</code> to be <br/>
* <code>Σ (x[i] - target)<sup>2</sup> / df</code> <br/>
* where the sum is taken over all <code>i</code> such that <code>x[i] < cutoff</code>
* and <code>df</code> is the length of <code>x</code> (non-bias-corrected) or
* one less than this number (bias corrected). The <i>upside semivariance</i>
* is defined similarly, with the sum taken over values of <code>x</code> that
* exceed the cutoff value.</p>
*
* <p>The cutoff value defaults to the mean, bias correction defaults to <code>true</code>
* and the "variance direction" (upside or downside) defaults to downside. The variance direction
* and bias correction may be set using property setters or their values can provided as
* parameters to {@link #evaluate(double[], double, Direction, boolean, int, int)}.</p>
*
* <p>If the input array is null, <code>evaluate</code> methods throw
* <code>IllegalArgumentException.</code> If the array has length 1, <code>0</code>
* is returned, regardless of the value of the <code>cutoff.</code>
*
* <p><strong>Note that this class is not intended to be threadsafe.</strong> If
* multiple threads access an instance of this class concurrently, and one or
* more of these threads invoke property setters, external synchronization must
* be provided to ensure correct results.</p>
*
* @version $Revision$ $Date$
* @since 2.1
*/
public class SemiVariance extends AbstractUnivariateStatistic implements Serializable {
/**
* The UPSIDE Direction is used to specify that the observations above the
* cutoff point will be used to calculate SemiVariance.
*/
public static final Direction UPSIDE_VARIANCE = Direction.UPSIDE;
/**
* The DOWNSIDE Direction is used to specify that the observations below
* the cutoff point will be used to calculate SemiVariance
*/
public static final Direction DOWNSIDE_VARIANCE = Direction.DOWNSIDE;
/** Serializable version identifier */
private static final long serialVersionUID = -2653430366886024994L;
/**
* Determines whether or not bias correction is applied when computing the
* value of the statisic. True means that bias is corrected.
*/
private boolean biasCorrected = true;
/**
* Determines whether to calculate downside or upside SemiVariance.
*/
private Direction varianceDirection = Direction.DOWNSIDE;
/**
* Constructs a SemiVariance with default (true) <code>biasCorrected</code>
* property and default (Downside) <code>varianceDirection</code> property.
*/
public SemiVariance() {
}
/**
* Constructs a SemiVariance with the specified <code>biasCorrected</code>
* property and default (Downside) <code>varianceDirection</code> property.
*
* @param biasCorrected setting for bias correction - true means
* bias will be corrected and is equivalent to using the argumentless
* constructor
*/
public SemiVariance(final boolean biasCorrected) {
this.biasCorrected = biasCorrected;
}
/**
* Constructs a SemiVariance with the specified <code>Direction</code> property
* and default (true) <code>biasCorrected</code> property
*
* @param direction setting for the direction of the SemiVariance
* to calculate
*/
public SemiVariance(final Direction direction) {
this.varianceDirection = direction;
}
/**
* Constructs a SemiVariance with the specified <code>isBiasCorrected</code>
* property and the specified <code>Direction</code> property.
*
* @param corrected setting for bias correction - true means
* bias will be corrected and is equivalent to using the argumentless
* constructor
*
* @param direction setting for the direction of the SemiVariance
* to calculate
*/
public SemiVariance(final boolean corrected, final Direction direction) {
this.biasCorrected = corrected;
this.varianceDirection = direction;
}
/**
* Copy constructor, creates a new {@code SemiVariance} identical
* to the {@code original}
*
* @param original the {@code SemiVariance} instance to copy
*/
public SemiVariance(final SemiVariance original) {
copy(original, this);
}
/**
* {@inheritDoc}
*/
@Override
public SemiVariance copy() {
SemiVariance result = new SemiVariance();
copy(this, result);
return result;
}
/**
* Copies source to dest.
* <p>Neither source nor dest can be null.</p>
*
* @param source SemiVariance to copy
* @param dest SemiVariance to copy to
* @throws NullPointerException if either source or dest is null
*/
public static void copy(final SemiVariance source, SemiVariance dest) {
dest.biasCorrected = source.biasCorrected;
dest.varianceDirection = source.varianceDirection;
}
/**
* This method calculates {@link SemiVariance} for the entire array against the mean, using
* instance properties varianceDirection and biasCorrection.
*
* @param values the input array
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*
*/
@Override
public double evaluate(final double[] values) {
if (values == null) {
throw MathRuntimeException.createIllegalArgumentException("input values array is null");
}
return evaluate(values, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the mean, using
* instance properties varianceDirection and biasCorrection.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param start index of the first array element to include
* @param length the number of elements to include
* @return the SemiVariance
* @throws IllegalArgumentException if the parameters are not valid
*
*/
@Override
public double evaluate(final double[] values, final int start, final int length) {
double m = (new Mean()).evaluate(values, start, length);
return evaluate(values, m, varianceDirection, biasCorrected, 0, values.length);
}
/**
* This method calculates {@link SemiVariance} for the entire array against the mean, using
* the current value of the biasCorrection instance property.
*
* @param values the input array
* @param direction the {@link Direction} of the semivariance
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*
*/
public double evaluate(final double[] values, Direction direction) {
double m = (new Mean()).evaluate(values);
return evaluate (values, m, direction, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff, using
* instance properties variancDirection and biasCorrection.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*/
public double evaluate(final double[] values, final double cutoff) {
return evaluate(values, cutoff, varianceDirection, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff in the
* given direction, using the current value of the biasCorrection instance property.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @param direction the {@link Direction} of the semivariance
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*/
public double evaluate(final double[] values, final double cutoff, final Direction direction) {
return evaluate(values, cutoff, direction, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff
* in the given direction with the provided bias correction.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @param direction the {@link Direction} of the semivariance
* @param corrected the BiasCorrection flag
* @param start index of the first array element to include
* @param length the number of elements to include
* @return the SemiVariance
* @throws IllegalArgumentException if the parameters are not valid
*
*/
public double evaluate (final double[] values, final double cutoff, final Direction direction,
final boolean corrected, final int start, final int length) {
test(values, start, length);
if (values.length == 0) {
return Double.NaN;
} else {
if (values.length == 1) {
return 0.0;
} else {
final boolean booleanDirection = direction.getDirection();
double dev = 0.0;
double sumsq = 0.0;
for (int i = start; i < length; i++) {
if ((values[i] > cutoff) == booleanDirection) {
dev = values[i] - cutoff;
sumsq += dev * dev;
}
}
if (corrected) {
return sumsq / (length - 1.0);
} else {
return sumsq / length;
}
}
}
}
/**
* Returns true iff biasCorrected property is set to true.
*
* @return the value of biasCorrected.
*/
public boolean isBiasCorrected() {
return biasCorrected;
}
/**
* Sets the biasCorrected property.
*
* @param biasCorrected new biasCorrected property value
*/
public void setBiasCorrected(boolean biasCorrected) {
this.biasCorrected = biasCorrected;
}
/**
* Returns the varianceDirection property.
*
* @return the varianceDirection
*/
public Direction getVarianceDirection () {
return varianceDirection;
}
/**
* Sets the variance direction
*
* @param varianceDirection the direction of the semivariance
*/
public void setVarianceDirection(Direction varianceDirection) {
this.varianceDirection = varianceDirection;
}
/**
* The direction of the semivariance - either upside or downside. The direction
* is represented by boolean, with true corresponding to UPSIDE semivariance.
*/
public enum Direction {
/**
* The UPSIDE Direction is used to specify that the observations above the
* cutoff point will be used to calculate SemiVariance
*/
UPSIDE (true),
/**
* The DOWNSIDE Direction is used to specify that the observations below
* the cutoff point will be used to calculate SemiVariance
*/
DOWNSIDE (false);
/**
* boolean value UPSIDE <-> true
*/
private boolean direction;
/**
* Create a Direction with the given value.
*
* @param b boolean value representing the Direction. True corresponds to UPSIDE.
*/
Direction (boolean b) {
direction = b;
}
/**
* Returns the value of this Direction. True corresponds to UPSIDE.
*
* @return true if direction is UPSIDE; false otherwise
*/
boolean getDirection () {
return direction;
}
}
}
| src/main/java/org/apache/commons/math/stat/descriptive/moment/SemiVariance.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math.stat.descriptive.moment;
import java.io.Serializable;
import org.apache.commons.math.MathRuntimeException;
import org.apache.commons.math.stat.descriptive.AbstractUnivariateStatistic;
/**
* <p>Computes the semivariance of a set of values with respect to a given cutoff value.
* We define the <i>downside semivariance</i> of a set of values <code>x</code>
* against the <i>cutoff value</i> <code>cutoff</code> to be <br/>
* <code>Σ (x[i] - target)<sup>2</sup> / df</code> <br/>
* where the sum is taken over all <code>i</code> such that <code>x[i] < cutoff</code>
* and <code>df</code> is the length of <code>x</code> (non-bias-corrected) or
* one less than this number (bias corrected). The <i>upside semivariance</i>
* is defined similarly, with the sum taken over values of <code>x</code> that
* exceed the cutoff value.</p>
*
* <p>The cutoff value defaults to the mean, bias correction defaults to <code>true</code>
* and the "variance direction" (upside or downside) defaults to downside. The variance direction
* and bias correction may be set using property setters or their values can provided as
* parameters to {@link #evaluate(double[], double, Direction, boolean, int, int)}.</p>
*
* <p>If the input array is null, <code>evaluate</code> methods throw
* <code>IllegalArgumentException.</code> If the array has length 1, <code>0</code>
* is returned, regardless of the value of the <code>cutoff.</code>
*
* <p><strong>Note that this class is not intended to be threadsafe.</strong> If
* multiple threads access an instance of this class concurrently, and one or
* more of these threads invoke property setters, external synchronization must
* be provided to ensure correct results.</p>
*
* @version $Revision$ $Date$
* @since 2.1
*/
public class SemiVariance extends AbstractUnivariateStatistic implements Serializable {
/** Serializable version identifier */
private static final long serialVersionUID = -2653430366886024994L;
/**
* Determines whether or not bias correction is applied when computing the
* value of the statisic. True means that bias is corrected.
*/
private boolean biasCorrected = true;
/**
* Determines whether to calculate downside or upside SemiVariance.
*/
private Direction varianceDirection = Direction.DOWNSIDE;
/**
* The UPSIDE Direction is used to specify that the observations above the
* cutoff point will be used to calculate SemiVariance.
*/
public static final Direction UPSIDE_VARIANCE = Direction.UPSIDE;
/**
* The DOWNSIDE Direction is used to specify that the observations below
* the cutoff point will be used to calculate SemiVariance
*/
public static final Direction DOWNSIDE_VARIANCE = Direction.DOWNSIDE;
/**
* Constructs a SemiVariance with default (true) <code>biasCorrected</code>
* property and default (Downside) <code>varianceDirection</code> property.
*/
public SemiVariance() {
}
/**
* Constructs a SemiVariance with the specified <code>biasCorrected</code>
* property and default (Downside) <code>varianceDirection</code> property.
*
* @param biasCorrected setting for bias correction - true means
* bias will be corrected and is equivalent to using the argumentless
* constructor
*/
public SemiVariance(final boolean biasCorrected) {
this.biasCorrected = biasCorrected;
}
/**
* Constructs a SemiVariance with the specified <code>Direction</code> property
* and default (true) <code>biasCorrected</code> property
*
* @param direction setting for the direction of the SemiVariance
* to calculate
*/
public SemiVariance(final Direction direction) {
this.varianceDirection = direction;
}
/**
* Constructs a SemiVariance with the specified <code>isBiasCorrected</code>
* property and the specified <code>Direction</code> property.
*
* @param corrected setting for bias correction - true means
* bias will be corrected and is equivalent to using the argumentless
* constructor
*
* @param direction setting for the direction of the SemiVariance
* to calculate
*/
public SemiVariance(final boolean corrected, final Direction direction) {
this.biasCorrected = corrected;
this.varianceDirection = direction;
}
/**
* Copy constructor, creates a new {@code SemiVariance} identical
* to the {@code original}
*
* @param original the {@code SemiVariance} instance to copy
*/
public SemiVariance(final SemiVariance original) {
copy(original, this);
}
/**
* {@inheritDoc}
*/
@Override
public SemiVariance copy() {
SemiVariance result = new SemiVariance();
copy(this, result);
return result;
}
/**
* Copies source to dest.
* <p>Neither source nor dest can be null.</p>
*
* @param source SemiVariance to copy
* @param dest SemiVariance to copy to
* @throws NullPointerException if either source or dest is null
*/
public static void copy(final SemiVariance source, SemiVariance dest) {
dest.biasCorrected = source.biasCorrected;
dest.varianceDirection = source.varianceDirection;
}
/**
* This method calculates {@link SemiVariance} for the entire array against the mean, using
* instance properties varianceDirection and biasCorrection.
*
* @param values the input array
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*
*/
@Override
public double evaluate(final double[] values) {
if (values == null) {
throw MathRuntimeException.createIllegalArgumentException("input values array is null");
}
return evaluate(values, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the mean, using
* instance properties varianceDirection and biasCorrection.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param start index of the first array element to include
* @param length the number of elements to include
* @return the SemiVariance
* @throws IllegalArgumentException if the parameters are not valid
*
*/
@Override
public double evaluate(final double[] values, final int start, final int length) {
double m = (new Mean()).evaluate(values, start, length);
return evaluate(values, m, varianceDirection, biasCorrected, 0, values.length);
}
/**
* This method calculates {@link SemiVariance} for the entire array against the mean, using
* the current value of the biasCorrection instance property.
*
* @param values the input array
* @param direction the {@link Direction} of the semivariance
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*
*/
public double evaluate(final double[] values, Direction direction) {
double m = (new Mean()).evaluate(values);
return evaluate (values, m, direction, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff, using
* instance properties variancDirection and biasCorrection.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*/
public double evaluate(final double[] values, final double cutoff) {
return evaluate(values, cutoff, varianceDirection, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff in the
* given direction, using the current value of the biasCorrection instance property.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @param direction the {@link Direction} of the semivariance
* @return the SemiVariance
* @throws IllegalArgumentException if values is null
*/
public double evaluate(final double[] values, final double cutoff, final Direction direction) {
return evaluate(values, cutoff, direction, biasCorrected, 0, values.length);
}
/**
* <p>Returns the {@link SemiVariance} of the designated values against the cutoff
* in the given direction with the provided bias correction.</p>
*
* <p>Returns <code>NaN</code> if the array is empty and throws
* <code>IllegalArgumentException</code> if the array is null.</p>
*
* @param values the input array
* @param cutoff the reference point
* @param direction the {@link Direction} of the semivariance
* @param corrected the BiasCorrection flag
* @param start index of the first array element to include
* @param length the number of elements to include
* @return the SemiVariance
* @throws IllegalArgumentException if the parameters are not valid
*
*/
public double evaluate (final double[] values, final double cutoff, final Direction direction,
final boolean corrected, final int start, final int length) {
test(values, start, length);
if (values.length == 0) {
return Double.NaN;
} else {
if (values.length == 1) {
return 0.0;
} else {
final boolean booleanDirection = direction.getDirection();
double dev = 0.0;
double sumsq = 0.0;
for (int i = start; i < length; i++) {
if ((values[i] > cutoff) == booleanDirection) {
dev = values[i] - cutoff;
sumsq += dev * dev;
}
}
if (corrected) {
return sumsq / (length - 1.0);
} else {
return sumsq / length;
}
}
}
}
/**
* Returns true iff biasCorrected property is set to true.
*
* @return the value of biasCorrected.
*/
public boolean isBiasCorrected() {
return biasCorrected;
}
/**
* Sets the biasCorrected property.
*
* @param biasCorrected new biasCorrected property value
*/
public void setBiasCorrected(boolean biasCorrected) {
this.biasCorrected = biasCorrected;
}
/**
* Returns the varianceDirection property.
*
* @return the varianceDirection
*/
public Direction getVarianceDirection () {
return varianceDirection;
}
/**
* Sets the variance direction
*
* @param varianceDirection the direction of the semivariance
*/
public void setVarianceDirection(Direction varianceDirection) {
this.varianceDirection = varianceDirection;
}
/**
* The direction of the semivariance - either upside or downside. The direction
* is represented by boolean, with true corresponding to UPSIDE semivariance.
*/
public enum Direction {
/**
* The UPSIDE Direction is used to specify that the observations above the
* cutoff point will be used to calculate SemiVariance
*/
UPSIDE (true),
/**
* The DOWNSIDE Direction is used to specify that the observations below
* the cutoff point will be used to calculate SemiVariance
*/
DOWNSIDE (false);
/**
* boolean value UPSIDE <-> true
*/
private boolean direction;
/**
* Create a Direction with the given value.
*
* @param b boolean value representing the Direction. True corresponds to UPSIDE.
*/
Direction (boolean b) {
direction = b;
}
/**
* Returns the value of this Direction. True corresponds to UPSIDE.
*
* @return true if direction is UPSIDE; false otherwise
*/
boolean getDirection () {
return direction;
}
}
}
| fixed variables declaration order to match checkstyle rules
git-svn-id: 80d496c472b8b763a5e941dba212da9bf48aeceb@917275 13f79535-47bb-0310-9956-ffa450edef68
| src/main/java/org/apache/commons/math/stat/descriptive/moment/SemiVariance.java | fixed variables declaration order to match checkstyle rules | <ide><path>rc/main/java/org/apache/commons/math/stat/descriptive/moment/SemiVariance.java
<ide>
<ide> public class SemiVariance extends AbstractUnivariateStatistic implements Serializable {
<ide>
<add> /**
<add> * The UPSIDE Direction is used to specify that the observations above the
<add> * cutoff point will be used to calculate SemiVariance.
<add> */
<add> public static final Direction UPSIDE_VARIANCE = Direction.UPSIDE;
<add>
<add> /**
<add> * The DOWNSIDE Direction is used to specify that the observations below
<add> * the cutoff point will be used to calculate SemiVariance
<add> */
<add> public static final Direction DOWNSIDE_VARIANCE = Direction.DOWNSIDE;
<add>
<ide> /** Serializable version identifier */
<ide> private static final long serialVersionUID = -2653430366886024994L;
<ide>
<ide> * Determines whether to calculate downside or upside SemiVariance.
<ide> */
<ide> private Direction varianceDirection = Direction.DOWNSIDE;
<del>
<del> /**
<del> * The UPSIDE Direction is used to specify that the observations above the
<del> * cutoff point will be used to calculate SemiVariance.
<del> */
<del> public static final Direction UPSIDE_VARIANCE = Direction.UPSIDE;
<del>
<del> /**
<del> * The DOWNSIDE Direction is used to specify that the observations below
<del> * the cutoff point will be used to calculate SemiVariance
<del> */
<del> public static final Direction DOWNSIDE_VARIANCE = Direction.DOWNSIDE;
<ide>
<ide> /**
<ide> * Constructs a SemiVariance with default (true) <code>biasCorrected</code> |
|
Java | lgpl-2.1 | 79cde8f9eb1a7d78651cd94625e5ca0da78868fd | 0 | threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya | //
// $Id: AutoFringer.java,v 1.4 2002/04/06 03:52:28 ray Exp $
package com.threerings.miso.tile;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.util.Random;
import java.util.HashMap;
import com.threerings.media.Log;
import com.threerings.media.tile.NoSuchTileException;
import com.threerings.media.tile.NoSuchTileSetException;
import com.threerings.media.tile.Tile;
import com.threerings.media.tile.TileLayer;
import com.threerings.media.tile.TileManager;
import com.threerings.media.tile.TileSet;
import com.threerings.media.tile.TileUtil;
import com.threerings.media.tile.UniformTileSet;
import com.threerings.media.util.ImageUtil;
import com.threerings.miso.scene.MisoSceneModel;
/**
* Automatically fringes a scene according to the rules in the
* FringeConfiguration.
*/
public class AutoFringer
{
/**
* Construct an AutoFringer
*/
public AutoFringer (FringeConfiguration fringeconf, TileManager tmgr)
{
_fringeconf = fringeconf;
_tmgr = tmgr;
}
/**
* Automatically fringe the entire scene.
*/
public void fringe (MisoSceneModel scene, TileLayer fringelayer)
{
fringe(scene, fringelayer,
new Rectangle(0, 0, scene.width, scene.height));
}
/**
* Automatically generate fringe information for the specified
* rectangular region <strong>and the tiles they influence</strong>
* and insert into the fringe TileLayer.
*/
public void fringe (MisoSceneModel scene, TileLayer fringelayer,
Rectangle r)
{
// create a hash to cache our masks
HashMap maskcache = new HashMap();
int lastrow = Math.min(r.y + r.height + 1, scene.height);
int lastcol = Math.min(r.x + r.width + 1, scene.width);
for (int row = Math.max(r.y - 1, 0); row < lastrow; row++) {
for (int col = Math.max(r.x - 1, 0); col < lastcol; col++) {
fringelayer.setTile(col, row,
getFringeTile(scene, row, col, maskcache));
}
}
// and then we throw maskcache out...
}
/**
* Compute and return the fringe Tile to be inserted at the specified
* location.
*/
protected Tile getFringeTile (MisoSceneModel scene, int row, int col,
HashMap masks)
{
int hei = scene.height;
int wid = scene.width;
// get the tileset id of the base tile we are considering
int underset = scene.baseTileIds[row * wid + col] >> 16;
// hold the tileset of the base tile that will fringe on us
// (initially set to bogus value)
int overset = -1;
// the pieces of fringe that are turned on by influencing tiles
int fringebits = 0;
// walk through our influence tiles
for (int y=Math.max(0, row - 1); y < Math.min(hei, row + 2); y++) {
for (int x=Math.max(0, col - 1); x < Math.min(wid, col + 2); x++) {
// we sensibly do not consider ourselves
if ((x == col) && (y == row)) {
continue;
}
int baseset = scene.baseTileIds[y * wid + x] >> 16;
// if this set does not fringe on us, move on to the next
if (!_fringeconf.fringesOn(baseset, underset)) {
continue;
}
if (overset == -1) {
// if this is the first fringer we've seen
// remember that fact
overset = baseset;
} else if (overset != baseset) {
// oh no! two different fringes want to fringe this
// tile. We don't support that, so instead: no fringe!
Log.debug("Two different base tilesets affect fringe " +
"and so we fail with no fringe [x=" + col +
", y=" + row + "].");
return null;
}
// now turn on the appropriate fringebits
fringebits |= FLAGMATRIX[y - row + 1][x - col + 1];
}
}
// now we've looked at all the influencing tiles
// look up the appropriate fringe index according to which bits
// are turned on
int index = _fringeconf.getTileIndexFromFringeBits(fringebits);
if (index == -1) {
// our fringes do not specify a tile to use in this case.
return null;
}
try {
return getTile(overset, index, masks);
} catch (NoSuchTileException nste) {
Log.warning("Autofringer couldn't find a needed tile.");
return null;
} catch (NoSuchTileSetException nstse) {
Log.warning("Autofringer couldn't find a needed tileset.");
return null;
}
}
protected Tile getTile (int baseset, int index, HashMap masks)
throws NoSuchTileException, NoSuchTileSetException
{
FringeConfiguration.FringeTileSetRecord tsr =
_fringeconf.getRandomFringe(baseset, rando);
int fringeset = tsr.fringe_tsid;
if (!tsr.mask) {
// oh good, this is easy.
return _tmgr.getTile(fringeset, index);
}
// otherwise, it's a mask.. look for it in the cache..
Long maskkey = new Long(
(((long) baseset) << 32) + (fringeset << 16) + index);
Tile t = (Tile) masks.get(maskkey);
if (t == null) {
t = new FringeTile(ImageUtil.composeMaskedImage(
(BufferedImage) _tmgr.getTile(fringeset, index).getImage(),
(BufferedImage) _tmgr.getTile(baseset, 0).getImage()));
masks.put(maskkey, t);
Log.debug("created cached fringe image");
}
return t;
}
// fringe bits
// see docs/miso/fringebits.png
//
private static final int NORTH = 1 << 0;
private static final int NORTHEAST = 1 << 1;
private static final int EAST = 1 << 2;
private static final int SOUTHEAST = 1 << 3;
private static final int SOUTH = 1 << 4;
private static final int SOUTHWEST = 1 << 5;
private static final int WEST = 1 << 6;
private static final int NORTHWEST = 1 << 7;
// A matrix mapping adjacent tiles to which fringe bits
// they affect.
// (x and y are offset by +1, since we can't have -1 as an array index)
// again, see docs/miso/fringebits.png
//
private static final int[][] FLAGMATRIX = {
{ NORTHEAST, (NORTHEAST | EAST | SOUTHEAST), SOUTHEAST },
{ (NORTHWEST | NORTH | NORTHEAST), 0, (SOUTHEAST | SOUTH | SOUTHWEST) },
{ NORTHWEST, (NORTHWEST | WEST | SOUTHWEST), SOUTHWEST }
};
/** Our tile manager. */
protected static TileManager _tmgr;
/** Our fringe configuration. */
protected static FringeConfiguration _fringeconf;
/** Our random # generator. */
// this may change.. or we may seed it before we do any scene
// with a number deterministicly generated from that scene
protected static Random rando = new Random();
}
| src/java/com/threerings/miso/tile/AutoFringer.java | //
// $Id: AutoFringer.java,v 1.3 2002/04/06 03:43:24 ray Exp $
package com.threerings.miso.tile;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.util.Random;
import java.util.HashMap;
import com.threerings.media.Log;
import com.threerings.media.tile.NoSuchTileException;
import com.threerings.media.tile.NoSuchTileSetException;
import com.threerings.media.tile.Tile;
import com.threerings.media.tile.TileLayer;
import com.threerings.media.tile.TileManager;
import com.threerings.media.tile.TileSet;
import com.threerings.media.tile.TileUtil;
import com.threerings.media.tile.UniformTileSet;
import com.threerings.media.util.ImageUtil;
import com.threerings.miso.scene.MisoSceneModel;
/**
* Automatically fringes a scene according to the rules in the
* FringeConfiguration.
*/
public class AutoFringer
{
/**
* Construct an AutoFringer
*/
public AutoFringer (FringeConfiguration fringeconf, TileManager tmgr)
{
_fringeconf = fringeconf;
_tmgr = tmgr;
}
/**
* Automatically fringe the entire scene.
*/
public void fringe (MisoSceneModel scene, TileLayer fringelayer)
{
fringe(scene, fringelayer,
new Rectangle(0, 0, scene.width, scene.height));
}
/**
* Automatically generate fringe information for the specified rectangular
* region AND THE TILES THEY INFLUENCE and insert into the fringe TileLayer.
*/
public void fringe (MisoSceneModel scene, TileLayer fringelayer,
Rectangle r)
// int startx, int starty, int width, int height)
{
// create a hash to cache our masks
HashMap maskcache = new HashMap();
int lastrow = Math.min(r.y + r.height + 1, scene.height);
int lastcol = Math.min(r.x + r.width + 1, scene.width);
for (int row = Math.max(r.y - 1, 0); row < lastrow; row++) {
for (int col = Math.max(r.x - 1, 0); col < lastcol; col++) {
fringelayer.setTile(col, row,
getFringeTile(scene, row, col, maskcache));
}
}
// and then we throw maskcache out...
}
/**
* The actual computation of fringe for a specified location.
*
* @return a fully qualified fringe tileid for the specified location
* (0 for no fringe)
*/
protected Tile getFringeTile (MisoSceneModel scene, int row, int col,
HashMap masks)
{
int hei = scene.height;
int wid = scene.width;
// get the tileset id of the base tile we are considering
int underset = scene.baseTileIds[row * wid + col] >> 16;
// hold the tileset of the base tile that will fringe on us
// (initially set to bogus value)
int overset = -1;
// the pieces of fringe that are turned on by influencing tiles
int fringebits = 0;
// walk through our influence tiles
for (int y=Math.max(0, row - 1); y < Math.min(hei, row + 2); y++) {
for (int x=Math.max(0, col - 1); x < Math.min(wid, col + 2); x++) {
// we sensibly do not consider ourselves
if ((x == col) && (y == row)) {
continue;
}
int baseset = scene.baseTileIds[y * wid + x] >> 16;
// if this set does not fringe on us, move on to the next
if (!_fringeconf.fringesOn(baseset, underset)) {
continue;
}
if (overset == -1) {
// if this is the first fringer we've seen
// remember that fact
overset = baseset;
} else if (overset != baseset) {
// oh no! two different fringes want to fringe this
// tile. We don't support that, so instead: no fringe!
Log.debug("Two different base tilesets affect fringe " +
"and so we fail with no fringe [x=" + col +
", y=" + row + "].");
return null;
}
// now turn on the appropriate fringebits
fringebits |= FLAGMATRIX[y - row + 1][x - col + 1];
}
}
// now we've looked at all the influencing tiles
// look up the appropriate fringe index according to which bits
// are turned on
int index = _fringeconf.getTileIndexFromFringeBits(fringebits);
if (index == -1) {
// our fringes do not specify a tile to use in this case.
return null;
}
try {
return getTile(overset, index, masks);
} catch (NoSuchTileException nste) {
Log.warning("Autofringer couldn't find a needed tile.");
return null;
} catch (NoSuchTileSetException nstse) {
Log.warning("Autofringer couldn't find a needed tileset.");
return null;
}
}
protected Tile getTile (int baseset, int index, HashMap masks)
throws NoSuchTileException, NoSuchTileSetException
{
FringeConfiguration.FringeTileSetRecord tsr =
_fringeconf.getRandomFringe(baseset, rando);
int fringeset = tsr.fringe_tsid;
if (!tsr.mask) {
// oh good, this is easy.
return _tmgr.getTile(fringeset, index);
}
// otherwise, it's a mask.. look for it in the cache..
Long maskkey = new Long(
(((long) baseset) << 32) + (fringeset << 16) + index);
Tile t = (Tile) masks.get(maskkey);
if (t == null) {
t = new FringeTile(ImageUtil.composeMaskedImage(
(BufferedImage) _tmgr.getTile(fringeset, index).getImage(),
(BufferedImage) _tmgr.getTile(baseset, 0).getImage()));
masks.put(maskkey, t);
Log.debug("created cached fringe image");
}
return t;
}
// fringe bits
// see docs/miso/fringebits.png
//
private static final int NORTH = 1 << 0;
private static final int NORTHEAST = 1 << 1;
private static final int EAST = 1 << 2;
private static final int SOUTHEAST = 1 << 3;
private static final int SOUTH = 1 << 4;
private static final int SOUTHWEST = 1 << 5;
private static final int WEST = 1 << 6;
private static final int NORTHWEST = 1 << 7;
// A matrix mapping adjacent tiles to which fringe bits
// they affect.
// (x and y are offset by +1, since we can't have -1 as an array index)
// again, see docs/miso/fringebits.png
//
private static final int[][] FLAGMATRIX = {
{ NORTHEAST, (NORTHEAST | EAST | SOUTHEAST), SOUTHEAST },
{ (NORTHWEST | NORTH | NORTHEAST), 0, (SOUTHEAST | SOUTH | SOUTHWEST) },
{ NORTHWEST, (NORTHWEST | WEST | SOUTHWEST), SOUTHWEST }
};
/** Our tile manager. */
protected static TileManager _tmgr;
/** Our fringe configuration. */
protected static FringeConfiguration _fringeconf;
/** Our random # generator. */
// this may change.. or we may seed it before we do any scene
// with a number deterministicly generated from that scene
protected static Random rando = new Random();
}
| cleanup
git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@1206 542714f4-19e9-0310-aa3c-eee0fc999fb1
| src/java/com/threerings/miso/tile/AutoFringer.java | cleanup | <ide><path>rc/java/com/threerings/miso/tile/AutoFringer.java
<ide> //
<del>// $Id: AutoFringer.java,v 1.3 2002/04/06 03:43:24 ray Exp $
<add>// $Id: AutoFringer.java,v 1.4 2002/04/06 03:52:28 ray Exp $
<ide>
<ide> package com.threerings.miso.tile;
<ide>
<ide> }
<ide>
<ide> /**
<del> * Automatically generate fringe information for the specified rectangular
<del> * region AND THE TILES THEY INFLUENCE and insert into the fringe TileLayer.
<add> * Automatically generate fringe information for the specified
<add> * rectangular region <strong>and the tiles they influence</strong>
<add> * and insert into the fringe TileLayer.
<ide> */
<ide> public void fringe (MisoSceneModel scene, TileLayer fringelayer,
<ide> Rectangle r)
<del> // int startx, int starty, int width, int height)
<ide> {
<ide> // create a hash to cache our masks
<ide> HashMap maskcache = new HashMap();
<ide> }
<ide>
<ide> /**
<del> * The actual computation of fringe for a specified location.
<del> *
<del> * @return a fully qualified fringe tileid for the specified location
<del> * (0 for no fringe)
<add> * Compute and return the fringe Tile to be inserted at the specified
<add> * location.
<ide> */
<ide> protected Tile getFringeTile (MisoSceneModel scene, int row, int col,
<ide> HashMap masks) |
|
JavaScript | mit | 9a95621520c7b65fc826b70bd1e25fab6def91dd | 0 | ioBroker/ioBroker.hm-rpc,ioBroker/ioBroker.hm-rpc,ioBroker/ioBroker.hm-rpc,ioBroker/ioBroker.hm-rpc | /*
* Copyright (c) 2014-2019 bluefox <[email protected]>
*
* Copyright (c) 2014 hobbyquaker
*
* The MIT License (MIT)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/* jshint -W097 */
/* jshint strict: false */
/*jslint node: true */
'use strict';
const utils = require('@iobroker/adapter-core'); // Get common adapter utils
const adapterName = require('./package.json').name.split('.').pop();
const images = require('./lib/images');
const crypto = require('./lib/crypto'); // Provides encrypt and decrypt
let connected = false;
const displays = {};
let adapter;
const FORBIDDEN_CHARS = /[\][*,;'"`<>\\\s?]/g;
// msgBuffer = [{line: line2, icon: icon2}, {line: line3, icon: icon3}, {line: '', icon: ''}];
// Icons:
// 0x80 AUS
// 0x81 EIN
// 0x82 OFFEN
// 0x83 geschlossen
// 0x84 fehler
// 0x85 alles ok
// 0x86 information
// 0x87 neue nachricht
// 0x88 servicemeldung
// Tonfolgen
// 0xC0 AUS
// 0xC1 LANG LANG
// 0xC2 LANG KURZ
// 0xC3 LANG KURZ KURZ
// 0xC4 KURZ
// 0xC5 KURZ KURZ
// 0xC6 LANG
// 0xC7
// 0xC9
// 0xCA
// Signale
// 0xF0 AUS
// 0xF1 Rotes Blitzen
// 0xF2 Grünes Blitzen
// 0xF3 Orangenes Blitzen
function number2hex(num) {
if (typeof num === 'number') {
num = num.toString(16).toUpperCase();
if (num.length < 2) num = '0' + num;
num = '0x' + num;
}
return num;
}
function combineEPaperCommand(lines, signal, ton, repeats, offset) {
signal = number2hex(signal || '0xF0');
ton = number2hex(ton || '0xC0');
const substitutions = {
'A': '0x41',
'B': '0x42',
'C': '0x43',
'D': '0x44',
'E': '0x45',
'F': '0x46',
'G': '0x47',
'H': '0x48',
'I': '0x49',
'J': '0x4A',
'K': '0x4B',
'L': '0x4C',
'M': '0x4D',
'N': '0x4E',
'O': '0x4F',
'P': '0x50',
'Q': '0x51',
'R': '0x52',
'S': '0x53',
'T': '0x54',
'U': '0x55',
'V': '0x56',
'W': '0x57',
'X': '0x58',
'Y': '0x59',
'Z': '0x5A',
'a': '0x61',
'b': '0x62',
'c': '0x63',
'd': '0x64',
'e': '0x65',
'f': '0x66',
'g': '0x67',
'h': '0x68',
'i': '0x69',
'j': '0x6A',
'k': '0x6B',
'l': '0x6C',
'm': '0x6D',
'n': '0x6E',
'o': '0x6F',
'p': '0x70',
'q': '0x71',
'r': '0x72',
's': '0x73',
't': '0x74',
'u': '0x75',
'v': '0x76',
'w': '0x77',
'x': '0x78',
'y': '0x79',
'z': '0x7A',
'0': '0x30',
'1': '0x31',
'2': '0x32',
'3': '0x33',
'4': '0x34',
'5': '0x35',
'6': '0x36',
'7': '0x37',
'8': '0x38',
'9': '0x39',
' ': '0x20',
'!': '0x21',
'"': '0x22',
'%': '0x25',
'&': '0x26',
'=': '0x27',
'(': '0x28',
')': '0x29',
'*': '0x2A',
'+': '0x2B',
',': '0x2C',
'-': '0x2D',
'.': '0x2E',
'/': '0x2F',
'Ä': '0x5B',
'Ö': '0x23',
'Ü': '0x24',
'ä': '0x7B',
'ö': '0x7C',
'ü': '0x7D',
'ß': '0x5F',
':': '0x3A',
';': '0x3B',
'@': '0x40',
'>': '0x3E'
};
offset = 10;
repeats = 1;
let command = '0x02,0x0A';
for (const li of lines) {
const line = li.line;
const icon = li.icon;
if (line || icon) {
command = command + ',0x12';
let i;
if ((line.substring(0, 2) === '0x') && (line.length === 4)) {
command = command + ',' + line;
i = 12;
} else {
i = 0;
}
while ((i < line.length) && (i < 12)) {
command += ',' + substitutions[line[i]] || '0x2A';
i++;
}
if (icon) {
command += ',0x13,' + number2hex(icon);
}
}
command = command + ',0x0A';
}
command = command + ',0x14,' + ton + ',0x1C,';
if (repeats < 1) {
command = command + '0xDF,0x1D,';
} else {
if (repeats < 11) {
command = command + '0xD' + (repeats - 1) + ',0x1D,';
} else {
if (repeats === 11) {
command = command + '0xDA,0x1D,';
} else {
if (repeats === 12) {
command = command + '0xDB,0x1D,';
} else {
if (repeats === 13) {
command = command + '0xDC,0x1D,';
} else {
if (repeats === 14) {
command = command + '0xDD,0x1D,';
} else {
command = command + '0xDE,0x1D,';
}
}
}
}
}
}
if (offset <= 10) {
command = command + '0xE0,0x16,';
} else {
if (offset <= 100) {
command = command + '0xE' + (offset - 1 / 10) + ',0x16,';
} else {
if (offset <= 110) {
command = command + '0xEA,0x16,';
} else {
if (offset <= 120) {
command = command + '0xEB,0x16,';
} else {
if (offset <= 130) {
command = command + '0xEC,0x16,';
} else {
if (offset <= 140) {
command = command + '0xED,0x16,';
} else {
if (offset <= 150) {
command = command + '0xEE,0x16,';
} else {
command = command + '0xEF,0x16,';
}
}
}
}
}
}
}
command = command + signal + ',0x03';
return command;
}
function controlEPaper(id, data) {
const tmp = id.split('.');
tmp[3] = '3';
tmp[4] = 'SUBMIT';
const val = combineEPaperCommand(data.lines, data.signal || '0xF0', data.tone || '0xC0');
try {
if (rpcClient && connected) {
rpcClient.methodCall('setValue', [tmp[2] + ':' + tmp[3], tmp[4], val], err => {
if (err) {
adapter.log.error(adapter.config.type + 'rpc -> setValue ' + JSON.stringify([tmp[3], tmp[4], val]));
adapter.log.error(err);
}
});
} else {
adapter.log.warn('Cannot setValue "' + id + '", because not connected.');
}
} catch (err) {
adapter.log.error('Cannot call setValue: :' + err);
}
}
function readSignals(id) {
displays[id] = null;
const data = {
lines: [{}, {}, {}],
signal: '0xF0',
tone: '0xC0'
};
const promises = [];
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE2', (err, state) => {
data.lines[0].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON2', (err, state) => {
data.lines[0].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE3', (err, state) => {
data.lines[1].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON3', (err, state) => {
data.lines[1].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE4', (err, state) => {
data.lines[2].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON4', (err, state) => {
data.lines[2].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_SIGNAL', (err, state) => {
data.signal = state ? state.val || '0xF0' : '0xF0';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_TONE', (err, state) => {
data.tone = state ? state.val || '0xC0' : '0xC0';
resolve();
});
}));
Promise.all(promises).then(() => controlEPaper(id, data));
} // endReadSignals
function readSettings(id) {
displays[id] = null;
const data = {
lines: [{}, {}, {}],
signal: '0xF0',
tone: '0xC0'
};
const promises = [];
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE2', (err, state) => {
data.lines[0].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON2', (err, state) => {
data.lines[0].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE3', (err, state) => {
data.lines[1].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON3', (err, state) => {
data.lines[1].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE4', (err, state) => {
data.lines[2].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON4', (err, state) => {
data.lines[2].icon = state ? state.val || '' : '';
resolve();
});
}));
Promise.all(promises).then(() => controlEPaper(id, data));
} // endReadSettings
// the adapter object
function startAdapter(options) {
options = options || {};
Object.assign(options, {
name: adapterName,
ready: () => {
adapter.subscribeStates('*');
main();
},
stateChange: (id, state) => {
if (state && state.ack !== true) {
const tmp = id.split('.');
let val;
if (id === adapter.namespace + '.updated' || /_ALARM$/.test(id)) return;
adapter.log.debug(adapter.config.type + 'rpc -> setValue ' + tmp[3] + ' ' + tmp[4] + ': ' + state.val);
if (!dpTypes[id]) {
adapter.log.error(adapter.config.type + 'rpc -> setValue: no dpType for ' + id + '!');
return;
}
if (dpTypes[id].UNIT === '%' && dpTypes[id].MIN !== undefined) {
state.val = (state.val / 100) * (dpTypes[id].MAX - dpTypes[id].MIN) + dpTypes[id].MIN;
state.val = Math.round(state.val * 1000) / 1000;
} else if (dpTypes[id].UNIT === '100%') {
state.val = state.val / 100;
}
const type = dpTypes[id].TYPE;
if (type === 'EPAPER_LINE' || type === 'EPAPER_ICON') {
const _id = tmp[0] + '.' + tmp[1] + '.' + tmp[2];
if (displays[_id] && displays[_id].timer) {
clearTimeout(displays[_id].timer);
if (displays[_id].withTone) {
displays[_id] = {timer: setTimeout(readSignals, 300, _id), withTone: true};
return;
}
}
displays[_id] = {timer: setTimeout(readSettings, 300, _id), withTone: false};
return;
} else if (type === 'EPAPER_SIGNAL' || type === 'EPAPER_TONE') {
const _id = tmp[0] + '.' + tmp[1] + '.' + tmp[2];
if (displays[_id] && displays[_id].timer) {
clearTimeout(displays[_id].timer);
}
displays[_id] = {timer: setTimeout(readSignals, 300, _id), withTone: true};
return;
} else {
switch (type) {
case 'BOOL':
val = (state.val === 'false' || state.val === '0') ? false : !!state.val;
break;
case 'FLOAT':
val = {explicitDouble: state.val};
break;
default:
val = state.val;
}
}
adapter.log.debug('setValue ' + JSON.stringify([tmp[2] + ':' + tmp[3], tmp[4], val]) + ' ' + type);
try {
if (rpcClient && connected) {
rpcClient.methodCall('setValue', [tmp[2] + ':' + tmp[3], tmp[4], val], (err/*, data*/) => {
if (err) {
adapter.log.error(adapter.config.type + 'rpc -> setValue ' + JSON.stringify([tmp[3], tmp[4], state.val]) + ' ' + type);
adapter.log.error(err);
}
});
} else {
adapter.log.warn('Cannot setValue "' + id + '", because not connected.');
}
} catch (err) {
adapter.log.error('Cannot call setValue: :' + err);
}
}
},
// Add messagebox Function for ioBroker.occ
message: obj => {
adapter.log.debug('[MSSG] Received: ' + JSON.stringify(obj));
if (obj.command === 'stopInstance') {
if (rpcServer && rpcServer.server) {
try {
rpcServer.server.close(() => {
console.log('server closed.');
rpcServer.server.unref();
});
} catch (e) {
//
}
}
if (rpcClient && rpcClient.socket) {
try {
rpcClient.socket.destroy();
} catch (e) {
//
}
}
// force close
setTimeout(() => adapter.terminate ? adapter.terminate() : process.exit(), 3000);
} else if (obj.message.params === undefined || obj.message.params === null) {
try {
if (rpcClient && connected) {
rpcClient.methodCall(obj.command, [obj.message.ID, obj.message.paramType], (err, data) => {
if (obj.callback) adapter.sendTo(obj.from, obj.command, {
result: data,
error: err
}, obj.callback);
});
} else {
adapter.log.warn('Cannot send "' + obj.command + '" "' + obj.message.ID + '": because not connected');
if (obj.callback) adapter.sendTo(obj.from, obj.command, {error: 'not connected'}, obj.callback);
}
} catch (err) {
adapter.log.error('Cannot call ' + obj.command + ': ' + err);
adapter.sendTo(obj.from, obj.command, {error: err}, obj.callback);
}
} else {
try {
if (rpcClient && connected) {
rpcClient.methodCall(obj.command, [obj.message.ID, obj.message.paramType, obj.message.params], (err, data) => {
if (obj.callback) adapter.sendTo(obj.from, obj.command, {
result: data,
error: err
}, obj.callback);
});
} else {
adapter.log.warn('Cannot send "' + obj.command + '" "' + obj.message.ID + '": because not connected');
if (obj.callback) adapter.sendTo(obj.from, obj.command, {error: 'not connected'}, obj.callback);
}
} catch (err) {
adapter.log.error('Cannot call ' + obj.command + ': ' + err);
adapter.sendTo(obj.from, obj.command, {error: err}, obj.callback);
}
}
},
unload: callback => {
try {
if (eventInterval) {
clearInterval(eventInterval);
eventInterval = null;
}
if (connInterval) {
clearInterval(connInterval);
connInterval = null;
}
if (connTimeout) {
clearTimeout(connTimeout);
connTimeout = null;
}
if (adapter.config && rpcClient) {
adapter.log.info(adapter.config.type + 'rpc -> ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' init ' + JSON.stringify([daemonURL, '']));
try {
rpcClient.methodCall('init', [daemonURL, ''], (/*err, data*/) => {
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
if (callback) callback();
callback = null;
});
} catch (err) {
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
adapter.log.error('Cannot call init: [' + daemonURL + ', ""]' + err);
if (callback) callback();
callback = null;
}
} else {
if (callback) callback();
callback = null;
}
} catch (e) {
if (adapter && adapter.log) {
adapter.log.error('Unload error: ' + e);
} else {
console.log(e);
}
if (callback) callback();
callback = null;
}
}
});
adapter = new utils.Adapter(options);
return adapter;
}
let rpc;
let rpcClient;
let rpcServer;
const metaValues = {};
let metaRoles = {};
const dpTypes = {};
let lastEvent = 0;
let eventInterval;
let connInterval;
let connTimeout;
let daemonURL = '';
let daemonProto = '';
let homematicPath;
function main() {
homematicPath = adapter.config.daemon === 'virtual-devices' ? '/groups/' : '/';
adapter.config.reconnectInterval = parseInt(adapter.config.reconnectInterval, 10) || 30;
if (adapter.config.reconnectInterval < 10) {
adapter.log.error('Reconnect interval is less than 10 seconds. Set reconnect interval to 10 seconds.');
adapter.config.reconnectInterval = 10;
}
adapter.config.checkInitInterval = parseInt(adapter.config.checkInitInterval, 10);
if (adapter.config.checkInitInterval < 10) {
adapter.log.error('Check init interval is less than 10 seconds. Set init interval to 10 seconds.');
adapter.config.checkInitInterval = 10;
}
adapter.setState('info.connection', false, true);
if (adapter.config.type === 'bin') {
rpc = require('binrpc');
daemonProto = 'xmlrpc_bin://';
} else {
rpc = require('homematic-xmlrpc');
adapter.config.type = 'xml';
daemonProto = adapter.config.useHttps ? 'https://' : 'http://';
}
// Load VALUE paramsetDescriptions (needed to create state objects)
adapter.objects.getObjectView('hm-rpc', 'paramsetDescription', {
startkey: 'hm-rpc.meta.VALUES',
endkey: 'hm-rpc.meta.VALUES.\u9999'
}, function handleValueParamSetDescriptions(err, doc) {
if (err) adapter.log.error('getObjectView hm-rpc: ' + err);
if (doc && doc.rows) {
for (const row of doc.rows) {
const channel = row.id.slice(19);
metaValues[channel] = row.value.native;
}
}
// Load common.role assignments
adapter.getForeignObject('hm-rpc.meta.roles', (err, res) => {
if (err) adapter.log.error('hm-rpc.meta.roles: ' + err);
if (res) metaRoles = res.native;
// Start Adapter
if (adapter.config) initRpcServer();
});
});
adapter.objects.getObjectView('system', 'state', {
startkey: adapter.namespace,
endkey: adapter.namespace + '\u9999'
}, function handleStateViews(err, res) {
if (!err && res.rows) {
for (const row of res.rows) {
if (row.id === adapter.namespace + '.updated') continue;
if (!row.value.native) {
adapter.log.warn('State ' + row.id + ' does not have native.');
dpTypes[row.id] = {UNIT: '', TYPE: ''};
} else {
dpTypes[row.id] = {
UNIT: row.value.native.UNIT,
TYPE: row.value.native.TYPE,
MIN: row.value.native.MIN,
MAX: row.value.native.MAX
};
if (typeof dpTypes[row.id].MIN === 'number') {
dpTypes[row.id].MIN = parseFloat(dpTypes[row.id].MIN);
dpTypes[row.id].MAX = parseFloat(dpTypes[row.id].MAX);
if (dpTypes[row.id].UNIT === '100%') {
dpTypes[row.id].UNIT = '%';
}
if (dpTypes[row.id].MAX === 99) {
dpTypes[row.id].MAX = 100;
} else if (dpTypes[row.id].MAX === 1.005) {
dpTypes[row.id].MAX = 1;
}
} // endIf
}
}
}
});
}
function sendInit() {
try {
if (rpcClient && (rpcClient.connected === undefined || rpcClient.connected)) {
adapter.log.debug(adapter.config.type + 'rpc -> ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' init ' + JSON.stringify([daemonURL, adapter.namespace]));
rpcClient.methodCall('init', [daemonURL, adapter.namespace], (err/*, data*/) => {
if (!err) {
if (adapter.config.daemon === 'CUxD') {
getCuxDevices(function handleCuxDevices(err2) {
if (!err2) {
updateConnection();
} else {
adapter.log.error('getCuxDevices error: ' + err2);
}
});
} else {
updateConnection();
}
} else {
adapter.log.error('init error: ' + err);
}
});
}
} catch (err) {
adapter.log.error('Init not possible, going to stop: ' + err);
adapter.stop();
}
}
function sendPing() {
if (rpcClient) {
adapter.log.debug('Send PING...');
try {
rpcClient.methodCall('ping', [adapter.namespace], (err/*, data*/) => {
if (!err) {
adapter.log.debug('PING ok');
} else {
adapter.log.error('Ping error: ' + err);
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
connect();
}
}
});
} catch (err) {
adapter.log.error('Cannot call ping [' + adapter.namespace + ']: ' + err);
}
} else {
adapter.warn('Called PING, but client does not exist');
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
connect();
}
}
}
function initRpcServer() {
adapter.config.homematicPort = parseInt(adapter.config.homematicPort, 10);
adapter.config.port = parseInt(adapter.config.port, 10);
adapter.config.useHttps = adapter.config.useHttps || false;
//adapterPort was introduced in v1.0.1. If not set yet then try 2000
const adapterPort = parseInt(adapter.config.port || adapter.config.homematicPort, 10) || 2000;
const callbackAddress = adapter.config.callbackAddress || adapter.config.adapterAddress;
adapter.getPort(adapterPort, port => {
daemonURL = daemonProto + callbackAddress + ':' + port;
rpcServer = rpc.createServer({
host: adapter.config.adapterAddress,
port: port
});
adapter.log.info(adapter.config.type + 'rpc server is trying to listen on ' + adapter.config.adapterAddress + ':' + port);
adapter.log.info(adapter.config.type + 'rpc client is trying to connect to ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' with ' + JSON.stringify([daemonURL, adapter.namespace]));
connect(true);
rpcServer.on('NotFound', (method, params) => adapter.log.warn(adapter.config.type + 'rpc <- undefined method ' + method + ' ' + JSON.stringify(params).slice(0, 80)));
rpcServer.on('system.multicall', (method, params, callback) => {
updateConnection();
const response = [];
for (const param of params[0]) {
if (methods[param.methodName]) {
adapter.log.debug(`${adapter.config.type} multicall <${param.methodName}>: ${param.params}`);
response.push(methods[param.methodName](null, param.params));
} else {
response.push('');
}
}
callback(null, response);
});
rpcServer.on('system.listMethods', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- system.listMethods ' + JSON.stringify(params));
callback(null, ['event', 'deleteDevices', 'listDevices', 'newDevices', 'system.listMethods', 'system.multicall', 'setReadyConfig']);
});
rpcServer.on('event', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
updateConnection();
try {
callback(null, methods.event(err, params));
} catch (err) {
adapter.log.error('Cannot response on event:' + err);
}
});
rpcServer.on('newDevices', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
const newDevices = params[1];
adapter.log.info(adapter.config.type + 'rpc <- newDevices ' + newDevices.length);
// for a HmIP-adapter we have to filter out the devices that
// are already present if forceReinit is not set
if (adapter.config.forceReInit === false && adapter.config.daemon === 'HMIP') {
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
if (doc && doc.rows) {
for (const row of doc.rows) {
if (row.id === adapter.namespace + '.updated') continue;
// lets get the device description
const val = row.value;
if (typeof val.ADDRESS === 'undefined') continue;
// lets find the current device in the newDevices array
// and if it doesn't exist we can delete it
let index = -1;
for (let j = 0; j < newDevices.length; j++) {
if (newDevices[j].ADDRESS === val.ADDRESS && newDevices[j].VERSION === val.VERSION) {
index = j;
break;
}
}
// if index is -1 than the newDevices doesn't have the
// device with address val.ADDRESS anymore, thus we can delete it
if (index === -1) {
if (val.ADDRESS && !adapter.config.dontDelete) {
if (val.ADDRESS.indexOf(':') !== -1) {
const address = val.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const parts = address.split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
adapter.log.info('obsolete channel ' + address + ' ' + JSON.stringify(address) + ' deleted');
} else {
adapter.deleteDevice(val.ADDRESS);
adapter.log.info('obsolete device ' + val.ADDRESS + ' deleted');
}
}
} else {
// we can remove the item at index because it is already registered
// to ioBroker
newDevices.splice(index, 1);
}
}
}
adapter.log.info('new HmIP devices/channels after filter: ' + newDevices.length);
createDevices(newDevices, callback);
});
} else {
createDevices(newDevices, callback);
}
});
rpcServer.on('listDevices', (err, params, callback) => {
if (err) {
adapter.log.warn('Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- listDevices ' + JSON.stringify(params));
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
const response = [];
// we only fill the response if this isn't a force reinit and
// if the adapter instance is not bothering with HmIP (which seems to work slightly different in terms of XMLRPC)
if (!adapter.config.forceReInit && adapter.config.daemon !== 'HMIP' && doc && doc.rows) {
for (let i = 0; i < doc.rows.length; i++) {
if (doc.rows[i].id === adapter.namespace + '.updated') continue;
const val = doc.rows[i].value;
/*if (val.PARENT_TYPE) {
channelParams[val.ADDRESS] = val.PARENT_TYPE + '.' + val.TYPE + '.' + val.VERSION;
}*/
if (val.ADDRESS) response.push({ADDRESS: val.ADDRESS, VERSION: val.VERSION});
}
}
adapter.log.info(adapter.config.type + 'rpc -> ' + response.length + ' devices');
//log.info(JSON.stringify(response));
try {
for (let r = response.length - 1; r >= 0; r--) {
if (!response[r].ADDRESS) {
adapter.log.warn(adapter.config.type + 'rpc -> found empty entry at position ' + r + ' !');
response.splice(r, 1);
}
}
callback(null, response);
} catch (err) {
adapter.log.error('Cannot response on listDevices:' + err);
require('fs').writeFileSync(__dirname + '/problem.json', JSON.stringify(response));
}
});
});
rpcServer.on('deleteDevices', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- deleteDevices ' + params[1].length);
for (let i = 0; i < params[1].length; i++) {
if (params[1][i].indexOf(':') !== -1) {
params[1][i] = params[1][i].replace(':', '.').replace(FORBIDDEN_CHARS, '_');
adapter.log.info('channel ' + params[1][i] + ' ' + JSON.stringify(params[1][i]) + ' deleted');
const parts = params[1][i].split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
} else {
adapter.log.info('device ' + params[1][i] + ' deleted');
adapter.deleteDevice(params[1][i]);
}
}
try {
callback(null, '');
} catch (err) {
adapter.log.error('Cannot response on deleteDevices:' + err);
}
});
rpcServer.on('setReadyConfig', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on setReadyConfig: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- setReadyConfig ' + JSON.stringify(params));
try {
callback(null, '');
} catch (err) {
adapter.log.error('Cannot response on setReadyConfig:' + err);
}
});
});
}
const methods = {
event: function (err, params) {
adapter.log.debug(adapter.config.type + 'rpc <- event ' + JSON.stringify(params));
let val;
// CUxD ignores all prefixes!!
if (params[0] === 'CUxD' || params[0].indexOf(adapter.name) === -1) {
params[0] = adapter.namespace;
}
const channel = params[1].replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const name = params[0] + '.' + channel + '.' + params[2];
if (dpTypes[name]) {
if (dpTypes[name].MIN !== undefined && dpTypes[name].UNIT === '%') {
val = ((parseFloat(params[3]) - dpTypes[name].MIN) / (dpTypes[name].MAX - dpTypes[name].MIN)) * 100;
val = Math.round(val * 100) / 100;
} else if (dpTypes[name].UNIT === '100%' || (dpTypes[name].UNIT === '%' && dpTypes[name].MAX === 1)) {
val = params[3] * 100;
} else {
val = params[3];
}
} else {
val = params[3];
}
adapter.log.debug(name + ' ==> UNIT: "' + (dpTypes[name] ? dpTypes[name].UNIT : 'none') + '" (min: ' + (dpTypes[name] ? dpTypes[name].MIN : 'none') + ', max: ' + (dpTypes[name] ? dpTypes[name].MAX : 'none') + ') From "' + params[3] + '" => "' + val + '"');
adapter.setState(channel + '.' + params[2], {val: val, ack: true});
return '';
}
};
const queueValueParamsets = [];
function addParamsetObjects(channel, paramset, callback) {
const promises = [];
for (const key in paramset) {
if (!paramset.hasOwnProperty(key)) continue;
const commonType = {
ACTION: 'boolean',
BOOL: 'boolean',
FLOAT: 'number',
ENUM: 'number',
INTEGER: 'number',
STRING: 'string',
EPAPER_LINE: 'string',
EPAPER_ICON: 'string',
EPAPER_TONE: 'string'
};
const obj = {
type: 'state',
common: {
def: paramset[key].DEFAULT,
type: commonType[paramset[key].TYPE] || paramset[key].TYPE || '',
read: !!(paramset[key].OPERATIONS & 1),
write: !!(paramset[key].OPERATIONS & 2)
},
native: paramset[key]
};
if (obj.common.type === 'number') {
obj.common.min = paramset[key].MIN;
obj.common.max = paramset[key].MAX;
if (paramset[key].TYPE === 'ENUM') {
obj.common.states = {};
for (let i = 0; i < paramset[key].VALUE_LIST.length; i++) {
obj.common.states[i] = paramset[key].VALUE_LIST[i];
}
}
if (paramset[key].SPECIAL) {
if (!obj.common.states) obj.common.states = {};
for (let i = 0; i < paramset[key].SPECIAL.length; i++) {
obj.common.states[paramset[key].SPECIAL[i].VALUE] = paramset[key].SPECIAL[i].ID;
}
}
}
if (paramset[key].STATES) {
obj.common.states = paramset[key].STATES;
}
if (paramset[key].UNIT === '100%') {
obj.common.unit = '%';
obj.common.max = 100 * paramset[key].MAX;
} else if (paramset[key].UNIT !== '') {
obj.common.unit = paramset[key].UNIT;
if (obj.common.unit === '�C' || obj.common.unit === '°C') {
obj.common.unit = '°C';
} else if (obj.common.unit === '�F' || obj.common.unit === '°F') {
obj.common.unit = '°F';
}
}
if (metaRoles.dpCONTROL && metaRoles.dpCONTROL[obj.native.CONTROL]) {
obj.common.role = metaRoles.dpCONTROL[obj.native.CONTROL];
} else if (metaRoles.chTYPE_dpNAME && metaRoles.chTYPE_dpNAME[channel.native.TYPE + '.' + key]) {
obj.common.role = metaRoles.chTYPE_dpNAME[channel.native.TYPE + '.' + key];
} else if (metaRoles.dpNAME && metaRoles.dpNAME[key]) {
obj.common.role = metaRoles.dpNAME[key];
}
if (obj.common.role === 'state' && obj.common.write) {
obj.common.role = 'switch';
}
if (obj.common.role === 'level.color.hue') {
obj.common.max = 200;
}
if (obj.common.role === 'value.rssi') {
obj.common.unit = 'dBm';
}
if (obj.common.role === 'value.voltage') {
obj.common.unit = 'V';
}
if (paramset[key].OPERATIONS & 8) {
obj.common.role = 'indicator.service';
}
// specify which value is LOCK
if (obj.native.CONTROL === 'LOCK.STATE') {
obj.native.LOCK_VALUE = false;
obj.common.role = 'switch.lock';
} else if (obj.native.CONTROL === 'DOOR_SENSOR.STATE') {
obj.common.role = 'value.window';
}
if (typeof obj.common.role !== 'string' && typeof obj.common.role !== 'undefined') {
throw 'typeof obj.common.role ' + typeof obj.common.role;
}
const dpID = adapter.namespace + '.' + channel._id + '.' + key;
dpTypes[dpID] = {
UNIT: paramset[key].UNIT,
TYPE: paramset[key].TYPE,
MIN: paramset[key].MIN,
MAX: paramset[key].MAX
};
if (typeof dpTypes[dpID].MIN === 'number') {
dpTypes[dpID].MIN = parseFloat(dpTypes[dpID].MIN);
dpTypes[dpID].MAX = parseFloat(dpTypes[dpID].MAX);
// Humidity is from 0 to 99. It is wrong.
if (dpTypes[dpID].MAX === 99) {
dpTypes[dpID].MAX = 100;
}
if (dpTypes[dpID].UNIT === '100%') {
dpTypes[dpID].UNIT = '%';
}
}
if (key === 'LEVEL' && paramset.WORKING) {
obj.common.workingID = 'WORKING';
}
promises.push(new Promise(resolve => {
adapter.extendObject(channel._id + '.' + key, obj, (err, res) => {
if (!err) {
adapter.log.debug('object ' + res.id + ' extended');
} else {
adapter.log.error('object ' + (res ? res.id : '?') + ' extend ' + err);
}
resolve();
});
}));
} // endFor
Promise.all(promises).then(() => callback());
} // endAddParamsetObjects
function getValueParamsets() {
if (queueValueParamsets.length === 0) {
// Inform hm-rega about new devices
adapter.setState('updated', true, false);
// Inform hm-rega about new devices
if (adapter.config.forceReInit) {
adapter.extendForeignObject('system.adapter.' + adapter.namespace, {native: {forceReInit: false}});
}
return;
}
const obj = queueValueParamsets.pop();
const cid = `${obj.native.PARENT_TYPE}.${obj.native.TYPE}.${obj.native.VERSION}`;
adapter.log.debug(`getValueParamsets ${cid}`);
// if meta values are cached for Epaper we extend this cached meta values by epaper states
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
if (metaValues[cid]) {
adapter.log.debug('paramset cache hit');
addParamsetObjects(obj, metaValues[cid], () => setImmediate(getValueParamsets));
} else {
const key = `hm-rpc.meta.VALUES.${cid}`;
adapter.getForeignObject(key, (err, res) => {
if (res && res.native) {
adapter.log.debug(`${key} found`);
metaValues[cid] = res.native;
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
addParamsetObjects(obj, metaValues[cid], () => setImmediate(getValueParamsets));
} else {
adapter.log.info(adapter.config.type + 'rpc -> getParamsetDescription ' + JSON.stringify([obj.native.ADDRESS, 'VALUES']));
try {
rpcClient.methodCall('getParamsetDescription', [obj.native.ADDRESS, 'VALUES'], (err, res) => {
if (err) {
adapter.log.error(`Error on getParamsetDescription: ${err}`);
} else {
metaValues[cid] = res;
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
const paramset = {
'type': 'meta',
'meta': {
adapter: 'hm-rpc',
type: 'paramsetDescription'
},
'common': {},
'native': metaValues[cid]
};
if (res) {
// if not empty
for (const attr in res) {
if (res.hasOwnProperty(attr)) {
adapter.log.warn(`Send this info to developer: "_id": "${key}"`);
adapter.log.warn(`Send this info to developer: ${JSON.stringify(paramset)}`);
break;
}
}
}
adapter.setForeignObject(key, paramset, () => {
addParamsetObjects(obj, metaValues[cid], () => {
setImmediate(getValueParamsets);
});
});
}
});
} catch (err) {
adapter.log.error(`Cannot call getParamsetDescription: ${err}`);
}
}
});
}
}
function addEPaperToMeta() {
// Check all versions from 9 to 12
for (let i = 9; i < 13; i++) {
const id = `HM-Dis-EP-WM55.MAINTENANCE.${i}`;
if (!metaValues[id] || !metaValues[id].EPAPER_LINE2) {
// Add the EPAPER States to the Maintenance channel if they are non-existent
metaValues[id] = metaValues[id] || {};
adapter.log.debug(`[EPAPER] Add E-Paper to Meta on ${JSON.stringify(metaValues[id])}`);
const obj = metaValues[id];
obj.EPAPER_LINE2 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE2',
OPERATIONS: 2
};
obj.EPAPER_ICON2 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON2',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_LINE3 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE3',
OPERATIONS: 2
};
obj.EPAPER_ICON3 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON3',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_LINE4 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE4',
OPERATIONS: 2
};
obj.EPAPER_ICON4 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON4',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_SIGNAL = {
TYPE: 'EPAPER_SIGNAL',
ID: 'EPAPER_SIGNAL',
STATES: {
'0xF0': 'OFF',
'0xF1': 'Red blink',
'0xF2': 'Green blink',
'0xF3': 'Orange blink'
},
OPERATIONS: 2
};
obj.EPAPER_TONE = {
TYPE: 'EPAPER_TONE',
ID: 'EPAPER_TONE',
STATES: {
'0xC0': 'Off',
'0xC1': 'Long Long',
'0xC2': 'Long Short',
'0xC3': 'Long Short Short',
'0xC4': 'Short',
'0xC5': 'Short Short',
'0xC6': 'Long',
'0xC7': '7',
'0xC9': '9',
'0xCA': 'A'
},
OPERATIONS: 2
};
}
}
}
function createDevices(deviceArr, callback) {
const objs = [];
for (const device of deviceArr) {
let type;
let role;
let icon;
if (device.PARENT) {
type = 'channel';
role = metaRoles.chTYPE && metaRoles.chTYPE[device.TYPE] ? metaRoles.chTYPE && metaRoles.chTYPE[device.TYPE] : undefined;
} else {
type = 'device';
if (!images[device.TYPE]) {
adapter.log.warn('No image for "' + device.TYPE + '" found.');
}
icon = images[device.TYPE] ? ('/icons/' + images[device.TYPE]) : '';
}
const obj = {
_id: device.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_'),
type: type,
common: {
// FIXME strange bug - LEVEL and WORKING datapoint of Dimmers have name of first dimmer device?!?
name: device.ADDRESS,
role: role
},
native: device
};
if (icon) obj.common.icon = icon;
const dpID = `${adapter.namespace}.${obj._id}`;
dpTypes[dpID] = {
UNIT: device.UNIT,
TYPE: device.TYPE,
MAX: device.MAX,
MIN: device.MIN,
role: role
};
if (typeof dpTypes[dpID].MIN === 'number') {
dpTypes[dpID].MIN = parseFloat(dpTypes[dpID].MIN);
dpTypes[dpID].MAX = parseFloat(dpTypes[dpID].MAX);
// Humidity is from 0 to 99. It is wrong.
if (dpTypes[dpID].MAX === 99) {
dpTypes[dpID].MAX = 100;
}
// Soemtimes unit is 100%, sometimes % it's the same
if (dpTypes[dpID].UNIT === '100%') {
dpTypes[dpID].UNIT = '%';
}
}
objs.push(obj);
}
function queue() {
if (objs.length) {
const obj = objs.pop();
if (metaRoles.dvTYPE && obj.native && metaRoles.dvTYPE[obj.native.PARENT_TYPE]) {
obj.common.role = metaRoles.dvTYPE[obj.native.PARENT_TYPE];
}
adapter.setObject(obj._id, obj, (err, res) => {
if (!err) {
adapter.log.debug('object ' + res.id + ' created');
} else {
adapter.log.error('object ' + (res ? res.id : '?') + ' error on creation: ' + err);
}
setImmediate(queue);
});
if (obj.type === 'channel') {
queueValueParamsets.push(obj);
}
} else {
getValueParamsets();
callback(null, '');
}
}
queue();
}
function getCuxDevices(callback) {
if (rpcClient) {
// request devices from CUxD
try {
rpcClient.methodCall('listDevices', [], (err, newDevices) => {
if (err) {
adapter.log.error('Error on listDevices: ' + err);
return;
}
adapter.log.info(adapter.config.type + 'rpc -> listDevices ' + newDevices.length);
if (adapter.config.forceReInit === false) {
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
if (doc && doc.rows) {
for (const row of doc.rows) {
if (row.id === adapter.namespace + '.updated') continue;
// lets get the device description
const val = row.value;
if (typeof val.ADDRESS === 'undefined') continue;
// lets find the current device in the newDevices array
// and if it doesn't exist we can delete it
let index = -1;
for (let j = 0; j < newDevices.length; j++) {
if (newDevices[j].ADDRESS === val.ADDRESS && newDevices[j].VERSION === val.VERSION) {
index = j;
break;
}
}
// if index is -1 than the newDevices doesn't have the
// device with address val.ADDRESS anymore, thus we can delete it
if (index === -1) {
if (val.ADDRESS && !adapter.config.dontDelete) {
if (val.ADDRESS.indexOf(':') !== -1) {
const address = val.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const parts = address.split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
adapter.log.info('obsolete channel ' + address + ' ' + JSON.stringify(address) + ' deleted');
} else {
adapter.deleteDevice(val.ADDRESS);
adapter.log.info('obsolete device ' + val.ADDRESS + ' deleted');
}
}
} else {
// we can remove the item at index because it is already registered
// to ioBroker
newDevices.splice(index, 1);
}
}
}
adapter.log.info('new CUxD devices/channels after filter: ' + newDevices.length);
createDevices(newDevices, callback);
});
} else {
createDevices(newDevices, callback);
}
});
} catch (err) {
adapter.log.error('Cannot call listDevices: ' + err);
}
} else {
callback && callback();
}
}
function updateConnection() {
lastEvent = new Date().getTime();
if (!connected) {
adapter.log.info('Connected');
connected = true;
adapter.setState('info.connection', true, true);
}
if (connInterval) {
adapter.log.debug('clear connecting interval');
clearInterval(connInterval);
connInterval = null;
}
if (connTimeout) {
adapter.log.debug('clear connecting timeout');
clearTimeout(connTimeout);
connTimeout = null;
}
// Virtual Devices API does not support PING
if (!eventInterval && adapter.config.daemon !== 'virtual-devices') {
adapter.log.debug('start ping interval');
eventInterval = setInterval(keepAlive, adapter.config.checkInitInterval * 1000 / 2);
}
}
function connect(isFirst) {
if (!rpcClient && !adapter.config.useHttps) {
rpcClient = rpc.createClient({
host: adapter.config.homematicAddress,
port: adapter.config.homematicPort,
path: homematicPath,
reconnectTimeout: adapter.config.reconnectInterval * 1000
});
// If we have bin-rpc, only need it here because bin-rpc cant have https
if (rpcClient.on) {
rpcClient.on('error', err => {
adapter.log.error(`Socket error: ${err}`);
});
} // endIf
// if bin-rpc
/* if (rpcClient.on) {
rpcClient.on('connect', function (err) {
sendInit();
});
rpcClient.on('close', function () {
adapter.log.debug('Socket closed.');
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
if (eventInterval) {
adapter.log.debug('clear ping interval');
clearInterval(eventInterval);
eventInterval = null;
}
// clear queue
if (rpcClient.queue) {
while (rpcClient.queue.length) {
rpcClient.queue.pop();
}
rpcClient.pending = false;
}
if (!connTimeout) {
connTimeout = setTimeout(connect, adapter.config.reconnectInterval * 1000);
}
});
}*/
} else if (!rpcClient) {
adapter.getForeignObject('system.config', (err, obj) => {
let password;
let username;
if (obj && obj.native && obj.native.secret) {
password = crypto.decrypt(obj.native.secret, adapter.config.password);
username = crypto.decrypt(obj.native.secret, adapter.config.username);
} else {
password = crypto.decrypt('Zgfr56gFe87jJOM', adapter.config.password);
username = crypto.decrypt('Zgfr56gFe87jJOM', adapter.config.username);
} // endElse
rpcClient = rpc.createSecureClient({
host: adapter.config.homematicAddress,
port: adapter.config.homematicPort,
path: homematicPath,
reconnectTimeout: adapter.config.reconnectInterval * 1000,
basic_auth: {user: username, pass: password},
rejectUnauthorized: false
});
});
} // endElseIf
connTimeout = null;
adapter.log.debug('Connect...');
if (eventInterval) {
adapter.log.debug('clear ping interval');
clearInterval(eventInterval);
eventInterval = null;
}
if (isFirst) sendInit();
// Periodically try to reconnect
if (!connInterval) {
adapter.log.debug('start connecting interval');
connInterval = setInterval(() => sendInit(), adapter.config.reconnectInterval * 1000);
}
}
function keepAlive() {
adapter.log.debug('[KEEPALIVE] Check if connection is alive');
if (connInterval) {
clearInterval(connInterval);
connInterval = null;
}
const _now = Date.now();
// Check last event time. If timeout => send init again
if (!lastEvent || (_now - lastEvent) >= adapter.config.checkInitInterval * 1000) {
adapter.log.debug('[KEEPALIVE] Connection timed out, initializing new connection');
connect();
} else {
sendPing();
}
} // endKeepAlive
// If started as allInOne/compact mode => return function to create instance
if (module && module.parent) {
module.exports = startAdapter;
} else {
// or start the instance directly
startAdapter();
} // endElse
| hm-rpc.js | /*
* Copyright (c) 2014-2019 bluefox <[email protected]>
*
* Copyright (c) 2014 hobbyquaker
*
* The MIT License (MIT)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/* jshint -W097 */
/* jshint strict: false */
/*jslint node: true */
'use strict';
const utils = require('@iobroker/adapter-core'); // Get common adapter utils
const adapterName = require('./package.json').name.split('.').pop();
const images = require('./lib/images');
const crypto = require('./lib/crypto'); // Provides encrypt and decrypt
let connected = false;
const displays = {};
let adapter;
const FORBIDDEN_CHARS = /[\][*,;'"`<>\\\s?]/g;
// msgBuffer = [{line: line2, icon: icon2}, {line: line3, icon: icon3}, {line: '', icon: ''}];
// Icons:
// 0x80 AUS
// 0x81 EIN
// 0x82 OFFEN
// 0x83 geschlossen
// 0x84 fehler
// 0x85 alles ok
// 0x86 information
// 0x87 neue nachricht
// 0x88 servicemeldung
// Tonfolgen
// 0xC0 AUS
// 0xC1 LANG LANG
// 0xC2 LANG KURZ
// 0xC3 LANG KURZ KURZ
// 0xC4 KURZ
// 0xC5 KURZ KURZ
// 0xC6 LANG
// 0xC7
// 0xC9
// 0xCA
// Signale
// 0xF0 AUS
// 0xF1 Rotes Blitzen
// 0xF2 Grünes Blitzen
// 0xF3 Orangenes Blitzen
function number2hex(num) {
if (typeof num === 'number') {
num = num.toString(16).toUpperCase();
if (num.length < 2) num = '0' + num;
num = '0x' + num;
}
return num;
}
function combineEPaperCommand(lines, signal, ton, repeats, offset) {
signal = number2hex(signal || '0xF0');
ton = number2hex(ton || '0xC0');
const substitutions = {
'A': '0x41',
'B': '0x42',
'C': '0x43',
'D': '0x44',
'E': '0x45',
'F': '0x46',
'G': '0x47',
'H': '0x48',
'I': '0x49',
'J': '0x4A',
'K': '0x4B',
'L': '0x4C',
'M': '0x4D',
'N': '0x4E',
'O': '0x4F',
'P': '0x50',
'Q': '0x51',
'R': '0x52',
'S': '0x53',
'T': '0x54',
'U': '0x55',
'V': '0x56',
'W': '0x57',
'X': '0x58',
'Y': '0x59',
'Z': '0x5A',
'a': '0x61',
'b': '0x62',
'c': '0x63',
'd': '0x64',
'e': '0x65',
'f': '0x66',
'g': '0x67',
'h': '0x68',
'i': '0x69',
'j': '0x6A',
'k': '0x6B',
'l': '0x6C',
'm': '0x6D',
'n': '0x6E',
'o': '0x6F',
'p': '0x70',
'q': '0x71',
'r': '0x72',
's': '0x73',
't': '0x74',
'u': '0x75',
'v': '0x76',
'w': '0x77',
'x': '0x78',
'y': '0x79',
'z': '0x7A',
'0': '0x30',
'1': '0x31',
'2': '0x32',
'3': '0x33',
'4': '0x34',
'5': '0x35',
'6': '0x36',
'7': '0x37',
'8': '0x38',
'9': '0x39',
' ': '0x20',
'!': '0x21',
'"': '0x22',
'%': '0x25',
'&': '0x26',
'=': '0x27',
'(': '0x28',
')': '0x29',
'*': '0x2A',
'+': '0x2B',
',': '0x2C',
'-': '0x2D',
'.': '0x2E',
'/': '0x2F',
'Ä': '0x5B',
'Ö': '0x23',
'Ü': '0x24',
'ä': '0x7B',
'ö': '0x7C',
'ü': '0x7D',
'ß': '0x5F',
':': '0x3A',
';': '0x3B',
'@': '0x40',
'>': '0x3E'
};
offset = 10;
repeats = 1;
let command = '0x02,0x0A';
for (const li of lines) {
const line = li.line;
const icon = li.icon;
if (line || icon) {
command = command + ',0x12';
let i;
if ((line.substring(0, 2) === '0x') && (line.length === 4)) {
command = command + ',' + line;
i = 12;
} else {
i = 0;
}
while ((i < line.length) && (i < 12)) {
command += ',' + substitutions[line[i]] || '0x2A';
i++;
}
if (icon) {
command += ',0x13,' + number2hex(icon);
}
}
command = command + ',0x0A';
}
command = command + ',0x14,' + ton + ',0x1C,';
if (repeats < 1) {
command = command + '0xDF,0x1D,';
} else {
if (repeats < 11) {
command = command + '0xD' + (repeats - 1) + ',0x1D,';
} else {
if (repeats === 11) {
command = command + '0xDA,0x1D,';
} else {
if (repeats === 12) {
command = command + '0xDB,0x1D,';
} else {
if (repeats === 13) {
command = command + '0xDC,0x1D,';
} else {
if (repeats === 14) {
command = command + '0xDD,0x1D,';
} else {
command = command + '0xDE,0x1D,';
}
}
}
}
}
}
if (offset <= 10) {
command = command + '0xE0,0x16,';
} else {
if (offset <= 100) {
command = command + '0xE' + (offset - 1 / 10) + ',0x16,';
} else {
if (offset <= 110) {
command = command + '0xEA,0x16,';
} else {
if (offset <= 120) {
command = command + '0xEB,0x16,';
} else {
if (offset <= 130) {
command = command + '0xEC,0x16,';
} else {
if (offset <= 140) {
command = command + '0xED,0x16,';
} else {
if (offset <= 150) {
command = command + '0xEE,0x16,';
} else {
command = command + '0xEF,0x16,';
}
}
}
}
}
}
}
command = command + signal + ',0x03';
return command;
}
function controlEPaper(id, data) {
const tmp = id.split('.');
tmp[3] = '3';
tmp[4] = 'SUBMIT';
const val = combineEPaperCommand(data.lines, data.signal || '0xF0', data.tone || '0xC0');
try {
if (rpcClient && connected) {
rpcClient.methodCall('setValue', [tmp[2] + ':' + tmp[3], tmp[4], val], err => {
if (err) {
adapter.log.error(adapter.config.type + 'rpc -> setValue ' + JSON.stringify([tmp[3], tmp[4], val]));
adapter.log.error(err);
}
});
} else {
adapter.log.warn('Cannot setValue "' + id + '", because not connected.');
}
} catch (err) {
adapter.log.error('Cannot call setValue: :' + err);
}
}
function readSignals(id) {
displays[id] = null;
const data = {
lines: [{}, {}, {}],
signal: '0xF0',
tone: '0xC0'
};
const promises = [];
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE2', (err, state) => {
data.lines[0].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON2', (err, state) => {
data.lines[0].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE3', (err, state) => {
data.lines[1].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON3', (err, state) => {
data.lines[1].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE4', (err, state) => {
data.lines[2].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON4', (err, state) => {
data.lines[2].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_SIGNAL', (err, state) => {
data.signal = state ? state.val || '0xF0' : '0xF0';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_TONE', (err, state) => {
data.tone = state ? state.val || '0xC0' : '0xC0';
resolve();
});
}));
Promise.all(promises).then(() => controlEPaper(id, data));
} // endReadSignals
function readSettings(id) {
displays[id] = null;
const data = {
lines: [{}, {}, {}],
signal: '0xF0',
tone: '0xC0'
};
const promises = [];
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE2', (err, state) => {
data.lines[0].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON2', (err, state) => {
data.lines[0].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE3', (err, state) => {
data.lines[1].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON3', (err, state) => {
data.lines[1].icon = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_LINE4', (err, state) => {
data.lines[2].line = state ? state.val || '' : '';
resolve();
});
}));
promises.push(new Promise(resolve => {
adapter.getForeignState(id + '.0.EPAPER_ICON4', (err, state) => {
data.lines[2].icon = state ? state.val || '' : '';
resolve();
});
}));
Promise.all(promises).then(() => controlEPaper(id, data));
} // endReadSettings
// the adapter object
function startAdapter(options) {
options = options || {};
Object.assign(options, {
name: adapterName,
ready: () => {
adapter.subscribeStates('*');
main();
},
stateChange: (id, state) => {
if (state && state.ack !== true) {
const tmp = id.split('.');
let val;
if (id === adapter.namespace + '.updated' || /_ALARM$/.test(id)) return;
adapter.log.debug(adapter.config.type + 'rpc -> setValue ' + tmp[3] + ' ' + tmp[4] + ': ' + state.val);
if (!dpTypes[id]) {
adapter.log.error(adapter.config.type + 'rpc -> setValue: no dpType for ' + id + '!');
return;
}
if (dpTypes[id].UNIT === '%' && dpTypes[id].MIN !== undefined) {
state.val = (state.val / 100) * (dpTypes[id].MAX - dpTypes[id].MIN) + dpTypes[id].MIN;
state.val = Math.round(state.val * 1000) / 1000;
} else if (dpTypes[id].UNIT === '100%') {
state.val = state.val / 100;
}
const type = dpTypes[id].TYPE;
if (type === 'EPAPER_LINE' || type === 'EPAPER_ICON') {
const _id = tmp[0] + '.' + tmp[1] + '.' + tmp[2];
if (displays[_id] && displays[_id].timer) {
clearTimeout(displays[_id].timer);
if (displays[_id].withTone) {
displays[_id] = {timer: setTimeout(readSignals, 300, _id), withTone: true};
return;
}
}
displays[_id] = {timer: setTimeout(readSettings, 300, _id), withTone: false};
return;
} else if (type === 'EPAPER_SIGNAL' || type === 'EPAPER_TONE') {
const _id = tmp[0] + '.' + tmp[1] + '.' + tmp[2];
if (displays[_id] && displays[_id].timer) {
clearTimeout(displays[_id].timer);
}
displays[_id] = {timer: setTimeout(readSignals, 300, _id), withTone: true};
return;
} else {
switch (type) {
case 'BOOL':
val = (state.val === 'false' || state.val === '0') ? false : !!state.val;
break;
case 'FLOAT':
val = {explicitDouble: state.val};
break;
default:
val = state.val;
}
}
adapter.log.debug('setValue ' + JSON.stringify([tmp[2] + ':' + tmp[3], tmp[4], val]) + ' ' + type);
try {
if (rpcClient && connected) {
rpcClient.methodCall('setValue', [tmp[2] + ':' + tmp[3], tmp[4], val], (err/*, data*/) => {
if (err) {
adapter.log.error(adapter.config.type + 'rpc -> setValue ' + JSON.stringify([tmp[3], tmp[4], state.val]) + ' ' + type);
adapter.log.error(err);
}
});
} else {
adapter.log.warn('Cannot setValue "' + id + '", because not connected.');
}
} catch (err) {
adapter.log.error('Cannot call setValue: :' + err);
}
}
},
// Add messagebox Function for ioBroker.occ
message: obj => {
adapter.log.debug('[MSSG] Received: ' + JSON.stringify(obj));
if (obj.command === 'stopInstance') {
if (rpcServer && rpcServer.server) {
try {
rpcServer.server.close(() => {
console.log('server closed.');
rpcServer.server.unref();
});
} catch (e) {
//
}
}
if (rpcClient && rpcClient.socket) {
try {
rpcClient.socket.destroy();
} catch (e) {
//
}
}
// force close
setTimeout(() => adapter.terminate ? adapter.terminate() : process.exit(), 3000);
} else if (obj.message.params === undefined || obj.message.params === null) {
try {
if (rpcClient && connected) {
rpcClient.methodCall(obj.command, [obj.message.ID, obj.message.paramType], (err, data) => {
if (obj.callback) adapter.sendTo(obj.from, obj.command, {
result: data,
error: err
}, obj.callback);
});
} else {
adapter.log.warn('Cannot send "' + obj.command + '" "' + obj.message.ID + '": because not connected');
if (obj.callback) adapter.sendTo(obj.from, obj.command, {error: 'not connected'}, obj.callback);
}
} catch (err) {
adapter.log.error('Cannot call ' + obj.command + ': ' + err);
adapter.sendTo(obj.from, obj.command, {error: err}, obj.callback);
}
} else {
try {
if (rpcClient && connected) {
rpcClient.methodCall(obj.command, [obj.message.ID, obj.message.paramType, obj.message.params], (err, data) => {
if (obj.callback) adapter.sendTo(obj.from, obj.command, {
result: data,
error: err
}, obj.callback);
});
} else {
adapter.log.warn('Cannot send "' + obj.command + '" "' + obj.message.ID + '": because not connected');
if (obj.callback) adapter.sendTo(obj.from, obj.command, {error: 'not connected'}, obj.callback);
}
} catch (err) {
adapter.log.error('Cannot call ' + obj.command + ': ' + err);
adapter.sendTo(obj.from, obj.command, {error: err}, obj.callback);
}
}
},
unload: callback => {
try {
if (eventInterval) {
clearInterval(eventInterval);
eventInterval = null;
}
if (connInterval) {
clearInterval(connInterval);
connInterval = null;
}
if (connTimeout) {
clearTimeout(connTimeout);
connTimeout = null;
}
if (adapter.config && rpcClient) {
adapter.log.info(adapter.config.type + 'rpc -> ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' init ' + JSON.stringify([daemonURL, '']));
try {
rpcClient.methodCall('init', [daemonURL, ''], (/*err, data*/) => {
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
if (callback) callback();
callback = null;
});
} catch (err) {
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
adapter.log.error('Cannot call init: [' + daemonURL + ', ""]' + err);
if (callback) callback();
callback = null;
}
} else {
if (callback) callback();
callback = null;
}
} catch (e) {
if (adapter && adapter.log) {
adapter.log.error('Unload error: ' + e);
} else {
console.log(e);
}
if (callback) callback();
callback = null;
}
}
});
adapter = new utils.Adapter(options);
return adapter;
}
let rpc;
let rpcClient;
let rpcServer;
const metaValues = {};
let metaRoles = {};
const dpTypes = {};
let lastEvent = 0;
let eventInterval;
let connInterval;
let connTimeout;
let daemonURL = '';
let daemonProto = '';
let homematicPath;
function main() {
homematicPath = adapter.config.daemon === 'virtual-devices' ? '/groups/' : '/';
adapter.config.reconnectInterval = parseInt(adapter.config.reconnectInterval, 10) || 30;
if (adapter.config.reconnectInterval < 10) {
adapter.log.error('Reconnect interval is less than 10 seconds. Set reconnect interval to 10 seconds.');
adapter.config.reconnectInterval = 10;
}
adapter.config.checkInitInterval = parseInt(adapter.config.checkInitInterval, 10);
if (adapter.config.checkInitInterval < 10) {
adapter.log.error('Check init interval is less than 10 seconds. Set init interval to 10 seconds.');
adapter.config.checkInitInterval = 10;
}
adapter.setState('info.connection', false, true);
if (adapter.config.type === 'bin') {
rpc = require('binrpc');
daemonProto = 'xmlrpc_bin://';
} else {
rpc = require('homematic-xmlrpc');
adapter.config.type = 'xml';
daemonProto = adapter.config.useHttps ? 'https://' : 'http://';
}
// Load VALUE paramsetDescriptions (needed to create state objects)
adapter.objects.getObjectView('hm-rpc', 'paramsetDescription', {
startkey: 'hm-rpc.meta.VALUES',
endkey: 'hm-rpc.meta.VALUES.\u9999'
}, function handleValueParamSetDescriptions(err, doc) {
if (err) adapter.log.error('getObjectView hm-rpc: ' + err);
if (doc && doc.rows) {
for (const row of doc.rows) {
const channel = row.id.slice(19);
metaValues[channel] = row.value.native;
}
}
// Load common.role assignments
adapter.getForeignObject('hm-rpc.meta.roles', (err, res) => {
if (err) adapter.log.error('hm-rpc.meta.roles: ' + err);
if (res) metaRoles = res.native;
// Start Adapter
if (adapter.config) initRpcServer();
});
});
adapter.objects.getObjectView('system', 'state', {
startkey: adapter.namespace,
endkey: adapter.namespace + '\u9999'
}, function handleStateViews(err, res) {
if (!err && res.rows) {
for (const row of res.rows) {
if (row.id === adapter.namespace + '.updated') continue;
if (!row.value.native) {
adapter.log.warn('State ' + row.id + ' does not have native.');
dpTypes[row.id] = {UNIT: '', TYPE: ''};
} else {
dpTypes[row.id] = {
UNIT: row.value.native.UNIT,
TYPE: row.value.native.TYPE,
MIN: row.value.native.MIN,
MAX: row.value.native.MAX
};
if (typeof dpTypes[row.id].MIN === 'number') {
dpTypes[row.id].MIN = parseFloat(dpTypes[row.id].MIN);
dpTypes[row.id].MAX = parseFloat(dpTypes[row.id].MAX);
if (dpTypes[row.id].UNIT === '100%') {
dpTypes[row.id].UNIT = '%';
}
if (dpTypes[row.id].MAX === 99) {
dpTypes[row.id].MAX = 100;
} else if (dpTypes[row.id].MAX === 1.005) {
dpTypes[row.id].MAX = 1;
}
} // endIf
}
}
}
});
}
function sendInit() {
try {
if (rpcClient && (rpcClient.connected === undefined || rpcClient.connected)) {
adapter.log.debug(adapter.config.type + 'rpc -> ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' init ' + JSON.stringify([daemonURL, adapter.namespace]));
rpcClient.methodCall('init', [daemonURL, adapter.namespace], (err/*, data*/) => {
if (!err) {
if (adapter.config.daemon === 'CUxD') {
getCuxDevices(function handleCuxDevices(err2) {
if (!err2) {
updateConnection();
} else {
adapter.log.error('getCuxDevices error: ' + err2);
}
});
} else {
updateConnection();
}
} else {
adapter.log.error('init error: ' + err);
}
});
}
} catch (err) {
adapter.log.error('Init not possible, going to stop: ' + err);
adapter.stop();
}
}
function sendPing() {
if (rpcClient) {
adapter.log.debug('Send PING...');
try {
rpcClient.methodCall('ping', [adapter.namespace], (err/*, data*/) => {
if (!err) {
adapter.log.debug('PING ok');
} else {
adapter.log.error('Ping error: ' + err);
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
connect();
}
}
});
} catch (err) {
adapter.log.error('Cannot call ping [' + adapter.namespace + ']: ' + err);
}
} else {
adapter.warn('Called PING, but client does not exist');
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
connect();
}
}
}
function initRpcServer() {
adapter.config.homematicPort = parseInt(adapter.config.homematicPort, 10);
adapter.config.port = parseInt(adapter.config.port, 10);
adapter.config.useHttps = adapter.config.useHttps || false;
//adapterPort was introduced in v1.0.1. If not set yet then try 2000
const adapterPort = parseInt(adapter.config.port || adapter.config.homematicPort, 10) || 2000;
const callbackAddress = adapter.config.callbackAddress || adapter.config.adapterAddress;
adapter.getPort(adapterPort, port => {
daemonURL = daemonProto + callbackAddress + ':' + port;
rpcServer = rpc.createServer({
host: adapter.config.adapterAddress,
port: port
});
adapter.log.info(adapter.config.type + 'rpc server is trying to listen on ' + adapter.config.adapterAddress + ':' + port);
adapter.log.info(adapter.config.type + 'rpc client is trying to connect to ' + adapter.config.homematicAddress + ':' + adapter.config.homematicPort + homematicPath + ' with ' + JSON.stringify([daemonURL, adapter.namespace]));
connect(true);
rpcServer.on('NotFound', (method, params) => adapter.log.warn(adapter.config.type + 'rpc <- undefined method ' + method + ' ' + JSON.stringify(params).slice(0, 80)));
rpcServer.on('system.multicall', (method, params, callback) => {
updateConnection();
const response = [];
for (const param of params[0]) {
if (methods[param.methodName]) {
adapter.log.debug(`${adapter.config.type} multicall <${param.methodName}>: ${param.params}`);
response.push(methods[param.methodName](null, param.params));
} else {
response.push('');
}
}
callback(null, response);
});
rpcServer.on('system.listMethods', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- system.listMethods ' + JSON.stringify(params));
callback(null, ['event', 'deleteDevices', 'listDevices', 'newDevices', 'system.listMethods', 'system.multicall', 'setReadyConfig']);
});
rpcServer.on('event', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
updateConnection();
try {
callback(null, methods.event(err, params));
} catch (err) {
adapter.log.error('Cannot response on event:' + err);
}
});
rpcServer.on('newDevices', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
const newDevices = params[1];
adapter.log.info(adapter.config.type + 'rpc <- newDevices ' + newDevices.length);
// for a HmIP-adapter we have to filter out the devices that
// are already present if forceReinit is not set
if (adapter.config.forceReInit === false && adapter.config.daemon === 'HMIP') {
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
if (doc && doc.rows) {
for (const row of doc.rows) {
if (row.id === adapter.namespace + '.updated') continue;
// lets get the device description
const val = row.value;
if (typeof val.ADDRESS === 'undefined') continue;
// lets find the current device in the newDevices array
// and if it doesn't exist we can delete it
let index = -1;
for (let j = 0; j < newDevices.length; j++) {
if (newDevices[j].ADDRESS === val.ADDRESS && newDevices[j].VERSION === val.VERSION) {
index = j;
break;
}
}
// if index is -1 than the newDevices doesn't have the
// device with address val.ADDRESS anymore, thus we can delete it
if (index === -1) {
if (val.ADDRESS && !adapter.config.dontDelete) {
if (val.ADDRESS.indexOf(':') !== -1) {
const address = val.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const parts = address.split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
adapter.log.info('obsolete channel ' + address + ' ' + JSON.stringify(address) + ' deleted');
} else {
adapter.deleteDevice(val.ADDRESS);
adapter.log.info('obsolete device ' + val.ADDRESS + ' deleted');
}
}
} else {
// we can remove the item at index because it is already registered
// to ioBroker
newDevices.splice(index, 1);
}
}
}
adapter.log.info('new HmIP devices/channels after filter: ' + newDevices.length);
createDevices(newDevices, callback);
});
} else {
createDevices(newDevices, callback);
}
});
rpcServer.on('listDevices', (err, params, callback) => {
if (err) {
adapter.log.warn('Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- listDevices ' + JSON.stringify(params));
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
const response = [];
// we only fill the response if this isn't a force reinit and
// if the adapter instance is not bothering with HmIP (which seems to work slightly different in terms of XMLRPC)
if (!adapter.config.forceReInit && adapter.config.daemon !== 'HMIP' && doc && doc.rows) {
for (let i = 0; i < doc.rows.length; i++) {
if (doc.rows[i].id === adapter.namespace + '.updated') continue;
const val = doc.rows[i].value;
/*if (val.PARENT_TYPE) {
channelParams[val.ADDRESS] = val.PARENT_TYPE + '.' + val.TYPE + '.' + val.VERSION;
}*/
if (val.ADDRESS) response.push({ADDRESS: val.ADDRESS, VERSION: val.VERSION});
}
}
adapter.log.info(adapter.config.type + 'rpc -> ' + response.length + ' devices');
//log.info(JSON.stringify(response));
try {
for (let r = response.length - 1; r >= 0; r--) {
if (!response[r].ADDRESS) {
adapter.log.warn(adapter.config.type + 'rpc -> found empty entry at position ' + r + ' !');
response.splice(r, 1);
}
}
callback(null, response);
} catch (err) {
adapter.log.error('Cannot response on listDevices:' + err);
require('fs').writeFileSync(__dirname + '/problem.json', JSON.stringify(response));
}
});
});
rpcServer.on('deleteDevices', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on system.listMethods: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- deleteDevices ' + params[1].length);
for (let i = 0; i < params[1].length; i++) {
if (params[1][i].indexOf(':') !== -1) {
params[1][i] = params[1][i].replace(':', '.').replace(FORBIDDEN_CHARS, '_');
adapter.log.info('channel ' + params[1][i] + ' ' + JSON.stringify(params[1][i]) + ' deleted');
const parts = params[1][i].split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
} else {
adapter.log.info('device ' + params[1][i] + ' deleted');
adapter.deleteDevice(params[1][i]);
}
}
try {
callback(null, '');
} catch (err) {
adapter.log.error('Cannot response on deleteDevices:' + err);
}
});
rpcServer.on('setReadyConfig', (err, params, callback) => {
if (err) {
adapter.log.warn(' Error on setReadyConfig: ' + err);
}
adapter.log.info(adapter.config.type + 'rpc <- setReadyConfig ' + JSON.stringify(params));
try {
callback(null, '');
} catch (err) {
adapter.log.error('Cannot response on setReadyConfig:' + err);
}
});
});
}
const methods = {
event: function (err, params) {
adapter.log.debug(adapter.config.type + 'rpc <- event ' + JSON.stringify(params));
let val;
// CUxD ignores all prefixes!!
if (params[0] === 'CUxD' || params[0].indexOf(adapter.name) === -1) {
params[0] = adapter.namespace;
}
const channel = params[1].replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const name = params[0] + '.' + channel + '.' + params[2];
if (dpTypes[name]) {
if (dpTypes[name].MIN !== undefined && dpTypes[name].UNIT === '%') {
val = ((parseFloat(params[3]) - dpTypes[name].MIN) / (dpTypes[name].MAX - dpTypes[name].MIN)) * 100;
val = Math.round(val * 100) / 100;
} else if (dpTypes[name].UNIT === '100%' || (dpTypes[name].UNIT === '%' && dpTypes[name].MAX === 1)) {
val = params[3] * 100;
} else {
val = params[3];
}
} else {
val = params[3];
}
adapter.log.debug(name + ' ==> UNIT: "' + (dpTypes[name] ? dpTypes[name].UNIT : 'none') + '" (min: ' + (dpTypes[name] ? dpTypes[name].MIN : 'none') + ', max: ' + (dpTypes[name] ? dpTypes[name].MAX : 'none') + ') From "' + params[3] + '" => "' + val + '"');
adapter.setState(channel + '.' + params[2], {val: val, ack: true});
return '';
}
};
const queueValueParamsets = [];
function addParamsetObjects(channel, paramset, callback) {
const channelChildren = [];
const promises = [];
for (const key in paramset) {
if (!paramset.hasOwnProperty(key)) continue;
channelChildren.push(channel._id + '.' + key);
const commonType = {
ACTION: 'boolean',
BOOL: 'boolean',
FLOAT: 'number',
ENUM: 'number',
INTEGER: 'number',
STRING: 'string',
EPAPER_LINE: 'string',
EPAPER_ICON: 'string',
EPAPER_TONE: 'string'
};
const obj = {
type: 'state',
common: {
def: paramset[key].DEFAULT,
type: commonType[paramset[key].TYPE] || paramset[key].TYPE || '',
read: !!(paramset[key].OPERATIONS & 1),
write: !!(paramset[key].OPERATIONS & 2)
},
native: paramset[key]
};
if (obj.common.type === 'number') {
obj.common.min = paramset[key].MIN;
obj.common.max = paramset[key].MAX;
if (paramset[key].TYPE === 'ENUM') {
obj.common.states = {};
for (let i = 0; i < paramset[key].VALUE_LIST.length; i++) {
obj.common.states[i] = paramset[key].VALUE_LIST[i];
}
}
if (paramset[key].SPECIAL) {
if (!obj.common.states) obj.common.states = {};
for (let i = 0; i < paramset[key].SPECIAL.length; i++) {
obj.common.states[paramset[key].SPECIAL[i].VALUE] = paramset[key].SPECIAL[i].ID;
}
}
}
if (paramset[key].STATES) {
obj.common.states = paramset[key].STATES;
}
if (paramset[key].UNIT === '100%') {
obj.common.unit = '%';
obj.common.max = 100 * paramset[key].MAX;
} else if (paramset[key].UNIT !== '') {
obj.common.unit = paramset[key].UNIT;
if (obj.common.unit === '�C' || obj.common.unit === '°C') {
obj.common.unit = '°C';
} else if (obj.common.unit === '�F' || obj.common.unit === '°F') {
obj.common.unit = '°F';
}
}
if (metaRoles.dpCONTROL && metaRoles.dpCONTROL[obj.native.CONTROL]) {
obj.common.role = metaRoles.dpCONTROL[obj.native.CONTROL];
} else if (metaRoles.chTYPE_dpNAME && metaRoles.chTYPE_dpNAME[channel.native.TYPE + '.' + key]) {
obj.common.role = metaRoles.chTYPE_dpNAME[channel.native.TYPE + '.' + key];
} else if (metaRoles.dpNAME && metaRoles.dpNAME[key]) {
obj.common.role = metaRoles.dpNAME[key];
}
if (obj.common.role === 'state' && obj.common.write) {
obj.common.role = 'switch';
}
if (obj.common.role === 'level.color.hue') {
obj.common.max = 200;
}
if (obj.common.role === 'value.rssi') {
obj.common.unit = 'dBm';
}
if (obj.common.role === 'value.voltage') {
obj.common.unit = 'V';
}
if (paramset[key].OPERATIONS & 8) {
obj.common.role = 'indicator.service';
}
// specify which value is LOCK
if (obj.native.CONTROL === 'LOCK.STATE') {
obj.native.LOCK_VALUE = false;
obj.common.role = 'switch.lock';
} else if (obj.native.CONTROL === 'DOOR_SENSOR.STATE') {
obj.common.role = 'value.window';
}
if (typeof obj.common.role !== 'string' && typeof obj.common.role !== 'undefined') {
throw 'typeof obj.common.role ' + typeof obj.common.role;
}
const dpID = adapter.namespace + '.' + channel._id + '.' + key;
dpTypes[dpID] = {
UNIT: paramset[key].UNIT,
TYPE: paramset[key].TYPE,
MIN: paramset[key].MIN,
MAX: paramset[key].MAX
};
if (typeof dpTypes[dpID].MIN === 'number') {
dpTypes[dpID].MIN = parseFloat(dpTypes[dpID].MIN);
dpTypes[dpID].MAX = parseFloat(dpTypes[dpID].MAX);
// Humidity is from 0 to 99. It is wrong.
if (dpTypes[dpID].MAX === 99) {
dpTypes[dpID].MAX = 100;
}
if (dpTypes[dpID].UNIT === '100%') {
dpTypes[dpID].UNIT = '%';
}
}
if (key === 'LEVEL' && paramset.WORKING) {
obj.common.workingID = 'WORKING';
}
promises.push(new Promise(resolve => {
adapter.extendObject(channel._id + '.' + key, obj, (err, res) => {
if (!err) {
adapter.log.debug('object ' + res.id + ' extended');
} else {
adapter.log.error('object ' + (res ? res.id : '?') + ' extend ' + err);
}
resolve();
});
}));
} // endFor
Promise.all(promises).then(() => callback());
} // endAddParamsetObjects
function getValueParamsets() {
if (queueValueParamsets.length === 0) {
// Inform hm-rega about new devices
adapter.setState('updated', true, false);
// Inform hm-rega about new devices
if (adapter.config.forceReInit) {
adapter.extendForeignObject('system.adapter.' + adapter.namespace, {native: {forceReInit: false}});
}
return;
}
const obj = queueValueParamsets.pop();
const cid = `${obj.native.PARENT_TYPE}.${obj.native.TYPE}.${obj.native.VERSION}`;
adapter.log.debug(`getValueParamsets ${cid}`);
// if meta values are cached for Epaper we extend this cached meta values by epaper states
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
if (metaValues[cid]) {
adapter.log.debug('paramset cache hit');
addParamsetObjects(obj, metaValues[cid], () => setImmediate(getValueParamsets));
} else {
const key = `hm-rpc.meta.VALUES.${cid}`;
adapter.getForeignObject(key, (err, res) => {
if (res && res.native) {
adapter.log.debug(`${key} found`);
metaValues[cid] = res.native;
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
addParamsetObjects(obj, metaValues[cid], () => setImmediate(getValueParamsets));
} else {
adapter.log.info(adapter.config.type + 'rpc -> getParamsetDescription ' + JSON.stringify([obj.native.ADDRESS, 'VALUES']));
try {
rpcClient.methodCall('getParamsetDescription', [obj.native.ADDRESS, 'VALUES'], (err, res) => {
if (err) {
adapter.log.error(`Error on getParamsetDescription: ${err}`);
} else {
metaValues[cid] = res;
if (obj.native && obj.native.PARENT_TYPE === 'HM-Dis-EP-WM55' && obj.native.TYPE === 'MAINTENANCE') {
addEPaperToMeta();
}
const paramset = {
'type': 'meta',
'meta': {
adapter: 'hm-rpc',
type: 'paramsetDescription'
},
'common': {},
'native': metaValues[cid]
};
if (res) {
// if not empty
for (const attr in res) {
if (res.hasOwnProperty(attr)) {
adapter.log.warn(`Send this info to developer: "_id": "${key}"`);
adapter.log.warn(`Send this info to developer: ${JSON.stringify(paramset)}`);
break;
}
}
}
adapter.setForeignObject(key, paramset, () => {
addParamsetObjects(obj, metaValues[cid], () => {
setImmediate(getValueParamsets);
});
});
}
});
} catch (err) {
adapter.log.error(`Cannot call getParamsetDescription: ${err}`);
}
}
});
}
}
function addEPaperToMeta() {
// Check all versions from 9 to 12
for (let i = 9; i < 13; i++) {
const id = `HM-Dis-EP-WM55.MAINTENANCE.${i}`;
if (!metaValues[id] || !metaValues[id].EPAPER_LINE2) {
// Add the EPAPER States to the Maintenance channel if they are non-existent
metaValues[id] = metaValues[id] || {};
adapter.log.debug(`[EPAPER] Add E-Paper to Meta on ${JSON.stringify(metaValues[id])}`);
const obj = metaValues[id];
obj.EPAPER_LINE2 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE2',
OPERATIONS: 2
};
obj.EPAPER_ICON2 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON2',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_LINE3 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE3',
OPERATIONS: 2
};
obj.EPAPER_ICON3 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON3',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_LINE4 = {
TYPE: 'EPAPER_LINE',
ID: 'LINE4',
OPERATIONS: 2
};
obj.EPAPER_ICON4 = {
TYPE: 'EPAPER_ICON',
ID: 'ICON4',
STATES: {
'': 'Empty',
'0x80': 'OFF',
'0x81': 'ON',
'0x82': 'Opened',
'0x83': 'Closed',
'0x84': 'error',
'0x85': 'All OK',
'0x86': 'Information',
'0x87': 'New message',
'0x88': 'Service message'
},
OPERATIONS: 2
};
obj.EPAPER_SIGNAL = {
TYPE: 'EPAPER_SIGNAL',
ID: 'EPAPER_SIGNAL',
STATES: {
'0xF0': 'OFF',
'0xF1': 'Red blink',
'0xF2': 'Green blink',
'0xF3': 'Orange blink'
},
OPERATIONS: 2
};
obj.EPAPER_TONE = {
TYPE: 'EPAPER_TONE',
ID: 'EPAPER_TONE',
STATES: {
'0xC0': 'Off',
'0xC1': 'Long Long',
'0xC2': 'Long Short',
'0xC3': 'Long Short Short',
'0xC4': 'Short',
'0xC5': 'Short Short',
'0xC6': 'Long',
'0xC7': '7',
'0xC9': '9',
'0xCA': 'A'
},
OPERATIONS: 2
};
}
}
}
function createDevices(deviceArr, callback) {
const objs = [];
for (const device of deviceArr) {
let type;
let role;
let icon;
if (device.PARENT) {
type = 'channel';
role = metaRoles.chTYPE && metaRoles.chTYPE[device.TYPE] ? metaRoles.chTYPE && metaRoles.chTYPE[device.TYPE] : undefined;
} else {
type = 'device';
if (!images[device.TYPE]) {
adapter.log.warn('No image for "' + device.TYPE + '" found.');
}
icon = images[device.TYPE] ? ('/icons/' + images[device.TYPE]) : '';
}
const obj = {
_id: device.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_'),
type: type,
common: {
// FIXME strange bug - LEVEL and WORKING datapoint of Dimmers have name of first dimmer device?!?
name: device.ADDRESS,
role: role
},
native: device
};
if (icon) obj.common.icon = icon;
const dpID = `${adapter.namespace}.${obj._id}`;
dpTypes[dpID] = {
UNIT: device.UNIT,
TYPE: device.TYPE,
MAX: device.MAX,
MIN: device.MIN,
role: role
};
if (typeof dpTypes[dpID].MIN === 'number') {
dpTypes[dpID].MIN = parseFloat(dpTypes[dpID].MIN);
dpTypes[dpID].MAX = parseFloat(dpTypes[dpID].MAX);
// Humidity is from 0 to 99. It is wrong.
if (dpTypes[dpID].MAX === 99) {
dpTypes[dpID].MAX = 100;
}
// Soemtimes unit is 100%, sometimes % it's the same
if (dpTypes[dpID].UNIT === '100%') {
dpTypes[dpID].UNIT = '%';
}
}
objs.push(obj);
}
function queue() {
if (objs.length) {
const obj = objs.pop();
if (metaRoles.dvTYPE && obj.native && metaRoles.dvTYPE[obj.native.PARENT_TYPE]) {
obj.common.role = metaRoles.dvTYPE[obj.native.PARENT_TYPE];
}
adapter.setObject(obj._id, obj, (err, res) => {
if (!err) {
adapter.log.debug('object ' + res.id + ' created');
} else {
adapter.log.error('object ' + (res ? res.id : '?') + ' error on creation: ' + err);
}
setImmediate(queue);
});
if (obj.type === 'channel') {
queueValueParamsets.push(obj);
}
} else {
getValueParamsets();
callback(null, '');
}
}
queue();
}
function getCuxDevices(callback) {
if (rpcClient) {
// request devices from CUxD
try {
rpcClient.methodCall('listDevices', [], (err, newDevices) => {
if (err) {
adapter.log.error('Error on listDevices: ' + err);
return;
}
adapter.log.info(adapter.config.type + 'rpc -> listDevices ' + newDevices.length);
if (adapter.config.forceReInit === false) {
adapter.objects.getObjectView('hm-rpc', 'listDevices', {
startkey: 'hm-rpc.' + adapter.instance + '.',
endkey: 'hm-rpc.' + adapter.instance + '.\u9999'
}, (err, doc) => {
if (doc && doc.rows) {
for (const row of doc.rows) {
if (row.id === adapter.namespace + '.updated') continue;
// lets get the device description
const val = row.value;
if (typeof val.ADDRESS === 'undefined') continue;
// lets find the current device in the newDevices array
// and if it doesn't exist we can delete it
let index = -1;
for (let j = 0; j < newDevices.length; j++) {
if (newDevices[j].ADDRESS === val.ADDRESS && newDevices[j].VERSION === val.VERSION) {
index = j;
break;
}
}
// if index is -1 than the newDevices doesn't have the
// device with address val.ADDRESS anymore, thus we can delete it
if (index === -1) {
if (val.ADDRESS && !adapter.config.dontDelete) {
if (val.ADDRESS.indexOf(':') !== -1) {
const address = val.ADDRESS.replace(':', '.').replace(FORBIDDEN_CHARS, '_');
const parts = address.split('.');
adapter.deleteChannel(parts[parts.length - 2], parts[parts.length - 1]);
adapter.log.info('obsolete channel ' + address + ' ' + JSON.stringify(address) + ' deleted');
} else {
adapter.deleteDevice(val.ADDRESS);
adapter.log.info('obsolete device ' + val.ADDRESS + ' deleted');
}
}
} else {
// we can remove the item at index because it is already registered
// to ioBroker
newDevices.splice(index, 1);
}
}
}
adapter.log.info('new CUxD devices/channels after filter: ' + newDevices.length);
createDevices(newDevices, callback);
});
} else {
createDevices(newDevices, callback);
}
});
} catch (err) {
adapter.log.error('Cannot call listDevices: ' + err);
}
} else {
callback && callback();
}
}
function updateConnection() {
lastEvent = new Date().getTime();
if (!connected) {
adapter.log.info('Connected');
connected = true;
adapter.setState('info.connection', true, true);
}
if (connInterval) {
adapter.log.debug('clear connecting interval');
clearInterval(connInterval);
connInterval = null;
}
if (connTimeout) {
adapter.log.debug('clear connecting timeout');
clearTimeout(connTimeout);
connTimeout = null;
}
// Virtual Devices API does not support PING
if (!eventInterval && adapter.config.daemon !== 'virtual-devices') {
adapter.log.debug('start ping interval');
eventInterval = setInterval(keepAlive, adapter.config.checkInitInterval * 1000 / 2);
}
}
function connect(isFirst) {
if (!rpcClient && !adapter.config.useHttps) {
rpcClient = rpc.createClient({
host: adapter.config.homematicAddress,
port: adapter.config.homematicPort,
path: homematicPath,
reconnectTimeout: adapter.config.reconnectInterval * 1000
});
// If we have bin-rpc, only need it here because bin-rpc cant have https
if (rpcClient.on) {
rpcClient.on('error', err => {
adapter.log.error(`Socket error: ${err}`);
});
} // endIf
// if bin-rpc
/* if (rpcClient.on) {
rpcClient.on('connect', function (err) {
sendInit();
});
rpcClient.on('close', function () {
adapter.log.debug('Socket closed.');
if (connected) {
adapter.log.info('Disconnected');
connected = false;
adapter.setState('info.connection', false, true);
}
if (eventInterval) {
adapter.log.debug('clear ping interval');
clearInterval(eventInterval);
eventInterval = null;
}
// clear queue
if (rpcClient.queue) {
while (rpcClient.queue.length) {
rpcClient.queue.pop();
}
rpcClient.pending = false;
}
if (!connTimeout) {
connTimeout = setTimeout(connect, adapter.config.reconnectInterval * 1000);
}
});
}*/
} else if (!rpcClient) {
adapter.getForeignObject('system.config', (err, obj) => {
let password;
let username;
if (obj && obj.native && obj.native.secret) {
password = crypto.decrypt(obj.native.secret, adapter.config.password);
username = crypto.decrypt(obj.native.secret, adapter.config.username);
} else {
password = crypto.decrypt('Zgfr56gFe87jJOM', adapter.config.password);
username = crypto.decrypt('Zgfr56gFe87jJOM', adapter.config.username);
} // endElse
rpcClient = rpc.createSecureClient({
host: adapter.config.homematicAddress,
port: adapter.config.homematicPort,
path: homematicPath,
reconnectTimeout: adapter.config.reconnectInterval * 1000,
basic_auth: {user: username, pass: password},
rejectUnauthorized: false
});
});
} // endElseIf
connTimeout = null;
adapter.log.debug('Connect...');
if (eventInterval) {
adapter.log.debug('clear ping interval');
clearInterval(eventInterval);
eventInterval = null;
}
if (isFirst) sendInit();
// Periodically try to reconnect
if (!connInterval) {
adapter.log.debug('start connecting interval');
connInterval = setInterval(() => sendInit(), adapter.config.reconnectInterval * 1000);
}
}
function keepAlive() {
adapter.log.debug('[KEEPALIVE] Check if connection is alive');
if (connInterval) {
clearInterval(connInterval);
connInterval = null;
}
const _now = Date.now();
// Check last event time. If timeout => send init again
if (!lastEvent || (_now - lastEvent) >= adapter.config.checkInitInterval * 1000) {
adapter.log.debug('[KEEPALIVE] Connection timed out, initializing new connection');
connect();
} else {
sendPing();
}
} // endKeepAlive
// If started as allInOne/compact mode => return function to create instance
if (module && module.parent) {
module.exports = startAdapter;
} else {
// or start the instance directly
startAdapter();
} // endElse
| rm legacy code
| hm-rpc.js | rm legacy code | <ide><path>m-rpc.js
<ide> const queueValueParamsets = [];
<ide>
<ide> function addParamsetObjects(channel, paramset, callback) {
<del> const channelChildren = [];
<ide> const promises = [];
<ide>
<ide> for (const key in paramset) {
<ide> if (!paramset.hasOwnProperty(key)) continue;
<del> channelChildren.push(channel._id + '.' + key);
<ide> const commonType = {
<ide> ACTION: 'boolean',
<ide> BOOL: 'boolean', |
|
JavaScript | mit | d87ae5179038ff0c0c9787129459c54a39742185 | 0 | senchalabs/CmdPackages,senchalabs/CmdPackages,senchalabs/CmdPackages,senchalabs/CmdPackages | /**
* The main colorful square for selecting color shades by dragging around the
* little circle.
*/
Ext.define('Ext.ux.colorpick.ColorMap', {
extend : 'Ext.container.Container',
alias : 'widget.colorpickercolormap',
controller : 'colorpickercolormapcontroller',
requires: [
'Ext.ux.colorpick.ColorMapController'
],
cls : 'x-colorpicker-colormap',
// This is the drag "circle"; note it's 1x1 in size to allow full
// travel around the color map; the inner div has the bigger image
items: [{
xtype : 'component',
cls : 'x-colorpicker-colormap-draghandle-container',
itemId : 'dragHandle',
width : 1,
height : 1,
draggable : true,
html: '<div class="x-colorpicker-colormap-draghandle"></div>'
}],
listeners : {
boxready : {
single : true,
fn : 'onFirstBoxReady',
scope : 'controller'
},
colorbindingchanged: {
fn : 'onColorBindingChanged',
scope : 'controller'
},
huebindingchanged: {
fn : 'onHueBindingChanged',
scope : 'controller'
}
},
afterRender: function () {
var me = this,
src = me.mapGradientUrl,
el = me.el;
me.callParent();
if (!src) {
// We do this trick to allow the Sass to calculate resource image path for
// our package and pick up the proper image URL here.
src = el.getStyle('background-image');
src = src.substring(4, src.length - 1); // strip off outer "url(...)"
// In IE8 this path will have quotes around it
if (src.indexOf('"') === 0) {
src = src.substring(1, src.length-1);
}
// Then remember it on our prototype for any subsequent instances.
Ext.ux.colorpick.ColorMap.prototype.mapGradientUrl = src;
}
// Now clear that style because it will conflict with the background-color
el.setStyle('background-image', 'none');
// Create the image with transparent PNG with black and white gradient shades;
// it blends with the background color (which changes with hue selection). This
// must be an IMG in order to properly stretch to fit.
el = me.layout.getElementTarget(); // the el for items and html
el.createChild({
tag: 'img',
cls: 'x-colorpicker-colormap-blender',
src: src
});
},
// Called via data binding whenever selectedColor changes; fires "colorbindingchanged"
setPosition: function(data) {
var me = this,
dragHandle = me.down('#dragHandle');
// Too early in the render cycle? Skip event
if (!dragHandle.dd || !dragHandle.dd.constrain) {
return;
}
// User actively dragging? Skip event
if (typeof dragHandle.dd.dragEnded !== 'undefined' && !dragHandle.dd.dragEnded) {
return;
}
me.fireEvent('colorbindingchanged', data);
},
// Called via data binding whenever selectedColor.h changes; fires "huebindingchanged" event
setHue: function(hue) {
var me = this;
// Too early in the render cycle? Skip event
if (!me.getEl()) {
return;
}
me.fireEvent('huebindingchanged', hue);
}
});
| packages/ext-ux-colorpick/src/ColorMap.js | /**
* The main colorful square for selecting color shades by dragging around the
* little circle.
*/
Ext.define('Ext.ux.colorpick.ColorMap', {
extend : 'Ext.container.Container',
alias : 'widget.colorpickercolormap',
controller : 'colorpickercolormapcontroller',
requires: [
'Ext.ux.colorpick.ColorMapController'
],
cls : 'x-colorpicker-colormap',
// This is the drag "circle"; note it's 1x1 in size to allow full
// travel around the color map; the inner div has the bigger image
items: [{
xtype : 'component',
cls : 'x-colorpicker-colormap-draghandle-container',
itemId : 'dragHandle',
width : 1,
height : 1,
draggable : true,
html: '<div class="x-colorpicker-colormap-draghandle"></div>'
}],
listeners : {
boxready : {
single : true,
fn : 'onFirstBoxReady',
scope : 'controller'
},
colorbindingchanged: {
fn : 'onColorBindingChanged',
scope : 'controller'
},
huebindingchanged: {
fn : 'onHueBindingChanged',
scope : 'controller'
}
},
afterRender: function () {
var me = this,
src = me.mapGradientUrl,
el = me.el;
me.callParent();
if (!src) {
// We do this trick to allow the Sass to calculate resource image path for
// our package and pick up the proper image URL here.
src = el.getStyle('background-image');
src = src.substring(4, src.length - 1); // strip off outer "url(...)"
// Then remember it on our prototype for any subsequent instances.
Ext.ux.colorpick.ColorMap.prototype.mapGradientUrl = src;
}
// Now clear that style because it will conflict with the background-color
el.setStyle('background-image', 'none');
// Create the image with transparent PNG with black and white gradient shades;
// it blends with the background color (which changes with hue selection). This
// must be an IMG in order to properly stretch to fit.
el = me.layout.getElementTarget(); // the el for items and html
el.createChild({
tag: 'img',
cls: 'x-colorpicker-colormap-blender',
src: src
});
},
// Called via data binding whenever selectedColor changes; fires "colorbindingchanged"
setPosition: function(data) {
var me = this,
dragHandle = me.down('#dragHandle');
// Too early in the render cycle? Skip event
if (!dragHandle.dd || !dragHandle.dd.constrain) {
return;
}
// User actively dragging? Skip event
if (typeof dragHandle.dd.dragEnded !== 'undefined' && !dragHandle.dd.dragEnded) {
return;
}
me.fireEvent('colorbindingchanged', data);
},
// Called via data binding whenever selectedColor.h changes; fires "huebindingchanged" event
setHue: function(hue) {
var me = this;
// Too early in the render cycle? Skip event
if (!me.getEl()) {
return;
}
me.fireEvent('huebindingchanged', hue);
}
});
| IE8 fix for ColorMap background image not showing
| packages/ext-ux-colorpick/src/ColorMap.js | IE8 fix for ColorMap background image not showing | <ide><path>ackages/ext-ux-colorpick/src/ColorMap.js
<ide> el = me.el;
<ide>
<ide> me.callParent();
<del>
<add>
<ide> if (!src) {
<ide> // We do this trick to allow the Sass to calculate resource image path for
<ide> // our package and pick up the proper image URL here.
<ide> src = el.getStyle('background-image');
<ide> src = src.substring(4, src.length - 1); // strip off outer "url(...)"
<add>
<add> // In IE8 this path will have quotes around it
<add> if (src.indexOf('"') === 0) {
<add> src = src.substring(1, src.length-1);
<add> }
<ide>
<ide> // Then remember it on our prototype for any subsequent instances.
<ide> Ext.ux.colorpick.ColorMap.prototype.mapGradientUrl = src; |
|
Java | apache-2.0 | 5c425dca2fc95ed718d13513de7a7319e20c5261 | 0 | alanfgates/hive,alanfgates/hive,sankarh/hive,vineetgarg02/hive,alanfgates/hive,lirui-apache/hive,vineetgarg02/hive,alanfgates/hive,jcamachor/hive,nishantmonu51/hive,sankarh/hive,jcamachor/hive,vineetgarg02/hive,lirui-apache/hive,lirui-apache/hive,alanfgates/hive,lirui-apache/hive,vineetgarg02/hive,b-slim/hive,jcamachor/hive,sankarh/hive,b-slim/hive,nishantmonu51/hive,b-slim/hive,anishek/hive,alanfgates/hive,jcamachor/hive,b-slim/hive,b-slim/hive,lirui-apache/hive,sankarh/hive,anishek/hive,sankarh/hive,jcamachor/hive,jcamachor/hive,vineetgarg02/hive,anishek/hive,nishantmonu51/hive,vineetgarg02/hive,lirui-apache/hive,nishantmonu51/hive,nishantmonu51/hive,anishek/hive,nishantmonu51/hive,sankarh/hive,vineetgarg02/hive,anishek/hive,lirui-apache/hive,lirui-apache/hive,sankarh/hive,b-slim/hive,alanfgates/hive,nishantmonu51/hive,anishek/hive,vineetgarg02/hive,nishantmonu51/hive,jcamachor/hive,anishek/hive,jcamachor/hive,nishantmonu51/hive,b-slim/hive,lirui-apache/hive,sankarh/hive,anishek/hive,vineetgarg02/hive,sankarh/hive,alanfgates/hive,jcamachor/hive,b-slim/hive,alanfgates/hive,anishek/hive,b-slim/hive | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.metadata;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
import static org.apache.hadoop.hive.ql.io.AcidUtils.getFullTableName;
import static org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.makeBinaryPredicate;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.sql.SQLIntegrityConstraintViolationException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.jdo.JDODataStoreException;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.plan.RelOptMaterialization;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelVisitor;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rex.RexBuilder;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hive.common.*;
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
import org.apache.hadoop.hive.common.log.InPlaceUpdate;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.io.HdfsUtils;
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.HiveMetaHook;
import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.PartitionDropOptions;
import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient;
import org.apache.hadoop.hive.metastore.SynchronizedMetaStoreClient;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.ReplChangeManager;
import org.apache.hadoop.hive.metastore.api.*;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.FunctionTask;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.AcidUtils.TableSnapshot;
import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAugmentMaterializationRule;
import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.DropPartitionDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType;
import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
import org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.TxnIdUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class has functions that implement meta data/DDL operations using calls
* to the metastore.
* It has a metastore client instance it uses to communicate with the metastore.
*
* It is a thread local variable, and the instances is accessed using static
* get methods in this class.
*/
@SuppressWarnings({"deprecation", "rawtypes"})
public class Hive {
static final private Logger LOG = LoggerFactory.getLogger("hive.ql.metadata.Hive");
private HiveConf conf = null;
private IMetaStoreClient metaStoreClient;
private SynchronizedMetaStoreClient syncMetaStoreClient;
private UserGroupInformation owner;
private boolean isAllowClose = true;
// metastore calls timing information
private final ConcurrentHashMap<String, Long> metaCallTimeMap = new ConcurrentHashMap<>();
// Static class to store thread local Hive object.
private static class ThreadLocalHive extends ThreadLocal<Hive> {
@Override
protected Hive initialValue() {
return null;
}
@Override
public synchronized void set(Hive hiveObj) {
Hive currentHive = this.get();
if (currentHive != hiveObj) {
// Remove/close current thread-local Hive object before overwriting with new Hive object.
remove();
super.set(hiveObj);
}
}
@Override
public synchronized void remove() {
Hive currentHive = this.get();
if (currentHive != null) {
// Close the metastore connections before removing it from thread local hiveDB.
currentHive.close(false);
super.remove();
}
}
}
private static ThreadLocalHive hiveDB = new ThreadLocalHive();
// Note that while this is an improvement over static initialization, it is still not,
// technically, valid, cause nothing prevents us from connecting to several metastores in
// the same process. This will still only get the functions from the first metastore.
private final static AtomicInteger didRegisterAllFuncs = new AtomicInteger(0);
private final static int REG_FUNCS_NO = 0, REG_FUNCS_DONE = 2, REG_FUNCS_PENDING = 1;
// register all permanent functions. need improvement
private void registerAllFunctionsOnce() throws HiveException {
boolean breakLoop = false;
while (!breakLoop) {
int val = didRegisterAllFuncs.get();
switch (val) {
case REG_FUNCS_NO: {
if (didRegisterAllFuncs.compareAndSet(val, REG_FUNCS_PENDING)) {
breakLoop = true;
break;
}
continue;
}
case REG_FUNCS_PENDING: {
synchronized (didRegisterAllFuncs) {
try {
didRegisterAllFuncs.wait(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
continue;
}
case REG_FUNCS_DONE: return;
default: throw new AssertionError(val);
}
}
try {
reloadFunctions();
didRegisterAllFuncs.compareAndSet(REG_FUNCS_PENDING, REG_FUNCS_DONE);
} catch (Exception e) {
LOG.warn("Failed to register all functions.", e);
didRegisterAllFuncs.compareAndSet(REG_FUNCS_PENDING, REG_FUNCS_NO);
throw new HiveException(e);
} finally {
synchronized (didRegisterAllFuncs) {
didRegisterAllFuncs.notifyAll();
}
}
}
public void reloadFunctions() throws HiveException {
HashSet<String> registryFunctions = new HashSet<String>(
FunctionRegistry.getFunctionNames(".+\\..+"));
for (Function function : getAllFunctions()) {
String functionName = function.getFunctionName();
try {
LOG.info("Registering function " + functionName + " " + function.getClassName());
String qualFunc = FunctionUtils.qualifyFunctionName(functionName, function.getDbName());
FunctionRegistry.registerPermanentFunction(qualFunc, function.getClassName(), false,
FunctionTask.toFunctionResource(function.getResourceUris()));
registryFunctions.remove(qualFunc);
} catch (Exception e) {
LOG.warn("Failed to register persistent function " +
functionName + ":" + function.getClassName() + ". Ignore and continue.");
}
}
// unregister functions from local system registry that are not in getAllFunctions()
for (String functionName : registryFunctions) {
try {
FunctionRegistry.unregisterPermanentFunction(functionName);
} catch (Exception e) {
LOG.warn("Failed to unregister persistent function " +
functionName + "on reload. Ignore and continue.");
}
}
}
public static Hive get(Configuration c, Class<?> clazz) throws HiveException {
return get(c instanceof HiveConf ? (HiveConf)c : new HiveConf(c, clazz));
}
/**
* Gets hive object for the current thread. If one is not initialized then a
* new one is created If the new configuration is different in metadata conf
* vars, or the owner will be different then a new one is created.
*
* @param c
* new Hive Configuration
* @return Hive object for current thread
* @throws HiveException
*
*/
public static Hive get(HiveConf c) throws HiveException {
return getInternal(c, false, false, true);
}
/**
* Same as {@link #get(HiveConf)}, except that it checks only the object identity of existing
* MS client, assuming the relevant settings would be unchanged within the same conf object.
*/
public static Hive getWithFastCheck(HiveConf c) throws HiveException {
return getWithFastCheck(c, true);
}
/**
* Same as {@link #get(HiveConf)}, except that it checks only the object identity of existing
* MS client, assuming the relevant settings would be unchanged within the same conf object.
*/
public static Hive getWithFastCheck(HiveConf c, boolean doRegisterAllFns) throws HiveException {
return getInternal(c, false, true, doRegisterAllFns);
}
private static Hive getInternal(HiveConf c, boolean needsRefresh, boolean isFastCheck,
boolean doRegisterAllFns) throws HiveException {
Hive db = hiveDB.get();
if (db == null || !db.isCurrentUserOwner() || needsRefresh
|| (c != null && !isCompatible(db, c, isFastCheck))) {
if (db != null) {
LOG.debug("Creating new db. db = " + db + ", needsRefresh = " + needsRefresh +
", db.isCurrentUserOwner = " + db.isCurrentUserOwner());
closeCurrent();
}
db = create(c, doRegisterAllFns);
}
if (c != null) {
db.conf = c;
}
return db;
}
private static Hive create(HiveConf c, boolean doRegisterAllFns) throws HiveException {
if (c == null) {
c = createHiveConf();
}
c.set("fs.scheme.class", "dfs");
Hive newdb = new Hive(c, doRegisterAllFns);
hiveDB.set(newdb);
return newdb;
}
private static HiveConf createHiveConf() {
SessionState session = SessionState.get();
return (session == null) ? new HiveConf(Hive.class) : session.getConf();
}
private static boolean isCompatible(Hive db, HiveConf c, boolean isFastCheck) {
if (isFastCheck) {
return (db.metaStoreClient == null || db.metaStoreClient.isSameConfObj(c))
&& (db.syncMetaStoreClient == null || db.syncMetaStoreClient.isSameConfObj(c));
} else {
return (db.metaStoreClient == null || db.metaStoreClient.isCompatibleWith(c))
&& (db.syncMetaStoreClient == null || db.syncMetaStoreClient.isCompatibleWith(c));
}
}
private boolean isCurrentUserOwner() throws HiveException {
try {
return owner == null || owner.equals(UserGroupInformation.getCurrentUser());
} catch(IOException e) {
throw new HiveException("Error getting current user: " + e.getMessage(), e);
}
}
public static Hive getThreadLocal() {
return hiveDB.get();
}
public static Hive get() throws HiveException {
return get(true);
}
public static Hive get(boolean doRegisterAllFns) throws HiveException {
return getInternal(null, false, false, doRegisterAllFns);
}
/**
* get a connection to metastore. see get(HiveConf) function for comments
*
* @param c
* new conf
* @param needsRefresh
* if true then creates a new one
* @return The connection to the metastore
* @throws HiveException
*/
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException {
return getInternal(c, needsRefresh, false, true);
}
public static void set(Hive hive) {
hiveDB.set(hive);
}
public static void closeCurrent() {
hiveDB.remove();
}
/**
* Hive
*
* @param c
*
*/
private Hive(HiveConf c, boolean doRegisterAllFns) throws HiveException {
conf = c;
if (doRegisterAllFns) {
registerAllFunctionsOnce();
}
}
/**
* GC is attempting to destroy the object.
* No one references this Hive anymore, so HMS connection from this Hive object can be closed.
* @throws Throwable
*/
@Override
protected void finalize() throws Throwable {
close(true);
super.finalize();
}
/**
* Marks if the given Hive object is allowed to close metastore connections.
* @param allowClose
*/
public void setAllowClose(boolean allowClose) {
isAllowClose = allowClose;
}
/**
* Gets the allowClose flag which determines if it is allowed to close metastore connections.
* @return allowClose flag
*/
public boolean allowClose() {
return isAllowClose;
}
/**
* Closes the connection to metastore for the calling thread if allow to close.
* @param forceClose - Override the isAllowClose flag to forcefully close the MS connections.
*/
public void close(boolean forceClose) {
if (allowClose() || forceClose) {
LOG.debug("Closing current thread's connection to Hive Metastore.");
if (metaStoreClient != null) {
metaStoreClient.close();
metaStoreClient = null;
}
// syncMetaStoreClient is wrapped on metaStoreClient. So, it is enough to close it once.
syncMetaStoreClient = null;
if (owner != null) {
owner = null;
}
}
}
/**
* Create a database
* @param db
* @param ifNotExist if true, will ignore AlreadyExistsException exception
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db, boolean ifNotExist)
throws AlreadyExistsException, HiveException {
try {
getMSC().createDatabase(db);
} catch (AlreadyExistsException e) {
if (!ifNotExist) {
throw e;
}
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
/**
* Drop a database.
* @param name
* @throws NoSuchObjectException
* @throws HiveException
* @see org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDatabase(java.lang.String)
*/
public void dropDatabase(String name) throws HiveException, NoSuchObjectException {
dropDatabase(name, true, false, false);
}
/**
* Drop a database
* @param name
* @param deleteData
* @param ignoreUnknownDb if true, will ignore NoSuchObjectException
* @throws HiveException
* @throws NoSuchObjectException
*/
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws HiveException, NoSuchObjectException {
dropDatabase(name, deleteData, ignoreUnknownDb, false);
}
/**
* Drop a database
* @param name
* @param deleteData
* @param ignoreUnknownDb if true, will ignore NoSuchObjectException
* @param cascade if true, delete all tables on the DB if exists. Otherwise, the query
* will fail if table still exists.
* @throws HiveException
* @throws NoSuchObjectException
*/
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
throws HiveException, NoSuchObjectException {
try {
getMSC().dropDatabase(name, deleteData, ignoreUnknownDb, cascade);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat) throws HiveException {
this.createTable(tableName, columns, partCols, fileInputFormat,
fileOutputFormat, -1, null);
}
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @param bucketCount
* number of buckets that each partition (or the table itself) should
* be divided into
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols)
throws HiveException {
createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount,
bucketCols, null);
}
/**
* Create a table metadata and the directory for the table data
* @param tableName table name
* @param columns list of fields of the table
* @param partCols partition keys of the table
* @param fileInputFormat Class of the input format of the table data file
* @param fileOutputFormat Class of the output format of the table data file
* @param bucketCount number of buckets that each partition (or the table itself) should be
* divided into
* @param bucketCols Bucket columns
* @param parameters Parameters for the table
* @throws HiveException
*/
public void createTable(String tableName, List<String> columns, List<String> partCols,
Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols,
Map<String, String> parameters) throws HiveException {
if (columns == null) {
throw new HiveException("columns not specified for table " + tableName);
}
Table tbl = newTable(tableName);
tbl.setInputFormatClass(fileInputFormat.getName());
tbl.setOutputFormatClass(fileOutputFormat.getName());
for (String col : columns) {
FieldSchema field = new FieldSchema(col, STRING_TYPE_NAME, "default");
tbl.getCols().add(field);
}
if (partCols != null) {
for (String partCol : partCols) {
FieldSchema part = new FieldSchema();
part.setName(partCol);
part.setType(STRING_TYPE_NAME); // default partition key
tbl.getPartCols().add(part);
}
}
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
tbl.setNumBuckets(bucketCount);
tbl.setBucketCols(bucketCols);
if (parameters != null) {
tbl.setParameters(parameters);
}
createTable(tbl);
}
public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environmentContext,
boolean transactional) throws HiveException {
alterTable(newTbl.getCatName(), newTbl.getDbName(),
newTbl.getTableName(), newTbl, cascade, environmentContext, transactional);
}
/**
* Updates the existing table metadata with the new metadata.
*
* @param fullyQlfdTblName
* name of the existing table
* @param newTbl
* new name of the table. could be the old name
* @param transactional
* Need to generate and save a table snapshot into the metastore?
* @throws HiveException
*/
public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext,
boolean transactional)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional);
}
public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional);
}
public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional, long writeId)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional,
writeId);
}
public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional) throws HiveException {
alterTable(catName, dbName, tblName, newTbl, cascade, environmentContext, transactional, 0);
}
public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional, long replWriteId)
throws HiveException {
if (catName == null) {
catName = getDefaultCatalog(conf);
}
try {
// Remove the DDL_TIME so it gets refreshed
if (newTbl.getParameters() != null) {
newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
newTbl.checkValidity(conf);
if (environmentContext == null) {
environmentContext = new EnvironmentContext();
}
if (cascade) {
environmentContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE);
}
// Take a table snapshot and set it to newTbl.
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
if (replWriteId > 0) {
// We need a valid writeId list for a transactional table modification. During
// replication we do not have a valid writeId list which was used to modify the table
// on the source. But we know for sure that the writeId associated with it was valid
// then (otherwise modification would have failed on the source). So use a valid
// transaction list with only that writeId.
ValidWriteIdList writeIds = new ValidReaderWriteIdList(TableName.getDbTable(dbName, tblName),
new long[0], new BitSet(),
replWriteId);
tableSnapshot = new TableSnapshot(replWriteId, writeIds.writeToString());
} else {
// Make sure we pass in the names, so we can get the correct snapshot for rename table.
tableSnapshot = AcidUtils.getTableSnapshot(conf, newTbl, dbName, tblName, true);
}
if (tableSnapshot != null) {
newTbl.getTTable().setWriteId(tableSnapshot.getWriteId());
} else {
LOG.warn("Cannot get a table snapshot for " + tblName);
}
}
// Why is alter_partitions synchronized while this isn't?
getMSC().alter_table(
catName, dbName, tblName, newTbl.getTTable(), environmentContext,
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
} catch (MetaException e) {
throw new HiveException("Unable to alter table. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter table. " + e.getMessage(), e);
}
}
public void updateCreationMetadata(String dbName, String tableName, CreationMetadata cm)
throws HiveException {
try {
getMSC().updateCreationMetadata(dbName, tableName, cm);
} catch (TException e) {
throw new HiveException("Unable to update creation metadata " + e.getMessage(), e);
}
}
/**
* Updates the existing partition metadata with the new metadata.
*
* @param tblName
* name of the existing table
* @param newPart
* new partition
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
@Deprecated
public void alterPartition(String tblName, Partition newPart,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
String[] names = Utilities.getDbTableName(tblName);
alterPartition(null, names[0], names[1], newPart, environmentContext, transactional);
}
/**
* Updates the existing partition metadata with the new metadata.
*
* @param dbName
* name of the exiting table's database
* @param tblName
* name of the existing table
* @param newPart
* new partition
* @param environmentContext
* environment context for the method
* @param transactional
* indicates this call is for transaction stats
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
public void alterPartition(String catName, String dbName, String tblName, Partition newPart,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
try {
if (catName == null) {
catName = getDefaultCatalog(conf);
}
validatePartition(newPart);
String location = newPart.getLocation();
if (location != null) {
location = Utilities.getQualifiedPath(conf, new Path(location));
newPart.setLocation(location);
}
if (environmentContext == null) {
environmentContext = new EnvironmentContext();
}
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
tableSnapshot = AcidUtils.getTableSnapshot(conf, newPart.getTable(), true);
if (tableSnapshot != null) {
newPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
} else {
LOG.warn("Cannot get a table snapshot for " + tblName);
}
}
getSynchronizedMSC().alter_partition(catName,
dbName, tblName, newPart.getTPartition(), environmentContext,
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
} catch (MetaException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
}
}
private void validatePartition(Partition newPart) throws HiveException {
// Remove the DDL time so that it gets refreshed
if (newPart.getParameters() != null) {
newPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
newPart.checkValidity();
}
/**
* Updates the existing table metadata with the new metadata.
*
* @param tblName
* name of the existing table
* @param newParts
* new partitions
* @param transactional
* Need to generate and save a table snapshot into the metastore?
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
public void alterPartitions(String tblName, List<Partition> newParts,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
String[] names = Utilities.getDbTableName(tblName);
List<org.apache.hadoop.hive.metastore.api.Partition> newTParts =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();
try {
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
tableSnapshot = AcidUtils.getTableSnapshot(conf, newParts.get(0).getTable(), true);
}
// Remove the DDL time so that it gets refreshed
for (Partition tmpPart: newParts) {
if (tmpPart.getParameters() != null) {
tmpPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
String location = tmpPart.getLocation();
if (location != null) {
location = Utilities.getQualifiedPath(conf, new Path(location));
tmpPart.setLocation(location);
}
newTParts.add(tmpPart.getTPartition());
}
getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext,
tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null,
tableSnapshot != null ? tableSnapshot.getWriteId() : -1);
} catch (MetaException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
}
}
/**
* Rename a old partition to new partition
*
* @param tbl
* existing table
* @param oldPartSpec
* spec of old partition
* @param newPart
* new partition
* @throws HiveException
*/
public void renamePartition(Table tbl, Map<String, String> oldPartSpec, Partition newPart,
long replWriteId)
throws HiveException {
try {
Map<String, String> newPartSpec = newPart.getSpec();
if (oldPartSpec.keySet().size() != tbl.getPartCols().size()
|| newPartSpec.keySet().size() != tbl.getPartCols().size()) {
throw new HiveException("Unable to rename partition to the same name: number of partition cols don't match. ");
}
if (!oldPartSpec.keySet().equals(newPartSpec.keySet())){
throw new HiveException("Unable to rename partition to the same name: old and new partition cols don't match. ");
}
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
String val = oldPartSpec.get(field.getName());
if (val == null || val.length() == 0) {
throw new HiveException("get partition: Value for key "
+ field.getName() + " is null or empty");
} else if (val != null){
pvals.add(val);
}
}
String validWriteIds = null;
if (AcidUtils.isTransactionalTable(tbl)) {
TableSnapshot tableSnapshot;
if (replWriteId > 0) {
// We need a valid writeId list for a transactional table modification. During
// replication we do not have a valid writeId list which was used to modify the table
// on the source. But we know for sure that the writeId associated with it was valid
// then (otherwise modification would have failed on the source). So use a valid
// transaction list with only that writeId.
ValidWriteIdList writeIds = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(),
tbl.getTableName()), new long[0], new BitSet(), replWriteId);
tableSnapshot = new TableSnapshot(replWriteId, writeIds.writeToString());
} else {
// Set table snapshot to api.Table to make it persistent.
tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
}
if (tableSnapshot != null) {
newPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
validWriteIds = tableSnapshot.getValidWriteIdList();
}
}
getMSC().renamePartition(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(), pvals,
newPart.getTPartition(), validWriteIds);
} catch (InvalidOperationException e){
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
} catch (MetaException e) {
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
}
}
// TODO: this whole path won't work with catalogs
public void alterDatabase(String dbName, Database db)
throws HiveException {
try {
getMSC().alterDatabase(dbName, db);
} catch (MetaException e) {
throw new HiveException("Unable to alter database " + dbName + ". " + e.getMessage(), e);
} catch (NoSuchObjectException e) {
throw new HiveException("Database " + dbName + " does not exists.", e);
} catch (TException e) {
throw new HiveException("Unable to alter database " + dbName + ". " + e.getMessage(), e);
}
}
/**
* Creates the table with the give objects
*
* @param tbl
* a table object
* @throws HiveException
*/
public void createTable(Table tbl) throws HiveException {
createTable(tbl, false);
}
// TODO: from here down dozens of methods do not support catalog. I got tired marking them.
/**
* Creates the table with the given objects. It takes additional arguments for
* primary keys and foreign keys associated with the table.
*
* @param tbl
* a table object
* @param ifNotExists
* if true, ignore AlreadyExistsException
* @param primaryKeys
* primary key columns associated with the table
* @param foreignKeys
* foreign key columns associated with the table
* @param uniqueConstraints
* UNIQUE constraints associated with the table
* @param notNullConstraints
* NOT NULL constraints associated with the table
* @param defaultConstraints
* DEFAULT constraints associated with the table
* @param checkConstraints
* CHECK constraints associated with the table
* @throws HiveException
*/
public void createTable(Table tbl, boolean ifNotExists,
List<SQLPrimaryKey> primaryKeys,
List<SQLForeignKey> foreignKeys,
List<SQLUniqueConstraint> uniqueConstraints,
List<SQLNotNullConstraint> notNullConstraints,
List<SQLDefaultConstraint> defaultConstraints,
List<SQLCheckConstraint> checkConstraints)
throws HiveException {
try {
if (tbl.getDbName() == null || "".equals(tbl.getDbName().trim())) {
tbl.setDbName(SessionState.get().getCurrentDatabase());
}
if (tbl.getCols().size() == 0 || tbl.getSd().getColsSize() == 0) {
tbl.setFields(HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(),
tbl.getDeserializer()));
}
tbl.checkValidity(conf);
if (tbl.getParameters() != null) {
tbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
org.apache.hadoop.hive.metastore.api.Table tTbl = tbl.getTTable();
PrincipalPrivilegeSet principalPrivs = new PrincipalPrivilegeSet();
SessionState ss = SessionState.get();
if (ss != null) {
CreateTableAutomaticGrant grants = ss.getCreateTableGrants();
if (grants != null) {
principalPrivs.setUserPrivileges(grants.getUserGrants());
principalPrivs.setGroupPrivileges(grants.getGroupGrants());
principalPrivs.setRolePrivileges(grants.getRoleGrants());
tTbl.setPrivileges(principalPrivs);
}
}
// Set table snapshot to api.Table to make it persistent. A transactional table being
// replicated may have a valid write Id copied from the source. Use that instead of
// crafting one on the replica.
if (tTbl.getWriteId() <= 0) {
TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
if (tableSnapshot != null) {
tbl.getTTable().setWriteId(tableSnapshot.getWriteId());
}
}
if (primaryKeys == null && foreignKeys == null
&& uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null
&& checkConstraints == null) {
getMSC().createTable(tTbl);
} else {
getMSC().createTableWithConstraints(tTbl, primaryKeys, foreignKeys,
uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
}
} catch (AlreadyExistsException e) {
if (!ifNotExists) {
throw new HiveException(e);
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createTable(Table tbl, boolean ifNotExists) throws HiveException {
createTable(tbl, ifNotExists, null, null, null, null,
null, null);
}
public static List<FieldSchema> getFieldsFromDeserializerForMsStorage(
Table tbl, Deserializer deserializer) throws SerDeException, MetaException {
List<FieldSchema> schema = HiveMetaStoreUtils.getFieldsFromDeserializer(
tbl.getTableName(), deserializer);
for (FieldSchema field : schema) {
field.setType(MetaStoreUtils.TYPE_FROM_DESERIALIZER);
}
return schema;
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op. If ifPurge option is specified it is passed to the
* hdfs command that removes table data from warehouse to make it skip trash.
*
* @param tableName
* table to drop
* @param ifPurge
* completely purge the table (skipping trash) while removing data from warehouse
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String tableName, boolean ifPurge) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
dropTable(names[0], names[1], true, true, ifPurge);
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op
*
* @param tableName
* table to drop
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String tableName) throws HiveException {
dropTable(tableName, false);
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op
*
* @param dbName
* database where the table lives
* @param tableName
* table to drop
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String dbName, String tableName) throws HiveException {
dropTable(dbName, tableName, true, true, false);
}
/**
* Drops the table.
*
* @param dbName
* @param tableName
* @param deleteData
* deletes the underlying data along with metadata
* @param ignoreUnknownTab
* an exception is thrown if this is false and the table doesn't exist
* @throws HiveException
*/
public void dropTable(String dbName, String tableName, boolean deleteData,
boolean ignoreUnknownTab) throws HiveException {
dropTable(dbName, tableName, deleteData, ignoreUnknownTab, false);
}
/**
* Drops the table.
*
* @param dbName
* @param tableName
* @param deleteData
* deletes the underlying data along with metadata
* @param ignoreUnknownTab
* an exception is thrown if this is false and the table doesn't exist
* @param ifPurge
* completely purge the table skipping trash while removing data from warehouse
* @throws HiveException
*/
public void dropTable(String dbName, String tableName, boolean deleteData,
boolean ignoreUnknownTab, boolean ifPurge) throws HiveException {
try {
getMSC().dropTable(dbName, tableName, deleteData, ignoreUnknownTab, ifPurge);
} catch (NoSuchObjectException e) {
if (!ignoreUnknownTab) {
throw new HiveException(e);
}
} catch (MetaException e) {
int idx = ExceptionUtils.indexOfType(e, SQLIntegrityConstraintViolationException.class);
if (idx != -1 && ExceptionUtils.getThrowables(e)[idx].getMessage().contains("MV_TABLES_USED")) {
throw new HiveException("Cannot drop table since it is used by at least one materialized view definition. " +
"Please drop any materialized view that uses the table before dropping it", e);
}
throw new HiveException(e);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Truncates the table/partition as per specifications. Just trash the data files
*
* @param dbDotTableName
* name of the table
* @throws HiveException
*/
public void truncateTable(String dbDotTableName, Map<String, String> partSpec, Long writeId) throws HiveException {
try {
Table table = getTable(dbDotTableName, true);
AcidUtils.TableSnapshot snapshot = null;
if (AcidUtils.isTransactionalTable(table)) {
if (writeId <= 0) {
snapshot = AcidUtils.getTableSnapshot(conf, table, true);
} else {
String fullTableName = getFullTableName(table.getDbName(), table.getTableName());
ValidWriteIdList writeIdList = getMSC().getValidWriteIds(fullTableName, writeId);
snapshot = new TableSnapshot(writeId, writeIdList.writeToString());
}
}
// TODO: APIs with catalog names
List<String> partNames = ((null == partSpec)
? null : getPartitionNames(table.getDbName(), table.getTableName(), partSpec, (short) -1));
if (snapshot == null) {
getMSC().truncateTable(table.getDbName(), table.getTableName(), partNames);
} else {
getMSC().truncateTable(table.getDbName(), table.getTableName(), partNames,
snapshot.getValidWriteIdList(), snapshot.getWriteId());
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public HiveConf getConf() {
return (conf);
}
/**
* Returns metadata for the table named tableName
* @param tableName the name of the table
* @return the table metadata
* @throws HiveException if there's an internal error or if the
* table doesn't exist
*/
public Table getTable(final String tableName) throws HiveException {
return this.getTable(tableName, true);
}
/**
* Returns metadata for the table named tableName
* @param tableName the name of the table
* @param throwException controls whether an exception is thrown or a returns a null
* @return the table metadata
* @throws HiveException if there's an internal error or if the
* table doesn't exist
*/
public Table getTable(final String tableName, boolean throwException) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
return this.getTable(names[0], names[1], throwException);
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @return the table
* @exception HiveException
* if there's an internal error or if the table doesn't exist
*/
public Table getTable(final String dbName, final String tableName) throws HiveException {
// TODO: catalog... etc everywhere
if (tableName.contains(".")) {
String[] names = Utilities.getDbTableName(tableName);
return this.getTable(names[0], names[1], true);
} else {
return this.getTable(dbName, tableName, true);
}
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName,
boolean throwException) throws HiveException {
return this.getTable(dbName, tableName, throwException, false);
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @param checkTransactional
* checks whether the metadata table stats are valid (or
* compilant with the snapshot isolation of) for the current transaction.
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName, boolean throwException,
boolean checkTransactional) throws HiveException {
return getTable(dbName, tableName, throwException, checkTransactional, false);
}
/**
* Returns metadata of the table.
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @param checkTransactional
* checks whether the metadata table stats are valid (or
* compilant with the snapshot isolation of) for the current transaction.
* @param getColumnStats
* get column statistics if available
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName, boolean throwException,
boolean checkTransactional, boolean getColumnStats) throws HiveException {
if (tableName == null || tableName.equals("")) {
throw new HiveException("empty table creation??");
}
// Get the table from metastore
org.apache.hadoop.hive.metastore.api.Table tTable = null;
try {
// Note: this is currently called w/true from StatsOptimizer only.
if (checkTransactional) {
ValidWriteIdList validWriteIdList = null;
long txnId = SessionState.get().getTxnMgr() != null ?
SessionState.get().getTxnMgr().getCurrentTxnId() : 0;
if (txnId > 0) {
validWriteIdList = AcidUtils.getTableValidWriteIdListWithTxnList(conf,
dbName, tableName);
}
tTable = getMSC().getTable(getDefaultCatalog(conf), dbName, tableName,
validWriteIdList != null ? validWriteIdList.toString() : null, getColumnStats);
} else {
tTable = getMSC().getTable(dbName, tableName, getColumnStats);
}
} catch (NoSuchObjectException e) {
if (throwException) {
throw new InvalidTableException(tableName);
}
return null;
} catch (Exception e) {
throw new HiveException("Unable to fetch table " + tableName + ". " + e.getMessage(), e);
}
// For non-views, we need to do some extra fixes
if (!TableType.VIRTUAL_VIEW.toString().equals(tTable.getTableType())) {
// Fix the non-printable chars
Map<String, String> parameters = tTable.getSd().getParameters();
String sf = parameters!=null?parameters.get(SERIALIZATION_FORMAT) : null;
if (sf != null) {
char[] b = sf.toCharArray();
if ((b.length == 1) && (b[0] < 10)) { // ^A, ^B, ^C, ^D, \t
parameters.put(SERIALIZATION_FORMAT, Integer.toString(b[0]));
}
}
// Use LazySimpleSerDe for MetadataTypedColumnsetSerDe.
// NOTE: LazySimpleSerDe does not support tables with a single column of
// col
// of type "array<string>". This happens when the table is created using
// an
// earlier version of Hive.
if (org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
.getName().equals(
tTable.getSd().getSerdeInfo().getSerializationLib())
&& tTable.getSd().getColsSize() > 0
&& tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) {
tTable.getSd().getSerdeInfo().setSerializationLib(
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
}
}
return new Table(tTable);
}
/**
* Get all table names for the current database.
* @return List of table names
* @throws HiveException
*/
public List<String> getAllTables() throws HiveException {
return getTablesByType(SessionState.get().getCurrentDatabase(), null, null);
}
/**
* Get all table names for the specified database.
* @param dbName
* @return List of table names
* @throws HiveException
*/
public List<String> getAllTables(String dbName) throws HiveException {
return getTablesByType(dbName, ".*", null);
}
/**
* Get all tables for the specified database.
* @param dbName
* @return List of all tables
* @throws HiveException
*/
public List<Table> getAllTableObjects(String dbName) throws HiveException {
return getTableObjects(dbName, ".*", null);
}
/**
* Get tables for the specified database that match the provided regex pattern and table type.
* @param dbName
* @param pattern
* @param tableType
* @return List of table objects
* @throws HiveException
*/
public List<Table> getTableObjectsByType(String dbName, String pattern, TableType tableType) throws HiveException {
return getTableObjects(dbName, pattern, tableType);
}
/**
* Get all materialized view names for the specified database.
* @param dbName
* @return List of materialized view table names
* @throws HiveException
*/
public List<String> getAllMaterializedViews(String dbName) throws HiveException {
return getTablesByType(dbName, ".*", TableType.MATERIALIZED_VIEW);
}
/**
* Get all materialized views for the specified database.
* @param dbName
* @return List of materialized view table objects
* @throws HiveException
*/
public List<Table> getAllMaterializedViewObjects(String dbName) throws HiveException {
return getTableObjects(dbName, ".*", TableType.MATERIALIZED_VIEW);
}
/**
* Get materialized views for the specified database that match the provided regex pattern.
* @param dbName
* @param pattern
* @return List of materialized view table objects
* @throws HiveException
*/
public List<Table> getMaterializedViewObjectsByPattern(String dbName, String pattern) throws HiveException {
return getTableObjects(dbName, pattern, TableType.MATERIALIZED_VIEW);
}
private List<Table> getTableObjects(String dbName, String pattern, TableType tableType) throws HiveException {
try {
return Lists.transform(getMSC().getTableObjectsByName(dbName, getTablesByType(dbName, pattern, tableType)),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
private List<Table> getTableObjects(String dbName, List<String> tableNames) throws HiveException {
try {
return Lists.transform(getMSC().getTableObjectsByName(dbName, tableNames),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Returns all existing tables from default database which match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param tablePattern
* java re pattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesByPattern(String tablePattern) throws HiveException {
return getTablesByType(SessionState.get().getCurrentDatabase(),
tablePattern, null);
}
/**
* Returns all existing tables from the specified database which match the given
* pattern. The matching occurs as per Java regular expressions.
* @param dbName
* @param tablePattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesByPattern(String dbName, String tablePattern) throws HiveException {
return getTablesByType(dbName, tablePattern, null);
}
/**
* Returns all existing tables from the given database which match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param database
* the database name
* @param tablePattern
* java re pattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesForDb(String database, String tablePattern)
throws HiveException {
return getTablesByType(database, tablePattern, null);
}
/**
* Returns all existing tables of a type (VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified
* database which match the given pattern. The matching occurs as per Java regular expressions.
* @param dbName Database name to find the tables in. if null, uses the current database in this session.
* @param pattern A pattern to match for the table names.If null, returns all names from this DB.
* @param type The type of tables to return. VIRTUAL_VIEWS for views. If null, returns all tables and views.
* @return list of table names that match the pattern.
* @throws HiveException
*/
public List<String> getTablesByType(String dbName, String pattern, TableType type)
throws HiveException {
if (dbName == null) {
dbName = SessionState.get().getCurrentDatabase();
}
try {
if (type != null) {
if (pattern != null) {
return getMSC().getTables(dbName, pattern, type);
} else {
return getMSC().getTables(dbName, ".*", type);
}
} else {
if (pattern != null) {
return getMSC().getTables(dbName, pattern);
} else {
return getMSC().getTables(dbName, ".*");
}
}
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the materialized views that have been enabled for rewriting from the
* metastore. If the materialized view is in the cache, we do not need to
* parse it to generate a logical plan for the rewriting. Instead, we
* return the version present in the cache. Further, information provided
* by the invalidation cache is useful to know whether a materialized view
* can be used for rewriting or not.
*
* @return the list of materialized views available for rewriting
* @throws HiveException
*/
public List<RelOptMaterialization> getAllValidMaterializedViews(List<String> tablesUsed, boolean forceMVContentsUpToDate,
HiveTxnManager txnMgr) throws HiveException {
// Final result
List<RelOptMaterialization> result = new ArrayList<>();
try {
// From metastore (for security)
List<Table> materializedViews = getAllMaterializedViewObjectsForRewriting();
if (materializedViews.isEmpty()) {
// Bail out: empty list
return result;
}
result.addAll(getValidMaterializedViews(materializedViews,
tablesUsed, forceMVContentsUpToDate, txnMgr));
return result;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<RelOptMaterialization> getValidMaterializedView(String dbName, String materializedViewName,
List<String> tablesUsed, boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException {
return getValidMaterializedViews(ImmutableList.of(getTable(dbName, materializedViewName)),
tablesUsed, forceMVContentsUpToDate, txnMgr);
}
private List<RelOptMaterialization> getValidMaterializedViews(List<Table> materializedViewTables,
List<String> tablesUsed, boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException {
final String validTxnsList = conf.get(ValidTxnList.VALID_TXNS_KEY);
final ValidTxnWriteIdList currentTxnWriteIds = txnMgr.getValidWriteIds(tablesUsed, validTxnsList);
final boolean tryIncrementalRewriting =
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_INCREMENTAL);
final boolean tryIncrementalRebuild =
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REBUILD_INCREMENTAL);
final long defaultTimeWindow =
HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW,
TimeUnit.MILLISECONDS);
try {
// Final result
List<RelOptMaterialization> result = new ArrayList<>();
for (Table materializedViewTable : materializedViewTables) {
final Boolean outdated = isOutdatedMaterializedView(materializedViewTable, currentTxnWriteIds,
defaultTimeWindow, tablesUsed, forceMVContentsUpToDate);
if (outdated == null) {
continue;
}
final CreationMetadata creationMetadata = materializedViewTable.getCreationMetadata();
if (outdated) {
// The MV is outdated, see whether we should consider it for rewriting or not
boolean ignore = false;
if (forceMVContentsUpToDate && !tryIncrementalRebuild) {
// We will not try partial rewriting for rebuild if incremental rebuild is disabled
ignore = true;
} else if (!forceMVContentsUpToDate && !tryIncrementalRewriting) {
// We will not try partial rewriting for non-rebuild if incremental rewriting is disabled
ignore = true;
} else {
// Obtain additional information if we should try incremental rewriting / rebuild
// We will not try partial rewriting if there were update/delete operations on source tables
Materialization invalidationInfo = getMSC().getMaterializationInvalidationInfo(
creationMetadata, conf.get(ValidTxnList.VALID_TXNS_KEY));
ignore = invalidationInfo == null || invalidationInfo.isSourceTablesUpdateDeleteModified();
}
if (ignore) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as its contents are outdated");
continue;
}
}
// It passed the test, load
RelOptMaterialization materialization =
HiveMaterializedViewsRegistry.get().getRewritingMaterializedView(
materializedViewTable.getDbName(), materializedViewTable.getTableName());
if (materialization != null) {
RelNode viewScan = materialization.tableRel;
RelOptHiveTable cachedMaterializedViewTable;
if (viewScan instanceof Project) {
// There is a Project on top (due to nullability)
cachedMaterializedViewTable = (RelOptHiveTable) viewScan.getInput(0).getTable();
} else {
cachedMaterializedViewTable = (RelOptHiveTable) viewScan.getTable();
}
if (cachedMaterializedViewTable.getHiveTableMD().getCreateTime() ==
materializedViewTable.getCreateTime()) {
// It is in the cache and up to date
if (outdated) {
// We will rewrite it to include the filters on transaction list
// so we can produce partial rewritings
materialization = augmentMaterializationWithTimeInformation(
materialization, validTxnsList, new ValidTxnWriteIdList(
creationMetadata.getValidTxnList()));
}
result.add(materialization);
continue;
}
}
// It was not present in the cache (maybe because it was added by another HS2)
// or it is not up to date.
if (HiveMaterializedViewsRegistry.get().isInitialized()) {
// But the registry was fully initialized, thus we need to add it
if (LOG.isDebugEnabled()) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" was not in the cache");
}
materialization = HiveMaterializedViewsRegistry.get().createMaterializedView(
conf, materializedViewTable);
if (materialization != null) {
if (outdated) {
// We will rewrite it to include the filters on transaction list
// so we can produce partial rewritings
materialization = augmentMaterializationWithTimeInformation(
materialization, validTxnsList, new ValidTxnWriteIdList(
creationMetadata.getValidTxnList()));
}
result.add(materialization);
}
} else {
// Otherwise the registry has not been initialized, skip for the time being
if (LOG.isWarnEnabled()) {
LOG.info("Materialized view " + materializedViewTable.getFullyQualifiedName() + " was skipped "
+ "because cache has not been loaded yet");
}
}
}
return result;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Utility method that returns whether a materialized view is outdated (true), not outdated
* (false), or it cannot be determined (null). The latest case may happen e.g. when the
* materialized view definition uses external tables.
*/
public static Boolean isOutdatedMaterializedView(Table materializedViewTable, final ValidTxnWriteIdList currentTxnWriteIds,
long defaultTimeWindow, List<String> tablesUsed, boolean forceMVContentsUpToDate) {
// Check if materialization defined its own invalidation time window
String timeWindowString = materializedViewTable.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW);
long timeWindow = org.apache.commons.lang.StringUtils.isEmpty(timeWindowString) ? defaultTimeWindow :
HiveConf.toTime(timeWindowString,
HiveConf.getDefaultTimeUnit(HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW),
TimeUnit.MILLISECONDS);
CreationMetadata creationMetadata = materializedViewTable.getCreationMetadata();
boolean outdated = false;
if (timeWindow < 0L) {
// We only consider the materialized view to be outdated if forceOutdated = true, i.e.,
// if it is a rebuild. Otherwise, it passed the test and we use it as it is.
outdated = forceMVContentsUpToDate;
} else {
// Check whether the materialized view is invalidated
if (forceMVContentsUpToDate || timeWindow == 0L || creationMetadata.getMaterializationTime() < System.currentTimeMillis() - timeWindow) {
if (currentTxnWriteIds == null) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as we could not obtain current txn ids");
return null;
}
if (creationMetadata.getValidTxnList() == null ||
creationMetadata.getValidTxnList().isEmpty()) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as we could not obtain materialization txn ids");
return null;
}
boolean ignore = false;
ValidTxnWriteIdList mvTxnWriteIds = new ValidTxnWriteIdList(
creationMetadata.getValidTxnList());
for (String qName : tablesUsed) {
// Note. If the materialized view does not contain a table that is contained in the query,
// we do not need to check whether that specific table is outdated or not. If a rewriting
// is produced in those cases, it is because that additional table is joined with the
// existing tables with an append-columns only join, i.e., PK-FK + not null.
if (!creationMetadata.getTablesUsed().contains(qName)) {
continue;
}
ValidWriteIdList tableCurrentWriteIds = currentTxnWriteIds.getTableValidWriteIdList(qName);
if (tableCurrentWriteIds == null) {
// Uses non-transactional table, cannot be considered
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as it is outdated and cannot be considered for " +
" rewriting because it uses non-transactional table " + qName);
ignore = true;
break;
}
ValidWriteIdList tableWriteIds = mvTxnWriteIds.getTableValidWriteIdList(qName);
if (tableWriteIds == null) {
// This should not happen, but we ignore for safety
LOG.warn("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as details about txn ids for table " + qName +
" could not be found in " + mvTxnWriteIds);
ignore = true;
break;
}
if (!outdated && !TxnIdUtils.checkEquivalentWriteIds(tableCurrentWriteIds, tableWriteIds)) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" contents are outdated");
outdated = true;
}
}
if (ignore) {
return null;
}
}
}
return outdated;
}
/**
* Method to enrich the materialization query contained in the input with
* its invalidation.
*/
private static RelOptMaterialization augmentMaterializationWithTimeInformation(
RelOptMaterialization materialization, String validTxnsList,
ValidTxnWriteIdList materializationTxnList) throws LockException {
// Extract tables used by the query which will in turn be used to generate
// the corresponding txn write ids
List<String> tablesUsed = new ArrayList<>();
new RelVisitor() {
@Override
public void visit(RelNode node, int ordinal, RelNode parent) {
if (node instanceof TableScan) {
TableScan ts = (TableScan) node;
tablesUsed.add(((RelOptHiveTable) ts.getTable()).getHiveTableMD().getFullyQualifiedName());
}
super.visit(node, ordinal, parent);
}
}.go(materialization.queryRel);
ValidTxnWriteIdList currentTxnList =
SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList);
// Augment
final RexBuilder rexBuilder = materialization.queryRel.getCluster().getRexBuilder();
final HepProgramBuilder augmentMaterializationProgram = new HepProgramBuilder()
.addRuleInstance(new HiveAugmentMaterializationRule(rexBuilder, currentTxnList, materializationTxnList));
final HepPlanner augmentMaterializationPlanner = new HepPlanner(
augmentMaterializationProgram.build());
augmentMaterializationPlanner.setRoot(materialization.queryRel);
final RelNode modifiedQueryRel = augmentMaterializationPlanner.findBestExp();
return new RelOptMaterialization(materialization.tableRel, modifiedQueryRel,
null, materialization.qualifiedTableName);
}
public List<Table> getAllMaterializedViewObjectsForRewriting() throws HiveException {
try {
return Lists.transform(getMSC().getAllMaterializedViewObjectsForRewriting(),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get materialized views for the specified database that have enabled rewriting.
* @param dbName
* @return List of materialized view table objects
* @throws HiveException
*/
private List<String> getMaterializedViewsForRewriting(String dbName) throws HiveException {
try {
return getMSC().getMaterializedViewsForRewriting(dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing database names.
*
* @return List of database names.
* @throws HiveException
*/
public List<String> getAllDatabases() throws HiveException {
try {
return getMSC().getAllDatabases();
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing databases that match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param databasePattern
* java re pattern
* @return list of database names
* @throws HiveException
*/
public List<String> getDatabasesByPattern(String databasePattern) throws HiveException {
try {
return getMSC().getDatabases(databasePattern);
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean grantPrivileges(PrivilegeBag privileges)
throws HiveException {
try {
return getMSC().grant_privileges(privileges);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param privileges
* a bag of privileges
* @return true on success
* @throws HiveException
*/
public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
throws HiveException {
try {
return getMSC().revoke_privileges(privileges, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Query metadata to see if a database with the given name already exists.
*
* @param dbName
* @return true if a database with the given name already exists, false if
* does not exist.
* @throws HiveException
*/
public boolean databaseExists(String dbName) throws HiveException {
return getDatabase(dbName) != null;
}
/**
* Get the database by name.
* @param dbName the name of the database.
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabase(String dbName) throws HiveException {
try {
return getMSC().getDatabase(dbName);
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the database by name.
* @param catName catalog name
* @param dbName the name of the database.
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabase(String catName, String dbName) throws HiveException {
try {
return getMSC().getDatabase(catName, dbName);
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the Database object for current database
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabaseCurrent() throws HiveException {
String currentDb = SessionState.get().getCurrentDatabase();
return getDatabase(currentDb);
}
private TableSnapshot getTableSnapshot(Table tbl, Long writeId) throws LockException {
TableSnapshot tableSnapshot = null;
if ((writeId != null) && (writeId > 0)) {
ValidWriteIdList writeIds = AcidUtils.getTableValidWriteIdListWithTxnList(
conf, tbl.getDbName(), tbl.getTableName());
tableSnapshot = new TableSnapshot(writeId, writeIds.writeToString());
} else {
// Make sure we pass in the names, so we can get the correct snapshot for rename table.
tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, tbl.getDbName(), tbl.getTableName(),
true);
}
return tableSnapshot;
}
/**
* Load a directory into a Hive Table Partition - Alters existing content of
* the partition with the contents of loadPath. - If the partition does not
* exist - one is created - files in loadPath are moved into Hive. But the
* directory itself is not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tbl
* name of table to be loaded.
* @param partSpec
* defines which partition needs to be loaded
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param inheritTableSpecs if true, on [re]creating the partition, take the
* location/inputformat/outputformat/serde details from table spec
* @param isSrcLocal
* If the source directory is LOCAL
* @param isAcidIUDoperation
* true if this is an ACID operation Insert/Update/Delete operation
* @param resetStatistics
* if true, reset the statistics. If false, do not reset statistics.
* @param writeId write ID allocated for the current load operation
* @param stmtId statement ID of the current load statement
* @param isInsertOverwrite
* @return Partition object being loaded with data
*/
public Partition loadPartition(Path loadPath, Table tbl, Map<String, String> partSpec,
LoadFileType loadFileType, boolean inheritTableSpecs,
boolean inheritLocation,
boolean isSkewedStoreAsSubdir,
boolean isSrcLocal, boolean isAcidIUDoperation,
boolean resetStatistics, Long writeId,
int stmtId, boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_PARTITION);
// Get the partition object if it already exists
Partition oldPart = getPartition(tbl, partSpec, false);
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
// If config is set, table is not temporary and partition being inserted exists, capture
// the list of files added. For not yet existing partitions (insert overwrite to new partition
// or dynamic partition inserts), the add partition event will capture the list of files added.
List<Path> newFiles = Collections.synchronizedList(new ArrayList<>());
Partition newTPart = loadPartitionInternal(loadPath, tbl, partSpec, oldPart,
loadFileType, inheritTableSpecs,
inheritLocation, isSkewedStoreAsSubdir, isSrcLocal, isAcidIUDoperation,
resetStatistics, writeId, stmtId, isInsertOverwrite, isTxnTable, newFiles);
AcidUtils.TableSnapshot tableSnapshot = isTxnTable ? getTableSnapshot(tbl, writeId) : null;
if (tableSnapshot != null) {
newTPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
}
if (oldPart == null) {
addPartitionToMetastore(newTPart, resetStatistics, tbl, tableSnapshot);
// For acid table, add the acid_write event with file list at the time of load itself. But
// it should be done after partition is created.
if (isTxnTable && (null != newFiles)) {
addWriteNotificationLog(tbl, partSpec, newFiles, writeId);
}
} else {
try {
setStatsPropAndAlterPartition(resetStatistics, tbl, newTPart, tableSnapshot);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_PARTITION);
return newTPart;
}
/**
* Move all the files from loadPath into Hive. If the partition
* does not exist - one is created - files in loadPath are moved into Hive. But the
* directory itself is not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tbl
* name of table to be loaded.
* @param partSpec
* defines which partition needs to be loaded
* @param oldPart
* already existing partition object, can be null
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param inheritTableSpecs if true, on [re]creating the partition, take the
* location/inputformat/outputformat/serde details from table spec
* @param inheritLocation
* if true, partition path is generated from table
* @param isSkewedStoreAsSubdir
* if true, skewed is stored as sub-directory
* @param isSrcLocal
* If the source directory is LOCAL
* @param isAcidIUDoperation
* true if this is an ACID operation Insert/Update/Delete operation
* @param resetStatistics
* if true, reset the statistics. Do not reset statistics if false.
* @param writeId
* write ID allocated for the current load operation
* @param stmtId
* statement ID of the current load statement
* @param isInsertOverwrite
* @param isTxnTable
*
* @return Partition object being loaded with data
* @throws HiveException
*/
private Partition loadPartitionInternal(Path loadPath, Table tbl, Map<String, String> partSpec,
Partition oldPart, LoadFileType loadFileType, boolean inheritTableSpecs,
boolean inheritLocation, boolean isSkewedStoreAsSubdir,
boolean isSrcLocal, boolean isAcidIUDoperation, boolean resetStatistics,
Long writeId, int stmtId, boolean isInsertOverwrite,
boolean isTxnTable, List<Path> newFiles) throws HiveException {
Path tblDataLocationPath = tbl.getDataLocation();
boolean isMmTableWrite = AcidUtils.isInsertOnlyTable(tbl.getParameters());
assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
try {
PerfLogger perfLogger = SessionState.getPerfLogger();
/**
* Move files before creating the partition since down stream processes
* check for existence of partition in metadata before accessing the data.
* If partition is created before data is moved, downstream waiting
* processes might move forward with partial data
*/
Path oldPartPath = (oldPart != null) ? oldPart.getDataLocation() : null;
Path newPartPath = null;
if (inheritLocation) {
newPartPath = genPartPathFromTable(tbl, partSpec, tblDataLocationPath);
if(oldPart != null) {
/*
* If we are moving the partition across filesystem boundaries
* inherit from the table properties. Otherwise (same filesystem) use the
* original partition location.
*
* See: HIVE-1707 and HIVE-2117 for background
*/
FileSystem oldPartPathFS = oldPartPath.getFileSystem(getConf());
FileSystem loadPathFS = loadPath.getFileSystem(getConf());
if (FileUtils.equalsFileSystem(oldPartPathFS,loadPathFS)) {
newPartPath = oldPartPath;
}
}
} else {
newPartPath = oldPartPath == null
? genPartPathFromTable(tbl, partSpec, tblDataLocationPath) : oldPartPath;
}
perfLogger.PerfLogBegin("MoveTask", PerfLogger.FILE_MOVES);
// Note: the stats for ACID tables do not have any coordination with either Hive ACID logic
// like txn commits, time outs, etc.; nor the lower level sync in metastore pertaining
// to ACID updates. So the are not themselves ACID.
// Note: this assumes both paths are qualified; which they are, currently.
if (((isMmTableWrite || isFullAcidTable) && loadPath.equals(newPartPath)) ||
(loadFileType == LoadFileType.IGNORE)) {
// MM insert query, move itself is a no-op.
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("not moving " + loadPath + " to " + newPartPath + " (MM)");
}
assert !isAcidIUDoperation;
if (newFiles != null) {
listFilesCreatedByQuery(loadPath, writeId, stmtId, isMmTableWrite ? isInsertOverwrite : false, newFiles);
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("maybe deleting stuff from " + oldPartPath
+ " (new " + newPartPath + ") for replace");
}
} else {
// Either a non-MM query, or a load into MM table from an external source.
Path destPath = newPartPath;
if (isMmTableWrite) {
assert !isAcidIUDoperation;
// We will load into MM directory, and hide previous directories if needed.
destPath = new Path(destPath, isInsertOverwrite
? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
}
if (!isAcidIUDoperation && isFullAcidTable) {
destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("moving " + loadPath + " to " + destPath);
}
boolean isManaged = tbl.getTableType() == TableType.MANAGED_TABLE;
// TODO: why is "&& !isAcidIUDoperation" needed here?
if (!isTxnTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
//for fullAcid tables we don't delete files for commands with OVERWRITE - we create a new
// base_x. (there is Insert Overwrite and Load Data Overwrite)
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
boolean needRecycle = !tbl.isTemporary()
&& ReplChangeManager.isSourceOfReplication(Hive.get().getDatabase(tbl.getDbName()));
replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(), isSrcLocal,
isAutoPurge, newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, needRecycle, isManaged, isInsertOverwrite);
} else {
FileSystem fs = destPath.getFileSystem(conf);
copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
(loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles,
tbl.getNumBuckets() > 0, isFullAcidTable, isManaged);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.FILE_MOVES);
Partition newTPart = oldPart != null ? oldPart : new Partition(tbl, partSpec, newPartPath);
alterPartitionSpecInMemory(tbl, partSpec, newTPart.getTPartition(), inheritTableSpecs, newPartPath.toString());
validatePartition(newTPart);
// If config is set, table is not temporary and partition being inserted exists, capture
// the list of files added. For not yet existing partitions (insert overwrite to new partition
// or dynamic partition inserts), the add partition event will capture the list of files added.
// Generate an insert event only if inserting into an existing partition
// When inserting into a new partition, the add partition event takes care of insert event
if ((null != oldPart) && (null != newFiles)) {
if (isTxnTable) {
addWriteNotificationLog(tbl, partSpec, newFiles, writeId);
} else {
fireInsertEvent(tbl, partSpec, (loadFileType == LoadFileType.REPLACE_ALL), newFiles);
}
} else {
LOG.debug("No new files were created, and is not a replace, or we're inserting into a "
+ "partition that does not exist yet. Skipping generating INSERT event.");
}
// column stats will be inaccurate
if (resetStatistics) {
StatsSetupConst.clearColumnStatsState(newTPart.getParameters());
}
// recreate the partition if it existed before
if (isSkewedStoreAsSubdir) {
org.apache.hadoop.hive.metastore.api.Partition newCreatedTpart = newTPart.getTPartition();
SkewedInfo skewedInfo = newCreatedTpart.getSd().getSkewedInfo();
/* Construct list bucketing location mappings from sub-directory name. */
Map<List<String>, String> skewedColValueLocationMaps = constructListBucketingLocationMap(
newPartPath, skewedInfo);
/* Add list bucketing location mappings. */
skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps);
newCreatedTpart.getSd().setSkewedInfo(skewedInfo);
}
if (!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
StatsSetupConst.setBasicStatsState(newTPart.getParameters(), StatsSetupConst.FALSE);
}
if (oldPart == null) {
newTPart.getTPartition().setParameters(new HashMap<String,String>());
if (this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
StatsSetupConst.setStatsStateForCreateTable(newTPart.getParameters(),
MetaStoreUtils.getColumnNames(tbl.getCols()), StatsSetupConst.TRUE);
}
// Note: we are creating a brand new the partition, so this is going to be valid for ACID.
List<FileStatus> filesForStats = null;
if (isTxnTable) {
filesForStats = AcidUtils.getAcidFilesForStats(
newTPart.getTable(), newPartPath, conf, null);
} else {
filesForStats = HiveStatsUtils.getFileStatusRecurse(
newPartPath, -1, newPartPath.getFileSystem(conf));
}
if (filesForStats != null) {
MetaStoreServerUtils.populateQuickStats(filesForStats, newTPart.getParameters());
} else {
// The ACID state is probably absent. Warning is logged in the get method.
MetaStoreServerUtils.clearQuickStats(newTPart.getParameters());
}
}
return newTPart;
} catch (IOException | MetaException | InvalidOperationException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private void addPartitionToMetastore(Partition newTPart, boolean resetStatistics,
Table tbl, TableSnapshot tableSnapshot) throws HiveException{
try {
LOG.debug("Adding new partition " + newTPart.getSpec());
getSynchronizedMSC().add_partition(newTPart.getTPartition());
} catch (AlreadyExistsException aee) {
// With multiple users concurrently issuing insert statements on the same partition has
// a side effect that some queries may not see a partition at the time when they're issued,
// but will realize the partition is actually there when it is trying to add such partition
// to the metastore and thus get AlreadyExistsException, because some earlier query just
// created it (race condition).
// For example, imagine such a table is created:
// create table T (name char(50)) partitioned by (ds string);
// and the following two queries are launched at the same time, from different sessions:
// insert into table T partition (ds) values ('Bob', 'today'); -- creates the partition 'today'
// insert into table T partition (ds) values ('Joe', 'today'); -- will fail with AlreadyExistsException
// In that case, we want to retry with alterPartition.
LOG.debug("Caught AlreadyExistsException, trying to alter partition instead");
try {
setStatsPropAndAlterPartition(resetStatistics, tbl, newTPart, tableSnapshot);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
} catch (Exception e) {
try {
final FileSystem newPathFileSystem = newTPart.getPartitionPath().getFileSystem(this.getConf());
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
final FileStatus status = newPathFileSystem.getFileStatus(newTPart.getPartitionPath());
Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
} catch (IOException io) {
LOG.error("Could not delete partition directory contents after failed partition creation: ", io);
}
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private void addPartitionsToMetastore(List<Partition> partitions,
boolean resetStatistics, Table tbl,
List<AcidUtils.TableSnapshot> tableSnapshots)
throws HiveException {
try {
if (partitions.isEmpty() || tableSnapshots.isEmpty()) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuffer debugMsg = new StringBuffer("Adding new partitions ");
partitions.forEach(partition -> debugMsg.append(partition.getSpec() + " "));
LOG.debug(debugMsg.toString());
}
getSynchronizedMSC().add_partitions(partitions.stream().map(Partition::getTPartition)
.collect(Collectors.toList()));
} catch(AlreadyExistsException aee) {
// With multiple users concurrently issuing insert statements on the same partition has
// a side effect that some queries may not see a partition at the time when they're issued,
// but will realize the partition is actually there when it is trying to add such partition
// to the metastore and thus get AlreadyExistsException, because some earlier query just
// created it (race condition).
// For example, imagine such a table is created:
// create table T (name char(50)) partitioned by (ds string);
// and the following two queries are launched at the same time, from different sessions:
// insert into table T partition (ds) values ('Bob', 'today'); -- creates the partition 'today'
// insert into table T partition (ds) values ('Joe', 'today'); -- will fail with AlreadyExistsException
// In that case, we want to retry with alterPartition.
LOG.debug("Caught AlreadyExistsException, trying to add partitions one by one.");
assert partitions.size() == tableSnapshots.size();
for (int i = 0; i < partitions.size(); i++) {
addPartitionToMetastore(partitions.get(i), resetStatistics, tbl,
tableSnapshots.get(i));
}
} catch (Exception e) {
try {
for (Partition partition : partitions) {
final FileSystem newPathFileSystem = partition.getPartitionPath().getFileSystem(this.getConf());
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
final FileStatus status = newPathFileSystem.getFileStatus(partition.getPartitionPath());
Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
}
} catch (IOException io) {
LOG.error("Could not delete partition directory contents after failed partition creation: ", io);
}
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private static Path genPartPathFromTable(Table tbl, Map<String, String> partSpec,
Path tblDataLocationPath) throws MetaException {
Path partPath = new Path(tbl.getDataLocation(), Warehouse.makePartPath(partSpec));
return new Path(tblDataLocationPath.toUri().getScheme(),
tblDataLocationPath.toUri().getAuthority(), partPath.toUri().getPath());
}
/**
* Load Data commands for fullAcid tables write to base_x (if there is overwrite clause) or
* delta_x_x directory - same as any other Acid write. This method modifies the destPath to add
* this path component.
* @param writeId - write id of the operated table from current transaction (in which this operation is running)
* @param stmtId - see {@link DbTxnManager#getStmtIdAndIncrement()}
* @return appropriately modified path
*/
private Path fixFullAcidPathForLoadData(LoadFileType loadFileType, Path destPath, long writeId, int stmtId, Table tbl) throws HiveException {
switch (loadFileType) {
case REPLACE_ALL:
destPath = new Path(destPath, AcidUtils.baseDir(writeId));
break;
case KEEP_EXISTING:
destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
break;
case OVERWRITE_EXISTING:
//should not happen here - this is for replication
default:
throw new IllegalArgumentException("Unexpected " + LoadFileType.class.getName() + " " + loadFileType);
}
try {
FileSystem fs = tbl.getDataLocation().getFileSystem(SessionState.getSessionConf());
if(!FileUtils.mkdir(fs, destPath, conf)) {
LOG.warn(destPath + " already exists?!?!");
}
} catch (IOException e) {
throw new HiveException("load: error while creating " + destPath + ";loadFileType=" + loadFileType, e);
}
return destPath;
}
private boolean areEventsForDmlNeeded(Table tbl, Partition oldPart) {
// For Acid IUD, add partition is a meta data only operation. So need to add the new files added
// information into the TXN_WRITE_NOTIFICATION_LOG table.
return conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary() &&
((null != oldPart) || AcidUtils.isTransactionalTable(tbl));
}
public static void listFilesInsideAcidDirectory(Path acidDir, FileSystem srcFs, List<Path> newFiles)
throws IOException {
// list out all the files/directory in the path
FileStatus[] acidFiles;
acidFiles = srcFs.listStatus(acidDir);
if (acidFiles == null) {
LOG.debug("No files added by this query in: " + acidDir);
return;
}
LOG.debug("Listing files under " + acidDir);
for (FileStatus acidFile : acidFiles) {
// need to list out only files, ignore folders.
if (!acidFile.isDirectory()) {
newFiles.add(acidFile.getPath());
} else {
listFilesInsideAcidDirectory(acidFile.getPath(), srcFs, newFiles);
}
}
}
private void listFilesCreatedByQuery(Path loadPath, long writeId, int stmtId,
boolean isInsertOverwrite, List<Path> newFiles) throws HiveException {
Path acidDir = new Path(loadPath, AcidUtils.baseOrDeltaSubdir(isInsertOverwrite, writeId, writeId, stmtId));
try {
FileSystem srcFs = loadPath.getFileSystem(conf);
if (srcFs.exists(acidDir) && srcFs.isDirectory(acidDir)){
// list out all the files in the path
listFilesInsideAcidDirectory(acidDir, srcFs, newFiles);
} else {
LOG.info("directory does not exist: " + acidDir);
return;
}
} catch (IOException e) {
LOG.error("Error listing files", e);
throw new HiveException(e);
}
return;
}
private void setStatsPropAndAlterPartition(boolean resetStatistics, Table tbl,
Partition newTPart, TableSnapshot tableSnapshot) throws TException {
EnvironmentContext ec = new EnvironmentContext();
if (!resetStatistics) {
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
LOG.debug("Altering existing partition " + newTPart.getSpec());
getSynchronizedMSC().alter_partition(tbl.getCatName(),
tbl.getDbName(), tbl.getTableName(), newTPart.getTPartition(), new EnvironmentContext(),
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
}
private void setStatsPropAndAlterPartitions(boolean resetStatistics, Table tbl,
List<Partition> partitions,
AcidUtils.TableSnapshot tableSnapshot)
throws TException {
if (partitions.isEmpty()) {
return;
}
EnvironmentContext ec = new EnvironmentContext();
if (!resetStatistics) {
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Altering existing partitions ");
partitions.forEach(p -> sb.append(p.getSpec()));
LOG.debug(sb.toString());
}
String validWriteIdList = null;
long writeId = 0L;
if (tableSnapshot != null) {
validWriteIdList = tableSnapshot.getValidWriteIdList();
writeId = tableSnapshot.getWriteId();
}
getSynchronizedMSC().alter_partitions(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(),
partitions.stream().map(Partition::getTPartition).collect(Collectors.toList()),
ec, validWriteIdList, writeId);
}
/**
* Walk through sub-directory tree to construct list bucketing location map.
*
* @param fSta
* @param fSys
* @param skewedColValueLocationMaps
* @param newPartPath
* @param skewedInfo
* @throws IOException
*/
private void walkDirTree(FileStatus fSta, FileSystem fSys,
Map<List<String>, String> skewedColValueLocationMaps, Path newPartPath, SkewedInfo skewedInfo)
throws IOException {
/* Base Case. It's leaf. */
if (!fSta.isDir()) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Processing LB leaf " + fSta.getPath());
}
/* construct one location map if not exists. */
constructOneLBLocationMap(fSta, skewedColValueLocationMaps, newPartPath, skewedInfo);
return;
}
/* dfs. */
FileStatus[] children = fSys.listStatus(fSta.getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER);
if (children != null) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Processing LB dir " + fSta.getPath());
}
for (FileStatus child : children) {
walkDirTree(child, fSys, skewedColValueLocationMaps, newPartPath, skewedInfo);
}
}
}
/**
* Construct a list bucketing location map
* @param fSta
* @param skewedColValueLocationMaps
* @param newPartPath
* @param skewedInfo
*/
private void constructOneLBLocationMap(FileStatus fSta,
Map<List<String>, String> skewedColValueLocationMaps,
Path newPartPath, SkewedInfo skewedInfo) {
Path lbdPath = fSta.getPath().getParent();
List<String> skewedValue = new ArrayList<String>();
String lbDirName = FileUtils.unescapePathName(lbdPath.toString());
String partDirName = FileUtils.unescapePathName(newPartPath.toString());
String lbDirSuffix = lbDirName.replace(partDirName, ""); // TODO: should it rather do a prefix?
if (lbDirSuffix.startsWith(Path.SEPARATOR)) {
lbDirSuffix = lbDirSuffix.substring(1);
}
String[] dirNames = lbDirSuffix.split(Path.SEPARATOR);
int keysFound = 0, dirsToTake = 0;
int colCount = skewedInfo.getSkewedColNames().size();
while (dirsToTake < dirNames.length && keysFound < colCount) {
String dirName = dirNames[dirsToTake++];
// Construct skewed-value to location map except default directory.
// why? query logic knows default-dir structure and don't need to get from map
if (dirName.equalsIgnoreCase(ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_DIR_NAME)) {
++keysFound;
} else {
String[] kv = dirName.split("=");
if (kv.length == 2) {
skewedValue.add(kv[1]);
++keysFound;
} else {
// TODO: we should really probably throw. Keep the existing logic for now.
LOG.warn("Skipping unknown directory " + dirName
+ " when expecting LB keys or default directory (from " + lbDirName + ")");
}
}
}
for (int i = 0; i < (dirNames.length - dirsToTake); ++i) {
lbdPath = lbdPath.getParent();
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Saving LB location " + lbdPath + " based on "
+ colCount + " keys and " + fSta.getPath());
}
if ((skewedValue.size() > 0) && (skewedValue.size() == colCount)
&& !skewedColValueLocationMaps.containsKey(skewedValue)) {
skewedColValueLocationMaps.put(skewedValue, lbdPath.toString());
}
}
/**
* Construct location map from path
*
* @param newPartPath
* @param skewedInfo
* @return
* @throws IOException
* @throws FileNotFoundException
*/
private Map<List<String>, String> constructListBucketingLocationMap(Path newPartPath,
SkewedInfo skewedInfo) throws IOException, FileNotFoundException {
Map<List<String>, String> skewedColValueLocationMaps = new HashMap<List<String>, String>();
FileSystem fSys = newPartPath.getFileSystem(conf);
walkDirTree(fSys.getFileStatus(newPartPath),
fSys, skewedColValueLocationMaps, newPartPath, skewedInfo);
return skewedColValueLocationMaps;
}
/**
* Get the valid partitions from the path
* @param numDP number of dynamic partitions
* @param loadPath
* @return Set of valid partitions
* @throws HiveException
*/
private Set<Path> getValidPartitionsInPath(
int numDP, int numLB, Path loadPath, Long writeId, int stmtId,
boolean isMmTable, boolean isInsertOverwrite) throws HiveException {
Set<Path> validPartitions = new HashSet<Path>();
try {
FileSystem fs = loadPath.getFileSystem(conf);
if (!isMmTable) {
List<FileStatus> leafStatus = HiveStatsUtils.getFileStatusRecurse(loadPath, numDP, fs);
// Check for empty partitions
for (FileStatus s : leafStatus) {
if (!s.isDirectory()) {
throw new HiveException("partition " + s.getPath() + " is not a directory!");
}
Path dpPath = s.getPath();
validPartitions.add(dpPath);
}
} else {
// The non-MM path only finds new partitions, as it is looking at the temp path.
// To produce the same effect, we will find all the partitions affected by this txn ID.
// Note: we ignore the statement ID here, because it's currently irrelevant for MoveTask
// where this is used; we always want to load everything; also the only case where
// we have multiple statements anyway is union.
Utilities.FILE_OP_LOGGER.trace(
"Looking for dynamic partitions in {} ({} levels)", loadPath, numDP);
Path[] leafStatus = Utilities.getMmDirectoryCandidates(
fs, loadPath, numDP, null, writeId, -1, conf, isInsertOverwrite);
for (Path p : leafStatus) {
Path dpPath = p.getParent(); // Skip the MM directory that we have found.
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Found DP " + dpPath);
}
validPartitions.add(dpPath);
}
}
} catch (IOException e) {
throw new HiveException(e);
}
int partsToLoad = validPartitions.size();
if (partsToLoad == 0) {
LOG.warn("No partition is generated by dynamic partitioning");
}
if (partsToLoad > conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS)) {
throw new HiveException("Number of dynamic partitions created is " + partsToLoad
+ ", which is more than "
+ conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS)
+". To solve this try to set " + HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS.varname
+ " to at least " + partsToLoad + '.');
}
return validPartitions;
}
/**
* Given a source directory name of the load path, load all dynamically generated partitions
* into the specified table and return a list of strings that represent the dynamic partition
* paths.
* @param loadPath
* @param tableName
* @param partSpec
* @param loadFileType
* @param numDP number of dynamic partitions
* @param isAcid true if this is an ACID operation
* @param writeId writeId, can be 0 unless isAcid == true
* @param resetStatistics if true, reset statistics. Do not reset statistics otherwise.
* @return partition map details (PartitionSpec and Partition)
* @throws HiveException
*/
public Map<Map<String, String>, Partition> loadDynamicPartitions(final Path loadPath,
final String tableName, final Map<String, String> partSpec, final LoadFileType loadFileType,
final int numDP, final int numLB, final boolean isAcid, final long writeId, final int stmtId,
final boolean resetStatistics, final AcidUtils.Operation operation,
boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_DYNAMIC_PARTITIONS);
// Get all valid partition paths and existing partitions for them (if any)
final Table tbl = getTable(tableName);
final Set<Path> validPartitions = getValidPartitionsInPath(numDP, numLB, loadPath, writeId, stmtId,
AcidUtils.isInsertOnlyTable(tbl.getParameters()), isInsertOverwrite);
final int partsToLoad = validPartitions.size();
final AtomicInteger partitionsLoaded = new AtomicInteger(0);
final boolean inPlaceEligible = conf.getLong("fs.trash.interval", 0) <= 0
&& InPlaceUpdate.canRenderInPlace(conf) && !SessionState.getConsole().getIsSilent();
final PrintStream ps = (inPlaceEligible) ? SessionState.getConsole().getInfoStream() : null;
final SessionState parentSession = SessionState.get();
List<Callable<Partition>> tasks = Lists.newLinkedList();
final class PartitionDetails {
Map<String, String> fullSpec;
Partition partition;
List<Path> newFiles;
boolean hasOldPartition = false;
AcidUtils.TableSnapshot tableSnapshot;
}
Map<Path, PartitionDetails> partitionDetailsMap =
Collections.synchronizedMap(new LinkedHashMap<>());
// calculate full path spec for each valid partition path
validPartitions.forEach(partPath -> {
Map<String, String> fullPartSpec = Maps.newLinkedHashMap(partSpec);
if (!Warehouse.makeSpecFromName(fullPartSpec, partPath, new HashSet<>(partSpec.keySet()))) {
Utilities.FILE_OP_LOGGER.warn("Ignoring invalid DP directory " + partPath);
} else {
PartitionDetails details = new PartitionDetails();
details.fullSpec = fullPartSpec;
partitionDetailsMap.put(partPath, details);
}
});
// fetch all the partitions matching the part spec using the partition iterable
// this way the maximum batch size configuration parameter is considered
PartitionIterable partitionIterable = new PartitionIterable(Hive.get(), tbl, partSpec,
conf.getInt(MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX.getVarname(), 300));
Iterator<Partition> iterator = partitionIterable.iterator();
// Match valid partition path to partitions
while (iterator.hasNext()) {
Partition partition = iterator.next();
partitionDetailsMap.entrySet().stream()
.filter(entry -> entry.getValue().fullSpec.equals(partition.getSpec()))
.findAny().ifPresent(entry -> {
entry.getValue().partition = partition;
entry.getValue().hasOldPartition = true;
});
}
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
AcidUtils.TableSnapshot tableSnapshot = isTxnTable ? getTableSnapshot(tbl, writeId) : null;
for (Entry<Path, PartitionDetails> entry : partitionDetailsMap.entrySet()) {
tasks.add(() -> {
PartitionDetails partitionDetails = entry.getValue();
Map<String, String> fullPartSpec = partitionDetails.fullSpec;
try {
SessionState.setCurrentSessionState(parentSession);
LOG.info("New loading path = " + entry.getKey() + " withPartSpec " + fullPartSpec);
List<Path> newFiles = Lists.newArrayList();
Partition oldPartition = partitionDetails.partition;
// load the partition
Partition partition = loadPartitionInternal(entry.getKey(), tbl,
fullPartSpec, oldPartition, loadFileType, true, false, numLB > 0, false, isAcid,
resetStatistics, writeId, stmtId, isInsertOverwrite, isTxnTable, newFiles);
// if the partition already existed before the loading, no need to add it again to the
// metastore
if (tableSnapshot != null) {
partition.getTPartition().setWriteId(tableSnapshot.getWriteId());
}
partitionDetails.tableSnapshot = tableSnapshot;
if (oldPartition == null) {
partitionDetails.newFiles = newFiles;
partitionDetails.partition = partition;
}
if (inPlaceEligible) {
synchronized (ps) {
InPlaceUpdate.rePositionCursor(ps);
partitionsLoaded.incrementAndGet();
InPlaceUpdate.reprintLine(ps, "Loaded : " + partitionsLoaded.get() + "/"
+ partsToLoad + " partitions.");
}
}
return partition;
} catch (Exception e) {
LOG.error("Exception when loading partition with parameters "
+ " partPath=" + entry.getKey() + ", "
+ " table=" + tbl.getTableName() + ", "
+ " partSpec=" + fullPartSpec + ", "
+ " loadFileType=" + loadFileType.toString() + ", "
+ " listBucketingLevel=" + numLB + ", "
+ " isAcid=" + isAcid + ", "
+ " resetStatistics=" + resetStatistics, e);
throw e;
}
});
}
int poolSize = conf.getInt(ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_THREAD_COUNT.varname, 1);
ExecutorService executor = Executors.newFixedThreadPool(poolSize,
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("load-dynamic-partitionsToAdd-%d").build());
List<Future<Partition>> futures = Lists.newLinkedList();
Map<Map<String, String>, Partition> result = Maps.newLinkedHashMap();
try {
futures = executor.invokeAll(tasks);
LOG.debug("Number of partitionsToAdd to be added is " + futures.size());
for (Future<Partition> future : futures) {
Partition partition = future.get();
result.put(partition.getSpec(), partition);
}
// add new partitions in batch
addPartitionsToMetastore(
partitionDetailsMap.entrySet()
.stream()
.filter(entry -> !entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().partition)
.collect(Collectors.toList()),
resetStatistics,
tbl,
partitionDetailsMap.entrySet()
.stream()
.filter(entry -> !entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().tableSnapshot)
.collect(Collectors.toList()));
// For acid table, add the acid_write event with file list at the time of load itself. But
// it should be done after partition is created.
for (Entry<Path, PartitionDetails> entry : partitionDetailsMap.entrySet()) {
PartitionDetails partitionDetails = entry.getValue();
if (isTxnTable && partitionDetails.newFiles != null) {
addWriteNotificationLog(tbl, partitionDetails.fullSpec, partitionDetails.newFiles, writeId);
}
}
setStatsPropAndAlterPartitions(resetStatistics, tbl,
partitionDetailsMap.entrySet().stream()
.filter(entry -> entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().partition)
.collect(Collectors.toList()), tableSnapshot);
} catch (InterruptedException | ExecutionException e) {
throw new HiveException("Exception when loading " + validPartitions.size()
+ " in table " + tbl.getTableName()
+ " with loadPath=" + loadPath);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
} catch (Exception e) {
StringBuffer logMsg = new StringBuffer();
logMsg.append("Exception when loading partitionsToAdd with parameters ");
logMsg.append("partPaths=");
validPartitions.forEach(path -> logMsg.append(path + ", "));
logMsg.append("table=" + tbl.getTableName() + ", ").
append("partSpec=" + partSpec + ", ").
append("loadFileType=" + loadFileType.toString() + ", ").
append("listBucketingLevel=" + numLB + ", ").
append("isAcid=" + isAcid + ", ").
append("resetStatistics=" + resetStatistics);
LOG.error(logMsg.toString(), e);
throw e;
} finally {
LOG.debug("Cancelling " + futures.size() + " dynamic loading tasks");
executor.shutdownNow();
}
try {
if (isAcid) {
List<String> partNames =
result.values().stream().map(Partition::getName).collect(Collectors.toList());
getMSC().addDynamicPartitions(parentSession.getTxnMgr().getCurrentTxnId(), writeId,
tbl.getDbName(), tbl.getTableName(), partNames,
AcidUtils.toDataOperationType(operation));
}
LOG.info("Loaded " + result.size() + "partitionsToAdd");
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_DYNAMIC_PARTITIONS);
return result;
} catch (TException te) {
LOG.error(StringUtils.stringifyException(te));
throw new HiveException("Exception updating metastore for acid table "
+ tableName + " with partitions " + result.values(), te);
}
}
/**
* Load a directory into a Hive Table. - Alters existing content of table with
* the contents of loadPath. - If table does not exist - an exception is
* thrown - files in loadPath are moved into Hive. But the directory itself is
* not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tableName
* name of table to be loaded.
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param isSrcLocal
* If the source directory is LOCAL
* @param isSkewedStoreAsSubdir
* if list bucketing enabled
* @param isAcidIUDoperation true if this is an ACID based Insert [overwrite]/update/delete
* @param resetStatistics should reset statistics as part of move.
* @param writeId write ID allocated for the current load operation
* @param stmtId statement ID of the current load statement
*/
public void loadTable(Path loadPath, String tableName, LoadFileType loadFileType, boolean isSrcLocal,
boolean isSkewedStoreAsSubdir, boolean isAcidIUDoperation, boolean resetStatistics,
Long writeId, int stmtId, boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_TABLE);
List<Path> newFiles = null;
Table tbl = getTable(tableName);
assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
boolean isMmTable = AcidUtils.isInsertOnlyTable(tbl);
boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary()) {
newFiles = Collections.synchronizedList(new ArrayList<Path>());
}
// Note: this assumes both paths are qualified; which they are, currently.
if (((isMmTable || isFullAcidTable) && loadPath.equals(tbl.getPath())) || (loadFileType == LoadFileType.IGNORE)) {
/**
* some operations on Transactional tables (e.g. Import) write directly to the final location
* and avoid the 'move' operation. Since MoveTask does other things, setting 'loadPath' to be
* the table/partition path indicates that the 'file move' part of MoveTask is not needed.
*/
if (Utilities.FILE_OP_LOGGER.isDebugEnabled()) {
Utilities.FILE_OP_LOGGER.debug(
"not moving " + loadPath + " to " + tbl.getPath() + " (MM)");
}
//new files list is required only for event notification.
if (newFiles != null) {
listFilesCreatedByQuery(loadPath, writeId, stmtId, isMmTable ? isInsertOverwrite : false, newFiles);
}
} else {
// Either a non-MM query, or a load into MM table from an external source.
Path tblPath = tbl.getPath();
Path destPath = tblPath;
if (isMmTable) {
assert !isAcidIUDoperation;
// We will load into MM directory, and hide previous directories if needed.
destPath = new Path(destPath, isInsertOverwrite
? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
}
if (!isAcidIUDoperation && isFullAcidTable) {
destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
}
Utilities.FILE_OP_LOGGER.debug("moving " + loadPath + " to " + tblPath
+ " (replace = " + loadFileType + ")");
perfLogger.PerfLogBegin("MoveTask", PerfLogger.FILE_MOVES);
boolean isManaged = tbl.getTableType() == TableType.MANAGED_TABLE;
if (loadFileType == LoadFileType.REPLACE_ALL && !isTxnTable) {
//for fullAcid we don't want to delete any files even for OVERWRITE see HIVE-14988/HIVE-17361
boolean isAutopurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
boolean needRecycle = !tbl.isTemporary()
&& ReplChangeManager.isSourceOfReplication(Hive.get().getDatabase(tbl.getDbName()));
replaceFiles(tblPath, loadPath, destPath, tblPath, conf, isSrcLocal, isAutopurge,
newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, needRecycle, isManaged, isInsertOverwrite);
} else {
try {
FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
tbl.getNumBuckets() > 0, isFullAcidTable, isManaged);
} catch (IOException e) {
throw new HiveException("addFiles: filesystem error in check phase", e);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.FILE_MOVES);
}
if (!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
LOG.debug("setting table statistics false for " + tbl.getDbName() + "." + tbl.getTableName());
StatsSetupConst.setBasicStatsState(tbl.getParameters(), StatsSetupConst.FALSE);
}
//column stats will be inaccurate
if (resetStatistics) {
LOG.debug("Clearing table statistics for " + tbl.getDbName() + "." + tbl.getTableName());
StatsSetupConst.clearColumnStatsState(tbl.getParameters());
}
try {
if (isSkewedStoreAsSubdir) {
SkewedInfo skewedInfo = tbl.getSkewedInfo();
// Construct list bucketing location mappings from sub-directory name.
Map<List<String>, String> skewedColValueLocationMaps = constructListBucketingLocationMap(
tbl.getPath(), skewedInfo);
// Add list bucketing location mappings.
skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps);
}
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
EnvironmentContext environmentContext = null;
if (!resetStatistics) {
environmentContext = new EnvironmentContext();
environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
alterTable(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(), tbl, false, environmentContext,
true, ((writeId == null) ? 0 : writeId));
if (AcidUtils.isTransactionalTable(tbl)) {
addWriteNotificationLog(tbl, null, newFiles, writeId);
} else {
fireInsertEvent(tbl, null, (loadFileType == LoadFileType.REPLACE_ALL), newFiles);
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_TABLE);
}
/**
* Creates a partition.
*
* @param tbl
* table for which partition needs to be created
* @param partSpec
* partition keys and their values
* @return created partition object
* @throws HiveException
* if table doesn't exist or partition already exists
*/
@VisibleForTesting
public Partition createPartition(Table tbl, Map<String, String> partSpec) throws HiveException {
try {
org.apache.hadoop.hive.metastore.api.Partition part =
Partition.createMetaPartitionObject(tbl, partSpec, null);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
part.setWriteId(tableSnapshot != null ? tableSnapshot.getWriteId() : 0);
return new Partition(tbl, getMSC().add_partition(part));
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public List<Partition> createPartitions(AddPartitionDesc addPartitionDesc) throws HiveException {
// TODO: catalog name everywhere in this method
Table tbl = getTable(addPartitionDesc.getDbName(), addPartitionDesc.getTableName());
int size = addPartitionDesc.getPartitionCount();
List<org.apache.hadoop.hive.metastore.api.Partition> in =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>(size);
long writeId;
String validWriteIdList;
// In case of replication, get the writeId from the source and use valid write Id list
// for replication.
if (addPartitionDesc.getReplicationSpec().isInReplicationScope() &&
addPartitionDesc.getPartition(0).getWriteId() > 0) {
writeId = addPartitionDesc.getPartition(0).getWriteId();
// We need a valid writeId list for a transactional change. During replication we do not
// have a valid writeId list which was used for this on the source. But we know for sure
// that the writeId associated with it was valid then (otherwise the change would have
// failed on the source). So use a valid transaction list with only that writeId.
validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(),
tbl.getTableName()),
new long[0], new BitSet(), writeId).writeToString();
} else {
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
if (tableSnapshot != null && tableSnapshot.getWriteId() > 0) {
writeId = tableSnapshot.getWriteId();
validWriteIdList = tableSnapshot.getValidWriteIdList();
} else {
writeId = -1;
validWriteIdList = null;
}
}
for (int i = 0; i < size; ++i) {
org.apache.hadoop.hive.metastore.api.Partition tmpPart =
convertAddSpecToMetaPartition(tbl, addPartitionDesc.getPartition(i), conf);
if (tmpPart != null && writeId > 0) {
tmpPart.setWriteId(writeId);
}
in.add(tmpPart);
}
List<Partition> out = new ArrayList<Partition>();
try {
if (!addPartitionDesc.getReplicationSpec().isInReplicationScope()){
// TODO: normally, the result is not necessary; might make sense to pass false
for (org.apache.hadoop.hive.metastore.api.Partition outPart
: getMSC().add_partitions(in, addPartitionDesc.isIfNotExists(), true)) {
out.add(new Partition(tbl, outPart));
}
} else {
// For replication add-ptns, we need to follow a insert-if-not-exist, alter-if-exists scenario.
// TODO : ideally, we should push this mechanism to the metastore, because, otherwise, we have
// no choice but to iterate over the partitions here.
List<org.apache.hadoop.hive.metastore.api.Partition> partsToAdd = new ArrayList<>();
List<org.apache.hadoop.hive.metastore.api.Partition> partsToAlter = new ArrayList<>();
List<String> part_names = new ArrayList<>();
for (org.apache.hadoop.hive.metastore.api.Partition p: in){
part_names.add(Warehouse.makePartName(tbl.getPartitionKeys(), p.getValues()));
try {
org.apache.hadoop.hive.metastore.api.Partition ptn =
getMSC().getPartition(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(), p.getValues());
if (addPartitionDesc.getReplicationSpec().allowReplacementInto(ptn.getParameters())){
ReplicationSpec.copyLastReplId(ptn.getParameters(), p.getParameters());
partsToAlter.add(p);
} // else ptn already exists, but we do nothing with it.
} catch (NoSuchObjectException nsoe){
// if the object does not exist, we want to add it.
partsToAdd.add(p);
}
}
for (org.apache.hadoop.hive.metastore.api.Partition outPart
: getMSC().add_partitions(partsToAdd, addPartitionDesc.isIfNotExists(), true)) {
out.add(new Partition(tbl, outPart));
}
EnvironmentContext ec = new EnvironmentContext();
// In case of replication, statistics is obtained from the source, so do not update those
// on replica.
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
getMSC().alter_partitions(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(),
partsToAlter, ec, validWriteIdList, writeId);
for ( org.apache.hadoop.hive.metastore.api.Partition outPart :
getMSC().getPartitionsByNames(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(),part_names)){
out.add(new Partition(tbl,outPart));
}
}
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return out;
}
public static org.apache.hadoop.hive.metastore.api.Partition convertAddSpecToMetaPartition(
Table tbl, AddPartitionDesc.OnePartitionDesc addSpec, final HiveConf conf) throws HiveException {
Path location = addSpec.getLocation() != null
? new Path(tbl.getPath(), addSpec.getLocation()) : null;
if (location != null) {
// Ensure that it is a full qualified path (in most cases it will be since tbl.getPath() is full qualified)
location = new Path(Utilities.getQualifiedPath(conf, location));
}
org.apache.hadoop.hive.metastore.api.Partition part =
Partition.createMetaPartitionObject(tbl, addSpec.getPartSpec(), location);
if (addSpec.getPartParams() != null) {
part.setParameters(addSpec.getPartParams());
}
if (addSpec.getInputFormat() != null) {
part.getSd().setInputFormat(addSpec.getInputFormat());
}
if (addSpec.getOutputFormat() != null) {
part.getSd().setOutputFormat(addSpec.getOutputFormat());
}
if (addSpec.getNumBuckets() != -1) {
part.getSd().setNumBuckets(addSpec.getNumBuckets());
}
if (addSpec.getCols() != null) {
part.getSd().setCols(addSpec.getCols());
}
if (addSpec.getSerializationLib() != null) {
part.getSd().getSerdeInfo().setSerializationLib(addSpec.getSerializationLib());
}
if (addSpec.getSerdeParams() != null) {
part.getSd().getSerdeInfo().setParameters(addSpec.getSerdeParams());
}
if (addSpec.getBucketCols() != null) {
part.getSd().setBucketCols(addSpec.getBucketCols());
}
if (addSpec.getSortCols() != null) {
part.getSd().setSortCols(addSpec.getSortCols());
}
if (addSpec.getColStats() != null) {
part.setColStats(addSpec.getColStats());
// Statistics will have an associated write Id for a transactional table. We need it to
// update column statistics.
part.setWriteId(addSpec.getWriteId());
}
return part;
}
public Partition getPartition(Table tbl, Map<String, String> partSpec,
boolean forceCreate) throws HiveException {
return getPartition(tbl, partSpec, forceCreate, null, true);
}
/**
* Returns partition metadata
*
* @param tbl
* the partition's table
* @param partSpec
* partition keys and values
* @param forceCreate
* if this is true and partition doesn't exist then a partition is
* created
* @param partPath the path where the partition data is located
* @param inheritTableSpecs whether to copy over the table specs for if/of/serde
* @return result partition object or null if there is no partition
* @throws HiveException
*/
public Partition getPartition(Table tbl, Map<String, String> partSpec,
boolean forceCreate, String partPath, boolean inheritTableSpecs) throws HiveException {
tbl.validatePartColumnNames(partSpec, true);
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
String val = partSpec.get(field.getName());
// enable dynamic partitioning
if ((val == null && !HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING))
|| (val != null && val.length() == 0)) {
throw new HiveException("get partition: Value for key "
+ field.getName() + " is null or empty");
} else if (val != null){
pvals.add(val);
}
}
org.apache.hadoop.hive.metastore.api.Partition tpart = null;
try {
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given partition
// key value pairs - thrift cannot handle null return values, hence
// getPartition() throws NoSuchObjectException to indicate null partition
tpart = null;
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
try {
if (forceCreate) {
if (tpart == null) {
LOG.debug("creating partition for table " + tbl.getTableName()
+ " with partition spec : " + partSpec);
try {
tpart = getSynchronizedMSC().appendPartition(tbl.getDbName(), tbl.getTableName(), pvals);
} catch (AlreadyExistsException aee) {
LOG.debug("Caught already exists exception, trying to alter partition instead");
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
} catch (Exception e) {
if (CheckJDOException.isJDODataStoreException(e)) {
// Using utility method above, so that JDODataStoreException doesn't
// have to be used here. This helps avoid adding jdo dependency for
// hcatalog client uses
LOG.debug("Caught JDO exception, trying to alter partition instead");
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
if (tpart == null) {
// This means the exception was caused by something other than a race condition
// in creating the partition, since the partition still doesn't exist.
throw e;
}
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
} else {
throw e;
}
}
}
else {
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
fireInsertEvent(tbl, partSpec, true, null);
}
}
if (tpart == null) {
return null;
}
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return new Partition(tbl, tpart);
}
private void alterPartitionSpec(Table tbl,
Map<String, String> partSpec,
org.apache.hadoop.hive.metastore.api.Partition tpart,
boolean inheritTableSpecs,
String partPath) throws HiveException, InvalidOperationException {
alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, partPath);
String fullName = tbl.getTableName();
if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) {
fullName = tbl.getFullyQualifiedName();
}
alterPartition(tbl.getCatalogName(), tbl.getDbName(), tbl.getTableName(),
new Partition(tbl, tpart), null, true);
}
private void alterPartitionSpecInMemory(Table tbl,
Map<String, String> partSpec,
org.apache.hadoop.hive.metastore.api.Partition tpart,
boolean inheritTableSpecs,
String partPath) throws HiveException, InvalidOperationException {
LOG.debug("altering partition for table " + tbl.getTableName() + " with partition spec : "
+ partSpec);
if (inheritTableSpecs) {
tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat());
tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat());
tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib());
tpart.getSd().getSerdeInfo().setParameters(
tbl.getTTable().getSd().getSerdeInfo().getParameters());
tpart.getSd().setBucketCols(tbl.getBucketCols());
tpart.getSd().setNumBuckets(tbl.getNumBuckets());
tpart.getSd().setSortCols(tbl.getSortCols());
}
if (partPath == null || partPath.trim().equals("")) {
throw new HiveException("new partition path should not be null or empty.");
}
tpart.getSd().setLocation(partPath);
}
public void addWriteNotificationLog(Table tbl, Map<String, String> partitionSpec,
List<Path> newFiles, Long writeId) throws HiveException {
if (!conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML)) {
LOG.debug("write notification log is ignored as dml event logging is disabled");
return;
}
if (tbl.isTemporary()) {
LOG.debug("write notification log is ignored as " + tbl.getTableName() + " is temporary : " + writeId);
return;
}
if (newFiles == null || newFiles.isEmpty()) {
LOG.debug("write notification log is ignored as file list is empty");
return;
}
LOG.debug("adding write notification log for operation " + writeId + " table " + tbl.getCompleteName() +
"partition " + partitionSpec + " list of files " + newFiles);
try {
Long txnId = SessionState.get().getTxnMgr().getCurrentTxnId();
List<String> partitionVals = null;
if (partitionSpec != null && !partitionSpec.isEmpty()) {
partitionVals = new ArrayList<>();
for (FieldSchema fs : tbl.getPartitionKeys()) {
partitionVals.add(partitionSpec.get(fs.getName()));
}
}
addWriteNotificationLog(conf, tbl, partitionVals, txnId, writeId, newFiles);
} catch (IOException | TException e) {
throw new HiveException(e);
}
}
public static void addWriteNotificationLog(HiveConf conf, Table tbl, List<String> partitionVals,
Long txnId, Long writeId, List<Path> newFiles)
throws IOException, HiveException, TException {
FileSystem fileSystem = tbl.getDataLocation().getFileSystem(conf);
InsertEventRequestData insertData = new InsertEventRequestData();
insertData.setReplace(true);
WriteNotificationLogRequest rqst = new WriteNotificationLogRequest(txnId, writeId,
tbl.getDbName(), tbl.getTableName(), insertData);
addInsertFileInformation(newFiles, fileSystem, insertData);
rqst.setPartitionVals(partitionVals);
get(conf).getSynchronizedMSC().addWriteNotificationLog(rqst);
}
private void fireInsertEvent(Table tbl, Map<String, String> partitionSpec, boolean replace, List<Path> newFiles)
throws HiveException {
if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML)) {
LOG.debug("Firing dml insert event");
if (tbl.isTemporary()) {
LOG.debug("Not firing dml insert event as " + tbl.getTableName() + " is temporary");
return;
}
try {
FileSystem fileSystem = tbl.getDataLocation().getFileSystem(conf);
FireEventRequestData data = new FireEventRequestData();
InsertEventRequestData insertData = new InsertEventRequestData();
insertData.setReplace(replace);
data.setInsertData(insertData);
if (newFiles != null && !newFiles.isEmpty()) {
addInsertFileInformation(newFiles, fileSystem, insertData);
} else {
insertData.setFilesAdded(new ArrayList<String>());
}
FireEventRequest rqst = new FireEventRequest(true, data);
rqst.setDbName(tbl.getDbName());
rqst.setTableName(tbl.getTableName());
if (partitionSpec != null && partitionSpec.size() > 0) {
List<String> partVals = new ArrayList<String>(partitionSpec.size());
for (FieldSchema fs : tbl.getPartitionKeys()) {
partVals.add(partitionSpec.get(fs.getName()));
}
rqst.setPartitionVals(partVals);
}
getSynchronizedMSC().fireListenerEvent(rqst);
} catch (IOException | TException e) {
throw new HiveException(e);
}
}
}
private static void addInsertFileInformation(List<Path> newFiles, FileSystem fileSystem,
InsertEventRequestData insertData) throws IOException {
LinkedList<Path> directories = null;
for (Path p : newFiles) {
if (fileSystem.isDirectory(p)) {
if (directories == null) {
directories = new LinkedList<>();
}
directories.add(p);
continue;
}
addInsertNonDirectoryInformation(p, fileSystem, insertData);
}
if (directories == null) {
return;
}
// We don't expect any nesting in most cases, or a lot of it if it is present; union and LB
// are some examples where we would have 1, or few, levels respectively.
while (!directories.isEmpty()) {
Path dir = directories.poll();
FileStatus[] contents = fileSystem.listStatus(dir);
if (contents == null) {
continue;
}
for (FileStatus status : contents) {
if (status.isDirectory()) {
directories.add(status.getPath());
continue;
}
addInsertNonDirectoryInformation(status.getPath(), fileSystem, insertData);
}
}
}
private static void addInsertNonDirectoryInformation(Path p, FileSystem fileSystem,
InsertEventRequestData insertData) throws IOException {
insertData.addToFilesAdded(p.toString());
FileChecksum cksum = fileSystem.getFileChecksum(p);
String acidDirPath = AcidUtils.getFirstLevelAcidDirPath(p.getParent(), fileSystem);
// File checksum is not implemented for local filesystem (RawLocalFileSystem)
if (cksum != null) {
String checksumString =
StringUtils.byteToHexString(cksum.getBytes(), 0, cksum.getLength());
insertData.addToFilesAddedChecksum(checksumString);
} else {
// Add an empty checksum string for filesystems that don't generate one
insertData.addToFilesAddedChecksum("");
}
// acid dir will be present only for acid write operations.
if (acidDirPath != null) {
insertData.addToSubDirectoryList(acidDirPath);
}
}
public boolean dropPartition(String tblName, List<String> part_vals, boolean deleteData)
throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartition(names[0], names[1], part_vals, deleteData);
}
public boolean dropPartition(String db_name, String tbl_name,
List<String> part_vals, boolean deleteData) throws HiveException {
return dropPartition(db_name, tbl_name, part_vals,
PartitionDropOptions.instance().deleteData(deleteData));
}
public boolean dropPartition(String dbName, String tableName, List<String> partVals, PartitionDropOptions options)
throws HiveException {
try {
return getMSC().dropPartition(dbName, tableName, partVals, options);
} catch (NoSuchObjectException e) {
throw new HiveException("Partition or table doesn't exist.", e);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
}
/**
* drop the partitions specified as directory names associated with the table.
*
* @param table object for which partition is needed
* @param partDirNames partition directories that need to be dropped
* @param deleteData whether data should be deleted from file system
* @param ifExists check for existence before attempting delete
*
* @return list of partition objects that were deleted
*
* @throws HiveException
*/
public List<Partition> dropPartitions(Table table, List<String>partDirNames,
boolean deleteData, boolean ifExists) throws HiveException {
// partitions to be dropped in this batch
List<DropPartitionDesc.PartSpec> partSpecs = new ArrayList<>(partDirNames.size());
// parts of the partition
String[] parts = null;
// Expression splits of each part of the partition
String[] partExprParts = null;
// Column Types of all partitioned columns. Used for generating partition specification
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : table.getPartitionKeys()) {
colTypes.put(fs.getName(), fs.getType());
}
// Key to be used to save the partition to be dropped in partSpecs
int partSpecKey = 0;
for (String partDir : partDirNames) {
// The expression to identify the partition to be dropped
ExprNodeGenericFuncDesc expr = null;
// Split by "/" to identify partition parts
parts = partDir.split("/");
// Loop through the partitions and form the expression
for (String part : parts) {
// Split the partition predicate to identify column and value
partExprParts = part.split("=");
// Only two elements expected in partExprParts partition column name and partition value
assert partExprParts.length == 2;
// Partition Column
String partCol = partExprParts[0];
// Column Type
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(colTypes.get(partCol));
// Form the expression node corresponding to column
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, partCol, null, true);
// Build the expression based on the partition predicate
ExprNodeGenericFuncDesc op =
makeBinaryPredicate("=", column, new ExprNodeConstantDesc(pti, partExprParts[1]));
// the multiple parts to partition predicate are joined using and
expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
}
// Add the expression to partition specification
partSpecs.add(new DropPartitionDesc.PartSpec(expr, partSpecKey));
// Increment dropKey to get a new key for hash map
++partSpecKey;
}
String[] names = Utilities.getDbTableName(table.getFullyQualifiedName());
return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists);
}
public List<Partition> dropPartitions(String tblName, List<DropPartitionDesc.PartSpec> partSpecs,
boolean deleteData, boolean ifExists) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists);
}
public List<Partition> dropPartitions(String dbName, String tblName,
List<DropPartitionDesc.PartSpec> partSpecs, boolean deleteData,
boolean ifExists) throws HiveException {
return dropPartitions(dbName, tblName, partSpecs,
PartitionDropOptions.instance()
.deleteData(deleteData)
.ifExists(ifExists));
}
public List<Partition> dropPartitions(String tblName, List<DropPartitionDesc.PartSpec> partSpecs,
PartitionDropOptions dropOptions) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartitions(names[0], names[1], partSpecs, dropOptions);
}
public List<Partition> dropPartitions(String dbName, String tblName,
List<DropPartitionDesc.PartSpec> partSpecs, PartitionDropOptions dropOptions) throws HiveException {
try {
Table tbl = getTable(dbName, tblName);
List<org.apache.hadoop.hive.metastore.utils.ObjectPair<Integer, byte[]>> partExprs =
new ArrayList<>(partSpecs.size());
for (DropPartitionDesc.PartSpec partSpec : partSpecs) {
partExprs.add(new org.apache.hadoop.hive.metastore.utils.ObjectPair<>(partSpec.getPrefixLength(),
SerializationUtilities.serializeExpressionToKryo(partSpec.getPartSpec())));
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().dropPartitions(
dbName, tblName, partExprs, dropOptions);
return convertFromMetastore(tbl, tParts);
} catch (NoSuchObjectException e) {
throw new HiveException("Partition or table doesn't exist.", e);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
}
public List<String> getPartitionNames(String tblName, short max) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return getPartitionNames(names[0], names[1], max);
}
public List<String> getPartitionNames(String dbName, String tblName, short max)
throws HiveException {
List<String> names = null;
try {
names = getMSC().listPartitionNames(dbName, tblName, max);
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given dbName and tblName
// key value pairs - thrift cannot handle null return values, hence
// listPartitionNames() throws NoSuchObjectException to indicate null partitions
return Lists.newArrayList();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return names;
}
public List<String> getPartitionNames(String dbName, String tblName,
Map<String, String> partSpec, short max) throws HiveException {
List<String> names = null;
Table t = getTable(dbName, tblName);
List<String> pvals = MetaStoreUtils.getPvals(t.getPartCols(), partSpec);
try {
names = getMSC().listPartitionNames(dbName, tblName, pvals, max);
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given partition spec
// key value pairs - thrift cannot handle null return values, hence
// listPartitionNames() throws NoSuchObjectException to indicate null partitions
return Lists.newArrayList();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return names;
}
/**
* get all the partitions that the table has
*
* @param tbl
* object for which partition is needed
* @return list of partition objects
*/
public List<Partition> getPartitions(Table tbl) throws HiveException {
if (tbl.isPartitioned()) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
try {
tParts = getMSC().listPartitionsWithAuthInfo(tbl.getDbName(), tbl.getTableName(),
(short) -1, getUserName(), getGroupNames());
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
List<Partition> parts = new ArrayList<Partition>(tParts.size());
for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
parts.add(new Partition(tbl, tpart));
}
return parts;
} else {
Partition part = new Partition(tbl);
ArrayList<Partition> parts = new ArrayList<Partition>(1);
parts.add(part);
return parts;
}
}
/**
* Get all the partitions; unlike {@link #getPartitions(Table)}, does not include auth.
* @param tbl table for which partitions are needed
* @return list of partition objects
*/
public Set<Partition> getAllPartitionsOf(Table tbl) throws HiveException {
if (!tbl.isPartitioned()) {
return Sets.newHashSet(new Partition(tbl));
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
try {
tParts = getMSC().listPartitions(tbl.getDbName(), tbl.getTableName(), (short)-1);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
Set<Partition> parts = new LinkedHashSet<Partition>(tParts.size());
for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
parts.add(new Partition(tbl, tpart));
}
return parts;
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param limit number of partitions to return
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitions(Table tbl, Map<String, String> partialPartSpec,
short limit)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<String> partialPvals = MetaStoreUtils.getPvals(tbl.getPartCols(), partialPartSpec);
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = null;
try {
partitions = getMSC().listPartitionsWithAuthInfo(tbl.getDbName(), tbl.getTableName(),
partialPvals, limit, getUserName(), getGroupNames());
} catch (Exception e) {
throw new HiveException(e);
}
List<Partition> qlPartitions = new ArrayList<Partition>();
for (org.apache.hadoop.hive.metastore.api.Partition p : partitions) {
qlPartitions.add( new Partition(tbl, p));
}
return qlPartitions;
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitions(Table tbl, Map<String, String> partialPartSpec)
throws HiveException {
return getPartitions(tbl, partialPartSpec, (short)-1);
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partialPartSpec
* partial partition specification (some subpartitions can be empty).
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl,
Map<String, String> partialPartSpec)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<String> names = getPartitionNames(tbl.getDbName(), tbl.getTableName(),
partialPartSpec, (short)-1);
List<Partition> partitions = getPartitionsByNames(tbl, names);
return partitions;
}
/**
* Get all partitions of the table that matches the list of given partition names.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partNames
* list of partition names
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl, List<String> partNames)
throws HiveException {
return getPartitionsByNames(tbl, partNames, false);
}
/**
* Get all partitions of the table that matches the list of given partition names.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partNames
* list of partition names
* @param getColStats
* if true, Partition object includes column statistics for that partition.
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl, List<String> partNames, boolean getColStats)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<Partition> partitions = new ArrayList<Partition>(partNames.size());
int batchSize = HiveConf.getIntVar(conf, HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
// TODO: might want to increase the default batch size. 1024 is viable; MS gets OOM if too high.
int nParts = partNames.size();
int nBatches = nParts / batchSize;
try {
for (int i = 0; i < nBatches; ++i) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts =
getMSC().getPartitionsByNames(tbl.getDbName(), tbl.getTableName(),
partNames.subList(i*batchSize, (i+1)*batchSize), getColStats);
if (tParts != null) {
for (org.apache.hadoop.hive.metastore.api.Partition tpart: tParts) {
partitions.add(new Partition(tbl, tpart));
}
}
}
if (nParts > nBatches * batchSize) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts =
getMSC().getPartitionsByNames(tbl.getDbName(), tbl.getTableName(),
partNames.subList(nBatches*batchSize, nParts), getColStats);
if (tParts != null) {
for (org.apache.hadoop.hive.metastore.api.Partition tpart: tParts) {
partitions.add(new Partition(tbl, tpart));
}
}
}
} catch (Exception e) {
throw new HiveException(e);
}
return partitions;
}
/**
* Get a list of Partitions by filter.
* @param tbl The table containing the partitions.
* @param filter A string represent partition predicates.
* @return a list of partitions satisfying the partition predicates.
* @throws HiveException
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
*/
public List<Partition> getPartitionsByFilter(Table tbl, String filter)
throws HiveException, MetaException, NoSuchObjectException, TException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().listPartitionsByFilter(
tbl.getDbName(), tbl.getTableName(), filter, (short)-1);
return convertFromMetastore(tbl, tParts);
}
private static List<Partition> convertFromMetastore(Table tbl,
List<org.apache.hadoop.hive.metastore.api.Partition> partitions) throws HiveException {
if (partitions == null) {
return new ArrayList<Partition>();
}
List<Partition> results = new ArrayList<Partition>(partitions.size());
for (org.apache.hadoop.hive.metastore.api.Partition tPart : partitions) {
results.add(new Partition(tbl, tPart));
}
return results;
}
/**
* Get a list of Partitions by expr.
* @param tbl The table containing the partitions.
* @param expr A serialized expression for partition predicates.
* @param conf Hive config.
* @param result the resulting list of partitions
* @return whether the resulting list contains partitions which may or may not match the expr
*/
public boolean getPartitionsByExpr(Table tbl, ExprNodeGenericFuncDesc expr, HiveConf conf,
List<Partition> result) throws HiveException, TException {
assert result != null;
byte[] exprBytes = SerializationUtilities.serializeExpressionToKryo(expr);
String defaultPartitionName = HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME);
List<org.apache.hadoop.hive.metastore.api.Partition> msParts =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();
boolean hasUnknownParts = getMSC().listPartitionsByExpr(tbl.getDbName(),
tbl.getTableName(), exprBytes, defaultPartitionName, (short)-1, msParts);
result.addAll(convertFromMetastore(tbl, msParts));
return hasUnknownParts;
}
/**
* Get a number of Partitions by filter.
* @param tbl The table containing the partitions.
* @param filter A string represent partition predicates.
* @return the number of partitions satisfying the partition predicates.
* @throws HiveException
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
*/
public int getNumPartitionsByFilter(Table tbl, String filter)
throws HiveException, MetaException, NoSuchObjectException, TException {
if (!tbl.isPartitioned()) {
throw new HiveException("Partition spec should only be supplied for a " +
"partitioned table");
}
int numParts = getMSC().getNumPartitionsByFilter(
tbl.getDbName(), tbl.getTableName(), filter);
return numParts;
}
public void validatePartitionNameCharacters(List<String> partVals) throws HiveException {
try {
getMSC().validatePartitionNameCharacters(partVals);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void createRole(String roleName, String ownerName)
throws HiveException {
try {
getMSC().create_role(new Role(roleName, -1, ownerName));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropRole(String roleName) throws HiveException {
try {
getMSC().drop_role(roleName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing role names.
*
* @return List of role names.
* @throws HiveException
*/
public List<String> getAllRoleNames() throws HiveException {
try {
return getMSC().listRoleNames();
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<RolePrincipalGrant> getRoleGrantInfoForPrincipal(String principalName, PrincipalType principalType) throws HiveException {
try {
GetRoleGrantsForPrincipalRequest req = new GetRoleGrantsForPrincipalRequest(principalName, principalType);
GetRoleGrantsForPrincipalResponse resp = getMSC().get_role_grants_for_principal(req);
return resp.getPrincipalGrants();
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean grantRole(String roleName, String userName,
PrincipalType principalType, String grantor, PrincipalType grantorType,
boolean grantOption) throws HiveException {
try {
return getMSC().grant_role(roleName, userName, principalType, grantor,
grantorType, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean revokeRole(String roleName, String userName,
PrincipalType principalType, boolean grantOption) throws HiveException {
try {
return getMSC().revoke_role(roleName, userName, principalType, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<Role> listRoles(String userName, PrincipalType principalType)
throws HiveException {
try {
return getMSC().list_roles(userName, principalType);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param objectType
* hive object type
* @param db_name
* database name
* @param table_name
* table name
* @param part_values
* partition values
* @param column_name
* column name
* @param user_name
* user name
* @param group_names
* group names
* @return the privilege set
* @throws HiveException
*/
public PrincipalPrivilegeSet get_privilege_set(HiveObjectType objectType,
String db_name, String table_name, List<String> part_values,
String column_name, String user_name, List<String> group_names)
throws HiveException {
try {
HiveObjectRef hiveObj = new HiveObjectRef(objectType, db_name,
table_name, part_values, column_name);
return getMSC().get_privilege_set(hiveObj, user_name, group_names);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param objectType
* hive object type
* @param principalName
* @param principalType
* @param dbName
* @param tableName
* @param partValues
* @param columnName
* @return list of privileges
* @throws HiveException
*/
public List<HiveObjectPrivilege> showPrivilegeGrant(
HiveObjectType objectType, String principalName,
PrincipalType principalType, String dbName, String tableName,
List<String> partValues, String columnName) throws HiveException {
try {
HiveObjectRef hiveObj = new HiveObjectRef(objectType, dbName, tableName,
partValues, columnName);
return getMSC().list_privileges(principalName, principalType, hiveObj);
} catch (Exception e) {
throw new HiveException(e);
}
}
private static void copyFiles(final HiveConf conf, final FileSystem destFs,
FileStatus[] srcs, final FileSystem srcFs, final Path destf,
final boolean isSrcLocal, boolean isOverwrite,
final List<Path> newFiles, boolean acidRename, boolean isManaged) throws HiveException {
final HdfsUtils.HadoopFileStatus fullDestStatus;
try {
fullDestStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf);
} catch (IOException e1) {
throw new HiveException(e1);
}
if (!fullDestStatus.getFileStatus().isDirectory()) {
throw new HiveException(destf + " is not a directory.");
}
final List<Future<ObjectPair<Path, Path>>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Move-Thread-%d").build()) : null;
// For ACID non-bucketed case, the filenames have to be in the format consistent with INSERT/UPDATE/DELETE Ops,
// i.e, like 000000_0, 000001_0_copy_1, 000002_0.gz etc.
// The extension is only maintained for files which are compressed.
int taskId = 0;
// Sort the files
Arrays.sort(srcs);
String configuredOwner = HiveConf.getVar(conf, ConfVars.HIVE_LOAD_DATA_OWNER);
for (FileStatus src : srcs) {
FileStatus[] files;
if (src.isDirectory()) {
try {
files = srcFs.listStatus(src.getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER);
} catch (IOException e) {
if (null != pool) {
pool.shutdownNow();
}
throw new HiveException(e);
}
} else {
files = new FileStatus[] {src};
}
final SessionState parentSession = SessionState.get();
// Sort the files
Arrays.sort(files);
for (final FileStatus srcFile : files) {
final Path srcP = srcFile.getPath();
final boolean needToCopy = needToCopy(srcP, destf, srcFs, destFs, configuredOwner, isManaged);
final boolean isRenameAllowed = !needToCopy && !isSrcLocal;
final String msg = "Unable to move source " + srcP + " to destination " + destf;
// If we do a rename for a non-local file, we will be transfering the original
// file permissions from source to the destination. Else, in case of mvFile() where we
// copy from source to destination, we will inherit the destination's parent group ownership.
if (null == pool) {
try {
Path destPath = mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isOverwrite, isRenameAllowed,
acidRename ? taskId++ : -1);
if (null != newFiles) {
newFiles.add(destPath);
}
} catch (Exception e) {
throw getHiveException(e, msg, "Failed to move: {}");
}
} else {
// future only takes final or seemingly final values. Make a final copy of taskId
final int finalTaskId = acidRename ? taskId++ : -1;
futures.add(pool.submit(new Callable<ObjectPair<Path, Path>>() {
@Override
public ObjectPair<Path, Path> call() throws HiveException {
SessionState.setCurrentSessionState(parentSession);
try {
Path destPath =
mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isOverwrite, isRenameAllowed, finalTaskId);
if (null != newFiles) {
newFiles.add(destPath);
}
return ObjectPair.create(srcP, destPath);
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
}));
}
}
}
if (null != pool) {
pool.shutdown();
for (Future<ObjectPair<Path, Path>> future : futures) {
try {
ObjectPair<Path, Path> pair = future.get();
LOG.debug("Moved src: {}, to dest: {}", pair.getFirst().toString(), pair.getSecond().toString());
} catch (Exception e) {
throw handlePoolException(pool, e);
}
}
}
}
private static boolean isSubDir(Path srcf, Path destf, FileSystem srcFs, FileSystem destFs, boolean isSrcLocal) {
if (srcf == null) {
LOG.debug("The source path is null for isSubDir method.");
return false;
}
String fullF1 = getQualifiedPathWithoutSchemeAndAuthority(srcf, srcFs).toString() + Path.SEPARATOR;
String fullF2 = getQualifiedPathWithoutSchemeAndAuthority(destf, destFs).toString() + Path.SEPARATOR;
boolean isInTest = HiveConf.getBoolVar(srcFs.getConf(), ConfVars.HIVE_IN_TEST);
// In the automation, the data warehouse is the local file system based.
LOG.debug("The source path is " + fullF1 + " and the destination path is " + fullF2);
if (isInTest) {
return fullF1.startsWith(fullF2);
}
// schema is diff, return false
String schemaSrcf = srcf.toUri().getScheme();
String schemaDestf = destf.toUri().getScheme();
// if the schemaDestf is null, it means the destination is not in the local file system
if (schemaDestf == null && isSrcLocal) {
LOG.debug("The source file is in the local while the dest not.");
return false;
}
// If both schema information are provided, they should be the same.
if (schemaSrcf != null && schemaDestf != null && !schemaSrcf.equals(schemaDestf)) {
LOG.debug("The source path's schema is " + schemaSrcf +
" and the destination path's schema is " + schemaDestf + ".");
return false;
}
LOG.debug("The source path is " + fullF1 + " and the destination path is " + fullF2);
return fullF1.startsWith(fullF2);
}
private static Path getQualifiedPathWithoutSchemeAndAuthority(Path srcf, FileSystem fs) {
Path currentWorkingDir = fs.getWorkingDirectory();
Path path = srcf.makeQualified(srcf.toUri(), currentWorkingDir);
return ShimLoader.getHadoopShims().getPathWithoutSchemeAndAuthority(path);
}
private static String getPathName(int taskId) {
return Utilities.replaceTaskId("000000", taskId) + "_0";
}
/**
* <p>
* Moves a file from one {@link Path} to another. If {@code isRenameAllowed} is true then the
* {@link FileSystem#rename(Path, Path)} method is used to move the file. If its false then the data is copied, if
* {@code isSrcLocal} is true then the {@link FileSystem#copyFromLocalFile(Path, Path)} method is used, else
* {@link FileUtils#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, HiveConf)} is used.
* </p>
*
* <p>
* If the destination file already exists, then {@code _copy_[counter]} is appended to the file name, where counter
* is an integer starting from 1.
* </p>
*
* @param conf the {@link HiveConf} to use if copying data
* @param sourceFs the {@link FileSystem} where the source file exists
* @param sourcePath the {@link Path} to move
* @param destFs the {@link FileSystem} to move the file to
* @param destDirPath the {@link Path} to move the file to
* @param isSrcLocal if the source file is on the local filesystem
* @param isOverwrite if true, then overwrite destination file if exist else make a duplicate copy
* @param isRenameAllowed true if the data should be renamed and not copied, false otherwise
*
* @return the {@link Path} the source file was moved to
*
* @throws IOException if there was an issue moving the file
*/
private static Path mvFile(HiveConf conf, FileSystem sourceFs, Path sourcePath, FileSystem destFs, Path destDirPath,
boolean isSrcLocal, boolean isOverwrite, boolean isRenameAllowed,
int taskId) throws IOException {
// Strip off the file type, if any so we don't make:
// 000000_0.gz -> 000000_0.gz_copy_1
final String fullname = sourcePath.getName();
final String name;
if (taskId == -1) { // non-acid
name = FilenameUtils.getBaseName(sourcePath.getName());
} else { // acid
name = getPathName(taskId);
}
final String type = FilenameUtils.getExtension(sourcePath.getName());
// Incase of ACID, the file is ORC so the extension is not relevant and should not be inherited.
Path destFilePath = new Path(destDirPath, taskId == -1 ? fullname : name);
/*
* The below loop may perform bad when the destination file already exists and it has too many _copy_
* files as well. A desired approach was to call listFiles() and get a complete list of files from
* the destination, and check whether the file exists or not on that list. However, millions of files
* could live on the destination directory, and on concurrent situations, this can cause OOM problems.
*
* I'll leave the below loop for now until a better approach is found.
*/
for (int counter = 1; destFs.exists(destFilePath); counter++) {
if (isOverwrite) {
destFs.delete(destFilePath, false);
break;
}
destFilePath = new Path(destDirPath, name + (Utilities.COPY_KEYWORD + counter) +
((taskId == -1 && !type.isEmpty()) ? "." + type : ""));
}
if (isRenameAllowed) {
destFs.rename(sourcePath, destFilePath);
} else if (isSrcLocal) {
destFs.copyFromLocalFile(sourcePath, destFilePath);
} else {
if (!FileUtils.copy(sourceFs, sourcePath, destFs, destFilePath,
false, // delete source
false, // overwrite destination
conf)) {
LOG.error("Copy failed for source: " + sourcePath + " to destination: " + destFilePath);
throw new IOException("File copy failed.");
}
// Source file delete may fail because of permission issue as executing user might not
// have permission to delete the files in the source path. Ignore this failure.
try {
if (!sourceFs.delete(sourcePath, true)) {
LOG.warn("Delete source failed for source: " + sourcePath + " during copy to destination: " + destFilePath);
}
} catch (Exception e) {
LOG.warn("Delete source failed for source: " + sourcePath + " during copy to destination: " + destFilePath, e);
}
}
return destFilePath;
}
// Clears the dest dir when src is sub-dir of dest.
public static void clearDestForSubDirSrc(final HiveConf conf, Path dest,
Path src, boolean isSrcLocal) throws IOException {
FileSystem destFS = dest.getFileSystem(conf);
FileSystem srcFS = src.getFileSystem(conf);
if (isSubDir(src, dest, srcFS, destFS, isSrcLocal)) {
final Path fullSrcPath = getQualifiedPathWithoutSchemeAndAuthority(src, srcFS);
final Path fullDestPath = getQualifiedPathWithoutSchemeAndAuthority(dest, destFS);
if (fullSrcPath.equals(fullDestPath)) {
return;
}
Path parent = fullSrcPath;
while (!parent.getParent().equals(fullDestPath)) {
parent = parent.getParent();
}
FileStatus[] existingFiles = destFS.listStatus(
dest, FileUtils.HIDDEN_FILES_PATH_FILTER);
for (FileStatus fileStatus : existingFiles) {
if (!fileStatus.getPath().getName().equals(parent.getName())) {
destFS.delete(fileStatus.getPath(), true);
}
}
}
}
// List the new files in destination path which gets copied from source.
public static void listNewFilesRecursively(final FileSystem destFs, Path dest,
List<Path> newFiles) throws HiveException {
try {
for (FileStatus fileStatus : destFs.listStatus(dest, FileUtils.HIDDEN_FILES_PATH_FILTER)) {
if (fileStatus.isDirectory()) {
// If it is a sub-directory, then recursively list the files.
listNewFilesRecursively(destFs, fileStatus.getPath(), newFiles);
} else {
newFiles.add(fileStatus.getPath());
}
}
} catch (IOException e) {
LOG.error("Failed to get source file statuses", e);
throw new HiveException(e.getMessage(), e);
}
}
/**
* Recycles the files recursively from the input path to the cmroot directory either by copying or moving it.
*
* @param dataPath Path of the data files to be recycled to cmroot
* @param isPurge
* When set to true files which needs to be recycled are not moved to Trash
*/
public void recycleDirToCmPath(Path dataPath, boolean isPurge) throws HiveException {
try {
CmRecycleRequest request = new CmRecycleRequest(dataPath.toString(), isPurge);
getSynchronizedMSC().recycleDirToCmPath(request);
} catch (Exception e) {
throw new HiveException(e);
}
}
private static void deleteAndRename(FileSystem destFs, Path destFile, FileStatus srcStatus, Path destPath)
throws IOException {
if (destFs.exists(destFile)) {
// rename cannot overwrite non empty destination directory, so deleting the destination before renaming.
destFs.delete(destFile);
LOG.info("Deleting destination file" + destFile.toUri());
}
if(!destFs.rename(srcStatus.getPath(), destFile)) {
throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest:"
+ destPath + " returned false");
}
}
//it is assumed that parent directory of the destf should already exist when this
//method is called. when the replace value is true, this method works a little different
//from mv command if the destf is a directory, it replaces the destf instead of moving under
//the destf. in this case, the replaced destf still preserves the original destf's permission
public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, boolean replace,
boolean isSrcLocal, boolean isManaged) throws HiveException {
final FileSystem srcFs, destFs;
try {
destFs = destf.getFileSystem(conf);
} catch (IOException e) {
LOG.error("Failed to get dest fs", e);
throw new HiveException(e.getMessage(), e);
}
try {
srcFs = srcf.getFileSystem(conf);
} catch (IOException e) {
LOG.error("Failed to get src fs", e);
throw new HiveException(e.getMessage(), e);
}
HdfsUtils.HadoopFileStatus destStatus = null;
String configuredOwner = HiveConf.getVar(conf, ConfVars.HIVE_LOAD_DATA_OWNER);
// If source path is a subdirectory of the destination path (or the other way around):
// ex: INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
// where the staging directory is a subdirectory of the destination directory
// (1) Do not delete the dest dir before doing the move operation.
// (2) It is assumed that subdir and dir are in same encryption zone.
// (3) Move individual files from scr dir to dest dir.
boolean srcIsSubDirOfDest = isSubDir(srcf, destf, srcFs, destFs, isSrcLocal),
destIsSubDirOfSrc = isSubDir(destf, srcf, destFs, srcFs, false);
final String msg = "Unable to move source " + srcf + " to destination " + destf;
try {
if (replace) {
try{
destStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf);
//if destf is an existing directory:
//if replace is true, delete followed by rename(mv) is equivalent to replace
//if replace is false, rename (mv) actually move the src under dest dir
//if destf is an existing file, rename is actually a replace, and do not need
// to delete the file first
if (replace && !srcIsSubDirOfDest) {
destFs.delete(destf, true);
LOG.debug("The path " + destf.toString() + " is deleted");
}
} catch (FileNotFoundException ignore) {
}
}
final HdfsUtils.HadoopFileStatus desiredStatus = destStatus;
final SessionState parentSession = SessionState.get();
if (isSrcLocal) {
// For local src file, copy to hdfs
destFs.copyFromLocalFile(srcf, destf);
return true;
} else {
if (needToCopy(srcf, destf, srcFs, destFs, configuredOwner, isManaged)) {
//copy if across file system or encryption zones.
LOG.debug("Copying source " + srcf + " to " + destf + " because HDFS encryption zones are different.");
return FileUtils.copy(srcf.getFileSystem(conf), srcf, destf.getFileSystem(conf), destf,
true, // delete source
replace, // overwrite destination
conf);
} else {
if (srcIsSubDirOfDest || destIsSubDirOfSrc) {
FileStatus[] srcs = destFs.listStatus(srcf, FileUtils.HIDDEN_FILES_PATH_FILTER);
List<Future<Void>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Move-Thread-%d").build()) : null;
if (destIsSubDirOfSrc && !destFs.exists(destf)) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Creating " + destf);
}
destFs.mkdirs(destf);
}
/* Move files one by one because source is a subdirectory of destination */
for (final FileStatus srcStatus : srcs) {
final Path destFile = new Path(destf, srcStatus.getPath().getName());
final String poolMsg =
"Unable to move source " + srcStatus.getPath() + " to destination " + destFile;
if (null == pool) {
deleteAndRename(destFs, destFile, srcStatus, destf);
} else {
futures.add(pool.submit(new Callable<Void>() {
@Override
public Void call() throws HiveException {
SessionState.setCurrentSessionState(parentSession);
try {
deleteAndRename(destFs, destFile, srcStatus, destf);
} catch (Exception e) {
throw getHiveException(e, poolMsg);
}
return null;
}
}));
}
}
if (null != pool) {
pool.shutdown();
for (Future<Void> future : futures) {
try {
future.get();
} catch (Exception e) {
throw handlePoolException(pool, e);
}
}
}
return true;
} else {
if (destFs.rename(srcf, destf)) {
return true;
}
return false;
}
}
}
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
static private HiveException getHiveException(Exception e, String msg) {
return getHiveException(e, msg, null);
}
static private HiveException handlePoolException(ExecutorService pool, Exception e) {
HiveException he = null;
if (e instanceof HiveException) {
he = (HiveException) e;
if (he.getCanonicalErrorMsg() != ErrorMsg.GENERIC_ERROR) {
if (he.getCanonicalErrorMsg() == ErrorMsg.UNRESOLVED_RT_EXCEPTION) {
LOG.error("Failed to move: {}", he.getMessage());
} else {
LOG.error("Failed to move: {}", he.getRemoteErrorMsg());
}
}
} else {
LOG.error("Failed to move: {}", e.getMessage());
he = new HiveException(e.getCause());
}
pool.shutdownNow();
return he;
}
static private HiveException getHiveException(Exception e, String msg, String logMsg) {
// The message from remote exception includes the entire stack. The error thrown from
// hive based on the remote exception needs only the first line.
String hiveErrMsg = null;
if (e.getMessage() != null) {
hiveErrMsg = String.format("%s%s%s", msg, ": ",
Splitter.on(System.getProperty("line.separator")).split(e.getMessage()).iterator()
.next());
} else {
hiveErrMsg = msg;
}
ErrorMsg errorMsg = ErrorMsg.getErrorMsg(e);
if (logMsg != null) {
LOG.info(String.format(logMsg, e.getMessage()));
}
if (errorMsg != ErrorMsg.UNRESOLVED_RT_EXCEPTION) {
return new HiveException(e, e.getMessage(), errorMsg, hiveErrMsg);
} else {
return new HiveException(msg, e);
}
}
/**
* If moving across different FileSystems or differnent encryption zone, need to do a File copy instead of rename.
* TODO- consider if need to do this for different file authority.
* @throws HiveException
*/
static private boolean needToCopy(Path srcf, Path destf, FileSystem srcFs,
FileSystem destFs, String configuredOwner, boolean isManaged) throws HiveException {
//Check if different FileSystems
if (!FileUtils.equalsFileSystem(srcFs, destFs)) {
return true;
}
if (isManaged && !configuredOwner.isEmpty() && srcFs instanceof DistributedFileSystem) {
// Need some extra checks
// Get the running owner
FileStatus srcs;
try {
srcs = srcFs.getFileStatus(srcf);
String runningUser = UserGroupInformation.getLoginUser().getShortUserName();
boolean isOwned = FileUtils.isOwnerOfFileHierarchy(srcFs, srcs, configuredOwner, false);
if (configuredOwner.equals(runningUser)) {
// Check if owner has write permission, else it will have to copy
if (!(isOwned &&
FileUtils.isActionPermittedForFileHierarchy(
srcFs, srcs, configuredOwner, FsAction.WRITE, false))) {
return true;
}
} else {
// If the configured owner does not own the file, throw
if (!isOwned) {
throw new HiveException("Load Data failed for " + srcf + " as the file is not owned by "
+ configuredOwner + " and load data is also not ran as " + configuredOwner);
} else {
return true;
}
}
} catch (IOException e) {
throw new HiveException("Could not fetch FileStatus for source file");
} catch (HiveException e) {
throw new HiveException(e);
} catch (Exception e) {
throw new HiveException(" Failed in looking up Permissions on file + " + srcf);
}
}
//Check if different encryption zones
HadoopShims.HdfsEncryptionShim srcHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(srcFs);
HadoopShims.HdfsEncryptionShim destHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(destFs);
try {
return srcHdfsEncryptionShim != null
&& destHdfsEncryptionShim != null
&& (srcHdfsEncryptionShim.isPathEncrypted(srcf) || destHdfsEncryptionShim.isPathEncrypted(destf))
&& !srcHdfsEncryptionShim.arePathsOnSameEncryptionZone(srcf, destf, destHdfsEncryptionShim);
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Copy files. This handles building the mapping for buckets and such between the source and
* destination
* @param conf Configuration object
* @param srcf source directory, if bucketed should contain bucket files
* @param destf directory to move files into
* @param fs Filesystem
* @param isSrcLocal true if source is on local file system
* @param isAcidIUD true if this is an ACID based Insert/Update/Delete
* @param isOverwrite if true, then overwrite if destination file exist, else add a duplicate copy
* @param newFiles if this is non-null, a list of files that were created as a result of this
* move will be returned.
* @param isManaged if table is managed.
* @throws HiveException
*/
static protected void copyFiles(HiveConf conf, Path srcf, Path destf, FileSystem fs,
boolean isSrcLocal, boolean isAcidIUD,
boolean isOverwrite, List<Path> newFiles, boolean isBucketed,
boolean isFullAcidTable, boolean isManaged) throws HiveException {
try {
// create the destination if it does not exist
if (!fs.exists(destf)) {
FileUtils.mkdir(fs, destf, conf);
}
} catch (IOException e) {
throw new HiveException(
"copyFiles: error while checking/creating destination directory!!!",
e);
}
FileStatus[] srcs;
FileSystem srcFs;
try {
srcFs = srcf.getFileSystem(conf);
srcs = srcFs.globStatus(srcf);
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException("addFiles: filesystem error in check phase. " + e.getMessage(), e);
}
if (srcs == null) {
LOG.info("No sources specified to move: " + srcf);
return;
// srcs = new FileStatus[0]; Why is this needed?
}
// If we're moving files around for an ACID write then the rules and paths are all different.
// You can blame this on Owen.
if (isAcidIUD) {
moveAcidFiles(srcFs, srcs, destf, newFiles);
} else {
// For ACID non-bucketed case, the filenames have to be in the format consistent with INSERT/UPDATE/DELETE Ops,
// i.e, like 000000_0, 000001_0_copy_1, 000002_0.gz etc.
// The extension is only maintained for files which are compressed.
copyFiles(conf, fs, srcs, srcFs, destf, isSrcLocal, isOverwrite,
newFiles, isFullAcidTable && !isBucketed, isManaged);
}
}
public static void moveAcidFiles(FileSystem fs, FileStatus[] stats, Path dst,
List<Path> newFiles) throws HiveException {
// The layout for ACID files is table|partname/base|delta|delete_delta/bucket
// We will always only be writing delta files ( except IOW which writes base_X/ ).
// In the buckets created by FileSinkOperator
// it will look like original_bucket/delta|delete_delta/bucket
// (e.g. .../-ext-10004/000000_0/delta_0000014_0000014_0000/bucket_00000). So we need to
// move that into the above structure. For the first mover there will be no delta directory,
// so we can move the whole directory.
// For everyone else we will need to just move the buckets under the existing delta
// directory.
Set<Path> createdDeltaDirs = new HashSet<Path>();
// Open the original path we've been given and find the list of original buckets
for (FileStatus stat : stats) {
Path srcPath = stat.getPath();
LOG.debug("Acid move Looking for original buckets in " + srcPath);
FileStatus[] origBucketStats = null;
try {
origBucketStats = fs.listStatus(srcPath, AcidUtils.originalBucketFilter);
if(origBucketStats == null || origBucketStats.length == 0) {
/**
check if we are dealing with data with non-standard layout. For example a write
produced by a (optimized) Union All query
which looks like
└── -ext-10000
├── HIVE_UNION_SUBDIR_1
│ └── 000000_0
│ └── delta_0000019_0000019_0001
│ ├── _orc_acid_version
│ └── bucket_00000
├── HIVE_UNION_SUBDIR_2
│ └── 000000_0
│ └── delta_0000019_0000019_0002
│ ├── _orc_acid_version
│ └── bucket_00000
The assumption is that we either have all data in subdirs or root of srcPath
but not both.
For Union case, we expect delta dirs to have unique names which is assured by
{@link org.apache.hadoop.hive.ql.optimizer.QueryPlanPostProcessor}
*/
FileStatus[] unionSubdirs = fs.globStatus(new Path(srcPath,
AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "[0-9]*"));
List<FileStatus> buckets = new ArrayList<>();
for(FileStatus unionSubdir : unionSubdirs) {
Collections.addAll(buckets,
fs.listStatus(unionSubdir.getPath(), AcidUtils.originalBucketFilter));
}
origBucketStats = buckets.toArray(new FileStatus[buckets.size()]);
}
} catch (IOException e) {
String msg = "Unable to look for bucket files in src path " + srcPath.toUri().toString();
LOG.error(msg);
throw new HiveException(msg, e);
}
LOG.debug("Acid move found " + origBucketStats.length + " original buckets");
for (FileStatus origBucketStat : origBucketStats) {
Path origBucketPath = origBucketStat.getPath();
moveAcidFiles(AcidUtils.DELTA_PREFIX, AcidUtils.deltaFileFilter,
fs, dst, origBucketPath, createdDeltaDirs, newFiles);
moveAcidFiles(AcidUtils.DELETE_DELTA_PREFIX, AcidUtils.deleteEventDeltaDirFilter,
fs, dst,origBucketPath, createdDeltaDirs, newFiles);
moveAcidFiles(AcidUtils.BASE_PREFIX, AcidUtils.baseFileFilter,//for Insert Overwrite
fs, dst, origBucketPath, createdDeltaDirs, newFiles);
}
}
}
private static void moveAcidFiles(String deltaFileType, PathFilter pathFilter, FileSystem fs,
Path dst, Path origBucketPath, Set<Path> createdDeltaDirs,
List<Path> newFiles) throws HiveException {
LOG.debug("Acid move looking for " + deltaFileType + " files in bucket " + origBucketPath);
FileStatus[] deltaStats = null;
try {
deltaStats = fs.listStatus(origBucketPath, pathFilter);
} catch (IOException e) {
throw new HiveException("Unable to look for " + deltaFileType + " files in original bucket " +
origBucketPath.toUri().toString(), e);
}
LOG.debug("Acid move found " + deltaStats.length + " " + deltaFileType + " files");
for (FileStatus deltaStat : deltaStats) {
Path deltaPath = deltaStat.getPath();
// Create the delta directory. Don't worry if it already exists,
// as that likely means another task got to it first. Then move each of the buckets.
// it would be more efficient to try to move the delta with it's buckets but that is
// harder to make race condition proof.
Path deltaDest = new Path(dst, deltaPath.getName());
try {
if (!createdDeltaDirs.contains(deltaDest)) {
try {
if(fs.mkdirs(deltaDest)) {
fs.rename(AcidUtils.OrcAcidVersion.getVersionFilePath(deltaStat.getPath()),
AcidUtils.OrcAcidVersion.getVersionFilePath(deltaDest));
}
createdDeltaDirs.add(deltaDest);
} catch (IOException swallowIt) {
// Don't worry about this, as it likely just means it's already been created.
LOG.info("Unable to create " + deltaFileType + " directory " + deltaDest +
", assuming it already exists: " + swallowIt.getMessage());
}
}
FileStatus[] bucketStats = fs.listStatus(deltaPath, AcidUtils.bucketFileFilter);
LOG.debug("Acid move found " + bucketStats.length + " bucket files");
for (FileStatus bucketStat : bucketStats) {
Path bucketSrc = bucketStat.getPath();
Path bucketDest = new Path(deltaDest, bucketSrc.getName());
final String msg = "Unable to move source " + bucketSrc + " to destination " +
bucketDest;
LOG.info("Moving bucket " + bucketSrc.toUri().toString() + " to " +
bucketDest.toUri().toString());
try {
fs.rename(bucketSrc, bucketDest);
if (newFiles != null) {
newFiles.add(bucketDest);
}
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
} catch (IOException e) {
throw new HiveException("Error moving acid files " + e.getMessage(), e);
}
}
}
/**
* Replaces files in the partition with new data set specified by srcf. Works
* by renaming directory of srcf to the destination file.
* srcf, destf, and tmppath should resident in the same DFS, but the oldPath can be in a
* different DFS.
*
* @param tablePath path of the table. Used to identify permission inheritance.
* @param srcf
* Source directory to be renamed to tmppath. It should be a
* leaf directory where the final data files reside. However it
* could potentially contain subdirectories as well.
* @param destf
* The directory where the final data needs to go
* @param oldPath
* The directory where the old data location, need to be cleaned up. Most of time, will be the same
* as destf, unless its across FileSystem boundaries.
* @param purge
* When set to true files which needs to be deleted are not moved to Trash
* @param isSrcLocal
* If the source directory is LOCAL
* @param newFiles
* Output the list of new files replaced in the destination path
* @param isManaged
* If the table is managed.
*/
private void replaceFiles(Path tablePath, Path srcf, Path destf, Path oldPath, HiveConf conf,
boolean isSrcLocal, boolean purge, List<Path> newFiles, PathFilter deletePathFilter,
boolean isNeedRecycle, boolean isManaged, boolean isInsertOverwrite) throws HiveException {
try {
FileSystem destFs = destf.getFileSystem(conf);
// check if srcf contains nested sub-directories
FileStatus[] srcs;
FileSystem srcFs;
try {
srcFs = srcf.getFileSystem(conf);
srcs = srcFs.globStatus(srcf);
} catch (IOException e) {
throw new HiveException("Getting globStatus " + srcf.toString(), e);
}
// the extra check is required to make ALTER TABLE ... CONCATENATE work
if (oldPath != null && (srcs != null || isInsertOverwrite)) {
deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isNeedRecycle);
}
if (srcs == null) {
LOG.info("No sources specified to move: " + srcf);
return;
}
// first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates
// destf
boolean destfExist = FileUtils.mkdir(destFs, destf, conf);
if(!destfExist) {
throw new IOException("Directory " + destf.toString()
+ " does not exist and could not be created.");
}
// Two cases:
// 1. srcs has only a src directory, if rename src directory to destf, we also need to
// Copy/move each file under the source directory to avoid to delete the destination
// directory if it is the root of an HDFS encryption zone.
// 2. srcs must be a list of files -- ensured by LoadSemanticAnalyzer
// in both cases, we move the file under destf
if (srcs.length == 1 && srcs[0].isDirectory()) {
if (!moveFile(conf, srcs[0].getPath(), destf, true, isSrcLocal, isManaged)) {
throw new IOException("Error moving: " + srcf + " into: " + destf);
}
// Add file paths of the files that will be moved to the destination if the caller needs it
if (null != newFiles) {
listNewFilesRecursively(destFs, destf, newFiles);
}
} else {
final Map<Future<Boolean>, Path> moveFutures = Maps.newLinkedHashMapWithExpectedSize(srcs.length);
final int moveFilesThreadCount = HiveConf.getIntVar(conf, ConfVars.HIVE_MOVE_FILES_THREAD_COUNT);
final ExecutorService pool = moveFilesThreadCount > 0
? Executors.newFixedThreadPool(
moveFilesThreadCount,
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Replace-Thread-%d").build())
: MoreExecutors.newDirectExecutorService();
final SessionState parentSession = SessionState.get();
// its either a file or glob
for (FileStatus src : srcs) {
Path destFile = new Path(destf, src.getPath().getName());
moveFutures.put(
pool.submit(
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
SessionState.setCurrentSessionState(parentSession);
return moveFile(
conf, src.getPath(), destFile, true, isSrcLocal, isManaged);
}
}),
destFile);
}
pool.shutdown();
for (Map.Entry<Future<Boolean>, Path> moveFuture : moveFutures.entrySet()) {
boolean moveFailed;
try {
moveFailed = !moveFuture.getKey().get();
} catch (InterruptedException | ExecutionException e) {
pool.shutdownNow();
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
if (e.getCause() instanceof HiveException) {
throw (HiveException) e.getCause();
}
throw handlePoolException(pool, e);
}
if (moveFailed) {
throw new IOException("Error moving: " + srcf + " into: " + destf);
}
// Add file paths of the files that will be moved to the destination if the caller needs it
if (null != newFiles) {
newFiles.add(moveFuture.getValue());
}
}
}
} catch (IOException e) {
throw new HiveException(e.getMessage(), e);
}
}
private void deleteOldPathForReplace(Path destPath, Path oldPath, HiveConf conf, boolean purge,
PathFilter pathFilter, boolean isNeedRecycle) throws HiveException {
Utilities.FILE_OP_LOGGER.debug("Deleting old paths for replace in " + destPath
+ " and old path " + oldPath);
boolean isOldPathUnderDestf = false;
try {
FileSystem oldFs = oldPath.getFileSystem(conf);
FileSystem destFs = destPath.getFileSystem(conf);
// if oldPath is destf or its subdir, its should definitely be deleted, otherwise its
// existing content might result in incorrect (extra) data.
// But not sure why we changed not to delete the oldPath in HIVE-8750 if it is
// not the destf or its subdir?
isOldPathUnderDestf = isSubDir(oldPath, destPath, oldFs, destFs, false);
if (isOldPathUnderDestf) {
cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
}
} catch (IOException e) {
if (isOldPathUnderDestf) {
// if oldPath is a subdir of destf but it could not be cleaned
throw new HiveException("Directory " + oldPath.toString()
+ " could not be cleaned up.", e);
} else {
//swallow the exception since it won't affect the final result
LOG.warn("Directory " + oldPath.toString() + " cannot be cleaned: " + e, e);
}
}
}
public void cleanUpOneDirectoryForReplace(Path path, FileSystem fs,
PathFilter pathFilter, HiveConf conf, boolean purge, boolean isNeedRecycle) throws IOException, HiveException {
if (isNeedRecycle && conf.getBoolVar(HiveConf.ConfVars.REPLCMENABLED)) {
recycleDirToCmPath(path, purge);
}
FileStatus[] statuses = fs.listStatus(path, pathFilter);
if (statuses == null || statuses.length == 0) {
return;
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
String s = "Deleting files under " + path + " for replace: ";
for (FileStatus file : statuses) {
s += file.getPath().getName() + ", ";
}
Utilities.FILE_OP_LOGGER.trace(s);
}
if (!trashFiles(fs, statuses, conf, purge)) {
throw new HiveException("Old path " + path + " has not been cleaned up.");
}
}
/**
* Trashes or deletes all files under a directory. Leaves the directory as is.
* @param fs FileSystem to use
* @param statuses fileStatuses of files to be deleted
* @param conf hive configuration
* @return true if deletion successful
* @throws IOException
*/
public static boolean trashFiles(final FileSystem fs, final FileStatus[] statuses,
final Configuration conf, final boolean purge)
throws IOException {
boolean result = true;
if (statuses == null || statuses.length == 0) {
return false;
}
final List<Future<Boolean>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Delete-Thread-%d").build()) : null;
final SessionState parentSession = SessionState.get();
for (final FileStatus status : statuses) {
if (null == pool) {
result &= FileUtils.moveToTrash(fs, status.getPath(), conf, purge);
} else {
futures.add(pool.submit(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
SessionState.setCurrentSessionState(parentSession);
return FileUtils.moveToTrash(fs, status.getPath(), conf, purge);
}
}));
}
}
if (null != pool) {
pool.shutdown();
for (Future<Boolean> future : futures) {
try {
result &= future.get();
} catch (InterruptedException | ExecutionException e) {
LOG.error("Failed to delete: ",e);
pool.shutdownNow();
throw new IOException(e);
}
}
}
return result;
}
public static boolean isHadoop1() {
return ShimLoader.getMajorVersion().startsWith("0.20");
}
public List<Partition> exchangeTablePartitions(Map<String, String> partitionSpecs,
String sourceDb, String sourceTable, String destDb,
String destinationTableName) throws HiveException {
try {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions =
getMSC().exchange_partitions(partitionSpecs, sourceDb, sourceTable, destDb,
destinationTableName);
return convertFromMetastore(getTable(destDb, destinationTableName), partitions);
} catch (Exception ex) {
LOG.error(StringUtils.stringifyException(ex));
throw new HiveException(ex);
}
}
/**
* Creates a metastore client. Currently it creates only JDBC based client as
* File based store support is removed
*
* @returns a Meta Store Client
* @throws HiveMetaException
* if a working client can't be created
*/
private IMetaStoreClient createMetaStoreClient(boolean allowEmbedded) throws MetaException {
HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() {
@Override
public HiveMetaHook getHook(
org.apache.hadoop.hive.metastore.api.Table tbl)
throws MetaException {
HiveStorageHandler storageHandler = createStorageHandler(tbl);
return storageHandler == null ? null : storageHandler.getMetaHook();
}
};
if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) {
return new SessionHiveMetaStoreClient(conf, hookLoader, allowEmbedded);
} else {
return RetryingMetaStoreClient.getProxy(conf, hookLoader, metaCallTimeMap,
SessionHiveMetaStoreClient.class.getName(), allowEmbedded);
}
}
@Nullable
private HiveStorageHandler createStorageHandler(org.apache.hadoop.hive.metastore.api.Table tbl) throws MetaException {
try {
if (tbl == null) {
return null;
}
HiveStorageHandler storageHandler =
HiveUtils.getStorageHandler(conf, tbl.getParameters().get(META_TABLE_STORAGE));
return storageHandler;
} catch (HiveException ex) {
LOG.error(StringUtils.stringifyException(ex));
throw new MetaException(
"Failed to load storage handler: " + ex.getMessage());
}
}
public static class SchemaException extends MetaException {
private static final long serialVersionUID = 1L;
public SchemaException(String message) {
super(message);
}
}
/**
* @return synchronized metastore client
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized SynchronizedMetaStoreClient getSynchronizedMSC() throws MetaException {
if (syncMetaStoreClient == null) {
syncMetaStoreClient = new SynchronizedMetaStoreClient(getMSC(true, false));
}
return syncMetaStoreClient;
}
/**
* @return the metastore client for the current thread
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized IMetaStoreClient getMSC() throws MetaException {
return getMSC(true, false);
}
/**
* @return the metastore client for the current thread
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized IMetaStoreClient getMSC(
boolean allowEmbedded, boolean forceCreate) throws MetaException {
if (metaStoreClient == null || forceCreate) {
try {
owner = UserGroupInformation.getCurrentUser();
} catch(IOException e) {
String msg = "Error getting current user: " + e.getMessage();
LOG.error(msg, e);
throw new MetaException(msg + "\n" + StringUtils.stringifyException(e));
}
try {
metaStoreClient = createMetaStoreClient(allowEmbedded);
} catch (RuntimeException ex) {
Throwable t = ex.getCause();
while (t != null) {
if (t instanceof JDODataStoreException && t.getMessage() != null
&& t.getMessage().contains("autoCreate")) {
LOG.error("Cannot initialize metastore due to autoCreate error", t);
// DataNucleus wants us to auto-create, but we shall do no such thing.
throw new SchemaException("Hive metastore database is not initialized. Please use "
+ "schematool (e.g. ./schematool -initSchema -dbType ...) to create the schema. If "
+ "needed, don't forget to include the option to auto-create the underlying database"
+ " in your JDBC connection string (e.g. ?createDatabaseIfNotExist=true for mysql)");
}
t = t.getCause();
}
throw ex;
}
String metaStoreUris = conf.getVar(HiveConf.ConfVars.METASTOREURIS);
if (!org.apache.commons.lang3.StringUtils.isEmpty(metaStoreUris)) {
// get a synchronized wrapper if the meta store is remote.
metaStoreClient = HiveMetaStoreClient.newSynchronizedClient(metaStoreClient);
}
}
return metaStoreClient;
}
private static String getUserName() {
return SessionState.getUserFromAuthenticator();
}
private List<String> getGroupNames() {
SessionState ss = SessionState.get();
if (ss != null && ss.getAuthenticator() != null) {
return ss.getAuthenticator().getGroupNames();
}
return null;
}
public static List<FieldSchema> getFieldsFromDeserializer(String name,
Deserializer serde) throws HiveException {
try {
return HiveMetaStoreUtils.getFieldsFromDeserializer(name, serde);
} catch (SerDeException e) {
throw new HiveException("Error in getting fields from serde. "
+ e.getMessage(), e);
} catch (MetaException e) {
throw new HiveException("Error in getting fields from serde."
+ e.getMessage(), e);
}
}
public boolean setPartitionColumnStatistics(
SetPartitionsStatsRequest request) throws HiveException {
try {
ColumnStatistics colStat = request.getColStats().get(0);
ColumnStatisticsDesc statsDesc = colStat.getStatsDesc();
// In case of replication, the request already has valid writeId and valid transaction id
// list obtained from the source. Just use it.
if (request.getWriteId() <= 0 || request.getValidWriteIdList() == null) {
Table tbl = getTable(statsDesc.getDbName(), statsDesc.getTableName());
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
request.setValidWriteIdList(tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null);
request.setWriteId(tableSnapshot != null ? tableSnapshot.getWriteId() : 0);
}
return getMSC().setPartitionColumnStatistics(request);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public List<ColumnStatisticsObj> getTableColumnStatistics(
String dbName, String tableName, List<String> colNames, boolean checkTransactional)
throws HiveException {
List<ColumnStatisticsObj> retv = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tableName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
retv = getMSC().getTableColumnStatistics(dbName, tableName, colNames,
tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null);
} else {
retv = getMSC().getTableColumnStatistics(dbName, tableName, colNames);
}
return retv;
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(
String dbName, String tableName, List<String> partNames, List<String> colNames,
boolean checkTransactional)
throws HiveException {
String writeIdList = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tableName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
writeIdList = tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null;
}
return getMSC().getPartitionColumnStatistics(
dbName, tableName, partNames, colNames, writeIdList);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public AggrStats getAggrColStatsFor(String dbName, String tblName,
List<String> colNames, List<String> partName, boolean checkTransactional) {
String writeIdList = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tblName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
writeIdList = tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null;
}
return getMSC().getAggrColStatsFor(dbName, tblName, colNames, partName, writeIdList);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
return new AggrStats(new ArrayList<ColumnStatisticsObj>(),0);
}
}
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
throws HiveException {
try {
return getMSC().deleteTableColumnStatistics(dbName, tableName, colName);
} catch(Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName,
String colName) throws HiveException {
try {
return getMSC().deletePartitionColumnStatistics(dbName, tableName, partName, colName);
} catch(Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public Table newTable(String tableName) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
return new Table(names[0], names[1]);
}
public String getDelegationToken(String owner, String renewer)
throws HiveException{
try {
return getMSC().getDelegationToken(owner, renewer);
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void cancelDelegationToken(String tokenStrForm)
throws HiveException {
try {
getMSC().cancelDelegationToken(tokenStrForm);
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
/**
* @deprecated use {@link #compact2(String, String, String, String, Map)}
*/
@Deprecated
public void compact(String dbname, String tableName, String partName, String compactType,
Map<String, String> tblproperties) throws HiveException {
compact2(dbname, tableName, partName, compactType, tblproperties);
}
/**
* Enqueue a compaction request. Only 1 compaction for a given resource (db/table/partSpec) can
* be scheduled/running at any given time.
* @param dbname name of the database, if null default will be used.
* @param tableName name of the table, cannot be null
* @param partName name of the partition, if null table will be compacted (valid only for
* non-partitioned tables).
* @param compactType major or minor
* @param tblproperties the list of tblproperties to overwrite for this compaction
* @return id of new request or id already existing request for specified resource
* @throws HiveException
*/
public CompactionResponse compact2(String dbname, String tableName, String partName, String compactType,
Map<String, String> tblproperties)
throws HiveException {
try {
CompactionType cr = null;
if ("major".equalsIgnoreCase(compactType)) {
cr = CompactionType.MAJOR;
} else if ("minor".equalsIgnoreCase(compactType)) {
cr = CompactionType.MINOR;
} else {
throw new RuntimeException("Unknown compaction type " + compactType);
}
return getMSC().compact2(dbname, tableName, partName, cr, tblproperties);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public ShowCompactResponse showCompactions() throws HiveException {
try {
return getMSC().showCompactions();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public GetOpenTxnsInfoResponse showTransactions() throws HiveException {
try {
return getMSC().showTxns();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void abortTransactions(List<Long> txnids) throws HiveException {
try {
getMSC().abortTxns(txnids);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void createFunction(Function func) throws HiveException {
try {
getMSC().createFunction(func);
} catch (TException te) {
throw new HiveException(te);
}
}
public void alterFunction(String dbName, String funcName, Function newFunction)
throws HiveException {
try {
getMSC().alterFunction(dbName, funcName, newFunction);
} catch (TException te) {
throw new HiveException(te);
}
}
public void dropFunction(String dbName, String funcName)
throws HiveException {
try {
getMSC().dropFunction(dbName, funcName);
} catch (TException te) {
throw new HiveException(te);
}
}
public Function getFunction(String dbName, String funcName) throws HiveException {
try {
return getMSC().getFunction(dbName, funcName);
} catch (TException te) {
throw new HiveException(te);
}
}
public List<Function> getAllFunctions() throws HiveException {
try {
List<Function> functions = getMSC().getAllFunctions().getFunctions();
return functions == null ? new ArrayList<Function>() : functions;
} catch (TException te) {
throw new HiveException(te);
}
}
public List<String> getFunctions(String dbName, String pattern) throws HiveException {
try {
return getMSC().getFunctions(dbName, pattern);
} catch (TException te) {
throw new HiveException(te);
}
}
public void setMetaConf(String propName, String propValue) throws HiveException {
try {
getMSC().setMetaConf(propName, propValue);
} catch (TException te) {
throw new HiveException(te);
}
}
public String getMetaConf(String propName) throws HiveException {
try {
return getMSC().getMetaConf(propName);
} catch (TException te) {
throw new HiveException(te);
}
}
public void clearMetaCallTiming() {
metaCallTimeMap.clear();
}
public ImmutableMap<String, Long> dumpAndClearMetaCallTiming(String phase) {
boolean phaseInfoLogged = false;
if (LOG.isDebugEnabled()) {
phaseInfoLogged = logDumpPhase(phase);
LOG.debug("Total time spent in each metastore function (ms): " + metaCallTimeMap);
}
if (LOG.isInfoEnabled()) {
// print information about calls that took longer time at INFO level
for (Entry<String, Long> callTime : metaCallTimeMap.entrySet()) {
// dump information if call took more than 1 sec (1000ms)
if (callTime.getValue() > 1000) {
if (!phaseInfoLogged) {
phaseInfoLogged = logDumpPhase(phase);
}
LOG.info("Total time spent in this metastore function was greater than 1000ms : "
+ callTime);
}
}
}
ImmutableMap<String, Long> result = ImmutableMap.copyOf(metaCallTimeMap);
metaCallTimeMap.clear();
return result;
}
private boolean logDumpPhase(String phase) {
LOG.info("Dumping metastore api call timing information for : " + phase + " phase");
return true;
}
public Iterable<Map.Entry<Long, ByteBuffer>> getFileMetadata(
List<Long> fileIds) throws HiveException {
try {
return getMSC().getFileMetadata(fileIds);
} catch (TException e) {
throw new HiveException(e);
}
}
public Iterable<Map.Entry<Long, MetadataPpdResult>> getFileMetadataByExpr(
List<Long> fileIds, ByteBuffer sarg, boolean doGetFooters) throws HiveException {
try {
return getMSC().getFileMetadataBySarg(fileIds, sarg, doGetFooters);
} catch (TException e) {
throw new HiveException(e);
}
}
public void clearFileMetadata(List<Long> fileIds) throws HiveException {
try {
getMSC().clearFileMetadata(fileIds);
} catch (TException e) {
throw new HiveException(e);
}
}
public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) throws HiveException {
try {
getMSC().putFileMetadata(fileIds, metadata);
} catch (TException e) {
throw new HiveException(e);
}
}
public void cacheFileMetadata(
String dbName, String tableName, String partName, boolean allParts) throws HiveException {
try {
boolean willCache = getMSC().cacheFileMetadata(dbName, tableName, partName, allParts);
if (!willCache) {
throw new HiveException(
"Caching file metadata is not supported by metastore or for this file format");
}
} catch (TException e) {
throw new HiveException(e);
}
}
public void dropConstraint(String dbName, String tableName, String constraintName)
throws HiveException, NoSuchObjectException {
try {
getMSC().dropConstraint(dbName, tableName, constraintName);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLPrimaryKey> getPrimaryKeyList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLForeignKey> getForeignKeyList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLUniqueConstraint> getUniqueConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getUniqueConstraints(new UniqueConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLNotNullConstraint> getNotNullConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getNotNullConstraints(new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLDefaultConstraint> getDefaultConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getDefaultConstraints(new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLCheckConstraint> getCheckConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getCheckConstraints(new CheckConstraintsRequest(getDefaultCatalog(conf),
dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all primary key columns associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Primary Key associated with the table.
* @throws HiveException
*/
public PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName) throws HiveException {
return getPrimaryKeys(dbName, tblName, false);
}
/**
* Get primary key columns associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Primary Key associated with the table.
* @throws HiveException
*/
public PrimaryKeyInfo getReliablePrimaryKeys(String dbName, String tblName) throws HiveException {
return getPrimaryKeys(dbName, tblName, true);
}
private PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLPrimaryKey> primaryKeys = getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName));
if (onlyReliable && primaryKeys != null && !primaryKeys.isEmpty()) {
primaryKeys = primaryKeys.stream()
.filter(pk -> pk.isRely_cstr())
.collect(Collectors.toList());
}
return new PrimaryKeyInfo(primaryKeys, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all foreign keys associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Foreign keys associated with the table.
* @throws HiveException
*/
public ForeignKeyInfo getForeignKeys(String dbName, String tblName) throws HiveException {
return getForeignKeys(dbName, tblName, false);
}
/**
* Get foreign keys associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Foreign keys associated with the table.
* @throws HiveException
*/
public ForeignKeyInfo getReliableForeignKeys(String dbName, String tblName) throws HiveException {
return getForeignKeys(dbName, tblName, true);
}
private ForeignKeyInfo getForeignKeys(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLForeignKey> foreignKeys = getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName));
if (onlyReliable && foreignKeys != null && !foreignKeys.isEmpty()) {
foreignKeys = foreignKeys.stream()
.filter(fk -> fk.isRely_cstr())
.collect(Collectors.toList());
}
return new ForeignKeyInfo(foreignKeys, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all unique constraints associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Unique constraints associated with the table.
* @throws HiveException
*/
public UniqueConstraint getUniqueConstraints(String dbName, String tblName) throws HiveException {
return getUniqueConstraints(dbName, tblName, false);
}
/**
* Get unique constraints associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Unique constraints associated with the table.
* @throws HiveException
*/
public UniqueConstraint getReliableUniqueConstraints(String dbName, String tblName) throws HiveException {
return getUniqueConstraints(dbName, tblName, true);
}
private UniqueConstraint getUniqueConstraints(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLUniqueConstraint> uniqueConstraints = getMSC().getUniqueConstraints(
new UniqueConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (onlyReliable && uniqueConstraints != null && !uniqueConstraints.isEmpty()) {
uniqueConstraints = uniqueConstraints.stream()
.filter(uk -> uk.isRely_cstr())
.collect(Collectors.toList());
}
return new UniqueConstraint(uniqueConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all not null constraints associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getNotNullConstraints(String dbName, String tblName) throws HiveException {
return getNotNullConstraints(dbName, tblName, false);
}
/**
* Get not null constraints associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getReliableNotNullConstraints(String dbName, String tblName) throws HiveException {
return getNotNullConstraints(dbName, tblName, true);
}
/**
* Get not null constraints associated with the table that are enabled/enforced.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getEnabledNotNullConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLNotNullConstraint> notNullConstraints = getMSC().getNotNullConstraints(
new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (notNullConstraints != null && !notNullConstraints.isEmpty()) {
notNullConstraints = notNullConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new NotNullConstraint(notNullConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get CHECK constraints associated with the table that are enabled
*
* @param dbName Database Name
* @param tblName Table Name
* @return CHECK constraints associated with the table.
* @throws HiveException
*/
public CheckConstraint getEnabledCheckConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLCheckConstraint> checkConstraints = getMSC().getCheckConstraints(
new CheckConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (checkConstraints != null && !checkConstraints.isEmpty()) {
checkConstraints = checkConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new CheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get Default constraints associated with the table that are enabled
*
* @param dbName Database Name
* @param tblName Table Name
* @return Default constraints associated with the table.
* @throws HiveException
*/
public DefaultConstraint getEnabledDefaultConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLDefaultConstraint> defaultConstraints = getMSC().getDefaultConstraints(
new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (defaultConstraints != null && !defaultConstraints.isEmpty()) {
defaultConstraints = defaultConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new DefaultConstraint(defaultConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
private NotNullConstraint getNotNullConstraints(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLNotNullConstraint> notNullConstraints = getMSC().getNotNullConstraints(
new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (onlyReliable && notNullConstraints != null && !notNullConstraints.isEmpty()) {
notNullConstraints = notNullConstraints.stream()
.filter(nnc -> nnc.isRely_cstr())
.collect(Collectors.toList());
}
return new NotNullConstraint(notNullConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
public DefaultConstraint getDefaultConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLDefaultConstraint> defaultConstraints = getMSC().getDefaultConstraints(
new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (defaultConstraints != null && !defaultConstraints.isEmpty()) {
defaultConstraints = defaultConstraints.stream()
.collect(Collectors.toList());
}
return new DefaultConstraint(defaultConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
public CheckConstraint getCheckConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLCheckConstraint> checkConstraints = getMSC().getCheckConstraints(
new CheckConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (checkConstraints != null && !checkConstraints.isEmpty()) {
checkConstraints = checkConstraints.stream()
.collect(Collectors.toList());
}
return new CheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addPrimaryKey(List<SQLPrimaryKey> primaryKeyCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addPrimaryKey(primaryKeyCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addForeignKey(List<SQLForeignKey> foreignKeyCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addForeignKey(foreignKeyCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addUniqueConstraint(List<SQLUniqueConstraint> uniqueConstraintCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addUniqueConstraint(uniqueConstraintCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addNotNullConstraint(List<SQLNotNullConstraint> notNullConstraintCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addNotNullConstraint(notNullConstraintCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addDefaultConstraint(List<SQLDefaultConstraint> defaultConstraints)
throws HiveException, NoSuchObjectException {
try {
getMSC().addDefaultConstraint(defaultConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addCheckConstraint(List<SQLCheckConstraint> checkConstraints)
throws HiveException, NoSuchObjectException {
try {
getMSC().addCheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createResourcePlan(WMResourcePlan resourcePlan, String copyFromName, boolean ifNotExists)
throws HiveException {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (resourcePlan.isSetNs() && !ns.equals(resourcePlan.getNs())) {
throw new HiveException("Cannot create a plan in a different NS; was "
+ resourcePlan.getNs() + ", configured " + ns);
}
resourcePlan.setNs(ns);
try {
getMSC().createResourcePlan(resourcePlan, copyFromName);
} catch (AlreadyExistsException e) {
if (!ifNotExists) {
throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_ALREADY_EXISTS, resourcePlan.getName());
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan getResourcePlan(String rpName) throws HiveException {
try {
return getMSC().getResourcePlan(rpName, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<WMResourcePlan> getAllResourcePlans() throws HiveException {
try {
return getMSC().getAllResourcePlans(conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropResourcePlan(String rpName, boolean ifExists) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
getMSC().dropResourcePlan(rpName, ns);
} catch (NoSuchObjectException e) {
if (!ifExists) {
throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_NOT_EXISTS, rpName);
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan alterResourcePlan(String rpName, WMNullableResourcePlan resourcePlan,
boolean canActivateDisabled, boolean isForceDeactivate, boolean isReplace) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (resourcePlan.isSetNs() && !ns.equals(resourcePlan.getNs())) {
throw new HiveException("Cannot modify a plan in a different NS; was "
+ resourcePlan.getNs() + ", configured " + ns);
}
resourcePlan.setNs(ns);
return getMSC().alterResourcePlan(rpName, ns, resourcePlan, canActivateDisabled,
isForceDeactivate, isReplace);
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan getActiveResourcePlan() throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
return getMSC().getActiveResourcePlan(ns);
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMValidateResourcePlanResponse validateResourcePlan(String rpName) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
return getMSC().validateResourcePlan(rpName, ns);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createWMTrigger(WMTrigger trigger) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (trigger.isSetNs() && !ns.equals(trigger.getNs())) {
throw new HiveException("Cannot create a trigger in a different NS; was "
+ trigger.getNs() + ", configured " + ns);
}
trigger.setNs(ns);
getMSC().createWMTrigger(trigger);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void alterWMTrigger(WMTrigger trigger) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (trigger.isSetNs() && !ns.equals(trigger.getNs())) {
throw new HiveException("Cannot modify a trigger in a different NS; was "
+ trigger.getNs() + ", configured " + ns);
}
trigger.setNs(ns);
getMSC().alterWMTrigger(trigger);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMTrigger(String rpName, String triggerName) throws HiveException {
try {
getMSC().dropWMTrigger(rpName, triggerName, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createWMPool(WMPool pool) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (pool.isSetNs() && !ns.equals(pool.getNs())) {
throw new HiveException("Cannot create a pool in a different NS; was "
+ pool.getNs() + ", configured " + ns);
}
pool.setNs(ns);
getMSC().createWMPool(pool);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void alterWMPool(WMNullablePool pool, String poolPath) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (pool.isSetNs() && !ns.equals(pool.getNs())) {
throw new HiveException("Cannot modify a pool in a different NS; was "
+ pool.getNs() + ", configured " + ns);
}
pool.setNs(ns);
getMSC().alterWMPool(pool, poolPath);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMPool(String resourcePlanName, String poolPath) throws HiveException {
try {
getMSC().dropWMPool(resourcePlanName, poolPath,
conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createOrUpdateWMMapping(WMMapping mapping, boolean isUpdate)
throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (mapping.isSetNs() && !ns.equals(mapping.getNs())) {
throw new HiveException("Cannot create a mapping in a different NS; was "
+ mapping.getNs() + ", configured " + ns);
}
mapping.setNs(ns);
getMSC().createOrUpdateWMMapping(mapping, isUpdate);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMMapping(WMMapping mapping) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (mapping.isSetNs() && !ns.equals(mapping.getNs())) {
throw new HiveException("Cannot modify a mapping in a different NS; was "
+ mapping.getNs() + ", configured " + ns);
}
mapping.setNs(ns);
getMSC().dropWMMapping(mapping);
} catch (Exception e) {
throw new HiveException(e);
}
}
// TODO: eh
public void createOrDropTriggerToPoolMapping(String resourcePlanName, String triggerName,
String poolPath, boolean shouldDrop) throws HiveException {
try {
getMSC().createOrDropTriggerToPoolMapping(resourcePlanName, triggerName, poolPath,
shouldDrop, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
@Nullable
public StorageHandlerInfo getStorageHandlerInfo(Table table)
throws HiveException {
try {
HiveStorageHandler storageHandler = createStorageHandler(table.getTTable());
return storageHandler == null ? null : storageHandler.getStorageHandlerInfo(table.getTTable());
} catch (Exception e) {
throw new HiveException(e);
}
}
}
| ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.metadata;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
import static org.apache.hadoop.hive.ql.io.AcidUtils.getFullTableName;
import static org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.makeBinaryPredicate;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.sql.SQLIntegrityConstraintViolationException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.jdo.JDODataStoreException;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.plan.RelOptMaterialization;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelVisitor;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rex.RexBuilder;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hive.common.*;
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
import org.apache.hadoop.hive.common.log.InPlaceUpdate;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.io.HdfsUtils;
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.HiveMetaHook;
import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.PartitionDropOptions;
import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient;
import org.apache.hadoop.hive.metastore.SynchronizedMetaStoreClient;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.ReplChangeManager;
import org.apache.hadoop.hive.metastore.api.*;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.FunctionTask;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.AcidUtils.TableSnapshot;
import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAugmentMaterializationRule;
import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.DropPartitionDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType;
import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
import org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.TxnIdUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class has functions that implement meta data/DDL operations using calls
* to the metastore.
* It has a metastore client instance it uses to communicate with the metastore.
*
* It is a thread local variable, and the instances is accessed using static
* get methods in this class.
*/
@SuppressWarnings({"deprecation", "rawtypes"})
public class Hive {
static final private Logger LOG = LoggerFactory.getLogger("hive.ql.metadata.Hive");
private HiveConf conf = null;
private IMetaStoreClient metaStoreClient;
private SynchronizedMetaStoreClient syncMetaStoreClient;
private UserGroupInformation owner;
private boolean isAllowClose = true;
// metastore calls timing information
private final ConcurrentHashMap<String, Long> metaCallTimeMap = new ConcurrentHashMap<>();
// Static class to store thread local Hive object.
private static class ThreadLocalHive extends ThreadLocal<Hive> {
@Override
protected Hive initialValue() {
return null;
}
@Override
public synchronized void set(Hive hiveObj) {
Hive currentHive = this.get();
if (currentHive != hiveObj) {
// Remove/close current thread-local Hive object before overwriting with new Hive object.
remove();
super.set(hiveObj);
}
}
@Override
public synchronized void remove() {
Hive currentHive = this.get();
if (currentHive != null) {
// Close the metastore connections before removing it from thread local hiveDB.
currentHive.close(false);
super.remove();
}
}
}
private static ThreadLocalHive hiveDB = new ThreadLocalHive();
// Note that while this is an improvement over static initialization, it is still not,
// technically, valid, cause nothing prevents us from connecting to several metastores in
// the same process. This will still only get the functions from the first metastore.
private final static AtomicInteger didRegisterAllFuncs = new AtomicInteger(0);
private final static int REG_FUNCS_NO = 0, REG_FUNCS_DONE = 2, REG_FUNCS_PENDING = 1;
// register all permanent functions. need improvement
private void registerAllFunctionsOnce() throws HiveException {
boolean breakLoop = false;
while (!breakLoop) {
int val = didRegisterAllFuncs.get();
switch (val) {
case REG_FUNCS_NO: {
if (didRegisterAllFuncs.compareAndSet(val, REG_FUNCS_PENDING)) {
breakLoop = true;
break;
}
continue;
}
case REG_FUNCS_PENDING: {
synchronized (didRegisterAllFuncs) {
try {
didRegisterAllFuncs.wait(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
continue;
}
case REG_FUNCS_DONE: return;
default: throw new AssertionError(val);
}
}
try {
reloadFunctions();
didRegisterAllFuncs.compareAndSet(REG_FUNCS_PENDING, REG_FUNCS_DONE);
} catch (Exception e) {
LOG.warn("Failed to register all functions.", e);
didRegisterAllFuncs.compareAndSet(REG_FUNCS_PENDING, REG_FUNCS_NO);
throw new HiveException(e);
} finally {
synchronized (didRegisterAllFuncs) {
didRegisterAllFuncs.notifyAll();
}
}
}
public void reloadFunctions() throws HiveException {
HashSet<String> registryFunctions = new HashSet<String>(
FunctionRegistry.getFunctionNames(".+\\..+"));
for (Function function : getAllFunctions()) {
String functionName = function.getFunctionName();
try {
LOG.info("Registering function " + functionName + " " + function.getClassName());
String qualFunc = FunctionUtils.qualifyFunctionName(functionName, function.getDbName());
FunctionRegistry.registerPermanentFunction(qualFunc, function.getClassName(), false,
FunctionTask.toFunctionResource(function.getResourceUris()));
registryFunctions.remove(qualFunc);
} catch (Exception e) {
LOG.warn("Failed to register persistent function " +
functionName + ":" + function.getClassName() + ". Ignore and continue.");
}
}
// unregister functions from local system registry that are not in getAllFunctions()
for (String functionName : registryFunctions) {
try {
FunctionRegistry.unregisterPermanentFunction(functionName);
} catch (Exception e) {
LOG.warn("Failed to unregister persistent function " +
functionName + "on reload. Ignore and continue.");
}
}
}
public static Hive get(Configuration c, Class<?> clazz) throws HiveException {
return get(c instanceof HiveConf ? (HiveConf)c : new HiveConf(c, clazz));
}
/**
* Gets hive object for the current thread. If one is not initialized then a
* new one is created If the new configuration is different in metadata conf
* vars, or the owner will be different then a new one is created.
*
* @param c
* new Hive Configuration
* @return Hive object for current thread
* @throws HiveException
*
*/
public static Hive get(HiveConf c) throws HiveException {
return getInternal(c, false, false, true);
}
/**
* Same as {@link #get(HiveConf)}, except that it checks only the object identity of existing
* MS client, assuming the relevant settings would be unchanged within the same conf object.
*/
public static Hive getWithFastCheck(HiveConf c) throws HiveException {
return getWithFastCheck(c, true);
}
/**
* Same as {@link #get(HiveConf)}, except that it checks only the object identity of existing
* MS client, assuming the relevant settings would be unchanged within the same conf object.
*/
public static Hive getWithFastCheck(HiveConf c, boolean doRegisterAllFns) throws HiveException {
return getInternal(c, false, true, doRegisterAllFns);
}
private static Hive getInternal(HiveConf c, boolean needsRefresh, boolean isFastCheck,
boolean doRegisterAllFns) throws HiveException {
Hive db = hiveDB.get();
if (db == null || !db.isCurrentUserOwner() || needsRefresh
|| (c != null && !isCompatible(db, c, isFastCheck))) {
if (db != null) {
LOG.debug("Creating new db. db = " + db + ", needsRefresh = " + needsRefresh +
", db.isCurrentUserOwner = " + db.isCurrentUserOwner());
closeCurrent();
}
db = create(c, doRegisterAllFns);
}
if (c != null) {
db.conf = c;
}
return db;
}
private static Hive create(HiveConf c, boolean doRegisterAllFns) throws HiveException {
if (c == null) {
c = createHiveConf();
}
c.set("fs.scheme.class", "dfs");
Hive newdb = new Hive(c, doRegisterAllFns);
hiveDB.set(newdb);
return newdb;
}
private static HiveConf createHiveConf() {
SessionState session = SessionState.get();
return (session == null) ? new HiveConf(Hive.class) : session.getConf();
}
private static boolean isCompatible(Hive db, HiveConf c, boolean isFastCheck) {
if (isFastCheck) {
return (db.metaStoreClient == null || db.metaStoreClient.isSameConfObj(c))
&& (db.syncMetaStoreClient == null || db.syncMetaStoreClient.isSameConfObj(c));
} else {
return (db.metaStoreClient == null || db.metaStoreClient.isCompatibleWith(c))
&& (db.syncMetaStoreClient == null || db.syncMetaStoreClient.isCompatibleWith(c));
}
}
private boolean isCurrentUserOwner() throws HiveException {
try {
return owner == null || owner.equals(UserGroupInformation.getCurrentUser());
} catch(IOException e) {
throw new HiveException("Error getting current user: " + e.getMessage(), e);
}
}
public static Hive getThreadLocal() {
return hiveDB.get();
}
public static Hive get() throws HiveException {
return get(true);
}
public static Hive get(boolean doRegisterAllFns) throws HiveException {
return getInternal(null, false, false, doRegisterAllFns);
}
/**
* get a connection to metastore. see get(HiveConf) function for comments
*
* @param c
* new conf
* @param needsRefresh
* if true then creates a new one
* @return The connection to the metastore
* @throws HiveException
*/
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException {
return getInternal(c, needsRefresh, false, true);
}
public static void set(Hive hive) {
hiveDB.set(hive);
}
public static void closeCurrent() {
hiveDB.remove();
}
/**
* Hive
*
* @param c
*
*/
private Hive(HiveConf c, boolean doRegisterAllFns) throws HiveException {
conf = c;
if (doRegisterAllFns) {
registerAllFunctionsOnce();
}
}
/**
* GC is attempting to destroy the object.
* No one references this Hive anymore, so HMS connection from this Hive object can be closed.
* @throws Throwable
*/
@Override
protected void finalize() throws Throwable {
close(true);
super.finalize();
}
/**
* Marks if the given Hive object is allowed to close metastore connections.
* @param allowClose
*/
public void setAllowClose(boolean allowClose) {
isAllowClose = allowClose;
}
/**
* Gets the allowClose flag which determines if it is allowed to close metastore connections.
* @return allowClose flag
*/
public boolean allowClose() {
return isAllowClose;
}
/**
* Closes the connection to metastore for the calling thread if allow to close.
* @param forceClose - Override the isAllowClose flag to forcefully close the MS connections.
*/
public void close(boolean forceClose) {
if (allowClose() || forceClose) {
LOG.debug("Closing current thread's connection to Hive Metastore.");
if (metaStoreClient != null) {
metaStoreClient.close();
metaStoreClient = null;
}
// syncMetaStoreClient is wrapped on metaStoreClient. So, it is enough to close it once.
syncMetaStoreClient = null;
if (owner != null) {
owner = null;
}
}
}
/**
* Create a database
* @param db
* @param ifNotExist if true, will ignore AlreadyExistsException exception
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db, boolean ifNotExist)
throws AlreadyExistsException, HiveException {
try {
getMSC().createDatabase(db);
} catch (AlreadyExistsException e) {
if (!ifNotExist) {
throw e;
}
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
/**
* Drop a database.
* @param name
* @throws NoSuchObjectException
* @throws HiveException
* @see org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDatabase(java.lang.String)
*/
public void dropDatabase(String name) throws HiveException, NoSuchObjectException {
dropDatabase(name, true, false, false);
}
/**
* Drop a database
* @param name
* @param deleteData
* @param ignoreUnknownDb if true, will ignore NoSuchObjectException
* @throws HiveException
* @throws NoSuchObjectException
*/
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws HiveException, NoSuchObjectException {
dropDatabase(name, deleteData, ignoreUnknownDb, false);
}
/**
* Drop a database
* @param name
* @param deleteData
* @param ignoreUnknownDb if true, will ignore NoSuchObjectException
* @param cascade if true, delete all tables on the DB if exists. Otherwise, the query
* will fail if table still exists.
* @throws HiveException
* @throws NoSuchObjectException
*/
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
throws HiveException, NoSuchObjectException {
try {
getMSC().dropDatabase(name, deleteData, ignoreUnknownDb, cascade);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat) throws HiveException {
this.createTable(tableName, columns, partCols, fileInputFormat,
fileOutputFormat, -1, null);
}
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @param bucketCount
* number of buckets that each partition (or the table itself) should
* be divided into
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols)
throws HiveException {
createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount,
bucketCols, null);
}
/**
* Create a table metadata and the directory for the table data
* @param tableName table name
* @param columns list of fields of the table
* @param partCols partition keys of the table
* @param fileInputFormat Class of the input format of the table data file
* @param fileOutputFormat Class of the output format of the table data file
* @param bucketCount number of buckets that each partition (or the table itself) should be
* divided into
* @param bucketCols Bucket columns
* @param parameters Parameters for the table
* @throws HiveException
*/
public void createTable(String tableName, List<String> columns, List<String> partCols,
Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols,
Map<String, String> parameters) throws HiveException {
if (columns == null) {
throw new HiveException("columns not specified for table " + tableName);
}
Table tbl = newTable(tableName);
tbl.setInputFormatClass(fileInputFormat.getName());
tbl.setOutputFormatClass(fileOutputFormat.getName());
for (String col : columns) {
FieldSchema field = new FieldSchema(col, STRING_TYPE_NAME, "default");
tbl.getCols().add(field);
}
if (partCols != null) {
for (String partCol : partCols) {
FieldSchema part = new FieldSchema();
part.setName(partCol);
part.setType(STRING_TYPE_NAME); // default partition key
tbl.getPartCols().add(part);
}
}
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
tbl.setNumBuckets(bucketCount);
tbl.setBucketCols(bucketCols);
if (parameters != null) {
tbl.setParameters(parameters);
}
createTable(tbl);
}
public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environmentContext,
boolean transactional) throws HiveException {
alterTable(newTbl.getCatName(), newTbl.getDbName(),
newTbl.getTableName(), newTbl, cascade, environmentContext, transactional);
}
/**
* Updates the existing table metadata with the new metadata.
*
* @param fullyQlfdTblName
* name of the existing table
* @param newTbl
* new name of the table. could be the old name
* @param transactional
* Need to generate and save a table snapshot into the metastore?
* @throws HiveException
*/
public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext,
boolean transactional)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional);
}
public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional);
}
public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional, long writeId)
throws HiveException {
String[] names = Utilities.getDbTableName(fullyQlfdTblName);
alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional,
writeId);
}
public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional) throws HiveException {
alterTable(catName, dbName, tblName, newTbl, cascade, environmentContext, transactional, 0);
}
public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade,
EnvironmentContext environmentContext, boolean transactional, long replWriteId)
throws HiveException {
if (catName == null) {
catName = getDefaultCatalog(conf);
}
try {
// Remove the DDL_TIME so it gets refreshed
if (newTbl.getParameters() != null) {
newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
newTbl.checkValidity(conf);
if (environmentContext == null) {
environmentContext = new EnvironmentContext();
}
if (cascade) {
environmentContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE);
}
// Take a table snapshot and set it to newTbl.
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
if (replWriteId > 0) {
// We need a valid writeId list for a transactional table modification. During
// replication we do not have a valid writeId list which was used to modify the table
// on the source. But we know for sure that the writeId associated with it was valid
// then (otherwise modification would have failed on the source). So use a valid
// transaction list with only that writeId.
ValidWriteIdList writeIds = new ValidReaderWriteIdList(TableName.getDbTable(dbName, tblName),
new long[0], new BitSet(),
replWriteId);
tableSnapshot = new TableSnapshot(replWriteId, writeIds.writeToString());
} else {
// Make sure we pass in the names, so we can get the correct snapshot for rename table.
tableSnapshot = AcidUtils.getTableSnapshot(conf, newTbl, dbName, tblName, true);
}
if (tableSnapshot != null) {
newTbl.getTTable().setWriteId(tableSnapshot.getWriteId());
} else {
LOG.warn("Cannot get a table snapshot for " + tblName);
}
}
// Why is alter_partitions synchronized while this isn't?
getMSC().alter_table(
catName, dbName, tblName, newTbl.getTTable(), environmentContext,
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
} catch (MetaException e) {
throw new HiveException("Unable to alter table. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter table. " + e.getMessage(), e);
}
}
public void updateCreationMetadata(String dbName, String tableName, CreationMetadata cm)
throws HiveException {
try {
getMSC().updateCreationMetadata(dbName, tableName, cm);
} catch (TException e) {
throw new HiveException("Unable to update creation metadata " + e.getMessage(), e);
}
}
/**
* Updates the existing partition metadata with the new metadata.
*
* @param tblName
* name of the existing table
* @param newPart
* new partition
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
@Deprecated
public void alterPartition(String tblName, Partition newPart,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
String[] names = Utilities.getDbTableName(tblName);
alterPartition(null, names[0], names[1], newPart, environmentContext, transactional);
}
/**
* Updates the existing partition metadata with the new metadata.
*
* @param dbName
* name of the exiting table's database
* @param tblName
* name of the existing table
* @param newPart
* new partition
* @param environmentContext
* environment context for the method
* @param transactional
* indicates this call is for transaction stats
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
public void alterPartition(String catName, String dbName, String tblName, Partition newPart,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
try {
if (catName == null) {
catName = getDefaultCatalog(conf);
}
validatePartition(newPart);
String location = newPart.getLocation();
if (location != null) {
location = Utilities.getQualifiedPath(conf, new Path(location));
newPart.setLocation(location);
}
if (environmentContext == null) {
environmentContext = new EnvironmentContext();
}
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
tableSnapshot = AcidUtils.getTableSnapshot(conf, newPart.getTable(), true);
if (tableSnapshot != null) {
newPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
} else {
LOG.warn("Cannot get a table snapshot for " + tblName);
}
}
getSynchronizedMSC().alter_partition(catName,
dbName, tblName, newPart.getTPartition(), environmentContext,
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
} catch (MetaException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
}
}
private void validatePartition(Partition newPart) throws HiveException {
// Remove the DDL time so that it gets refreshed
if (newPart.getParameters() != null) {
newPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
newPart.checkValidity();
}
/**
* Updates the existing table metadata with the new metadata.
*
* @param tblName
* name of the existing table
* @param newParts
* new partitions
* @param transactional
* Need to generate and save a table snapshot into the metastore?
* @throws InvalidOperationException
* if the changes in metadata is not acceptable
* @throws HiveException
*/
public void alterPartitions(String tblName, List<Partition> newParts,
EnvironmentContext environmentContext, boolean transactional)
throws InvalidOperationException, HiveException {
String[] names = Utilities.getDbTableName(tblName);
List<org.apache.hadoop.hive.metastore.api.Partition> newTParts =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();
try {
AcidUtils.TableSnapshot tableSnapshot = null;
if (transactional) {
tableSnapshot = AcidUtils.getTableSnapshot(conf, newParts.get(0).getTable(), true);
}
// Remove the DDL time so that it gets refreshed
for (Partition tmpPart: newParts) {
if (tmpPart.getParameters() != null) {
tmpPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
String location = tmpPart.getLocation();
if (location != null) {
location = Utilities.getQualifiedPath(conf, new Path(location));
tmpPart.setLocation(location);
}
newTParts.add(tmpPart.getTPartition());
}
getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext,
tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null,
tableSnapshot != null ? tableSnapshot.getWriteId() : -1);
} catch (MetaException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to alter partition. " + e.getMessage(), e);
}
}
/**
* Rename a old partition to new partition
*
* @param tbl
* existing table
* @param oldPartSpec
* spec of old partition
* @param newPart
* new partition
* @throws HiveException
*/
public void renamePartition(Table tbl, Map<String, String> oldPartSpec, Partition newPart,
long replWriteId)
throws HiveException {
try {
Map<String, String> newPartSpec = newPart.getSpec();
if (oldPartSpec.keySet().size() != tbl.getPartCols().size()
|| newPartSpec.keySet().size() != tbl.getPartCols().size()) {
throw new HiveException("Unable to rename partition to the same name: number of partition cols don't match. ");
}
if (!oldPartSpec.keySet().equals(newPartSpec.keySet())){
throw new HiveException("Unable to rename partition to the same name: old and new partition cols don't match. ");
}
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
String val = oldPartSpec.get(field.getName());
if (val == null || val.length() == 0) {
throw new HiveException("get partition: Value for key "
+ field.getName() + " is null or empty");
} else if (val != null){
pvals.add(val);
}
}
String validWriteIds = null;
if (AcidUtils.isTransactionalTable(tbl)) {
TableSnapshot tableSnapshot;
if (replWriteId > 0) {
// We need a valid writeId list for a transactional table modification. During
// replication we do not have a valid writeId list which was used to modify the table
// on the source. But we know for sure that the writeId associated with it was valid
// then (otherwise modification would have failed on the source). So use a valid
// transaction list with only that writeId.
ValidWriteIdList writeIds = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(),
tbl.getTableName()), new long[0], new BitSet(), replWriteId);
tableSnapshot = new TableSnapshot(replWriteId, writeIds.writeToString());
} else {
// Set table snapshot to api.Table to make it persistent.
tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
}
if (tableSnapshot != null) {
newPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
validWriteIds = tableSnapshot.getValidWriteIdList();
}
}
getMSC().renamePartition(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(), pvals,
newPart.getTPartition(), validWriteIds);
} catch (InvalidOperationException e){
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
} catch (MetaException e) {
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
} catch (TException e) {
throw new HiveException("Unable to rename partition. " + e.getMessage(), e);
}
}
// TODO: this whole path won't work with catalogs
public void alterDatabase(String dbName, Database db)
throws HiveException {
try {
getMSC().alterDatabase(dbName, db);
} catch (MetaException e) {
throw new HiveException("Unable to alter database " + dbName + ". " + e.getMessage(), e);
} catch (NoSuchObjectException e) {
throw new HiveException("Database " + dbName + " does not exists.", e);
} catch (TException e) {
throw new HiveException("Unable to alter database " + dbName + ". " + e.getMessage(), e);
}
}
/**
* Creates the table with the give objects
*
* @param tbl
* a table object
* @throws HiveException
*/
public void createTable(Table tbl) throws HiveException {
createTable(tbl, false);
}
// TODO: from here down dozens of methods do not support catalog. I got tired marking them.
/**
* Creates the table with the given objects. It takes additional arguments for
* primary keys and foreign keys associated with the table.
*
* @param tbl
* a table object
* @param ifNotExists
* if true, ignore AlreadyExistsException
* @param primaryKeys
* primary key columns associated with the table
* @param foreignKeys
* foreign key columns associated with the table
* @param uniqueConstraints
* UNIQUE constraints associated with the table
* @param notNullConstraints
* NOT NULL constraints associated with the table
* @param defaultConstraints
* DEFAULT constraints associated with the table
* @param checkConstraints
* CHECK constraints associated with the table
* @throws HiveException
*/
public void createTable(Table tbl, boolean ifNotExists,
List<SQLPrimaryKey> primaryKeys,
List<SQLForeignKey> foreignKeys,
List<SQLUniqueConstraint> uniqueConstraints,
List<SQLNotNullConstraint> notNullConstraints,
List<SQLDefaultConstraint> defaultConstraints,
List<SQLCheckConstraint> checkConstraints)
throws HiveException {
try {
if (tbl.getDbName() == null || "".equals(tbl.getDbName().trim())) {
tbl.setDbName(SessionState.get().getCurrentDatabase());
}
if (tbl.getCols().size() == 0 || tbl.getSd().getColsSize() == 0) {
tbl.setFields(HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(),
tbl.getDeserializer()));
}
tbl.checkValidity(conf);
if (tbl.getParameters() != null) {
tbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
}
org.apache.hadoop.hive.metastore.api.Table tTbl = tbl.getTTable();
PrincipalPrivilegeSet principalPrivs = new PrincipalPrivilegeSet();
SessionState ss = SessionState.get();
if (ss != null) {
CreateTableAutomaticGrant grants = ss.getCreateTableGrants();
if (grants != null) {
principalPrivs.setUserPrivileges(grants.getUserGrants());
principalPrivs.setGroupPrivileges(grants.getGroupGrants());
principalPrivs.setRolePrivileges(grants.getRoleGrants());
tTbl.setPrivileges(principalPrivs);
}
}
// Set table snapshot to api.Table to make it persistent. A transactional table being
// replicated may have a valid write Id copied from the source. Use that instead of
// crafting one on the replica.
if (tTbl.getWriteId() <= 0) {
TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
if (tableSnapshot != null) {
tbl.getTTable().setWriteId(tableSnapshot.getWriteId());
}
}
if (primaryKeys == null && foreignKeys == null
&& uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null
&& checkConstraints == null) {
getMSC().createTable(tTbl);
} else {
getMSC().createTableWithConstraints(tTbl, primaryKeys, foreignKeys,
uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
}
} catch (AlreadyExistsException e) {
if (!ifNotExists) {
throw new HiveException(e);
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createTable(Table tbl, boolean ifNotExists) throws HiveException {
createTable(tbl, ifNotExists, null, null, null, null,
null, null);
}
public static List<FieldSchema> getFieldsFromDeserializerForMsStorage(
Table tbl, Deserializer deserializer) throws SerDeException, MetaException {
List<FieldSchema> schema = HiveMetaStoreUtils.getFieldsFromDeserializer(
tbl.getTableName(), deserializer);
for (FieldSchema field : schema) {
field.setType(MetaStoreUtils.TYPE_FROM_DESERIALIZER);
}
return schema;
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op. If ifPurge option is specified it is passed to the
* hdfs command that removes table data from warehouse to make it skip trash.
*
* @param tableName
* table to drop
* @param ifPurge
* completely purge the table (skipping trash) while removing data from warehouse
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String tableName, boolean ifPurge) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
dropTable(names[0], names[1], true, true, ifPurge);
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op
*
* @param tableName
* table to drop
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String tableName) throws HiveException {
dropTable(tableName, false);
}
/**
* Drops table along with the data in it. If the table doesn't exist then it
* is a no-op
*
* @param dbName
* database where the table lives
* @param tableName
* table to drop
* @throws HiveException
* thrown if the drop fails
*/
public void dropTable(String dbName, String tableName) throws HiveException {
dropTable(dbName, tableName, true, true, false);
}
/**
* Drops the table.
*
* @param dbName
* @param tableName
* @param deleteData
* deletes the underlying data along with metadata
* @param ignoreUnknownTab
* an exception is thrown if this is false and the table doesn't exist
* @throws HiveException
*/
public void dropTable(String dbName, String tableName, boolean deleteData,
boolean ignoreUnknownTab) throws HiveException {
dropTable(dbName, tableName, deleteData, ignoreUnknownTab, false);
}
/**
* Drops the table.
*
* @param dbName
* @param tableName
* @param deleteData
* deletes the underlying data along with metadata
* @param ignoreUnknownTab
* an exception is thrown if this is false and the table doesn't exist
* @param ifPurge
* completely purge the table skipping trash while removing data from warehouse
* @throws HiveException
*/
public void dropTable(String dbName, String tableName, boolean deleteData,
boolean ignoreUnknownTab, boolean ifPurge) throws HiveException {
try {
getMSC().dropTable(dbName, tableName, deleteData, ignoreUnknownTab, ifPurge);
} catch (NoSuchObjectException e) {
if (!ignoreUnknownTab) {
throw new HiveException(e);
}
} catch (MetaException e) {
int idx = ExceptionUtils.indexOfType(e, SQLIntegrityConstraintViolationException.class);
if (idx != -1 && ExceptionUtils.getThrowables(e)[idx].getMessage().contains("MV_TABLES_USED")) {
throw new HiveException("Cannot drop table since it is used by at least one materialized view definition. " +
"Please drop any materialized view that uses the table before dropping it", e);
}
throw new HiveException(e);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Truncates the table/partition as per specifications. Just trash the data files
*
* @param dbDotTableName
* name of the table
* @throws HiveException
*/
public void truncateTable(String dbDotTableName, Map<String, String> partSpec, Long writeId) throws HiveException {
try {
Table table = getTable(dbDotTableName, true);
AcidUtils.TableSnapshot snapshot = null;
if (AcidUtils.isTransactionalTable(table)) {
if (writeId <= 0) {
snapshot = AcidUtils.getTableSnapshot(conf, table, true);
} else {
String fullTableName = getFullTableName(table.getDbName(), table.getTableName());
ValidWriteIdList writeIdList = getMSC().getValidWriteIds(fullTableName, writeId);
snapshot = new TableSnapshot(writeId, writeIdList.writeToString());
}
}
// TODO: APIs with catalog names
List<String> partNames = ((null == partSpec)
? null : getPartitionNames(table.getDbName(), table.getTableName(), partSpec, (short) -1));
if (snapshot == null) {
getMSC().truncateTable(table.getDbName(), table.getTableName(), partNames);
} else {
getMSC().truncateTable(table.getDbName(), table.getTableName(), partNames,
snapshot.getValidWriteIdList(), snapshot.getWriteId());
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public HiveConf getConf() {
return (conf);
}
/**
* Returns metadata for the table named tableName
* @param tableName the name of the table
* @return the table metadata
* @throws HiveException if there's an internal error or if the
* table doesn't exist
*/
public Table getTable(final String tableName) throws HiveException {
return this.getTable(tableName, true);
}
/**
* Returns metadata for the table named tableName
* @param tableName the name of the table
* @param throwException controls whether an exception is thrown or a returns a null
* @return the table metadata
* @throws HiveException if there's an internal error or if the
* table doesn't exist
*/
public Table getTable(final String tableName, boolean throwException) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
return this.getTable(names[0], names[1], throwException);
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @return the table
* @exception HiveException
* if there's an internal error or if the table doesn't exist
*/
public Table getTable(final String dbName, final String tableName) throws HiveException {
// TODO: catalog... etc everywhere
if (tableName.contains(".")) {
String[] names = Utilities.getDbTableName(tableName);
return this.getTable(names[0], names[1], true);
} else {
return this.getTable(dbName, tableName, true);
}
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName,
boolean throwException) throws HiveException {
return this.getTable(dbName, tableName, throwException, false);
}
/**
* Returns metadata of the table
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @param checkTransactional
* checks whether the metadata table stats are valid (or
* compilant with the snapshot isolation of) for the current transaction.
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName, boolean throwException,
boolean checkTransactional) throws HiveException {
return getTable(dbName, tableName, throwException, checkTransactional, false);
}
/**
* Returns metadata of the table.
*
* @param dbName
* the name of the database
* @param tableName
* the name of the table
* @param throwException
* controls whether an exception is thrown or a returns a null
* @param checkTransactional
* checks whether the metadata table stats are valid (or
* compilant with the snapshot isolation of) for the current transaction.
* @param getColumnStats
* get column statistics if available
* @return the table or if throwException is false a null value.
* @throws HiveException
*/
public Table getTable(final String dbName, final String tableName, boolean throwException,
boolean checkTransactional, boolean getColumnStats) throws HiveException {
if (tableName == null || tableName.equals("")) {
throw new HiveException("empty table creation??");
}
// Get the table from metastore
org.apache.hadoop.hive.metastore.api.Table tTable = null;
try {
// Note: this is currently called w/true from StatsOptimizer only.
if (checkTransactional) {
ValidWriteIdList validWriteIdList = null;
long txnId = SessionState.get().getTxnMgr() != null ?
SessionState.get().getTxnMgr().getCurrentTxnId() : 0;
if (txnId > 0) {
validWriteIdList = AcidUtils.getTableValidWriteIdListWithTxnList(conf,
dbName, tableName);
}
tTable = getMSC().getTable(getDefaultCatalog(conf), dbName, tableName,
validWriteIdList != null ? validWriteIdList.toString() : null, getColumnStats);
} else {
tTable = getMSC().getTable(dbName, tableName, getColumnStats);
}
} catch (NoSuchObjectException e) {
if (throwException) {
throw new InvalidTableException(tableName);
}
return null;
} catch (Exception e) {
throw new HiveException("Unable to fetch table " + tableName + ". " + e.getMessage(), e);
}
// For non-views, we need to do some extra fixes
if (!TableType.VIRTUAL_VIEW.toString().equals(tTable.getTableType())) {
// Fix the non-printable chars
Map<String, String> parameters = tTable.getSd().getParameters();
String sf = parameters!=null?parameters.get(SERIALIZATION_FORMAT) : null;
if (sf != null) {
char[] b = sf.toCharArray();
if ((b.length == 1) && (b[0] < 10)) { // ^A, ^B, ^C, ^D, \t
parameters.put(SERIALIZATION_FORMAT, Integer.toString(b[0]));
}
}
// Use LazySimpleSerDe for MetadataTypedColumnsetSerDe.
// NOTE: LazySimpleSerDe does not support tables with a single column of
// col
// of type "array<string>". This happens when the table is created using
// an
// earlier version of Hive.
if (org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
.getName().equals(
tTable.getSd().getSerdeInfo().getSerializationLib())
&& tTable.getSd().getColsSize() > 0
&& tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) {
tTable.getSd().getSerdeInfo().setSerializationLib(
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
}
}
return new Table(tTable);
}
/**
* Get all table names for the current database.
* @return List of table names
* @throws HiveException
*/
public List<String> getAllTables() throws HiveException {
return getTablesByType(SessionState.get().getCurrentDatabase(), null, null);
}
/**
* Get all table names for the specified database.
* @param dbName
* @return List of table names
* @throws HiveException
*/
public List<String> getAllTables(String dbName) throws HiveException {
return getTablesByType(dbName, ".*", null);
}
/**
* Get all tables for the specified database.
* @param dbName
* @return List of all tables
* @throws HiveException
*/
public List<Table> getAllTableObjects(String dbName) throws HiveException {
return getTableObjects(dbName, ".*", null);
}
/**
* Get tables for the specified database that match the provided regex pattern and table type.
* @param dbName
* @param pattern
* @param tableType
* @return List of table objects
* @throws HiveException
*/
public List<Table> getTableObjectsByType(String dbName, String pattern, TableType tableType) throws HiveException {
return getTableObjects(dbName, pattern, tableType);
}
/**
* Get all materialized view names for the specified database.
* @param dbName
* @return List of materialized view table names
* @throws HiveException
*/
public List<String> getAllMaterializedViews(String dbName) throws HiveException {
return getTablesByType(dbName, ".*", TableType.MATERIALIZED_VIEW);
}
/**
* Get all materialized views for the specified database.
* @param dbName
* @return List of materialized view table objects
* @throws HiveException
*/
public List<Table> getAllMaterializedViewObjects(String dbName) throws HiveException {
return getTableObjects(dbName, ".*", TableType.MATERIALIZED_VIEW);
}
/**
* Get materialized views for the specified database that match the provided regex pattern.
* @param dbName
* @param pattern
* @return List of materialized view table objects
* @throws HiveException
*/
public List<Table> getMaterializedViewObjectsByPattern(String dbName, String pattern) throws HiveException {
return getTableObjects(dbName, pattern, TableType.MATERIALIZED_VIEW);
}
private List<Table> getTableObjects(String dbName, String pattern, TableType tableType) throws HiveException {
try {
return Lists.transform(getMSC().getTableObjectsByName(dbName, getTablesByType(dbName, pattern, tableType)),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
private List<Table> getTableObjects(String dbName, List<String> tableNames) throws HiveException {
try {
return Lists.transform(getMSC().getTableObjectsByName(dbName, tableNames),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Returns all existing tables from default database which match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param tablePattern
* java re pattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesByPattern(String tablePattern) throws HiveException {
return getTablesByType(SessionState.get().getCurrentDatabase(),
tablePattern, null);
}
/**
* Returns all existing tables from the specified database which match the given
* pattern. The matching occurs as per Java regular expressions.
* @param dbName
* @param tablePattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesByPattern(String dbName, String tablePattern) throws HiveException {
return getTablesByType(dbName, tablePattern, null);
}
/**
* Returns all existing tables from the given database which match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param database
* the database name
* @param tablePattern
* java re pattern
* @return list of table names
* @throws HiveException
*/
public List<String> getTablesForDb(String database, String tablePattern)
throws HiveException {
return getTablesByType(database, tablePattern, null);
}
/**
* Returns all existing tables of a type (VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified
* database which match the given pattern. The matching occurs as per Java regular expressions.
* @param dbName Database name to find the tables in. if null, uses the current database in this session.
* @param pattern A pattern to match for the table names.If null, returns all names from this DB.
* @param type The type of tables to return. VIRTUAL_VIEWS for views. If null, returns all tables and views.
* @return list of table names that match the pattern.
* @throws HiveException
*/
public List<String> getTablesByType(String dbName, String pattern, TableType type)
throws HiveException {
if (dbName == null) {
dbName = SessionState.get().getCurrentDatabase();
}
try {
if (type != null) {
if (pattern != null) {
return getMSC().getTables(dbName, pattern, type);
} else {
return getMSC().getTables(dbName, ".*", type);
}
} else {
if (pattern != null) {
return getMSC().getTables(dbName, pattern);
} else {
return getMSC().getTables(dbName, ".*");
}
}
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the materialized views that have been enabled for rewriting from the
* metastore. If the materialized view is in the cache, we do not need to
* parse it to generate a logical plan for the rewriting. Instead, we
* return the version present in the cache. Further, information provided
* by the invalidation cache is useful to know whether a materialized view
* can be used for rewriting or not.
*
* @return the list of materialized views available for rewriting
* @throws HiveException
*/
public List<RelOptMaterialization> getAllValidMaterializedViews(List<String> tablesUsed, boolean forceMVContentsUpToDate,
HiveTxnManager txnMgr) throws HiveException {
// Final result
List<RelOptMaterialization> result = new ArrayList<>();
try {
// From metastore (for security)
List<Table> materializedViews = getAllMaterializedViewObjectsForRewriting();
if (materializedViews.isEmpty()) {
// Bail out: empty list
return result;
}
result.addAll(getValidMaterializedViews(materializedViews,
tablesUsed, forceMVContentsUpToDate, txnMgr));
return result;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<RelOptMaterialization> getValidMaterializedView(String dbName, String materializedViewName,
List<String> tablesUsed, boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException {
return getValidMaterializedViews(ImmutableList.of(getTable(dbName, materializedViewName)),
tablesUsed, forceMVContentsUpToDate, txnMgr);
}
private List<RelOptMaterialization> getValidMaterializedViews(List<Table> materializedViewTables,
List<String> tablesUsed, boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException {
final String validTxnsList = conf.get(ValidTxnList.VALID_TXNS_KEY);
final ValidTxnWriteIdList currentTxnWriteIds = txnMgr.getValidWriteIds(tablesUsed, validTxnsList);
final boolean tryIncrementalRewriting =
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_INCREMENTAL);
final boolean tryIncrementalRebuild =
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REBUILD_INCREMENTAL);
final long defaultTimeWindow =
HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW,
TimeUnit.MILLISECONDS);
try {
// Final result
List<RelOptMaterialization> result = new ArrayList<>();
for (Table materializedViewTable : materializedViewTables) {
final Boolean outdated = isOutdatedMaterializedView(materializedViewTable, currentTxnWriteIds,
defaultTimeWindow, tablesUsed, forceMVContentsUpToDate);
if (outdated == null) {
continue;
}
final CreationMetadata creationMetadata = materializedViewTable.getCreationMetadata();
if (outdated) {
// The MV is outdated, see whether we should consider it for rewriting or not
boolean ignore = false;
if (forceMVContentsUpToDate && !tryIncrementalRebuild) {
// We will not try partial rewriting for rebuild if incremental rebuild is disabled
ignore = true;
} else if (!forceMVContentsUpToDate && !tryIncrementalRewriting) {
// We will not try partial rewriting for non-rebuild if incremental rewriting is disabled
ignore = true;
} else {
// Obtain additional information if we should try incremental rewriting / rebuild
// We will not try partial rewriting if there were update/delete operations on source tables
Materialization invalidationInfo = getMSC().getMaterializationInvalidationInfo(
creationMetadata, conf.get(ValidTxnList.VALID_TXNS_KEY));
ignore = invalidationInfo == null || invalidationInfo.isSourceTablesUpdateDeleteModified();
}
if (ignore) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as its contents are outdated");
continue;
}
}
// It passed the test, load
RelOptMaterialization materialization =
HiveMaterializedViewsRegistry.get().getRewritingMaterializedView(
materializedViewTable.getDbName(), materializedViewTable.getTableName());
if (materialization != null) {
RelNode viewScan = materialization.tableRel;
RelOptHiveTable cachedMaterializedViewTable;
if (viewScan instanceof Project) {
// There is a Project on top (due to nullability)
cachedMaterializedViewTable = (RelOptHiveTable) viewScan.getInput(0).getTable();
} else {
cachedMaterializedViewTable = (RelOptHiveTable) viewScan.getTable();
}
if (cachedMaterializedViewTable.getHiveTableMD().getCreateTime() ==
materializedViewTable.getCreateTime()) {
// It is in the cache and up to date
if (outdated) {
// We will rewrite it to include the filters on transaction list
// so we can produce partial rewritings
materialization = augmentMaterializationWithTimeInformation(
materialization, validTxnsList, new ValidTxnWriteIdList(
creationMetadata.getValidTxnList()));
}
result.add(materialization);
continue;
}
}
// It was not present in the cache (maybe because it was added by another HS2)
// or it is not up to date.
if (HiveMaterializedViewsRegistry.get().isInitialized()) {
// But the registry was fully initialized, thus we need to add it
if (LOG.isDebugEnabled()) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" was not in the cache");
}
materialization = HiveMaterializedViewsRegistry.get().createMaterializedView(
conf, materializedViewTable);
if (materialization != null) {
if (outdated) {
// We will rewrite it to include the filters on transaction list
// so we can produce partial rewritings
materialization = augmentMaterializationWithTimeInformation(
materialization, validTxnsList, new ValidTxnWriteIdList(
creationMetadata.getValidTxnList()));
}
result.add(materialization);
}
} else {
// Otherwise the registry has not been initialized, skip for the time being
if (LOG.isWarnEnabled()) {
LOG.info("Materialized view " + materializedViewTable.getFullyQualifiedName() + " was skipped "
+ "because cache has not been loaded yet");
}
}
}
return result;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Utility method that returns whether a materialized view is outdated (true), not outdated
* (false), or it cannot be determined (null). The latest case may happen e.g. when the
* materialized view definition uses external tables.
*/
public static Boolean isOutdatedMaterializedView(Table materializedViewTable, final ValidTxnWriteIdList currentTxnWriteIds,
long defaultTimeWindow, List<String> tablesUsed, boolean forceMVContentsUpToDate) {
// Check if materialization defined its own invalidation time window
String timeWindowString = materializedViewTable.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW);
long timeWindow = org.apache.commons.lang.StringUtils.isEmpty(timeWindowString) ? defaultTimeWindow :
HiveConf.toTime(timeWindowString,
HiveConf.getDefaultTimeUnit(HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW),
TimeUnit.MILLISECONDS);
CreationMetadata creationMetadata = materializedViewTable.getCreationMetadata();
boolean outdated = false;
if (timeWindow < 0L) {
// We only consider the materialized view to be outdated if forceOutdated = true, i.e.,
// if it is a rebuild. Otherwise, it passed the test and we use it as it is.
outdated = forceMVContentsUpToDate;
} else {
// Check whether the materialized view is invalidated
if (forceMVContentsUpToDate || timeWindow == 0L || creationMetadata.getMaterializationTime() < System.currentTimeMillis() - timeWindow) {
if (currentTxnWriteIds == null) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as we could not obtain current txn ids");
return null;
}
if (creationMetadata.getValidTxnList() == null ||
creationMetadata.getValidTxnList().isEmpty()) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as we could not obtain materialization txn ids");
return null;
}
boolean ignore = false;
ValidTxnWriteIdList mvTxnWriteIds = new ValidTxnWriteIdList(
creationMetadata.getValidTxnList());
for (String qName : tablesUsed) {
// Note. If the materialized view does not contain a table that is contained in the query,
// we do not need to check whether that specific table is outdated or not. If a rewriting
// is produced in those cases, it is because that additional table is joined with the
// existing tables with an append-columns only join, i.e., PK-FK + not null.
if (!creationMetadata.getTablesUsed().contains(qName)) {
continue;
}
ValidWriteIdList tableCurrentWriteIds = currentTxnWriteIds.getTableValidWriteIdList(qName);
if (tableCurrentWriteIds == null) {
// Uses non-transactional table, cannot be considered
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as it is outdated and cannot be considered for " +
" rewriting because it uses non-transactional table " + qName);
ignore = true;
break;
}
ValidWriteIdList tableWriteIds = mvTxnWriteIds.getTableValidWriteIdList(qName);
if (tableWriteIds == null) {
// This should not happen, but we ignore for safety
LOG.warn("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" ignored for rewriting as details about txn ids for table " + qName +
" could not be found in " + mvTxnWriteIds);
ignore = true;
break;
}
if (!outdated && !TxnIdUtils.checkEquivalentWriteIds(tableCurrentWriteIds, tableWriteIds)) {
LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() +
" contents are outdated");
outdated = true;
}
}
if (ignore) {
return null;
}
}
}
return outdated;
}
/**
* Method to enrich the materialization query contained in the input with
* its invalidation.
*/
private static RelOptMaterialization augmentMaterializationWithTimeInformation(
RelOptMaterialization materialization, String validTxnsList,
ValidTxnWriteIdList materializationTxnList) throws LockException {
// Extract tables used by the query which will in turn be used to generate
// the corresponding txn write ids
List<String> tablesUsed = new ArrayList<>();
new RelVisitor() {
@Override
public void visit(RelNode node, int ordinal, RelNode parent) {
if (node instanceof TableScan) {
TableScan ts = (TableScan) node;
tablesUsed.add(((RelOptHiveTable) ts.getTable()).getHiveTableMD().getFullyQualifiedName());
}
super.visit(node, ordinal, parent);
}
}.go(materialization.queryRel);
ValidTxnWriteIdList currentTxnList =
SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList);
// Augment
final RexBuilder rexBuilder = materialization.queryRel.getCluster().getRexBuilder();
final HepProgramBuilder augmentMaterializationProgram = new HepProgramBuilder()
.addRuleInstance(new HiveAugmentMaterializationRule(rexBuilder, currentTxnList, materializationTxnList));
final HepPlanner augmentMaterializationPlanner = new HepPlanner(
augmentMaterializationProgram.build());
augmentMaterializationPlanner.setRoot(materialization.queryRel);
final RelNode modifiedQueryRel = augmentMaterializationPlanner.findBestExp();
return new RelOptMaterialization(materialization.tableRel, modifiedQueryRel,
null, materialization.qualifiedTableName);
}
public List<Table> getAllMaterializedViewObjectsForRewriting() throws HiveException {
try {
return Lists.transform(getMSC().getAllMaterializedViewObjectsForRewriting(),
new com.google.common.base.Function<org.apache.hadoop.hive.metastore.api.Table, Table>() {
@Override
public Table apply(org.apache.hadoop.hive.metastore.api.Table table) {
return new Table(table);
}
}
);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get materialized views for the specified database that have enabled rewriting.
* @param dbName
* @return List of materialized view table objects
* @throws HiveException
*/
private List<String> getMaterializedViewsForRewriting(String dbName) throws HiveException {
try {
return getMSC().getMaterializedViewsForRewriting(dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing database names.
*
* @return List of database names.
* @throws HiveException
*/
public List<String> getAllDatabases() throws HiveException {
try {
return getMSC().getAllDatabases();
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing databases that match the given
* pattern. The matching occurs as per Java regular expressions
*
* @param databasePattern
* java re pattern
* @return list of database names
* @throws HiveException
*/
public List<String> getDatabasesByPattern(String databasePattern) throws HiveException {
try {
return getMSC().getDatabases(databasePattern);
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean grantPrivileges(PrivilegeBag privileges)
throws HiveException {
try {
return getMSC().grant_privileges(privileges);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param privileges
* a bag of privileges
* @return true on success
* @throws HiveException
*/
public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
throws HiveException {
try {
return getMSC().revoke_privileges(privileges, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Query metadata to see if a database with the given name already exists.
*
* @param dbName
* @return true if a database with the given name already exists, false if
* does not exist.
* @throws HiveException
*/
public boolean databaseExists(String dbName) throws HiveException {
return getDatabase(dbName) != null;
}
/**
* Get the database by name.
* @param dbName the name of the database.
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabase(String dbName) throws HiveException {
try {
return getMSC().getDatabase(dbName);
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the database by name.
* @param catName catalog name
* @param dbName the name of the database.
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabase(String catName, String dbName) throws HiveException {
try {
return getMSC().getDatabase(catName, dbName);
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get the Database object for current database
* @return a Database object if this database exists, null otherwise.
* @throws HiveException
*/
public Database getDatabaseCurrent() throws HiveException {
String currentDb = SessionState.get().getCurrentDatabase();
return getDatabase(currentDb);
}
private TableSnapshot getTableSnapshot(Table tbl, Long writeId) throws LockException {
TableSnapshot tableSnapshot = null;
if ((writeId != null) && (writeId > 0)) {
ValidWriteIdList writeIds = AcidUtils.getTableValidWriteIdListWithTxnList(
conf, tbl.getDbName(), tbl.getTableName());
tableSnapshot = new TableSnapshot(writeId, writeIds.writeToString());
} else {
// Make sure we pass in the names, so we can get the correct snapshot for rename table.
tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, tbl.getDbName(), tbl.getTableName(),
true);
}
return tableSnapshot;
}
/**
* Load a directory into a Hive Table Partition - Alters existing content of
* the partition with the contents of loadPath. - If the partition does not
* exist - one is created - files in loadPath are moved into Hive. But the
* directory itself is not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tbl
* name of table to be loaded.
* @param partSpec
* defines which partition needs to be loaded
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param inheritTableSpecs if true, on [re]creating the partition, take the
* location/inputformat/outputformat/serde details from table spec
* @param isSrcLocal
* If the source directory is LOCAL
* @param isAcidIUDoperation
* true if this is an ACID operation Insert/Update/Delete operation
* @param resetStatistics
* if true, reset the statistics. If false, do not reset statistics.
* @param writeId write ID allocated for the current load operation
* @param stmtId statement ID of the current load statement
* @param isInsertOverwrite
* @return Partition object being loaded with data
*/
public Partition loadPartition(Path loadPath, Table tbl, Map<String, String> partSpec,
LoadFileType loadFileType, boolean inheritTableSpecs,
boolean inheritLocation,
boolean isSkewedStoreAsSubdir,
boolean isSrcLocal, boolean isAcidIUDoperation,
boolean resetStatistics, Long writeId,
int stmtId, boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_PARTITION);
// Get the partition object if it already exists
Partition oldPart = getPartition(tbl, partSpec, false);
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
// If config is set, table is not temporary and partition being inserted exists, capture
// the list of files added. For not yet existing partitions (insert overwrite to new partition
// or dynamic partition inserts), the add partition event will capture the list of files added.
List<Path> newFiles = Collections.synchronizedList(new ArrayList<>());
Partition newTPart = loadPartitionInternal(loadPath, tbl, partSpec, oldPart,
loadFileType, inheritTableSpecs,
inheritLocation, isSkewedStoreAsSubdir, isSrcLocal, isAcidIUDoperation,
resetStatistics, writeId, stmtId, isInsertOverwrite, isTxnTable, newFiles);
AcidUtils.TableSnapshot tableSnapshot = isTxnTable ? getTableSnapshot(tbl, writeId) : null;
if (tableSnapshot != null) {
newTPart.getTPartition().setWriteId(tableSnapshot.getWriteId());
}
if (oldPart == null) {
addPartitionToMetastore(newTPart, resetStatistics, tbl, tableSnapshot);
// For acid table, add the acid_write event with file list at the time of load itself. But
// it should be done after partition is created.
if (isTxnTable && (null != newFiles)) {
addWriteNotificationLog(tbl, partSpec, newFiles, writeId);
}
} else {
try {
setStatsPropAndAlterPartition(resetStatistics, tbl, newTPart, tableSnapshot);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_PARTITION);
return newTPart;
}
/**
* Move all the files from loadPath into Hive. If the partition
* does not exist - one is created - files in loadPath are moved into Hive. But the
* directory itself is not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tbl
* name of table to be loaded.
* @param partSpec
* defines which partition needs to be loaded
* @param oldPart
* already existing partition object, can be null
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param inheritTableSpecs if true, on [re]creating the partition, take the
* location/inputformat/outputformat/serde details from table spec
* @param inheritLocation
* if true, partition path is generated from table
* @param isSkewedStoreAsSubdir
* if true, skewed is stored as sub-directory
* @param isSrcLocal
* If the source directory is LOCAL
* @param isAcidIUDoperation
* true if this is an ACID operation Insert/Update/Delete operation
* @param resetStatistics
* if true, reset the statistics. Do not reset statistics if false.
* @param writeId
* write ID allocated for the current load operation
* @param stmtId
* statement ID of the current load statement
* @param isInsertOverwrite
* @param isTxnTable
*
* @return Partition object being loaded with data
* @throws HiveException
*/
private Partition loadPartitionInternal(Path loadPath, Table tbl, Map<String, String> partSpec,
Partition oldPart, LoadFileType loadFileType, boolean inheritTableSpecs,
boolean inheritLocation, boolean isSkewedStoreAsSubdir,
boolean isSrcLocal, boolean isAcidIUDoperation, boolean resetStatistics,
Long writeId, int stmtId, boolean isInsertOverwrite,
boolean isTxnTable, List<Path> newFiles) throws HiveException {
Path tblDataLocationPath = tbl.getDataLocation();
boolean isMmTableWrite = AcidUtils.isInsertOnlyTable(tbl.getParameters());
assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
try {
PerfLogger perfLogger = SessionState.getPerfLogger();
/**
* Move files before creating the partition since down stream processes
* check for existence of partition in metadata before accessing the data.
* If partition is created before data is moved, downstream waiting
* processes might move forward with partial data
*/
Path oldPartPath = (oldPart != null) ? oldPart.getDataLocation() : null;
Path newPartPath = null;
if (inheritLocation) {
newPartPath = genPartPathFromTable(tbl, partSpec, tblDataLocationPath);
if(oldPart != null) {
/*
* If we are moving the partition across filesystem boundaries
* inherit from the table properties. Otherwise (same filesystem) use the
* original partition location.
*
* See: HIVE-1707 and HIVE-2117 for background
*/
FileSystem oldPartPathFS = oldPartPath.getFileSystem(getConf());
FileSystem loadPathFS = loadPath.getFileSystem(getConf());
if (FileUtils.equalsFileSystem(oldPartPathFS,loadPathFS)) {
newPartPath = oldPartPath;
}
}
} else {
newPartPath = oldPartPath == null
? genPartPathFromTable(tbl, partSpec, tblDataLocationPath) : oldPartPath;
}
perfLogger.PerfLogBegin("MoveTask", PerfLogger.FILE_MOVES);
// Note: the stats for ACID tables do not have any coordination with either Hive ACID logic
// like txn commits, time outs, etc.; nor the lower level sync in metastore pertaining
// to ACID updates. So the are not themselves ACID.
// Note: this assumes both paths are qualified; which they are, currently.
if (((isMmTableWrite || isFullAcidTable) && loadPath.equals(newPartPath)) ||
(loadFileType == LoadFileType.IGNORE)) {
// MM insert query, move itself is a no-op.
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("not moving " + loadPath + " to " + newPartPath + " (MM)");
}
assert !isAcidIUDoperation;
if (newFiles != null) {
listFilesCreatedByQuery(loadPath, writeId, stmtId, isMmTableWrite ? isInsertOverwrite : false, newFiles);
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("maybe deleting stuff from " + oldPartPath
+ " (new " + newPartPath + ") for replace");
}
} else {
// Either a non-MM query, or a load into MM table from an external source.
Path destPath = newPartPath;
if (isMmTableWrite) {
assert !isAcidIUDoperation;
// We will load into MM directory, and hide previous directories if needed.
destPath = new Path(destPath, isInsertOverwrite
? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
}
if (!isAcidIUDoperation && isFullAcidTable) {
destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("moving " + loadPath + " to " + destPath);
}
boolean isManaged = tbl.getTableType() == TableType.MANAGED_TABLE;
// TODO: why is "&& !isAcidIUDoperation" needed here?
if (!isTxnTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
//for fullAcid tables we don't delete files for commands with OVERWRITE - we create a new
// base_x. (there is Insert Overwrite and Load Data Overwrite)
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
boolean needRecycle = !tbl.isTemporary()
&& ReplChangeManager.isSourceOfReplication(Hive.get().getDatabase(tbl.getDbName()));
replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(), isSrcLocal,
isAutoPurge, newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, needRecycle, isManaged, isInsertOverwrite);
} else {
FileSystem fs = destPath.getFileSystem(conf);
copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
(loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles,
tbl.getNumBuckets() > 0, isFullAcidTable, isManaged);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.FILE_MOVES);
Partition newTPart = oldPart != null ? oldPart : new Partition(tbl, partSpec, newPartPath);
alterPartitionSpecInMemory(tbl, partSpec, newTPart.getTPartition(), inheritTableSpecs, newPartPath.toString());
validatePartition(newTPart);
// If config is set, table is not temporary and partition being inserted exists, capture
// the list of files added. For not yet existing partitions (insert overwrite to new partition
// or dynamic partition inserts), the add partition event will capture the list of files added.
// Generate an insert event only if inserting into an existing partition
// When inserting into a new partition, the add partition event takes care of insert event
if ((null != oldPart) && (null != newFiles)) {
if (isTxnTable) {
addWriteNotificationLog(tbl, partSpec, newFiles, writeId);
} else {
fireInsertEvent(tbl, partSpec, (loadFileType == LoadFileType.REPLACE_ALL), newFiles);
}
} else {
LOG.debug("No new files were created, and is not a replace, or we're inserting into a "
+ "partition that does not exist yet. Skipping generating INSERT event.");
}
// column stats will be inaccurate
if (resetStatistics) {
StatsSetupConst.clearColumnStatsState(newTPart.getParameters());
}
// recreate the partition if it existed before
if (isSkewedStoreAsSubdir) {
org.apache.hadoop.hive.metastore.api.Partition newCreatedTpart = newTPart.getTPartition();
SkewedInfo skewedInfo = newCreatedTpart.getSd().getSkewedInfo();
/* Construct list bucketing location mappings from sub-directory name. */
Map<List<String>, String> skewedColValueLocationMaps = constructListBucketingLocationMap(
newPartPath, skewedInfo);
/* Add list bucketing location mappings. */
skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps);
newCreatedTpart.getSd().setSkewedInfo(skewedInfo);
}
if (!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
StatsSetupConst.setBasicStatsState(newTPart.getParameters(), StatsSetupConst.FALSE);
}
if (oldPart == null) {
newTPart.getTPartition().setParameters(new HashMap<String,String>());
if (this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
StatsSetupConst.setStatsStateForCreateTable(newTPart.getParameters(),
MetaStoreUtils.getColumnNames(tbl.getCols()), StatsSetupConst.TRUE);
}
// Note: we are creating a brand new the partition, so this is going to be valid for ACID.
List<FileStatus> filesForStats = null;
if (isTxnTable) {
filesForStats = AcidUtils.getAcidFilesForStats(
newTPart.getTable(), newPartPath, conf, null);
} else {
filesForStats = HiveStatsUtils.getFileStatusRecurse(
newPartPath, -1, newPartPath.getFileSystem(conf));
}
if (filesForStats != null) {
MetaStoreServerUtils.populateQuickStats(filesForStats, newTPart.getParameters());
} else {
// The ACID state is probably absent. Warning is logged in the get method.
MetaStoreServerUtils.clearQuickStats(newTPart.getParameters());
}
}
return newTPart;
} catch (IOException | MetaException | InvalidOperationException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private void addPartitionToMetastore(Partition newTPart, boolean resetStatistics,
Table tbl, TableSnapshot tableSnapshot) throws HiveException{
try {
LOG.debug("Adding new partition " + newTPart.getSpec());
getSynchronizedMSC().add_partition(newTPart.getTPartition());
} catch (AlreadyExistsException aee) {
// With multiple users concurrently issuing insert statements on the same partition has
// a side effect that some queries may not see a partition at the time when they're issued,
// but will realize the partition is actually there when it is trying to add such partition
// to the metastore and thus get AlreadyExistsException, because some earlier query just
// created it (race condition).
// For example, imagine such a table is created:
// create table T (name char(50)) partitioned by (ds string);
// and the following two queries are launched at the same time, from different sessions:
// insert into table T partition (ds) values ('Bob', 'today'); -- creates the partition 'today'
// insert into table T partition (ds) values ('Joe', 'today'); -- will fail with AlreadyExistsException
// In that case, we want to retry with alterPartition.
LOG.debug("Caught AlreadyExistsException, trying to alter partition instead");
try {
setStatsPropAndAlterPartition(resetStatistics, tbl, newTPart, tableSnapshot);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
} catch (Exception e) {
try {
final FileSystem newPathFileSystem = newTPart.getPartitionPath().getFileSystem(this.getConf());
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
final FileStatus status = newPathFileSystem.getFileStatus(newTPart.getPartitionPath());
Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
} catch (IOException io) {
LOG.error("Could not delete partition directory contents after failed partition creation: ", io);
}
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private void addPartitionsToMetastore(List<Partition> partitions,
boolean resetStatistics, Table tbl,
List<AcidUtils.TableSnapshot> tableSnapshots)
throws HiveException {
try {
if (partitions.isEmpty() || tableSnapshots.isEmpty()) {
return;
}
if (LOG.isDebugEnabled()) {
StringBuffer debugMsg = new StringBuffer("Adding new partitions ");
partitions.forEach(partition -> debugMsg.append(partition.getSpec() + " "));
LOG.debug(debugMsg.toString());
}
getSynchronizedMSC().add_partitions(partitions.stream().map(Partition::getTPartition)
.collect(Collectors.toList()));
} catch(AlreadyExistsException aee) {
// With multiple users concurrently issuing insert statements on the same partition has
// a side effect that some queries may not see a partition at the time when they're issued,
// but will realize the partition is actually there when it is trying to add such partition
// to the metastore and thus get AlreadyExistsException, because some earlier query just
// created it (race condition).
// For example, imagine such a table is created:
// create table T (name char(50)) partitioned by (ds string);
// and the following two queries are launched at the same time, from different sessions:
// insert into table T partition (ds) values ('Bob', 'today'); -- creates the partition 'today'
// insert into table T partition (ds) values ('Joe', 'today'); -- will fail with AlreadyExistsException
// In that case, we want to retry with alterPartition.
LOG.debug("Caught AlreadyExistsException, trying to add partitions one by one.");
assert partitions.size() == tableSnapshots.size();
for (int i = 0; i < partitions.size(); i++) {
addPartitionToMetastore(partitions.get(i), resetStatistics, tbl,
tableSnapshots.get(i));
}
} catch (Exception e) {
try {
for (Partition partition : partitions) {
final FileSystem newPathFileSystem = partition.getPartitionPath().getFileSystem(this.getConf());
boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
final FileStatus status = newPathFileSystem.getFileStatus(partition.getPartitionPath());
Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
}
} catch (IOException io) {
LOG.error("Could not delete partition directory contents after failed partition creation: ", io);
}
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
private static Path genPartPathFromTable(Table tbl, Map<String, String> partSpec,
Path tblDataLocationPath) throws MetaException {
Path partPath = new Path(tbl.getDataLocation(), Warehouse.makePartPath(partSpec));
return new Path(tblDataLocationPath.toUri().getScheme(),
tblDataLocationPath.toUri().getAuthority(), partPath.toUri().getPath());
}
/**
* Load Data commands for fullAcid tables write to base_x (if there is overwrite clause) or
* delta_x_x directory - same as any other Acid write. This method modifies the destPath to add
* this path component.
* @param writeId - write id of the operated table from current transaction (in which this operation is running)
* @param stmtId - see {@link DbTxnManager#getStmtIdAndIncrement()}
* @return appropriately modified path
*/
private Path fixFullAcidPathForLoadData(LoadFileType loadFileType, Path destPath, long writeId, int stmtId, Table tbl) throws HiveException {
switch (loadFileType) {
case REPLACE_ALL:
destPath = new Path(destPath, AcidUtils.baseDir(writeId));
break;
case KEEP_EXISTING:
destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
break;
case OVERWRITE_EXISTING:
//should not happen here - this is for replication
default:
throw new IllegalArgumentException("Unexpected " + LoadFileType.class.getName() + " " + loadFileType);
}
try {
FileSystem fs = tbl.getDataLocation().getFileSystem(SessionState.getSessionConf());
if(!FileUtils.mkdir(fs, destPath, conf)) {
LOG.warn(destPath + " already exists?!?!");
}
} catch (IOException e) {
throw new HiveException("load: error while creating " + destPath + ";loadFileType=" + loadFileType, e);
}
return destPath;
}
private boolean areEventsForDmlNeeded(Table tbl, Partition oldPart) {
// For Acid IUD, add partition is a meta data only operation. So need to add the new files added
// information into the TXN_WRITE_NOTIFICATION_LOG table.
return conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary() &&
((null != oldPart) || AcidUtils.isTransactionalTable(tbl));
}
public static void listFilesInsideAcidDirectory(Path acidDir, FileSystem srcFs, List<Path> newFiles)
throws IOException {
// list out all the files/directory in the path
FileStatus[] acidFiles;
acidFiles = srcFs.listStatus(acidDir);
if (acidFiles == null) {
LOG.debug("No files added by this query in: " + acidDir);
return;
}
LOG.debug("Listing files under " + acidDir);
for (FileStatus acidFile : acidFiles) {
// need to list out only files, ignore folders.
if (!acidFile.isDirectory()) {
newFiles.add(acidFile.getPath());
} else {
listFilesInsideAcidDirectory(acidFile.getPath(), srcFs, newFiles);
}
}
}
private void listFilesCreatedByQuery(Path loadPath, long writeId, int stmtId,
boolean isInsertOverwrite, List<Path> newFiles) throws HiveException {
Path acidDir = new Path(loadPath, AcidUtils.baseOrDeltaSubdir(isInsertOverwrite, writeId, writeId, stmtId));
try {
FileSystem srcFs = loadPath.getFileSystem(conf);
if (srcFs.exists(acidDir) && srcFs.isDirectory(acidDir)){
// list out all the files in the path
listFilesInsideAcidDirectory(acidDir, srcFs, newFiles);
} else {
LOG.info("directory does not exist: " + acidDir);
return;
}
} catch (IOException e) {
LOG.error("Error listing files", e);
throw new HiveException(e);
}
return;
}
private void setStatsPropAndAlterPartition(boolean resetStatistics, Table tbl,
Partition newTPart, TableSnapshot tableSnapshot) throws TException {
EnvironmentContext ec = new EnvironmentContext();
if (!resetStatistics) {
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
LOG.debug("Altering existing partition " + newTPart.getSpec());
getSynchronizedMSC().alter_partition(tbl.getCatName(),
tbl.getDbName(), tbl.getTableName(), newTPart.getTPartition(), new EnvironmentContext(),
tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList());
}
private void setStatsPropAndAlterPartitions(boolean resetStatistics, Table tbl,
List<Partition> partitions,
AcidUtils.TableSnapshot tableSnapshot)
throws TException {
if (partitions.isEmpty()) {
return;
}
EnvironmentContext ec = new EnvironmentContext();
if (!resetStatistics) {
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Altering existing partitions ");
partitions.forEach(p -> sb.append(p.getSpec()));
LOG.debug(sb.toString());
}
String validWriteIdList = null;
long writeId = 0L;
if (tableSnapshot != null) {
validWriteIdList = tableSnapshot.getValidWriteIdList();
writeId = tableSnapshot.getWriteId();
}
getSynchronizedMSC().alter_partitions(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(),
partitions.stream().map(Partition::getTPartition).collect(Collectors.toList()),
ec, validWriteIdList, writeId);
}
/**
* Walk through sub-directory tree to construct list bucketing location map.
*
* @param fSta
* @param fSys
* @param skewedColValueLocationMaps
* @param newPartPath
* @param skewedInfo
* @throws IOException
*/
private void walkDirTree(FileStatus fSta, FileSystem fSys,
Map<List<String>, String> skewedColValueLocationMaps, Path newPartPath, SkewedInfo skewedInfo)
throws IOException {
/* Base Case. It's leaf. */
if (!fSta.isDir()) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Processing LB leaf " + fSta.getPath());
}
/* construct one location map if not exists. */
constructOneLBLocationMap(fSta, skewedColValueLocationMaps, newPartPath, skewedInfo);
return;
}
/* dfs. */
FileStatus[] children = fSys.listStatus(fSta.getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER);
if (children != null) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Processing LB dir " + fSta.getPath());
}
for (FileStatus child : children) {
walkDirTree(child, fSys, skewedColValueLocationMaps, newPartPath, skewedInfo);
}
}
}
/**
* Construct a list bucketing location map
* @param fSta
* @param skewedColValueLocationMaps
* @param newPartPath
* @param skewedInfo
*/
private void constructOneLBLocationMap(FileStatus fSta,
Map<List<String>, String> skewedColValueLocationMaps,
Path newPartPath, SkewedInfo skewedInfo) {
Path lbdPath = fSta.getPath().getParent();
List<String> skewedValue = new ArrayList<String>();
String lbDirName = FileUtils.unescapePathName(lbdPath.toString());
String partDirName = FileUtils.unescapePathName(newPartPath.toString());
String lbDirSuffix = lbDirName.replace(partDirName, ""); // TODO: should it rather do a prefix?
if (lbDirSuffix.startsWith(Path.SEPARATOR)) {
lbDirSuffix = lbDirSuffix.substring(1);
}
String[] dirNames = lbDirSuffix.split(Path.SEPARATOR);
int keysFound = 0, dirsToTake = 0;
int colCount = skewedInfo.getSkewedColNames().size();
while (dirsToTake < dirNames.length && keysFound < colCount) {
String dirName = dirNames[dirsToTake++];
// Construct skewed-value to location map except default directory.
// why? query logic knows default-dir structure and don't need to get from map
if (dirName.equalsIgnoreCase(ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_DIR_NAME)) {
++keysFound;
} else {
String[] kv = dirName.split("=");
if (kv.length == 2) {
skewedValue.add(kv[1]);
++keysFound;
} else {
// TODO: we should really probably throw. Keep the existing logic for now.
LOG.warn("Skipping unknown directory " + dirName
+ " when expecting LB keys or default directory (from " + lbDirName + ")");
}
}
}
for (int i = 0; i < (dirNames.length - dirsToTake); ++i) {
lbdPath = lbdPath.getParent();
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Saving LB location " + lbdPath + " based on "
+ colCount + " keys and " + fSta.getPath());
}
if ((skewedValue.size() > 0) && (skewedValue.size() == colCount)
&& !skewedColValueLocationMaps.containsKey(skewedValue)) {
skewedColValueLocationMaps.put(skewedValue, lbdPath.toString());
}
}
/**
* Construct location map from path
*
* @param newPartPath
* @param skewedInfo
* @return
* @throws IOException
* @throws FileNotFoundException
*/
private Map<List<String>, String> constructListBucketingLocationMap(Path newPartPath,
SkewedInfo skewedInfo) throws IOException, FileNotFoundException {
Map<List<String>, String> skewedColValueLocationMaps = new HashMap<List<String>, String>();
FileSystem fSys = newPartPath.getFileSystem(conf);
walkDirTree(fSys.getFileStatus(newPartPath),
fSys, skewedColValueLocationMaps, newPartPath, skewedInfo);
return skewedColValueLocationMaps;
}
/**
* Get the valid partitions from the path
* @param numDP number of dynamic partitions
* @param loadPath
* @return Set of valid partitions
* @throws HiveException
*/
private Set<Path> getValidPartitionsInPath(
int numDP, int numLB, Path loadPath, Long writeId, int stmtId,
boolean isMmTable, boolean isInsertOverwrite) throws HiveException {
Set<Path> validPartitions = new HashSet<Path>();
try {
FileSystem fs = loadPath.getFileSystem(conf);
if (!isMmTable) {
List<FileStatus> leafStatus = HiveStatsUtils.getFileStatusRecurse(loadPath, numDP, fs);
// Check for empty partitions
for (FileStatus s : leafStatus) {
if (!s.isDirectory()) {
throw new HiveException("partition " + s.getPath() + " is not a directory!");
}
Path dpPath = s.getPath();
validPartitions.add(dpPath);
}
} else {
// The non-MM path only finds new partitions, as it is looking at the temp path.
// To produce the same effect, we will find all the partitions affected by this txn ID.
// Note: we ignore the statement ID here, because it's currently irrelevant for MoveTask
// where this is used; we always want to load everything; also the only case where
// we have multiple statements anyway is union.
Utilities.FILE_OP_LOGGER.trace(
"Looking for dynamic partitions in {} ({} levels)", loadPath, numDP);
Path[] leafStatus = Utilities.getMmDirectoryCandidates(
fs, loadPath, numDP, null, writeId, -1, conf, isInsertOverwrite);
for (Path p : leafStatus) {
Path dpPath = p.getParent(); // Skip the MM directory that we have found.
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Found DP " + dpPath);
}
validPartitions.add(dpPath);
}
}
} catch (IOException e) {
throw new HiveException(e);
}
int partsToLoad = validPartitions.size();
if (partsToLoad == 0) {
LOG.warn("No partition is generated by dynamic partitioning");
}
if (partsToLoad > conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS)) {
throw new HiveException("Number of dynamic partitions created is " + partsToLoad
+ ", which is more than "
+ conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS)
+". To solve this try to set " + HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS.varname
+ " to at least " + partsToLoad + '.');
}
return validPartitions;
}
/**
* Given a source directory name of the load path, load all dynamically generated partitions
* into the specified table and return a list of strings that represent the dynamic partition
* paths.
* @param loadPath
* @param tableName
* @param partSpec
* @param loadFileType
* @param numDP number of dynamic partitions
* @param isAcid true if this is an ACID operation
* @param writeId writeId, can be 0 unless isAcid == true
* @param resetStatistics if true, reset statistics. Do not reset statistics otherwise.
* @return partition map details (PartitionSpec and Partition)
* @throws HiveException
*/
public Map<Map<String, String>, Partition> loadDynamicPartitions(final Path loadPath,
final String tableName, final Map<String, String> partSpec, final LoadFileType loadFileType,
final int numDP, final int numLB, final boolean isAcid, final long writeId, final int stmtId,
final boolean resetStatistics, final AcidUtils.Operation operation,
boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_DYNAMIC_PARTITIONS);
// Get all valid partition paths and existing partitions for them (if any)
final Table tbl = getTable(tableName);
final Set<Path> validPartitions = getValidPartitionsInPath(numDP, numLB, loadPath, writeId, stmtId,
AcidUtils.isInsertOnlyTable(tbl.getParameters()), isInsertOverwrite);
final int partsToLoad = validPartitions.size();
final AtomicInteger partitionsLoaded = new AtomicInteger(0);
final boolean inPlaceEligible = conf.getLong("fs.trash.interval", 0) <= 0
&& InPlaceUpdate.canRenderInPlace(conf) && !SessionState.getConsole().getIsSilent();
final PrintStream ps = (inPlaceEligible) ? SessionState.getConsole().getInfoStream() : null;
final SessionState parentSession = SessionState.get();
List<Callable<Partition>> tasks = Lists.newLinkedList();
final class PartitionDetails {
Map<String, String> fullSpec;
Partition partition;
List<Path> newFiles;
boolean hasOldPartition = false;
AcidUtils.TableSnapshot tableSnapshot;
}
Map<Path, PartitionDetails> partitionDetailsMap =
Collections.synchronizedMap(new LinkedHashMap<>());
// calculate full path spec for each valid partition path
validPartitions.forEach(partPath -> {
Map<String, String> fullPartSpec = Maps.newLinkedHashMap(partSpec);
if (!Warehouse.makeSpecFromName(fullPartSpec, partPath, new HashSet<>(partSpec.keySet()))) {
Utilities.FILE_OP_LOGGER.warn("Ignoring invalid DP directory " + partPath);
} else {
PartitionDetails details = new PartitionDetails();
details.fullSpec = fullPartSpec;
partitionDetailsMap.put(partPath, details);
}
});
// fetch all the partitions matching the part spec using the partition iterable
// this way the maximum batch size configuration parameter is considered
PartitionIterable partitionIterable = new PartitionIterable(Hive.get(), tbl, partSpec,
conf.getInt(MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX.getVarname(), 300));
Iterator<Partition> iterator = partitionIterable.iterator();
// Match valid partition path to partitions
while (iterator.hasNext()) {
Partition partition = iterator.next();
partitionDetailsMap.entrySet().stream()
.filter(entry -> entry.getValue().fullSpec.equals(partition.getSpec()))
.findAny().ifPresent(entry -> {
entry.getValue().partition = partition;
entry.getValue().hasOldPartition = true;
});
}
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
AcidUtils.TableSnapshot tableSnapshot = isTxnTable ? getTableSnapshot(tbl, writeId) : null;
for (Entry<Path, PartitionDetails> entry : partitionDetailsMap.entrySet()) {
tasks.add(() -> {
PartitionDetails partitionDetails = entry.getValue();
Map<String, String> fullPartSpec = partitionDetails.fullSpec;
try {
SessionState.setCurrentSessionState(parentSession);
LOG.info("New loading path = " + entry.getKey() + " withPartSpec " + fullPartSpec);
List<Path> newFiles = Lists.newArrayList();
Partition oldPartition = partitionDetails.partition;
// load the partition
Partition partition = loadPartitionInternal(entry.getKey(), tbl,
fullPartSpec, oldPartition, loadFileType, true, false, numLB > 0, false, isAcid,
resetStatistics, writeId, stmtId, isInsertOverwrite, isTxnTable, newFiles);
// if the partition already existed before the loading, no need to add it again to the
// metastore
if (tableSnapshot != null) {
partition.getTPartition().setWriteId(tableSnapshot.getWriteId());
}
partitionDetails.tableSnapshot = tableSnapshot;
if (oldPartition == null) {
partitionDetails.newFiles = newFiles;
partitionDetails.partition = partition;
}
if (inPlaceEligible) {
synchronized (ps) {
InPlaceUpdate.rePositionCursor(ps);
partitionsLoaded.incrementAndGet();
InPlaceUpdate.reprintLine(ps, "Loaded : " + partitionsLoaded.get() + "/"
+ partsToLoad + " partitions.");
}
}
return partition;
} catch (Exception e) {
LOG.error("Exception when loading partition with parameters "
+ " partPath=" + entry.getKey() + ", "
+ " table=" + tbl.getTableName() + ", "
+ " partSpec=" + fullPartSpec + ", "
+ " loadFileType=" + loadFileType.toString() + ", "
+ " listBucketingLevel=" + numLB + ", "
+ " isAcid=" + isAcid + ", "
+ " resetStatistics=" + resetStatistics, e);
throw e;
}
});
}
int poolSize = conf.getInt(ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_THREAD_COUNT.varname, 1);
ExecutorService executor = Executors.newFixedThreadPool(poolSize,
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("load-dynamic-partitionsToAdd-%d").build());
List<Future<Partition>> futures = Lists.newLinkedList();
Map<Map<String, String>, Partition> result = Maps.newLinkedHashMap();
try {
futures = executor.invokeAll(tasks);
LOG.debug("Number of partitionsToAdd to be added is " + futures.size());
for (Future<Partition> future : futures) {
Partition partition = future.get();
result.put(partition.getSpec(), partition);
}
// add new partitions in batch
addPartitionsToMetastore(
partitionDetailsMap.entrySet()
.stream()
.filter(entry -> !entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().partition)
.collect(Collectors.toList()),
resetStatistics,
tbl,
partitionDetailsMap.entrySet()
.stream()
.filter(entry -> !entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().tableSnapshot)
.collect(Collectors.toList()));
// For acid table, add the acid_write event with file list at the time of load itself. But
// it should be done after partition is created.
for (Entry<Path, PartitionDetails> entry : partitionDetailsMap.entrySet()) {
PartitionDetails partitionDetails = entry.getValue();
if (isTxnTable && partitionDetails.newFiles != null) {
addWriteNotificationLog(tbl, partitionDetails.fullSpec, partitionDetails.newFiles, writeId);
}
}
setStatsPropAndAlterPartitions(resetStatistics, tbl,
partitionDetailsMap.entrySet().stream()
.filter(entry -> entry.getValue().hasOldPartition)
.map(entry -> entry.getValue().partition)
.collect(Collectors.toList()), tableSnapshot);
} catch (InterruptedException | ExecutionException e) {
throw new HiveException("Exception when loading " + validPartitions.size()
+ " in table " + tbl.getTableName()
+ " with loadPath=" + loadPath);
} catch (TException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
} catch (Exception e) {
StringBuffer logMsg = new StringBuffer();
logMsg.append("Exception when loading partitionsToAdd with parameters ");
logMsg.append("partPaths=");
validPartitions.forEach(path -> logMsg.append(path + ", "));
logMsg.append("table=" + tbl.getTableName() + ", ").
append("partSpec=" + partSpec + ", ").
append("loadFileType=" + loadFileType.toString() + ", ").
append("listBucketingLevel=" + numLB + ", ").
append("isAcid=" + isAcid + ", ").
append("resetStatistics=" + resetStatistics);
LOG.error(logMsg.toString(), e);
throw e;
} finally {
LOG.debug("Cancelling " + futures.size() + " dynamic loading tasks");
executor.shutdownNow();
}
try {
if (isAcid) {
List<String> partNames =
result.values().stream().map(Partition::getName).collect(Collectors.toList());
getMSC().addDynamicPartitions(parentSession.getTxnMgr().getCurrentTxnId(), writeId,
tbl.getDbName(), tbl.getTableName(), partNames,
AcidUtils.toDataOperationType(operation));
}
LOG.info("Loaded " + result.size() + "partitionsToAdd");
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_DYNAMIC_PARTITIONS);
return result;
} catch (TException te) {
LOG.error(StringUtils.stringifyException(te));
throw new HiveException("Exception updating metastore for acid table "
+ tableName + " with partitions " + result.values(), te);
}
}
/**
* Load a directory into a Hive Table. - Alters existing content of table with
* the contents of loadPath. - If table does not exist - an exception is
* thrown - files in loadPath are moved into Hive. But the directory itself is
* not removed.
*
* @param loadPath
* Directory containing files to load into Table
* @param tableName
* name of table to be loaded.
* @param loadFileType
* if REPLACE_ALL - replace files in the table,
* otherwise add files to table (KEEP_EXISTING, OVERWRITE_EXISTING)
* @param isSrcLocal
* If the source directory is LOCAL
* @param isSkewedStoreAsSubdir
* if list bucketing enabled
* @param isAcidIUDoperation true if this is an ACID based Insert [overwrite]/update/delete
* @param resetStatistics should reset statistics as part of move.
* @param writeId write ID allocated for the current load operation
* @param stmtId statement ID of the current load statement
*/
public void loadTable(Path loadPath, String tableName, LoadFileType loadFileType, boolean isSrcLocal,
boolean isSkewedStoreAsSubdir, boolean isAcidIUDoperation, boolean resetStatistics,
Long writeId, int stmtId, boolean isInsertOverwrite) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin("MoveTask", PerfLogger.LOAD_TABLE);
List<Path> newFiles = null;
Table tbl = getTable(tableName);
assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
boolean isMmTable = AcidUtils.isInsertOnlyTable(tbl);
boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary()) {
newFiles = Collections.synchronizedList(new ArrayList<Path>());
}
// Note: this assumes both paths are qualified; which they are, currently.
if (((isMmTable || isFullAcidTable) && loadPath.equals(tbl.getPath())) || (loadFileType == LoadFileType.IGNORE)) {
/**
* some operations on Transactional tables (e.g. Import) write directly to the final location
* and avoid the 'move' operation. Since MoveTask does other things, setting 'loadPath' to be
* the table/partition path indicates that the 'file move' part of MoveTask is not needed.
*/
if (Utilities.FILE_OP_LOGGER.isDebugEnabled()) {
Utilities.FILE_OP_LOGGER.debug(
"not moving " + loadPath + " to " + tbl.getPath() + " (MM)");
}
//new files list is required only for event notification.
if (newFiles != null) {
listFilesCreatedByQuery(loadPath, writeId, stmtId, isMmTable ? isInsertOverwrite : false, newFiles);
}
} else {
// Either a non-MM query, or a load into MM table from an external source.
Path tblPath = tbl.getPath();
Path destPath = tblPath;
if (isMmTable) {
assert !isAcidIUDoperation;
// We will load into MM directory, and hide previous directories if needed.
destPath = new Path(destPath, isInsertOverwrite
? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
}
if (!isAcidIUDoperation && isFullAcidTable) {
destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
}
Utilities.FILE_OP_LOGGER.debug("moving " + loadPath + " to " + tblPath
+ " (replace = " + loadFileType + ")");
perfLogger.PerfLogBegin("MoveTask", PerfLogger.FILE_MOVES);
boolean isManaged = tbl.getTableType() == TableType.MANAGED_TABLE;
if (loadFileType == LoadFileType.REPLACE_ALL && !isTxnTable) {
//for fullAcid we don't want to delete any files even for OVERWRITE see HIVE-14988/HIVE-17361
boolean isAutopurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
boolean needRecycle = !tbl.isTemporary()
&& ReplChangeManager.isSourceOfReplication(Hive.get().getDatabase(tbl.getDbName()));
replaceFiles(tblPath, loadPath, destPath, tblPath, conf, isSrcLocal, isAutopurge,
newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, needRecycle, isManaged, isInsertOverwrite);
} else {
try {
FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
tbl.getNumBuckets() > 0, isFullAcidTable, isManaged);
} catch (IOException e) {
throw new HiveException("addFiles: filesystem error in check phase", e);
}
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.FILE_MOVES);
}
if (!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
LOG.debug("setting table statistics false for " + tbl.getDbName() + "." + tbl.getTableName());
StatsSetupConst.setBasicStatsState(tbl.getParameters(), StatsSetupConst.FALSE);
}
//column stats will be inaccurate
if (resetStatistics) {
LOG.debug("Clearing table statistics for " + tbl.getDbName() + "." + tbl.getTableName());
StatsSetupConst.clearColumnStatsState(tbl.getParameters());
}
try {
if (isSkewedStoreAsSubdir) {
SkewedInfo skewedInfo = tbl.getSkewedInfo();
// Construct list bucketing location mappings from sub-directory name.
Map<List<String>, String> skewedColValueLocationMaps = constructListBucketingLocationMap(
tbl.getPath(), skewedInfo);
// Add list bucketing location mappings.
skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps);
}
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
EnvironmentContext environmentContext = null;
if (!resetStatistics) {
environmentContext = new EnvironmentContext();
environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
}
alterTable(tbl.getCatName(), tbl.getDbName(), tbl.getTableName(), tbl, false, environmentContext,
true, ((writeId == null) ? 0 : writeId));
if (AcidUtils.isTransactionalTable(tbl)) {
addWriteNotificationLog(tbl, null, newFiles, writeId);
} else {
fireInsertEvent(tbl, null, (loadFileType == LoadFileType.REPLACE_ALL), newFiles);
}
perfLogger.PerfLogEnd("MoveTask", PerfLogger.LOAD_TABLE);
}
/**
* Creates a partition.
*
* @param tbl
* table for which partition needs to be created
* @param partSpec
* partition keys and their values
* @return created partition object
* @throws HiveException
* if table doesn't exist or partition already exists
*/
@VisibleForTesting
public Partition createPartition(Table tbl, Map<String, String> partSpec) throws HiveException {
try {
org.apache.hadoop.hive.metastore.api.Partition part =
Partition.createMetaPartitionObject(tbl, partSpec, null);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
part.setWriteId(tableSnapshot != null ? tableSnapshot.getWriteId() : 0);
return new Partition(tbl, getMSC().add_partition(part));
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public List<Partition> createPartitions(AddPartitionDesc addPartitionDesc) throws HiveException {
// TODO: catalog name everywhere in this method
Table tbl = getTable(addPartitionDesc.getDbName(), addPartitionDesc.getTableName());
int size = addPartitionDesc.getPartitionCount();
List<org.apache.hadoop.hive.metastore.api.Partition> in =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>(size);
long writeId;
String validWriteIdList;
// In case of replication, get the writeId from the source and use valid write Id list
// for replication.
if (addPartitionDesc.getReplicationSpec().isInReplicationScope() &&
addPartitionDesc.getPartition(0).getWriteId() > 0) {
writeId = addPartitionDesc.getPartition(0).getWriteId();
// We need a valid writeId list for a transactional change. During replication we do not
// have a valid writeId list which was used for this on the source. But we know for sure
// that the writeId associated with it was valid then (otherwise the change would have
// failed on the source). So use a valid transaction list with only that writeId.
validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(),
tbl.getTableName()),
new long[0], new BitSet(), writeId).writeToString();
} else {
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
if (tableSnapshot != null && tableSnapshot.getWriteId() > 0) {
writeId = tableSnapshot.getWriteId();
validWriteIdList = tableSnapshot.getValidWriteIdList();
} else {
writeId = -1;
validWriteIdList = null;
}
}
for (int i = 0; i < size; ++i) {
org.apache.hadoop.hive.metastore.api.Partition tmpPart =
convertAddSpecToMetaPartition(tbl, addPartitionDesc.getPartition(i), conf);
if (tmpPart != null && writeId > 0) {
tmpPart.setWriteId(writeId);
}
in.add(tmpPart);
}
List<Partition> out = new ArrayList<Partition>();
try {
if (!addPartitionDesc.getReplicationSpec().isInReplicationScope()){
// TODO: normally, the result is not necessary; might make sense to pass false
for (org.apache.hadoop.hive.metastore.api.Partition outPart
: getMSC().add_partitions(in, addPartitionDesc.isIfNotExists(), true)) {
out.add(new Partition(tbl, outPart));
}
} else {
// For replication add-ptns, we need to follow a insert-if-not-exist, alter-if-exists scenario.
// TODO : ideally, we should push this mechanism to the metastore, because, otherwise, we have
// no choice but to iterate over the partitions here.
List<org.apache.hadoop.hive.metastore.api.Partition> partsToAdd = new ArrayList<>();
List<org.apache.hadoop.hive.metastore.api.Partition> partsToAlter = new ArrayList<>();
List<String> part_names = new ArrayList<>();
for (org.apache.hadoop.hive.metastore.api.Partition p: in){
part_names.add(Warehouse.makePartName(tbl.getPartitionKeys(), p.getValues()));
try {
org.apache.hadoop.hive.metastore.api.Partition ptn =
getMSC().getPartition(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(), p.getValues());
if (addPartitionDesc.getReplicationSpec().allowReplacementInto(ptn.getParameters())){
ReplicationSpec.copyLastReplId(ptn.getParameters(), p.getParameters());
partsToAlter.add(p);
} // else ptn already exists, but we do nothing with it.
} catch (NoSuchObjectException nsoe){
// if the object does not exist, we want to add it.
partsToAdd.add(p);
}
}
for (org.apache.hadoop.hive.metastore.api.Partition outPart
: getMSC().add_partitions(partsToAdd, addPartitionDesc.isIfNotExists(), true)) {
out.add(new Partition(tbl, outPart));
}
EnvironmentContext ec = new EnvironmentContext();
// In case of replication, statistics is obtained from the source, so do not update those
// on replica.
ec.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
getMSC().alter_partitions(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(),
partsToAlter, ec, validWriteIdList, writeId);
for ( org.apache.hadoop.hive.metastore.api.Partition outPart :
getMSC().getPartitionsByNames(addPartitionDesc.getDbName(), addPartitionDesc.getTableName(),part_names)){
out.add(new Partition(tbl,outPart));
}
}
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return out;
}
public static org.apache.hadoop.hive.metastore.api.Partition convertAddSpecToMetaPartition(
Table tbl, AddPartitionDesc.OnePartitionDesc addSpec, final HiveConf conf) throws HiveException {
Path location = addSpec.getLocation() != null
? new Path(tbl.getPath(), addSpec.getLocation()) : null;
if (location != null) {
// Ensure that it is a full qualified path (in most cases it will be since tbl.getPath() is full qualified)
location = new Path(Utilities.getQualifiedPath(conf, location));
}
org.apache.hadoop.hive.metastore.api.Partition part =
Partition.createMetaPartitionObject(tbl, addSpec.getPartSpec(), location);
if (addSpec.getPartParams() != null) {
part.setParameters(addSpec.getPartParams());
}
if (addSpec.getInputFormat() != null) {
part.getSd().setInputFormat(addSpec.getInputFormat());
}
if (addSpec.getOutputFormat() != null) {
part.getSd().setOutputFormat(addSpec.getOutputFormat());
}
if (addSpec.getNumBuckets() != -1) {
part.getSd().setNumBuckets(addSpec.getNumBuckets());
}
if (addSpec.getCols() != null) {
part.getSd().setCols(addSpec.getCols());
}
if (addSpec.getSerializationLib() != null) {
part.getSd().getSerdeInfo().setSerializationLib(addSpec.getSerializationLib());
}
if (addSpec.getSerdeParams() != null) {
part.getSd().getSerdeInfo().setParameters(addSpec.getSerdeParams());
}
if (addSpec.getBucketCols() != null) {
part.getSd().setBucketCols(addSpec.getBucketCols());
}
if (addSpec.getSortCols() != null) {
part.getSd().setSortCols(addSpec.getSortCols());
}
if (addSpec.getColStats() != null) {
part.setColStats(addSpec.getColStats());
// Statistics will have an associated write Id for a transactional table. We need it to
// update column statistics.
part.setWriteId(addSpec.getWriteId());
}
return part;
}
public Partition getPartition(Table tbl, Map<String, String> partSpec,
boolean forceCreate) throws HiveException {
return getPartition(tbl, partSpec, forceCreate, null, true);
}
/**
* Returns partition metadata
*
* @param tbl
* the partition's table
* @param partSpec
* partition keys and values
* @param forceCreate
* if this is true and partition doesn't exist then a partition is
* created
* @param partPath the path where the partition data is located
* @param inheritTableSpecs whether to copy over the table specs for if/of/serde
* @return result partition object or null if there is no partition
* @throws HiveException
*/
public Partition getPartition(Table tbl, Map<String, String> partSpec,
boolean forceCreate, String partPath, boolean inheritTableSpecs) throws HiveException {
tbl.validatePartColumnNames(partSpec, true);
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
String val = partSpec.get(field.getName());
// enable dynamic partitioning
if ((val == null && !HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING))
|| (val != null && val.length() == 0)) {
throw new HiveException("get partition: Value for key "
+ field.getName() + " is null or empty");
} else if (val != null){
pvals.add(val);
}
}
org.apache.hadoop.hive.metastore.api.Partition tpart = null;
try {
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given partition
// key value pairs - thrift cannot handle null return values, hence
// getPartition() throws NoSuchObjectException to indicate null partition
tpart = null;
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
try {
if (forceCreate) {
if (tpart == null) {
LOG.debug("creating partition for table " + tbl.getTableName()
+ " with partition spec : " + partSpec);
try {
tpart = getSynchronizedMSC().appendPartition(tbl.getDbName(), tbl.getTableName(), pvals);
} catch (AlreadyExistsException aee) {
LOG.debug("Caught already exists exception, trying to alter partition instead");
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
} catch (Exception e) {
if (CheckJDOException.isJDODataStoreException(e)) {
// Using utility method above, so that JDODataStoreException doesn't
// have to be used here. This helps avoid adding jdo dependency for
// hcatalog client uses
LOG.debug("Caught JDO exception, trying to alter partition instead");
tpart = getSynchronizedMSC().getPartitionWithAuthInfo(tbl.getDbName(),
tbl.getTableName(), pvals, getUserName(), getGroupNames());
if (tpart == null) {
// This means the exception was caused by something other than a race condition
// in creating the partition, since the partition still doesn't exist.
throw e;
}
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
} else {
throw e;
}
}
}
else {
alterPartitionSpec(tbl, partSpec, tpart, inheritTableSpecs, partPath);
fireInsertEvent(tbl, partSpec, true, null);
}
}
if (tpart == null) {
return null;
}
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return new Partition(tbl, tpart);
}
private void alterPartitionSpec(Table tbl,
Map<String, String> partSpec,
org.apache.hadoop.hive.metastore.api.Partition tpart,
boolean inheritTableSpecs,
String partPath) throws HiveException, InvalidOperationException {
alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, partPath);
String fullName = tbl.getTableName();
if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) {
fullName = tbl.getFullyQualifiedName();
}
alterPartition(tbl.getCatalogName(), tbl.getDbName(), tbl.getTableName(),
new Partition(tbl, tpart), null, true);
}
private void alterPartitionSpecInMemory(Table tbl,
Map<String, String> partSpec,
org.apache.hadoop.hive.metastore.api.Partition tpart,
boolean inheritTableSpecs,
String partPath) throws HiveException, InvalidOperationException {
LOG.debug("altering partition for table " + tbl.getTableName() + " with partition spec : "
+ partSpec);
if (inheritTableSpecs) {
tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat());
tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat());
tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib());
tpart.getSd().getSerdeInfo().setParameters(
tbl.getTTable().getSd().getSerdeInfo().getParameters());
tpart.getSd().setBucketCols(tbl.getBucketCols());
tpart.getSd().setNumBuckets(tbl.getNumBuckets());
tpart.getSd().setSortCols(tbl.getSortCols());
}
if (partPath == null || partPath.trim().equals("")) {
throw new HiveException("new partition path should not be null or empty.");
}
tpart.getSd().setLocation(partPath);
}
public void addWriteNotificationLog(Table tbl, Map<String, String> partitionSpec,
List<Path> newFiles, Long writeId) throws HiveException {
if (!conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML)) {
LOG.debug("write notification log is ignored as dml event logging is disabled");
return;
}
if (tbl.isTemporary()) {
LOG.debug("write notification log is ignored as " + tbl.getTableName() + " is temporary : " + writeId);
return;
}
if (newFiles == null || newFiles.isEmpty()) {
LOG.debug("write notification log is ignored as file list is empty");
return;
}
LOG.debug("adding write notification log for operation " + writeId + " table " + tbl.getCompleteName() +
"partition " + partitionSpec + " list of files " + newFiles);
try {
Long txnId = SessionState.get().getTxnMgr().getCurrentTxnId();
List<String> partitionVals = null;
if (partitionSpec != null && !partitionSpec.isEmpty()) {
partitionVals = new ArrayList<>();
for (FieldSchema fs : tbl.getPartitionKeys()) {
partitionVals.add(partitionSpec.get(fs.getName()));
}
}
addWriteNotificationLog(conf, tbl, partitionVals, txnId, writeId, newFiles);
} catch (IOException | TException e) {
throw new HiveException(e);
}
}
public static void addWriteNotificationLog(HiveConf conf, Table tbl, List<String> partitionVals,
Long txnId, Long writeId, List<Path> newFiles)
throws IOException, HiveException, TException {
FileSystem fileSystem = tbl.getDataLocation().getFileSystem(conf);
InsertEventRequestData insertData = new InsertEventRequestData();
insertData.setReplace(true);
WriteNotificationLogRequest rqst = new WriteNotificationLogRequest(txnId, writeId,
tbl.getDbName(), tbl.getTableName(), insertData);
addInsertFileInformation(newFiles, fileSystem, insertData);
rqst.setPartitionVals(partitionVals);
get(conf).getSynchronizedMSC().addWriteNotificationLog(rqst);
}
private void fireInsertEvent(Table tbl, Map<String, String> partitionSpec, boolean replace, List<Path> newFiles)
throws HiveException {
if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML)) {
LOG.debug("Firing dml insert event");
if (tbl.isTemporary()) {
LOG.debug("Not firing dml insert event as " + tbl.getTableName() + " is temporary");
return;
}
try {
FileSystem fileSystem = tbl.getDataLocation().getFileSystem(conf);
FireEventRequestData data = new FireEventRequestData();
InsertEventRequestData insertData = new InsertEventRequestData();
insertData.setReplace(replace);
data.setInsertData(insertData);
if (newFiles != null && !newFiles.isEmpty()) {
addInsertFileInformation(newFiles, fileSystem, insertData);
} else {
insertData.setFilesAdded(new ArrayList<String>());
}
FireEventRequest rqst = new FireEventRequest(true, data);
rqst.setDbName(tbl.getDbName());
rqst.setTableName(tbl.getTableName());
if (partitionSpec != null && partitionSpec.size() > 0) {
List<String> partVals = new ArrayList<String>(partitionSpec.size());
for (FieldSchema fs : tbl.getPartitionKeys()) {
partVals.add(partitionSpec.get(fs.getName()));
}
rqst.setPartitionVals(partVals);
}
getSynchronizedMSC().fireListenerEvent(rqst);
} catch (IOException | TException e) {
throw new HiveException(e);
}
}
}
private static void addInsertFileInformation(List<Path> newFiles, FileSystem fileSystem,
InsertEventRequestData insertData) throws IOException {
LinkedList<Path> directories = null;
for (Path p : newFiles) {
if (fileSystem.isDirectory(p)) {
if (directories == null) {
directories = new LinkedList<>();
}
directories.add(p);
continue;
}
addInsertNonDirectoryInformation(p, fileSystem, insertData);
}
if (directories == null) {
return;
}
// We don't expect any nesting in most cases, or a lot of it if it is present; union and LB
// are some examples where we would have 1, or few, levels respectively.
while (!directories.isEmpty()) {
Path dir = directories.poll();
FileStatus[] contents = fileSystem.listStatus(dir);
if (contents == null) {
continue;
}
for (FileStatus status : contents) {
if (status.isDirectory()) {
directories.add(status.getPath());
continue;
}
addInsertNonDirectoryInformation(status.getPath(), fileSystem, insertData);
}
}
}
private static void addInsertNonDirectoryInformation(Path p, FileSystem fileSystem,
InsertEventRequestData insertData) throws IOException {
insertData.addToFilesAdded(p.toString());
FileChecksum cksum = fileSystem.getFileChecksum(p);
String acidDirPath = AcidUtils.getFirstLevelAcidDirPath(p.getParent(), fileSystem);
// File checksum is not implemented for local filesystem (RawLocalFileSystem)
if (cksum != null) {
String checksumString =
StringUtils.byteToHexString(cksum.getBytes(), 0, cksum.getLength());
insertData.addToFilesAddedChecksum(checksumString);
} else {
// Add an empty checksum string for filesystems that don't generate one
insertData.addToFilesAddedChecksum("");
}
// acid dir will be present only for acid write operations.
if (acidDirPath != null) {
insertData.addToSubDirectoryList(acidDirPath);
}
}
public boolean dropPartition(String tblName, List<String> part_vals, boolean deleteData)
throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartition(names[0], names[1], part_vals, deleteData);
}
public boolean dropPartition(String db_name, String tbl_name,
List<String> part_vals, boolean deleteData) throws HiveException {
return dropPartition(db_name, tbl_name, part_vals,
PartitionDropOptions.instance().deleteData(deleteData));
}
public boolean dropPartition(String dbName, String tableName, List<String> partVals, PartitionDropOptions options)
throws HiveException {
try {
return getMSC().dropPartition(dbName, tableName, partVals, options);
} catch (NoSuchObjectException e) {
throw new HiveException("Partition or table doesn't exist.", e);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
}
/**
* drop the partitions specified as directory names associated with the table.
*
* @param table object for which partition is needed
* @param partDirNames partition directories that need to be dropped
* @param deleteData whether data should be deleted from file system
* @param ifExists check for existence before attempting delete
*
* @return list of partition objects that were deleted
*
* @throws HiveException
*/
public List<Partition> dropPartitions(Table table, List<String>partDirNames,
boolean deleteData, boolean ifExists) throws HiveException {
// partitions to be dropped in this batch
List<DropPartitionDesc.PartSpec> partSpecs = new ArrayList<>(partDirNames.size());
// parts of the partition
String[] parts = null;
// Expression splits of each part of the partition
String[] partExprParts = null;
// Column Types of all partitioned columns. Used for generating partition specification
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : table.getPartitionKeys()) {
colTypes.put(fs.getName(), fs.getType());
}
// Key to be used to save the partition to be dropped in partSpecs
int partSpecKey = 0;
for (String partDir : partDirNames) {
// The expression to identify the partition to be dropped
ExprNodeGenericFuncDesc expr = null;
// Split by "/" to identify partition parts
parts = partDir.split("/");
// Loop through the partitions and form the expression
for (String part : parts) {
// Split the partition predicate to identify column and value
partExprParts = part.split("=");
// Only two elements expected in partExprParts partition column name and partition value
assert partExprParts.length == 2;
// Partition Column
String partCol = partExprParts[0];
// Column Type
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(colTypes.get(partCol));
// Form the expression node corresponding to column
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, partCol, null, true);
// Build the expression based on the partition predicate
ExprNodeGenericFuncDesc op =
makeBinaryPredicate("=", column, new ExprNodeConstantDesc(pti, partExprParts[1]));
// the multiple parts to partition predicate are joined using and
expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
}
// Add the expression to partition specification
partSpecs.add(new DropPartitionDesc.PartSpec(expr, partSpecKey));
// Increment dropKey to get a new key for hash map
++partSpecKey;
}
String[] names = Utilities.getDbTableName(table.getFullyQualifiedName());
return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists);
}
public List<Partition> dropPartitions(String tblName, List<DropPartitionDesc.PartSpec> partSpecs,
boolean deleteData, boolean ifExists) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists);
}
public List<Partition> dropPartitions(String dbName, String tblName,
List<DropPartitionDesc.PartSpec> partSpecs, boolean deleteData,
boolean ifExists) throws HiveException {
return dropPartitions(dbName, tblName, partSpecs,
PartitionDropOptions.instance()
.deleteData(deleteData)
.ifExists(ifExists));
}
public List<Partition> dropPartitions(String tblName, List<DropPartitionDesc.PartSpec> partSpecs,
PartitionDropOptions dropOptions) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return dropPartitions(names[0], names[1], partSpecs, dropOptions);
}
public List<Partition> dropPartitions(String dbName, String tblName,
List<DropPartitionDesc.PartSpec> partSpecs, PartitionDropOptions dropOptions) throws HiveException {
try {
Table tbl = getTable(dbName, tblName);
List<org.apache.hadoop.hive.metastore.utils.ObjectPair<Integer, byte[]>> partExprs =
new ArrayList<>(partSpecs.size());
for (DropPartitionDesc.PartSpec partSpec : partSpecs) {
partExprs.add(new org.apache.hadoop.hive.metastore.utils.ObjectPair<>(partSpec.getPrefixLength(),
SerializationUtilities.serializeExpressionToKryo(partSpec.getPartSpec())));
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().dropPartitions(
dbName, tblName, partExprs, dropOptions);
return convertFromMetastore(tbl, tParts);
} catch (NoSuchObjectException e) {
throw new HiveException("Partition or table doesn't exist.", e);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
}
public List<String> getPartitionNames(String tblName, short max) throws HiveException {
String[] names = Utilities.getDbTableName(tblName);
return getPartitionNames(names[0], names[1], max);
}
public List<String> getPartitionNames(String dbName, String tblName, short max)
throws HiveException {
List<String> names = null;
try {
names = getMSC().listPartitionNames(dbName, tblName, max);
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given dbName and tblName
// key value pairs - thrift cannot handle null return values, hence
// listPartitionNames() throws NoSuchObjectException to indicate null partitions
return Lists.newArrayList();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return names;
}
public List<String> getPartitionNames(String dbName, String tblName,
Map<String, String> partSpec, short max) throws HiveException {
List<String> names = null;
Table t = getTable(dbName, tblName);
List<String> pvals = MetaStoreUtils.getPvals(t.getPartCols(), partSpec);
try {
names = getMSC().listPartitionNames(dbName, tblName, pvals, max);
} catch (NoSuchObjectException nsoe) {
// this means no partition exists for the given partition spec
// key value pairs - thrift cannot handle null return values, hence
// listPartitionNames() throws NoSuchObjectException to indicate null partitions
return Lists.newArrayList();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
return names;
}
/**
* get all the partitions that the table has
*
* @param tbl
* object for which partition is needed
* @return list of partition objects
*/
public List<Partition> getPartitions(Table tbl) throws HiveException {
if (tbl.isPartitioned()) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
try {
tParts = getMSC().listPartitionsWithAuthInfo(tbl.getDbName(), tbl.getTableName(),
(short) -1, getUserName(), getGroupNames());
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
List<Partition> parts = new ArrayList<Partition>(tParts.size());
for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
parts.add(new Partition(tbl, tpart));
}
return parts;
} else {
Partition part = new Partition(tbl);
ArrayList<Partition> parts = new ArrayList<Partition>(1);
parts.add(part);
return parts;
}
}
/**
* Get all the partitions; unlike {@link #getPartitions(Table)}, does not include auth.
* @param tbl table for which partitions are needed
* @return list of partition objects
*/
public Set<Partition> getAllPartitionsOf(Table tbl) throws HiveException {
if (!tbl.isPartitioned()) {
return Sets.newHashSet(new Partition(tbl));
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
try {
tParts = getMSC().listPartitions(tbl.getDbName(), tbl.getTableName(), (short)-1);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
Set<Partition> parts = new LinkedHashSet<Partition>(tParts.size());
for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
parts.add(new Partition(tbl, tpart));
}
return parts;
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param limit number of partitions to return
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitions(Table tbl, Map<String, String> partialPartSpec,
short limit)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<String> partialPvals = MetaStoreUtils.getPvals(tbl.getPartCols(), partialPartSpec);
List<org.apache.hadoop.hive.metastore.api.Partition> partitions = null;
try {
partitions = getMSC().listPartitionsWithAuthInfo(tbl.getDbName(), tbl.getTableName(),
partialPvals, limit, getUserName(), getGroupNames());
} catch (Exception e) {
throw new HiveException(e);
}
List<Partition> qlPartitions = new ArrayList<Partition>();
for (org.apache.hadoop.hive.metastore.api.Partition p : partitions) {
qlPartitions.add( new Partition(tbl, p));
}
return qlPartitions;
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitions(Table tbl, Map<String, String> partialPartSpec)
throws HiveException {
return getPartitions(tbl, partialPartSpec, (short)-1);
}
/**
* get all the partitions of the table that matches the given partial
* specification. partition columns whose value is can be anything should be
* an empty string.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partialPartSpec
* partial partition specification (some subpartitions can be empty).
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl,
Map<String, String> partialPartSpec)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<String> names = getPartitionNames(tbl.getDbName(), tbl.getTableName(),
partialPartSpec, (short)-1);
List<Partition> partitions = getPartitionsByNames(tbl, names);
return partitions;
}
/**
* Get all partitions of the table that matches the list of given partition names.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partNames
* list of partition names
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl, List<String> partNames)
throws HiveException {
return getPartitionsByNames(tbl, partNames, false);
}
/**
* Get all partitions of the table that matches the list of given partition names.
*
* @param tbl
* object for which partition is needed. Must be partitioned.
* @param partNames
* list of partition names
* @param getColStats
* if true, Partition object includes column statistics for that partition.
* @return list of partition objects
* @throws HiveException
*/
public List<Partition> getPartitionsByNames(Table tbl, List<String> partNames, boolean getColStats)
throws HiveException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<Partition> partitions = new ArrayList<Partition>(partNames.size());
int batchSize = HiveConf.getIntVar(conf, HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
// TODO: might want to increase the default batch size. 1024 is viable; MS gets OOM if too high.
int nParts = partNames.size();
int nBatches = nParts / batchSize;
try {
for (int i = 0; i < nBatches; ++i) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts =
getMSC().getPartitionsByNames(tbl.getDbName(), tbl.getTableName(),
partNames.subList(i*batchSize, (i+1)*batchSize), getColStats);
if (tParts != null) {
for (org.apache.hadoop.hive.metastore.api.Partition tpart: tParts) {
partitions.add(new Partition(tbl, tpart));
}
}
}
if (nParts > nBatches * batchSize) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts =
getMSC().getPartitionsByNames(tbl.getDbName(), tbl.getTableName(),
partNames.subList(nBatches*batchSize, nParts), getColStats);
if (tParts != null) {
for (org.apache.hadoop.hive.metastore.api.Partition tpart: tParts) {
partitions.add(new Partition(tbl, tpart));
}
}
}
} catch (Exception e) {
throw new HiveException(e);
}
return partitions;
}
/**
* Get a list of Partitions by filter.
* @param tbl The table containing the partitions.
* @param filter A string represent partition predicates.
* @return a list of partitions satisfying the partition predicates.
* @throws HiveException
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
*/
public List<Partition> getPartitionsByFilter(Table tbl, String filter)
throws HiveException, MetaException, NoSuchObjectException, TException {
if (!tbl.isPartitioned()) {
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}
List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().listPartitionsByFilter(
tbl.getDbName(), tbl.getTableName(), filter, (short)-1);
return convertFromMetastore(tbl, tParts);
}
private static List<Partition> convertFromMetastore(Table tbl,
List<org.apache.hadoop.hive.metastore.api.Partition> partitions) throws HiveException {
if (partitions == null) {
return new ArrayList<Partition>();
}
List<Partition> results = new ArrayList<Partition>(partitions.size());
for (org.apache.hadoop.hive.metastore.api.Partition tPart : partitions) {
results.add(new Partition(tbl, tPart));
}
return results;
}
/**
* Get a list of Partitions by expr.
* @param tbl The table containing the partitions.
* @param expr A serialized expression for partition predicates.
* @param conf Hive config.
* @param result the resulting list of partitions
* @return whether the resulting list contains partitions which may or may not match the expr
*/
public boolean getPartitionsByExpr(Table tbl, ExprNodeGenericFuncDesc expr, HiveConf conf,
List<Partition> result) throws HiveException, TException {
assert result != null;
byte[] exprBytes = SerializationUtilities.serializeExpressionToKryo(expr);
String defaultPartitionName = HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME);
List<org.apache.hadoop.hive.metastore.api.Partition> msParts =
new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();
boolean hasUnknownParts = getMSC().listPartitionsByExpr(tbl.getDbName(),
tbl.getTableName(), exprBytes, defaultPartitionName, (short)-1, msParts);
result.addAll(convertFromMetastore(tbl, msParts));
return hasUnknownParts;
}
/**
* Get a number of Partitions by filter.
* @param tbl The table containing the partitions.
* @param filter A string represent partition predicates.
* @return the number of partitions satisfying the partition predicates.
* @throws HiveException
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
*/
public int getNumPartitionsByFilter(Table tbl, String filter)
throws HiveException, MetaException, NoSuchObjectException, TException {
if (!tbl.isPartitioned()) {
throw new HiveException("Partition spec should only be supplied for a " +
"partitioned table");
}
int numParts = getMSC().getNumPartitionsByFilter(
tbl.getDbName(), tbl.getTableName(), filter);
return numParts;
}
public void validatePartitionNameCharacters(List<String> partVals) throws HiveException {
try {
getMSC().validatePartitionNameCharacters(partVals);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void createRole(String roleName, String ownerName)
throws HiveException {
try {
getMSC().create_role(new Role(roleName, -1, ownerName));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropRole(String roleName) throws HiveException {
try {
getMSC().drop_role(roleName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all existing role names.
*
* @return List of role names.
* @throws HiveException
*/
public List<String> getAllRoleNames() throws HiveException {
try {
return getMSC().listRoleNames();
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<RolePrincipalGrant> getRoleGrantInfoForPrincipal(String principalName, PrincipalType principalType) throws HiveException {
try {
GetRoleGrantsForPrincipalRequest req = new GetRoleGrantsForPrincipalRequest(principalName, principalType);
GetRoleGrantsForPrincipalResponse resp = getMSC().get_role_grants_for_principal(req);
return resp.getPrincipalGrants();
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean grantRole(String roleName, String userName,
PrincipalType principalType, String grantor, PrincipalType grantorType,
boolean grantOption) throws HiveException {
try {
return getMSC().grant_role(roleName, userName, principalType, grantor,
grantorType, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
public boolean revokeRole(String roleName, String userName,
PrincipalType principalType, boolean grantOption) throws HiveException {
try {
return getMSC().revoke_role(roleName, userName, principalType, grantOption);
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<Role> listRoles(String userName, PrincipalType principalType)
throws HiveException {
try {
return getMSC().list_roles(userName, principalType);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param objectType
* hive object type
* @param db_name
* database name
* @param table_name
* table name
* @param part_values
* partition values
* @param column_name
* column name
* @param user_name
* user name
* @param group_names
* group names
* @return the privilege set
* @throws HiveException
*/
public PrincipalPrivilegeSet get_privilege_set(HiveObjectType objectType,
String db_name, String table_name, List<String> part_values,
String column_name, String user_name, List<String> group_names)
throws HiveException {
try {
HiveObjectRef hiveObj = new HiveObjectRef(objectType, db_name,
table_name, part_values, column_name);
return getMSC().get_privilege_set(hiveObj, user_name, group_names);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* @param objectType
* hive object type
* @param principalName
* @param principalType
* @param dbName
* @param tableName
* @param partValues
* @param columnName
* @return list of privileges
* @throws HiveException
*/
public List<HiveObjectPrivilege> showPrivilegeGrant(
HiveObjectType objectType, String principalName,
PrincipalType principalType, String dbName, String tableName,
List<String> partValues, String columnName) throws HiveException {
try {
HiveObjectRef hiveObj = new HiveObjectRef(objectType, dbName, tableName,
partValues, columnName);
return getMSC().list_privileges(principalName, principalType, hiveObj);
} catch (Exception e) {
throw new HiveException(e);
}
}
private static void copyFiles(final HiveConf conf, final FileSystem destFs,
FileStatus[] srcs, final FileSystem srcFs, final Path destf,
final boolean isSrcLocal, boolean isOverwrite,
final List<Path> newFiles, boolean acidRename, boolean isManaged) throws HiveException {
final HdfsUtils.HadoopFileStatus fullDestStatus;
try {
fullDestStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf);
} catch (IOException e1) {
throw new HiveException(e1);
}
if (!fullDestStatus.getFileStatus().isDirectory()) {
throw new HiveException(destf + " is not a directory.");
}
final List<Future<ObjectPair<Path, Path>>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Move-Thread-%d").build()) : null;
// For ACID non-bucketed case, the filenames have to be in the format consistent with INSERT/UPDATE/DELETE Ops,
// i.e, like 000000_0, 000001_0_copy_1, 000002_0.gz etc.
// The extension is only maintained for files which are compressed.
int taskId = 0;
// Sort the files
Arrays.sort(srcs);
String configuredOwner = HiveConf.getVar(conf, ConfVars.HIVE_LOAD_DATA_OWNER);
for (FileStatus src : srcs) {
FileStatus[] files;
if (src.isDirectory()) {
try {
files = srcFs.listStatus(src.getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER);
} catch (IOException e) {
if (null != pool) {
pool.shutdownNow();
}
throw new HiveException(e);
}
} else {
files = new FileStatus[] {src};
}
final SessionState parentSession = SessionState.get();
// Sort the files
Arrays.sort(files);
for (final FileStatus srcFile : files) {
final Path srcP = srcFile.getPath();
final boolean needToCopy = needToCopy(srcP, destf, srcFs, destFs, configuredOwner, isManaged);
final boolean isRenameAllowed = !needToCopy && !isSrcLocal;
final String msg = "Unable to move source " + srcP + " to destination " + destf;
// If we do a rename for a non-local file, we will be transfering the original
// file permissions from source to the destination. Else, in case of mvFile() where we
// copy from source to destination, we will inherit the destination's parent group ownership.
if (null == pool) {
try {
Path destPath = mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isOverwrite, isRenameAllowed,
acidRename ? taskId++ : -1);
if (null != newFiles) {
newFiles.add(destPath);
}
} catch (Exception e) {
throw getHiveException(e, msg, "Failed to move: {}");
}
} else {
// future only takes final or seemingly final values. Make a final copy of taskId
final int finalTaskId = acidRename ? taskId++ : -1;
futures.add(pool.submit(new Callable<ObjectPair<Path, Path>>() {
@Override
public ObjectPair<Path, Path> call() throws HiveException {
SessionState.setCurrentSessionState(parentSession);
try {
Path destPath =
mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isOverwrite, isRenameAllowed, finalTaskId);
if (null != newFiles) {
newFiles.add(destPath);
}
return ObjectPair.create(srcP, destPath);
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
}));
}
}
}
if (null != pool) {
pool.shutdown();
for (Future<ObjectPair<Path, Path>> future : futures) {
try {
ObjectPair<Path, Path> pair = future.get();
LOG.debug("Moved src: {}, to dest: {}", pair.getFirst().toString(), pair.getSecond().toString());
} catch (Exception e) {
throw handlePoolException(pool, e);
}
}
}
}
private static boolean isSubDir(Path srcf, Path destf, FileSystem srcFs, FileSystem destFs, boolean isSrcLocal) {
if (srcf == null) {
LOG.debug("The source path is null for isSubDir method.");
return false;
}
String fullF1 = getQualifiedPathWithoutSchemeAndAuthority(srcf, srcFs).toString() + Path.SEPARATOR;
String fullF2 = getQualifiedPathWithoutSchemeAndAuthority(destf, destFs).toString() + Path.SEPARATOR;
boolean isInTest = HiveConf.getBoolVar(srcFs.getConf(), ConfVars.HIVE_IN_TEST);
// In the automation, the data warehouse is the local file system based.
LOG.debug("The source path is " + fullF1 + " and the destination path is " + fullF2);
if (isInTest) {
return fullF1.startsWith(fullF2);
}
// schema is diff, return false
String schemaSrcf = srcf.toUri().getScheme();
String schemaDestf = destf.toUri().getScheme();
// if the schemaDestf is null, it means the destination is not in the local file system
if (schemaDestf == null && isSrcLocal) {
LOG.debug("The source file is in the local while the dest not.");
return false;
}
// If both schema information are provided, they should be the same.
if (schemaSrcf != null && schemaDestf != null && !schemaSrcf.equals(schemaDestf)) {
LOG.debug("The source path's schema is " + schemaSrcf +
" and the destination path's schema is " + schemaDestf + ".");
return false;
}
LOG.debug("The source path is " + fullF1 + " and the destination path is " + fullF2);
return fullF1.startsWith(fullF2);
}
private static Path getQualifiedPathWithoutSchemeAndAuthority(Path srcf, FileSystem fs) {
Path currentWorkingDir = fs.getWorkingDirectory();
Path path = srcf.makeQualified(srcf.toUri(), currentWorkingDir);
return ShimLoader.getHadoopShims().getPathWithoutSchemeAndAuthority(path);
}
private static String getPathName(int taskId) {
return Utilities.replaceTaskId("000000", taskId) + "_0";
}
/**
* <p>
* Moves a file from one {@link Path} to another. If {@code isRenameAllowed} is true then the
* {@link FileSystem#rename(Path, Path)} method is used to move the file. If its false then the data is copied, if
* {@code isSrcLocal} is true then the {@link FileSystem#copyFromLocalFile(Path, Path)} method is used, else
* {@link FileUtils#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, HiveConf)} is used.
* </p>
*
* <p>
* If the destination file already exists, then {@code _copy_[counter]} is appended to the file name, where counter
* is an integer starting from 1.
* </p>
*
* @param conf the {@link HiveConf} to use if copying data
* @param sourceFs the {@link FileSystem} where the source file exists
* @param sourcePath the {@link Path} to move
* @param destFs the {@link FileSystem} to move the file to
* @param destDirPath the {@link Path} to move the file to
* @param isSrcLocal if the source file is on the local filesystem
* @param isOverwrite if true, then overwrite destination file if exist else make a duplicate copy
* @param isRenameAllowed true if the data should be renamed and not copied, false otherwise
*
* @return the {@link Path} the source file was moved to
*
* @throws IOException if there was an issue moving the file
*/
private static Path mvFile(HiveConf conf, FileSystem sourceFs, Path sourcePath, FileSystem destFs, Path destDirPath,
boolean isSrcLocal, boolean isOverwrite, boolean isRenameAllowed,
int taskId) throws IOException {
// Strip off the file type, if any so we don't make:
// 000000_0.gz -> 000000_0.gz_copy_1
final String fullname = sourcePath.getName();
final String name;
if (taskId == -1) { // non-acid
name = FilenameUtils.getBaseName(sourcePath.getName());
} else { // acid
name = getPathName(taskId);
}
final String type = FilenameUtils.getExtension(sourcePath.getName());
// Incase of ACID, the file is ORC so the extension is not relevant and should not be inherited.
Path destFilePath = new Path(destDirPath, taskId == -1 ? fullname : name);
/*
* The below loop may perform bad when the destination file already exists and it has too many _copy_
* files as well. A desired approach was to call listFiles() and get a complete list of files from
* the destination, and check whether the file exists or not on that list. However, millions of files
* could live on the destination directory, and on concurrent situations, this can cause OOM problems.
*
* I'll leave the below loop for now until a better approach is found.
*/
for (int counter = 1; destFs.exists(destFilePath); counter++) {
if (isOverwrite) {
destFs.delete(destFilePath, false);
break;
}
destFilePath = new Path(destDirPath, name + (Utilities.COPY_KEYWORD + counter) +
((taskId == -1 && !type.isEmpty()) ? "." + type : ""));
}
if (isRenameAllowed) {
destFs.rename(sourcePath, destFilePath);
} else if (isSrcLocal) {
destFs.copyFromLocalFile(sourcePath, destFilePath);
} else {
if (!FileUtils.copy(sourceFs, sourcePath, destFs, destFilePath,
false, // delete source
false, // overwrite destination
conf)) {
LOG.error("Copy failed for source: " + sourcePath + " to destination: " + destFilePath);
throw new IOException("File copy failed.");
}
// Source file delete may fail because of permission issue as executing user might not
// have permission to delete the files in the source path. Ignore this failure.
try {
if (!sourceFs.delete(sourcePath, true)) {
LOG.warn("Delete source failed for source: " + sourcePath + " during copy to destination: " + destFilePath);
}
} catch (Exception e) {
LOG.warn("Delete source failed for source: " + sourcePath + " during copy to destination: " + destFilePath, e);
}
}
return destFilePath;
}
// Clears the dest dir when src is sub-dir of dest.
public static void clearDestForSubDirSrc(final HiveConf conf, Path dest,
Path src, boolean isSrcLocal) throws IOException {
FileSystem destFS = dest.getFileSystem(conf);
FileSystem srcFS = src.getFileSystem(conf);
if (isSubDir(src, dest, srcFS, destFS, isSrcLocal)) {
final Path fullSrcPath = getQualifiedPathWithoutSchemeAndAuthority(src, srcFS);
final Path fullDestPath = getQualifiedPathWithoutSchemeAndAuthority(dest, destFS);
if (fullSrcPath.equals(fullDestPath)) {
return;
}
Path parent = fullSrcPath;
while (!parent.getParent().equals(fullDestPath)) {
parent = parent.getParent();
}
FileStatus[] existingFiles = destFS.listStatus(
dest, FileUtils.HIDDEN_FILES_PATH_FILTER);
for (FileStatus fileStatus : existingFiles) {
if (!fileStatus.getPath().getName().equals(parent.getName())) {
destFS.delete(fileStatus.getPath(), true);
}
}
}
}
// List the new files in destination path which gets copied from source.
public static void listNewFilesRecursively(final FileSystem destFs, Path dest,
List<Path> newFiles) throws HiveException {
try {
for (FileStatus fileStatus : destFs.listStatus(dest, FileUtils.HIDDEN_FILES_PATH_FILTER)) {
if (fileStatus.isDirectory()) {
// If it is a sub-directory, then recursively list the files.
listNewFilesRecursively(destFs, fileStatus.getPath(), newFiles);
} else {
newFiles.add(fileStatus.getPath());
}
}
} catch (IOException e) {
LOG.error("Failed to get source file statuses", e);
throw new HiveException(e.getMessage(), e);
}
}
/**
* Recycles the files recursively from the input path to the cmroot directory either by copying or moving it.
*
* @param dataPath Path of the data files to be recycled to cmroot
* @param isPurge
* When set to true files which needs to be recycled are not moved to Trash
*/
public void recycleDirToCmPath(Path dataPath, boolean isPurge) throws HiveException {
try {
CmRecycleRequest request = new CmRecycleRequest(dataPath.toString(), isPurge);
getSynchronizedMSC().recycleDirToCmPath(request);
} catch (Exception e) {
throw new HiveException(e);
}
}
//it is assumed that parent directory of the destf should already exist when this
//method is called. when the replace value is true, this method works a little different
//from mv command if the destf is a directory, it replaces the destf instead of moving under
//the destf. in this case, the replaced destf still preserves the original destf's permission
public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, boolean replace,
boolean isSrcLocal, boolean isManaged) throws HiveException {
final FileSystem srcFs, destFs;
try {
destFs = destf.getFileSystem(conf);
} catch (IOException e) {
LOG.error("Failed to get dest fs", e);
throw new HiveException(e.getMessage(), e);
}
try {
srcFs = srcf.getFileSystem(conf);
} catch (IOException e) {
LOG.error("Failed to get src fs", e);
throw new HiveException(e.getMessage(), e);
}
HdfsUtils.HadoopFileStatus destStatus = null;
String configuredOwner = HiveConf.getVar(conf, ConfVars.HIVE_LOAD_DATA_OWNER);
// If source path is a subdirectory of the destination path (or the other way around):
// ex: INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
// where the staging directory is a subdirectory of the destination directory
// (1) Do not delete the dest dir before doing the move operation.
// (2) It is assumed that subdir and dir are in same encryption zone.
// (3) Move individual files from scr dir to dest dir.
boolean srcIsSubDirOfDest = isSubDir(srcf, destf, srcFs, destFs, isSrcLocal),
destIsSubDirOfSrc = isSubDir(destf, srcf, destFs, srcFs, false);
final String msg = "Unable to move source " + srcf + " to destination " + destf;
try {
if (replace) {
try{
destStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf);
//if destf is an existing directory:
//if replace is true, delete followed by rename(mv) is equivalent to replace
//if replace is false, rename (mv) actually move the src under dest dir
//if destf is an existing file, rename is actually a replace, and do not need
// to delete the file first
if (replace && !srcIsSubDirOfDest) {
destFs.delete(destf, true);
LOG.debug("The path " + destf.toString() + " is deleted");
}
} catch (FileNotFoundException ignore) {
}
}
final HdfsUtils.HadoopFileStatus desiredStatus = destStatus;
final SessionState parentSession = SessionState.get();
if (isSrcLocal) {
// For local src file, copy to hdfs
destFs.copyFromLocalFile(srcf, destf);
return true;
} else {
if (needToCopy(srcf, destf, srcFs, destFs, configuredOwner, isManaged)) {
//copy if across file system or encryption zones.
LOG.debug("Copying source " + srcf + " to " + destf + " because HDFS encryption zones are different.");
return FileUtils.copy(srcf.getFileSystem(conf), srcf, destf.getFileSystem(conf), destf,
true, // delete source
replace, // overwrite destination
conf);
} else {
if (srcIsSubDirOfDest || destIsSubDirOfSrc) {
FileStatus[] srcs = destFs.listStatus(srcf, FileUtils.HIDDEN_FILES_PATH_FILTER);
List<Future<Void>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Move-Thread-%d").build()) : null;
if (destIsSubDirOfSrc && !destFs.exists(destf)) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("Creating " + destf);
}
destFs.mkdirs(destf);
}
/* Move files one by one because source is a subdirectory of destination */
for (final FileStatus srcStatus : srcs) {
final Path destFile = new Path(destf, srcStatus.getPath().getName());
final String poolMsg =
"Unable to move source " + srcStatus.getPath() + " to destination " + destFile;
if (null == pool) {
boolean success = false;
if (destFs instanceof DistributedFileSystem) {
((DistributedFileSystem)destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE);
success = true;
} else {
destFs.delete(destFile, false);
success = destFs.rename(srcStatus.getPath(), destFile);
}
if(!success) {
throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest:"
+ destf + " returned false");
}
} else {
futures.add(pool.submit(new Callable<Void>() {
@Override
public Void call() throws HiveException {
SessionState.setCurrentSessionState(parentSession);
try {
boolean success = false;
if (destFs instanceof DistributedFileSystem) {
((DistributedFileSystem)destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE);
success = true;
} else {
destFs.delete(destFile, false);
success = destFs.rename(srcStatus.getPath(), destFile);
}
if (!success) {
throw new IOException(
"rename for src path: " + srcStatus.getPath() + " to dest path:"
+ destFile + " returned false");
}
} catch (Exception e) {
throw getHiveException(e, poolMsg);
}
return null;
}
}));
}
}
if (null != pool) {
pool.shutdown();
for (Future<Void> future : futures) {
try {
future.get();
} catch (Exception e) {
throw handlePoolException(pool, e);
}
}
}
return true;
} else {
if (destFs.rename(srcf, destf)) {
return true;
}
return false;
}
}
}
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
static private HiveException getHiveException(Exception e, String msg) {
return getHiveException(e, msg, null);
}
static private HiveException handlePoolException(ExecutorService pool, Exception e) {
HiveException he = null;
if (e instanceof HiveException) {
he = (HiveException) e;
if (he.getCanonicalErrorMsg() != ErrorMsg.GENERIC_ERROR) {
if (he.getCanonicalErrorMsg() == ErrorMsg.UNRESOLVED_RT_EXCEPTION) {
LOG.error("Failed to move: {}", he.getMessage());
} else {
LOG.error("Failed to move: {}", he.getRemoteErrorMsg());
}
}
} else {
LOG.error("Failed to move: {}", e.getMessage());
he = new HiveException(e.getCause());
}
pool.shutdownNow();
return he;
}
static private HiveException getHiveException(Exception e, String msg, String logMsg) {
// The message from remote exception includes the entire stack. The error thrown from
// hive based on the remote exception needs only the first line.
String hiveErrMsg = null;
if (e.getMessage() != null) {
hiveErrMsg = String.format("%s%s%s", msg, ": ",
Splitter.on(System.getProperty("line.separator")).split(e.getMessage()).iterator()
.next());
} else {
hiveErrMsg = msg;
}
ErrorMsg errorMsg = ErrorMsg.getErrorMsg(e);
if (logMsg != null) {
LOG.info(String.format(logMsg, e.getMessage()));
}
if (errorMsg != ErrorMsg.UNRESOLVED_RT_EXCEPTION) {
return new HiveException(e, e.getMessage(), errorMsg, hiveErrMsg);
} else {
return new HiveException(msg, e);
}
}
/**
* If moving across different FileSystems or differnent encryption zone, need to do a File copy instead of rename.
* TODO- consider if need to do this for different file authority.
* @throws HiveException
*/
static private boolean needToCopy(Path srcf, Path destf, FileSystem srcFs,
FileSystem destFs, String configuredOwner, boolean isManaged) throws HiveException {
//Check if different FileSystems
if (!FileUtils.equalsFileSystem(srcFs, destFs)) {
return true;
}
if (isManaged && !configuredOwner.isEmpty() && srcFs instanceof DistributedFileSystem) {
// Need some extra checks
// Get the running owner
FileStatus srcs;
try {
srcs = srcFs.getFileStatus(srcf);
String runningUser = UserGroupInformation.getLoginUser().getShortUserName();
boolean isOwned = FileUtils.isOwnerOfFileHierarchy(srcFs, srcs, configuredOwner, false);
if (configuredOwner.equals(runningUser)) {
// Check if owner has write permission, else it will have to copy
if (!(isOwned &&
FileUtils.isActionPermittedForFileHierarchy(
srcFs, srcs, configuredOwner, FsAction.WRITE, false))) {
return true;
}
} else {
// If the configured owner does not own the file, throw
if (!isOwned) {
throw new HiveException("Load Data failed for " + srcf + " as the file is not owned by "
+ configuredOwner + " and load data is also not ran as " + configuredOwner);
} else {
return true;
}
}
} catch (IOException e) {
throw new HiveException("Could not fetch FileStatus for source file");
} catch (HiveException e) {
throw new HiveException(e);
} catch (Exception e) {
throw new HiveException(" Failed in looking up Permissions on file + " + srcf);
}
}
//Check if different encryption zones
HadoopShims.HdfsEncryptionShim srcHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(srcFs);
HadoopShims.HdfsEncryptionShim destHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(destFs);
try {
return srcHdfsEncryptionShim != null
&& destHdfsEncryptionShim != null
&& (srcHdfsEncryptionShim.isPathEncrypted(srcf) || destHdfsEncryptionShim.isPathEncrypted(destf))
&& !srcHdfsEncryptionShim.arePathsOnSameEncryptionZone(srcf, destf, destHdfsEncryptionShim);
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Copy files. This handles building the mapping for buckets and such between the source and
* destination
* @param conf Configuration object
* @param srcf source directory, if bucketed should contain bucket files
* @param destf directory to move files into
* @param fs Filesystem
* @param isSrcLocal true if source is on local file system
* @param isAcidIUD true if this is an ACID based Insert/Update/Delete
* @param isOverwrite if true, then overwrite if destination file exist, else add a duplicate copy
* @param newFiles if this is non-null, a list of files that were created as a result of this
* move will be returned.
* @param isManaged if table is managed.
* @throws HiveException
*/
static protected void copyFiles(HiveConf conf, Path srcf, Path destf, FileSystem fs,
boolean isSrcLocal, boolean isAcidIUD,
boolean isOverwrite, List<Path> newFiles, boolean isBucketed,
boolean isFullAcidTable, boolean isManaged) throws HiveException {
try {
// create the destination if it does not exist
if (!fs.exists(destf)) {
FileUtils.mkdir(fs, destf, conf);
}
} catch (IOException e) {
throw new HiveException(
"copyFiles: error while checking/creating destination directory!!!",
e);
}
FileStatus[] srcs;
FileSystem srcFs;
try {
srcFs = srcf.getFileSystem(conf);
srcs = srcFs.globStatus(srcf);
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException("addFiles: filesystem error in check phase. " + e.getMessage(), e);
}
if (srcs == null) {
LOG.info("No sources specified to move: " + srcf);
return;
// srcs = new FileStatus[0]; Why is this needed?
}
// If we're moving files around for an ACID write then the rules and paths are all different.
// You can blame this on Owen.
if (isAcidIUD) {
moveAcidFiles(srcFs, srcs, destf, newFiles);
} else {
// For ACID non-bucketed case, the filenames have to be in the format consistent with INSERT/UPDATE/DELETE Ops,
// i.e, like 000000_0, 000001_0_copy_1, 000002_0.gz etc.
// The extension is only maintained for files which are compressed.
copyFiles(conf, fs, srcs, srcFs, destf, isSrcLocal, isOverwrite,
newFiles, isFullAcidTable && !isBucketed, isManaged);
}
}
public static void moveAcidFiles(FileSystem fs, FileStatus[] stats, Path dst,
List<Path> newFiles) throws HiveException {
// The layout for ACID files is table|partname/base|delta|delete_delta/bucket
// We will always only be writing delta files ( except IOW which writes base_X/ ).
// In the buckets created by FileSinkOperator
// it will look like original_bucket/delta|delete_delta/bucket
// (e.g. .../-ext-10004/000000_0/delta_0000014_0000014_0000/bucket_00000). So we need to
// move that into the above structure. For the first mover there will be no delta directory,
// so we can move the whole directory.
// For everyone else we will need to just move the buckets under the existing delta
// directory.
Set<Path> createdDeltaDirs = new HashSet<Path>();
// Open the original path we've been given and find the list of original buckets
for (FileStatus stat : stats) {
Path srcPath = stat.getPath();
LOG.debug("Acid move Looking for original buckets in " + srcPath);
FileStatus[] origBucketStats = null;
try {
origBucketStats = fs.listStatus(srcPath, AcidUtils.originalBucketFilter);
if(origBucketStats == null || origBucketStats.length == 0) {
/**
check if we are dealing with data with non-standard layout. For example a write
produced by a (optimized) Union All query
which looks like
└── -ext-10000
├── HIVE_UNION_SUBDIR_1
│ └── 000000_0
│ └── delta_0000019_0000019_0001
│ ├── _orc_acid_version
│ └── bucket_00000
├── HIVE_UNION_SUBDIR_2
│ └── 000000_0
│ └── delta_0000019_0000019_0002
│ ├── _orc_acid_version
│ └── bucket_00000
The assumption is that we either have all data in subdirs or root of srcPath
but not both.
For Union case, we expect delta dirs to have unique names which is assured by
{@link org.apache.hadoop.hive.ql.optimizer.QueryPlanPostProcessor}
*/
FileStatus[] unionSubdirs = fs.globStatus(new Path(srcPath,
AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "[0-9]*"));
List<FileStatus> buckets = new ArrayList<>();
for(FileStatus unionSubdir : unionSubdirs) {
Collections.addAll(buckets,
fs.listStatus(unionSubdir.getPath(), AcidUtils.originalBucketFilter));
}
origBucketStats = buckets.toArray(new FileStatus[buckets.size()]);
}
} catch (IOException e) {
String msg = "Unable to look for bucket files in src path " + srcPath.toUri().toString();
LOG.error(msg);
throw new HiveException(msg, e);
}
LOG.debug("Acid move found " + origBucketStats.length + " original buckets");
for (FileStatus origBucketStat : origBucketStats) {
Path origBucketPath = origBucketStat.getPath();
moveAcidFiles(AcidUtils.DELTA_PREFIX, AcidUtils.deltaFileFilter,
fs, dst, origBucketPath, createdDeltaDirs, newFiles);
moveAcidFiles(AcidUtils.DELETE_DELTA_PREFIX, AcidUtils.deleteEventDeltaDirFilter,
fs, dst,origBucketPath, createdDeltaDirs, newFiles);
moveAcidFiles(AcidUtils.BASE_PREFIX, AcidUtils.baseFileFilter,//for Insert Overwrite
fs, dst, origBucketPath, createdDeltaDirs, newFiles);
}
}
}
private static void moveAcidFiles(String deltaFileType, PathFilter pathFilter, FileSystem fs,
Path dst, Path origBucketPath, Set<Path> createdDeltaDirs,
List<Path> newFiles) throws HiveException {
LOG.debug("Acid move looking for " + deltaFileType + " files in bucket " + origBucketPath);
FileStatus[] deltaStats = null;
try {
deltaStats = fs.listStatus(origBucketPath, pathFilter);
} catch (IOException e) {
throw new HiveException("Unable to look for " + deltaFileType + " files in original bucket " +
origBucketPath.toUri().toString(), e);
}
LOG.debug("Acid move found " + deltaStats.length + " " + deltaFileType + " files");
for (FileStatus deltaStat : deltaStats) {
Path deltaPath = deltaStat.getPath();
// Create the delta directory. Don't worry if it already exists,
// as that likely means another task got to it first. Then move each of the buckets.
// it would be more efficient to try to move the delta with it's buckets but that is
// harder to make race condition proof.
Path deltaDest = new Path(dst, deltaPath.getName());
try {
if (!createdDeltaDirs.contains(deltaDest)) {
try {
if(fs.mkdirs(deltaDest)) {
fs.rename(AcidUtils.OrcAcidVersion.getVersionFilePath(deltaStat.getPath()),
AcidUtils.OrcAcidVersion.getVersionFilePath(deltaDest));
}
createdDeltaDirs.add(deltaDest);
} catch (IOException swallowIt) {
// Don't worry about this, as it likely just means it's already been created.
LOG.info("Unable to create " + deltaFileType + " directory " + deltaDest +
", assuming it already exists: " + swallowIt.getMessage());
}
}
FileStatus[] bucketStats = fs.listStatus(deltaPath, AcidUtils.bucketFileFilter);
LOG.debug("Acid move found " + bucketStats.length + " bucket files");
for (FileStatus bucketStat : bucketStats) {
Path bucketSrc = bucketStat.getPath();
Path bucketDest = new Path(deltaDest, bucketSrc.getName());
final String msg = "Unable to move source " + bucketSrc + " to destination " +
bucketDest;
LOG.info("Moving bucket " + bucketSrc.toUri().toString() + " to " +
bucketDest.toUri().toString());
try {
fs.rename(bucketSrc, bucketDest);
if (newFiles != null) {
newFiles.add(bucketDest);
}
} catch (Exception e) {
throw getHiveException(e, msg);
}
}
} catch (IOException e) {
throw new HiveException("Error moving acid files " + e.getMessage(), e);
}
}
}
/**
* Replaces files in the partition with new data set specified by srcf. Works
* by renaming directory of srcf to the destination file.
* srcf, destf, and tmppath should resident in the same DFS, but the oldPath can be in a
* different DFS.
*
* @param tablePath path of the table. Used to identify permission inheritance.
* @param srcf
* Source directory to be renamed to tmppath. It should be a
* leaf directory where the final data files reside. However it
* could potentially contain subdirectories as well.
* @param destf
* The directory where the final data needs to go
* @param oldPath
* The directory where the old data location, need to be cleaned up. Most of time, will be the same
* as destf, unless its across FileSystem boundaries.
* @param purge
* When set to true files which needs to be deleted are not moved to Trash
* @param isSrcLocal
* If the source directory is LOCAL
* @param newFiles
* Output the list of new files replaced in the destination path
* @param isManaged
* If the table is managed.
*/
private void replaceFiles(Path tablePath, Path srcf, Path destf, Path oldPath, HiveConf conf,
boolean isSrcLocal, boolean purge, List<Path> newFiles, PathFilter deletePathFilter,
boolean isNeedRecycle, boolean isManaged, boolean isInsertOverwrite) throws HiveException {
try {
FileSystem destFs = destf.getFileSystem(conf);
// check if srcf contains nested sub-directories
FileStatus[] srcs;
FileSystem srcFs;
try {
srcFs = srcf.getFileSystem(conf);
srcs = srcFs.globStatus(srcf);
} catch (IOException e) {
throw new HiveException("Getting globStatus " + srcf.toString(), e);
}
// the extra check is required to make ALTER TABLE ... CONCATENATE work
if (oldPath != null && (srcs != null || isInsertOverwrite)) {
deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isNeedRecycle);
}
if (srcs == null) {
LOG.info("No sources specified to move: " + srcf);
return;
}
// first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates
// destf
boolean destfExist = FileUtils.mkdir(destFs, destf, conf);
if(!destfExist) {
throw new IOException("Directory " + destf.toString()
+ " does not exist and could not be created.");
}
// Two cases:
// 1. srcs has only a src directory, if rename src directory to destf, we also need to
// Copy/move each file under the source directory to avoid to delete the destination
// directory if it is the root of an HDFS encryption zone.
// 2. srcs must be a list of files -- ensured by LoadSemanticAnalyzer
// in both cases, we move the file under destf
if (srcs.length == 1 && srcs[0].isDirectory()) {
if (!moveFile(conf, srcs[0].getPath(), destf, true, isSrcLocal, isManaged)) {
throw new IOException("Error moving: " + srcf + " into: " + destf);
}
// Add file paths of the files that will be moved to the destination if the caller needs it
if (null != newFiles) {
listNewFilesRecursively(destFs, destf, newFiles);
}
} else {
final Map<Future<Boolean>, Path> moveFutures = Maps.newLinkedHashMapWithExpectedSize(srcs.length);
final int moveFilesThreadCount = HiveConf.getIntVar(conf, ConfVars.HIVE_MOVE_FILES_THREAD_COUNT);
final ExecutorService pool = moveFilesThreadCount > 0
? Executors.newFixedThreadPool(
moveFilesThreadCount,
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Replace-Thread-%d").build())
: MoreExecutors.newDirectExecutorService();
final SessionState parentSession = SessionState.get();
// its either a file or glob
for (FileStatus src : srcs) {
Path destFile = new Path(destf, src.getPath().getName());
moveFutures.put(
pool.submit(
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
SessionState.setCurrentSessionState(parentSession);
return moveFile(
conf, src.getPath(), destFile, true, isSrcLocal, isManaged);
}
}),
destFile);
}
pool.shutdown();
for (Map.Entry<Future<Boolean>, Path> moveFuture : moveFutures.entrySet()) {
boolean moveFailed;
try {
moveFailed = !moveFuture.getKey().get();
} catch (InterruptedException | ExecutionException e) {
pool.shutdownNow();
if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
}
if (e.getCause() instanceof HiveException) {
throw (HiveException) e.getCause();
}
throw handlePoolException(pool, e);
}
if (moveFailed) {
throw new IOException("Error moving: " + srcf + " into: " + destf);
}
// Add file paths of the files that will be moved to the destination if the caller needs it
if (null != newFiles) {
newFiles.add(moveFuture.getValue());
}
}
}
} catch (IOException e) {
throw new HiveException(e.getMessage(), e);
}
}
private void deleteOldPathForReplace(Path destPath, Path oldPath, HiveConf conf, boolean purge,
PathFilter pathFilter, boolean isNeedRecycle) throws HiveException {
Utilities.FILE_OP_LOGGER.debug("Deleting old paths for replace in " + destPath
+ " and old path " + oldPath);
boolean isOldPathUnderDestf = false;
try {
FileSystem oldFs = oldPath.getFileSystem(conf);
FileSystem destFs = destPath.getFileSystem(conf);
// if oldPath is destf or its subdir, its should definitely be deleted, otherwise its
// existing content might result in incorrect (extra) data.
// But not sure why we changed not to delete the oldPath in HIVE-8750 if it is
// not the destf or its subdir?
isOldPathUnderDestf = isSubDir(oldPath, destPath, oldFs, destFs, false);
if (isOldPathUnderDestf) {
cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
}
} catch (IOException e) {
if (isOldPathUnderDestf) {
// if oldPath is a subdir of destf but it could not be cleaned
throw new HiveException("Directory " + oldPath.toString()
+ " could not be cleaned up.", e);
} else {
//swallow the exception since it won't affect the final result
LOG.warn("Directory " + oldPath.toString() + " cannot be cleaned: " + e, e);
}
}
}
public void cleanUpOneDirectoryForReplace(Path path, FileSystem fs,
PathFilter pathFilter, HiveConf conf, boolean purge, boolean isNeedRecycle) throws IOException, HiveException {
if (isNeedRecycle && conf.getBoolVar(HiveConf.ConfVars.REPLCMENABLED)) {
recycleDirToCmPath(path, purge);
}
FileStatus[] statuses = fs.listStatus(path, pathFilter);
if (statuses == null || statuses.length == 0) {
return;
}
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
String s = "Deleting files under " + path + " for replace: ";
for (FileStatus file : statuses) {
s += file.getPath().getName() + ", ";
}
Utilities.FILE_OP_LOGGER.trace(s);
}
if (!trashFiles(fs, statuses, conf, purge)) {
throw new HiveException("Old path " + path + " has not been cleaned up.");
}
}
/**
* Trashes or deletes all files under a directory. Leaves the directory as is.
* @param fs FileSystem to use
* @param statuses fileStatuses of files to be deleted
* @param conf hive configuration
* @return true if deletion successful
* @throws IOException
*/
public static boolean trashFiles(final FileSystem fs, final FileStatus[] statuses,
final Configuration conf, final boolean purge)
throws IOException {
boolean result = true;
if (statuses == null || statuses.length == 0) {
return false;
}
final List<Future<Boolean>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Delete-Thread-%d").build()) : null;
final SessionState parentSession = SessionState.get();
for (final FileStatus status : statuses) {
if (null == pool) {
result &= FileUtils.moveToTrash(fs, status.getPath(), conf, purge);
} else {
futures.add(pool.submit(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
SessionState.setCurrentSessionState(parentSession);
return FileUtils.moveToTrash(fs, status.getPath(), conf, purge);
}
}));
}
}
if (null != pool) {
pool.shutdown();
for (Future<Boolean> future : futures) {
try {
result &= future.get();
} catch (InterruptedException | ExecutionException e) {
LOG.error("Failed to delete: ",e);
pool.shutdownNow();
throw new IOException(e);
}
}
}
return result;
}
public static boolean isHadoop1() {
return ShimLoader.getMajorVersion().startsWith("0.20");
}
public List<Partition> exchangeTablePartitions(Map<String, String> partitionSpecs,
String sourceDb, String sourceTable, String destDb,
String destinationTableName) throws HiveException {
try {
List<org.apache.hadoop.hive.metastore.api.Partition> partitions =
getMSC().exchange_partitions(partitionSpecs, sourceDb, sourceTable, destDb,
destinationTableName);
return convertFromMetastore(getTable(destDb, destinationTableName), partitions);
} catch (Exception ex) {
LOG.error(StringUtils.stringifyException(ex));
throw new HiveException(ex);
}
}
/**
* Creates a metastore client. Currently it creates only JDBC based client as
* File based store support is removed
*
* @returns a Meta Store Client
* @throws HiveMetaException
* if a working client can't be created
*/
private IMetaStoreClient createMetaStoreClient(boolean allowEmbedded) throws MetaException {
HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() {
@Override
public HiveMetaHook getHook(
org.apache.hadoop.hive.metastore.api.Table tbl)
throws MetaException {
HiveStorageHandler storageHandler = createStorageHandler(tbl);
return storageHandler == null ? null : storageHandler.getMetaHook();
}
};
if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) {
return new SessionHiveMetaStoreClient(conf, hookLoader, allowEmbedded);
} else {
return RetryingMetaStoreClient.getProxy(conf, hookLoader, metaCallTimeMap,
SessionHiveMetaStoreClient.class.getName(), allowEmbedded);
}
}
@Nullable
private HiveStorageHandler createStorageHandler(org.apache.hadoop.hive.metastore.api.Table tbl) throws MetaException {
try {
if (tbl == null) {
return null;
}
HiveStorageHandler storageHandler =
HiveUtils.getStorageHandler(conf, tbl.getParameters().get(META_TABLE_STORAGE));
return storageHandler;
} catch (HiveException ex) {
LOG.error(StringUtils.stringifyException(ex));
throw new MetaException(
"Failed to load storage handler: " + ex.getMessage());
}
}
public static class SchemaException extends MetaException {
private static final long serialVersionUID = 1L;
public SchemaException(String message) {
super(message);
}
}
/**
* @return synchronized metastore client
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized SynchronizedMetaStoreClient getSynchronizedMSC() throws MetaException {
if (syncMetaStoreClient == null) {
syncMetaStoreClient = new SynchronizedMetaStoreClient(getMSC(true, false));
}
return syncMetaStoreClient;
}
/**
* @return the metastore client for the current thread
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized IMetaStoreClient getMSC() throws MetaException {
return getMSC(true, false);
}
/**
* @return the metastore client for the current thread
* @throws MetaException
*/
@LimitedPrivate(value = {"Hive"})
@Unstable
public synchronized IMetaStoreClient getMSC(
boolean allowEmbedded, boolean forceCreate) throws MetaException {
if (metaStoreClient == null || forceCreate) {
try {
owner = UserGroupInformation.getCurrentUser();
} catch(IOException e) {
String msg = "Error getting current user: " + e.getMessage();
LOG.error(msg, e);
throw new MetaException(msg + "\n" + StringUtils.stringifyException(e));
}
try {
metaStoreClient = createMetaStoreClient(allowEmbedded);
} catch (RuntimeException ex) {
Throwable t = ex.getCause();
while (t != null) {
if (t instanceof JDODataStoreException && t.getMessage() != null
&& t.getMessage().contains("autoCreate")) {
LOG.error("Cannot initialize metastore due to autoCreate error", t);
// DataNucleus wants us to auto-create, but we shall do no such thing.
throw new SchemaException("Hive metastore database is not initialized. Please use "
+ "schematool (e.g. ./schematool -initSchema -dbType ...) to create the schema. If "
+ "needed, don't forget to include the option to auto-create the underlying database"
+ " in your JDBC connection string (e.g. ?createDatabaseIfNotExist=true for mysql)");
}
t = t.getCause();
}
throw ex;
}
String metaStoreUris = conf.getVar(HiveConf.ConfVars.METASTOREURIS);
if (!org.apache.commons.lang3.StringUtils.isEmpty(metaStoreUris)) {
// get a synchronized wrapper if the meta store is remote.
metaStoreClient = HiveMetaStoreClient.newSynchronizedClient(metaStoreClient);
}
}
return metaStoreClient;
}
private static String getUserName() {
return SessionState.getUserFromAuthenticator();
}
private List<String> getGroupNames() {
SessionState ss = SessionState.get();
if (ss != null && ss.getAuthenticator() != null) {
return ss.getAuthenticator().getGroupNames();
}
return null;
}
public static List<FieldSchema> getFieldsFromDeserializer(String name,
Deserializer serde) throws HiveException {
try {
return HiveMetaStoreUtils.getFieldsFromDeserializer(name, serde);
} catch (SerDeException e) {
throw new HiveException("Error in getting fields from serde. "
+ e.getMessage(), e);
} catch (MetaException e) {
throw new HiveException("Error in getting fields from serde."
+ e.getMessage(), e);
}
}
public boolean setPartitionColumnStatistics(
SetPartitionsStatsRequest request) throws HiveException {
try {
ColumnStatistics colStat = request.getColStats().get(0);
ColumnStatisticsDesc statsDesc = colStat.getStatsDesc();
// In case of replication, the request already has valid writeId and valid transaction id
// list obtained from the source. Just use it.
if (request.getWriteId() <= 0 || request.getValidWriteIdList() == null) {
Table tbl = getTable(statsDesc.getDbName(), statsDesc.getTableName());
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl, true);
request.setValidWriteIdList(tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null);
request.setWriteId(tableSnapshot != null ? tableSnapshot.getWriteId() : 0);
}
return getMSC().setPartitionColumnStatistics(request);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public List<ColumnStatisticsObj> getTableColumnStatistics(
String dbName, String tableName, List<String> colNames, boolean checkTransactional)
throws HiveException {
List<ColumnStatisticsObj> retv = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tableName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
retv = getMSC().getTableColumnStatistics(dbName, tableName, colNames,
tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null);
} else {
retv = getMSC().getTableColumnStatistics(dbName, tableName, colNames);
}
return retv;
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(
String dbName, String tableName, List<String> partNames, List<String> colNames,
boolean checkTransactional)
throws HiveException {
String writeIdList = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tableName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
writeIdList = tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null;
}
return getMSC().getPartitionColumnStatistics(
dbName, tableName, partNames, colNames, writeIdList);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public AggrStats getAggrColStatsFor(String dbName, String tblName,
List<String> colNames, List<String> partName, boolean checkTransactional) {
String writeIdList = null;
try {
if (checkTransactional) {
Table tbl = getTable(dbName, tblName);
AcidUtils.TableSnapshot tableSnapshot = AcidUtils.getTableSnapshot(conf, tbl);
writeIdList = tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null;
}
return getMSC().getAggrColStatsFor(dbName, tblName, colNames, partName, writeIdList);
} catch (Exception e) {
LOG.debug(StringUtils.stringifyException(e));
return new AggrStats(new ArrayList<ColumnStatisticsObj>(),0);
}
}
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
throws HiveException {
try {
return getMSC().deleteTableColumnStatistics(dbName, tableName, colName);
} catch(Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName,
String colName) throws HiveException {
try {
return getMSC().deletePartitionColumnStatistics(dbName, tableName, partName, colName);
} catch(Exception e) {
LOG.debug(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public Table newTable(String tableName) throws HiveException {
String[] names = Utilities.getDbTableName(tableName);
return new Table(names[0], names[1]);
}
public String getDelegationToken(String owner, String renewer)
throws HiveException{
try {
return getMSC().getDelegationToken(owner, renewer);
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void cancelDelegationToken(String tokenStrForm)
throws HiveException {
try {
getMSC().cancelDelegationToken(tokenStrForm);
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
/**
* @deprecated use {@link #compact2(String, String, String, String, Map)}
*/
@Deprecated
public void compact(String dbname, String tableName, String partName, String compactType,
Map<String, String> tblproperties) throws HiveException {
compact2(dbname, tableName, partName, compactType, tblproperties);
}
/**
* Enqueue a compaction request. Only 1 compaction for a given resource (db/table/partSpec) can
* be scheduled/running at any given time.
* @param dbname name of the database, if null default will be used.
* @param tableName name of the table, cannot be null
* @param partName name of the partition, if null table will be compacted (valid only for
* non-partitioned tables).
* @param compactType major or minor
* @param tblproperties the list of tblproperties to overwrite for this compaction
* @return id of new request or id already existing request for specified resource
* @throws HiveException
*/
public CompactionResponse compact2(String dbname, String tableName, String partName, String compactType,
Map<String, String> tblproperties)
throws HiveException {
try {
CompactionType cr = null;
if ("major".equalsIgnoreCase(compactType)) {
cr = CompactionType.MAJOR;
} else if ("minor".equalsIgnoreCase(compactType)) {
cr = CompactionType.MINOR;
} else {
throw new RuntimeException("Unknown compaction type " + compactType);
}
return getMSC().compact2(dbname, tableName, partName, cr, tblproperties);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public ShowCompactResponse showCompactions() throws HiveException {
try {
return getMSC().showCompactions();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public GetOpenTxnsInfoResponse showTransactions() throws HiveException {
try {
return getMSC().showTxns();
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void abortTransactions(List<Long> txnids) throws HiveException {
try {
getMSC().abortTxns(txnids);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
}
}
public void createFunction(Function func) throws HiveException {
try {
getMSC().createFunction(func);
} catch (TException te) {
throw new HiveException(te);
}
}
public void alterFunction(String dbName, String funcName, Function newFunction)
throws HiveException {
try {
getMSC().alterFunction(dbName, funcName, newFunction);
} catch (TException te) {
throw new HiveException(te);
}
}
public void dropFunction(String dbName, String funcName)
throws HiveException {
try {
getMSC().dropFunction(dbName, funcName);
} catch (TException te) {
throw new HiveException(te);
}
}
public Function getFunction(String dbName, String funcName) throws HiveException {
try {
return getMSC().getFunction(dbName, funcName);
} catch (TException te) {
throw new HiveException(te);
}
}
public List<Function> getAllFunctions() throws HiveException {
try {
List<Function> functions = getMSC().getAllFunctions().getFunctions();
return functions == null ? new ArrayList<Function>() : functions;
} catch (TException te) {
throw new HiveException(te);
}
}
public List<String> getFunctions(String dbName, String pattern) throws HiveException {
try {
return getMSC().getFunctions(dbName, pattern);
} catch (TException te) {
throw new HiveException(te);
}
}
public void setMetaConf(String propName, String propValue) throws HiveException {
try {
getMSC().setMetaConf(propName, propValue);
} catch (TException te) {
throw new HiveException(te);
}
}
public String getMetaConf(String propName) throws HiveException {
try {
return getMSC().getMetaConf(propName);
} catch (TException te) {
throw new HiveException(te);
}
}
public void clearMetaCallTiming() {
metaCallTimeMap.clear();
}
public ImmutableMap<String, Long> dumpAndClearMetaCallTiming(String phase) {
boolean phaseInfoLogged = false;
if (LOG.isDebugEnabled()) {
phaseInfoLogged = logDumpPhase(phase);
LOG.debug("Total time spent in each metastore function (ms): " + metaCallTimeMap);
}
if (LOG.isInfoEnabled()) {
// print information about calls that took longer time at INFO level
for (Entry<String, Long> callTime : metaCallTimeMap.entrySet()) {
// dump information if call took more than 1 sec (1000ms)
if (callTime.getValue() > 1000) {
if (!phaseInfoLogged) {
phaseInfoLogged = logDumpPhase(phase);
}
LOG.info("Total time spent in this metastore function was greater than 1000ms : "
+ callTime);
}
}
}
ImmutableMap<String, Long> result = ImmutableMap.copyOf(metaCallTimeMap);
metaCallTimeMap.clear();
return result;
}
private boolean logDumpPhase(String phase) {
LOG.info("Dumping metastore api call timing information for : " + phase + " phase");
return true;
}
public Iterable<Map.Entry<Long, ByteBuffer>> getFileMetadata(
List<Long> fileIds) throws HiveException {
try {
return getMSC().getFileMetadata(fileIds);
} catch (TException e) {
throw new HiveException(e);
}
}
public Iterable<Map.Entry<Long, MetadataPpdResult>> getFileMetadataByExpr(
List<Long> fileIds, ByteBuffer sarg, boolean doGetFooters) throws HiveException {
try {
return getMSC().getFileMetadataBySarg(fileIds, sarg, doGetFooters);
} catch (TException e) {
throw new HiveException(e);
}
}
public void clearFileMetadata(List<Long> fileIds) throws HiveException {
try {
getMSC().clearFileMetadata(fileIds);
} catch (TException e) {
throw new HiveException(e);
}
}
public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) throws HiveException {
try {
getMSC().putFileMetadata(fileIds, metadata);
} catch (TException e) {
throw new HiveException(e);
}
}
public void cacheFileMetadata(
String dbName, String tableName, String partName, boolean allParts) throws HiveException {
try {
boolean willCache = getMSC().cacheFileMetadata(dbName, tableName, partName, allParts);
if (!willCache) {
throw new HiveException(
"Caching file metadata is not supported by metastore or for this file format");
}
} catch (TException e) {
throw new HiveException(e);
}
}
public void dropConstraint(String dbName, String tableName, String constraintName)
throws HiveException, NoSuchObjectException {
try {
getMSC().dropConstraint(dbName, tableName, constraintName);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLPrimaryKey> getPrimaryKeyList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLForeignKey> getForeignKeyList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLUniqueConstraint> getUniqueConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getUniqueConstraints(new UniqueConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLNotNullConstraint> getNotNullConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getNotNullConstraints(new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLDefaultConstraint> getDefaultConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getDefaultConstraints(new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<SQLCheckConstraint> getCheckConstraintList(String dbName, String tblName) throws HiveException, NoSuchObjectException {
try {
return getMSC().getCheckConstraints(new CheckConstraintsRequest(getDefaultCatalog(conf),
dbName, tblName));
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all primary key columns associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Primary Key associated with the table.
* @throws HiveException
*/
public PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName) throws HiveException {
return getPrimaryKeys(dbName, tblName, false);
}
/**
* Get primary key columns associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Primary Key associated with the table.
* @throws HiveException
*/
public PrimaryKeyInfo getReliablePrimaryKeys(String dbName, String tblName) throws HiveException {
return getPrimaryKeys(dbName, tblName, true);
}
private PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLPrimaryKey> primaryKeys = getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName));
if (onlyReliable && primaryKeys != null && !primaryKeys.isEmpty()) {
primaryKeys = primaryKeys.stream()
.filter(pk -> pk.isRely_cstr())
.collect(Collectors.toList());
}
return new PrimaryKeyInfo(primaryKeys, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all foreign keys associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Foreign keys associated with the table.
* @throws HiveException
*/
public ForeignKeyInfo getForeignKeys(String dbName, String tblName) throws HiveException {
return getForeignKeys(dbName, tblName, false);
}
/**
* Get foreign keys associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Foreign keys associated with the table.
* @throws HiveException
*/
public ForeignKeyInfo getReliableForeignKeys(String dbName, String tblName) throws HiveException {
return getForeignKeys(dbName, tblName, true);
}
private ForeignKeyInfo getForeignKeys(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLForeignKey> foreignKeys = getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName));
if (onlyReliable && foreignKeys != null && !foreignKeys.isEmpty()) {
foreignKeys = foreignKeys.stream()
.filter(fk -> fk.isRely_cstr())
.collect(Collectors.toList());
}
return new ForeignKeyInfo(foreignKeys, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all unique constraints associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Unique constraints associated with the table.
* @throws HiveException
*/
public UniqueConstraint getUniqueConstraints(String dbName, String tblName) throws HiveException {
return getUniqueConstraints(dbName, tblName, false);
}
/**
* Get unique constraints associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Unique constraints associated with the table.
* @throws HiveException
*/
public UniqueConstraint getReliableUniqueConstraints(String dbName, String tblName) throws HiveException {
return getUniqueConstraints(dbName, tblName, true);
}
private UniqueConstraint getUniqueConstraints(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLUniqueConstraint> uniqueConstraints = getMSC().getUniqueConstraints(
new UniqueConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (onlyReliable && uniqueConstraints != null && !uniqueConstraints.isEmpty()) {
uniqueConstraints = uniqueConstraints.stream()
.filter(uk -> uk.isRely_cstr())
.collect(Collectors.toList());
}
return new UniqueConstraint(uniqueConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get all not null constraints associated with the table.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getNotNullConstraints(String dbName, String tblName) throws HiveException {
return getNotNullConstraints(dbName, tblName, false);
}
/**
* Get not null constraints associated with the table that are available for optimization.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getReliableNotNullConstraints(String dbName, String tblName) throws HiveException {
return getNotNullConstraints(dbName, tblName, true);
}
/**
* Get not null constraints associated with the table that are enabled/enforced.
*
* @param dbName Database Name
* @param tblName Table Name
* @return Not null constraints associated with the table.
* @throws HiveException
*/
public NotNullConstraint getEnabledNotNullConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLNotNullConstraint> notNullConstraints = getMSC().getNotNullConstraints(
new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (notNullConstraints != null && !notNullConstraints.isEmpty()) {
notNullConstraints = notNullConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new NotNullConstraint(notNullConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get CHECK constraints associated with the table that are enabled
*
* @param dbName Database Name
* @param tblName Table Name
* @return CHECK constraints associated with the table.
* @throws HiveException
*/
public CheckConstraint getEnabledCheckConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLCheckConstraint> checkConstraints = getMSC().getCheckConstraints(
new CheckConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (checkConstraints != null && !checkConstraints.isEmpty()) {
checkConstraints = checkConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new CheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Get Default constraints associated with the table that are enabled
*
* @param dbName Database Name
* @param tblName Table Name
* @return Default constraints associated with the table.
* @throws HiveException
*/
public DefaultConstraint getEnabledDefaultConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLDefaultConstraint> defaultConstraints = getMSC().getDefaultConstraints(
new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (defaultConstraints != null && !defaultConstraints.isEmpty()) {
defaultConstraints = defaultConstraints.stream()
.filter(nnc -> nnc.isEnable_cstr())
.collect(Collectors.toList());
}
return new DefaultConstraint(defaultConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
private NotNullConstraint getNotNullConstraints(String dbName, String tblName, boolean onlyReliable)
throws HiveException {
try {
List<SQLNotNullConstraint> notNullConstraints = getMSC().getNotNullConstraints(
new NotNullConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (onlyReliable && notNullConstraints != null && !notNullConstraints.isEmpty()) {
notNullConstraints = notNullConstraints.stream()
.filter(nnc -> nnc.isRely_cstr())
.collect(Collectors.toList());
}
return new NotNullConstraint(notNullConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
public DefaultConstraint getDefaultConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLDefaultConstraint> defaultConstraints = getMSC().getDefaultConstraints(
new DefaultConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (defaultConstraints != null && !defaultConstraints.isEmpty()) {
defaultConstraints = defaultConstraints.stream()
.collect(Collectors.toList());
}
return new DefaultConstraint(defaultConstraints, tblName, dbName);
} catch (Exception e) {
throw new HiveException(e);
}
}
public CheckConstraint getCheckConstraints(String dbName, String tblName)
throws HiveException {
try {
List<SQLCheckConstraint> checkConstraints = getMSC().getCheckConstraints(
new CheckConstraintsRequest(getDefaultCatalog(conf), dbName, tblName));
if (checkConstraints != null && !checkConstraints.isEmpty()) {
checkConstraints = checkConstraints.stream()
.collect(Collectors.toList());
}
return new CheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addPrimaryKey(List<SQLPrimaryKey> primaryKeyCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addPrimaryKey(primaryKeyCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addForeignKey(List<SQLForeignKey> foreignKeyCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addForeignKey(foreignKeyCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addUniqueConstraint(List<SQLUniqueConstraint> uniqueConstraintCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addUniqueConstraint(uniqueConstraintCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addNotNullConstraint(List<SQLNotNullConstraint> notNullConstraintCols)
throws HiveException, NoSuchObjectException {
try {
getMSC().addNotNullConstraint(notNullConstraintCols);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addDefaultConstraint(List<SQLDefaultConstraint> defaultConstraints)
throws HiveException, NoSuchObjectException {
try {
getMSC().addDefaultConstraint(defaultConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void addCheckConstraint(List<SQLCheckConstraint> checkConstraints)
throws HiveException, NoSuchObjectException {
try {
getMSC().addCheckConstraint(checkConstraints);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createResourcePlan(WMResourcePlan resourcePlan, String copyFromName, boolean ifNotExists)
throws HiveException {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (resourcePlan.isSetNs() && !ns.equals(resourcePlan.getNs())) {
throw new HiveException("Cannot create a plan in a different NS; was "
+ resourcePlan.getNs() + ", configured " + ns);
}
resourcePlan.setNs(ns);
try {
getMSC().createResourcePlan(resourcePlan, copyFromName);
} catch (AlreadyExistsException e) {
if (!ifNotExists) {
throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_ALREADY_EXISTS, resourcePlan.getName());
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan getResourcePlan(String rpName) throws HiveException {
try {
return getMSC().getResourcePlan(rpName, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
throw new HiveException(e);
}
}
public List<WMResourcePlan> getAllResourcePlans() throws HiveException {
try {
return getMSC().getAllResourcePlans(conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropResourcePlan(String rpName, boolean ifExists) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
getMSC().dropResourcePlan(rpName, ns);
} catch (NoSuchObjectException e) {
if (!ifExists) {
throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_NOT_EXISTS, rpName);
}
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan alterResourcePlan(String rpName, WMNullableResourcePlan resourcePlan,
boolean canActivateDisabled, boolean isForceDeactivate, boolean isReplace) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (resourcePlan.isSetNs() && !ns.equals(resourcePlan.getNs())) {
throw new HiveException("Cannot modify a plan in a different NS; was "
+ resourcePlan.getNs() + ", configured " + ns);
}
resourcePlan.setNs(ns);
return getMSC().alterResourcePlan(rpName, ns, resourcePlan, canActivateDisabled,
isForceDeactivate, isReplace);
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMFullResourcePlan getActiveResourcePlan() throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
return getMSC().getActiveResourcePlan(ns);
} catch (Exception e) {
throw new HiveException(e);
}
}
public WMValidateResourcePlanResponse validateResourcePlan(String rpName) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
return getMSC().validateResourcePlan(rpName, ns);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createWMTrigger(WMTrigger trigger) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (trigger.isSetNs() && !ns.equals(trigger.getNs())) {
throw new HiveException("Cannot create a trigger in a different NS; was "
+ trigger.getNs() + ", configured " + ns);
}
trigger.setNs(ns);
getMSC().createWMTrigger(trigger);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void alterWMTrigger(WMTrigger trigger) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (trigger.isSetNs() && !ns.equals(trigger.getNs())) {
throw new HiveException("Cannot modify a trigger in a different NS; was "
+ trigger.getNs() + ", configured " + ns);
}
trigger.setNs(ns);
getMSC().alterWMTrigger(trigger);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMTrigger(String rpName, String triggerName) throws HiveException {
try {
getMSC().dropWMTrigger(rpName, triggerName, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createWMPool(WMPool pool) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (pool.isSetNs() && !ns.equals(pool.getNs())) {
throw new HiveException("Cannot create a pool in a different NS; was "
+ pool.getNs() + ", configured " + ns);
}
pool.setNs(ns);
getMSC().createWMPool(pool);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void alterWMPool(WMNullablePool pool, String poolPath) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (pool.isSetNs() && !ns.equals(pool.getNs())) {
throw new HiveException("Cannot modify a pool in a different NS; was "
+ pool.getNs() + ", configured " + ns);
}
pool.setNs(ns);
getMSC().alterWMPool(pool, poolPath);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMPool(String resourcePlanName, String poolPath) throws HiveException {
try {
getMSC().dropWMPool(resourcePlanName, poolPath,
conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
public void createOrUpdateWMMapping(WMMapping mapping, boolean isUpdate)
throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (mapping.isSetNs() && !ns.equals(mapping.getNs())) {
throw new HiveException("Cannot create a mapping in a different NS; was "
+ mapping.getNs() + ", configured " + ns);
}
mapping.setNs(ns);
getMSC().createOrUpdateWMMapping(mapping, isUpdate);
} catch (Exception e) {
throw new HiveException(e);
}
}
public void dropWMMapping(WMMapping mapping) throws HiveException {
try {
String ns = conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE);
if (mapping.isSetNs() && !ns.equals(mapping.getNs())) {
throw new HiveException("Cannot modify a mapping in a different NS; was "
+ mapping.getNs() + ", configured " + ns);
}
mapping.setNs(ns);
getMSC().dropWMMapping(mapping);
} catch (Exception e) {
throw new HiveException(e);
}
}
// TODO: eh
public void createOrDropTriggerToPoolMapping(String resourcePlanName, String triggerName,
String poolPath, boolean shouldDrop) throws HiveException {
try {
getMSC().createOrDropTriggerToPoolMapping(resourcePlanName, triggerName, poolPath,
shouldDrop, conf.getVar(ConfVars.HIVE_SERVER2_WM_NAMESPACE));
} catch (Exception e) {
throw new HiveException(e);
}
}
@Nullable
public StorageHandlerInfo getStorageHandlerInfo(Table table)
throws HiveException {
try {
HiveStorageHandler storageHandler = createStorageHandler(table.getTTable());
return storageHandler == null ? null : storageHandler.getStorageHandlerInfo(table.getTTable());
} catch (Exception e) {
throw new HiveException(e);
}
}
}
| HIVE-21717 : Rename is failing for directory in move task. (Mahesh Kumar Behera reviewed by Sankar Hariappan
| ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | HIVE-21717 : Rename is failing for directory in move task. (Mahesh Kumar Behera reviewed by Sankar Hariappan | <ide><path>l/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
<ide> }
<ide> }
<ide>
<add> private static void deleteAndRename(FileSystem destFs, Path destFile, FileStatus srcStatus, Path destPath)
<add> throws IOException {
<add> if (destFs.exists(destFile)) {
<add> // rename cannot overwrite non empty destination directory, so deleting the destination before renaming.
<add> destFs.delete(destFile);
<add> LOG.info("Deleting destination file" + destFile.toUri());
<add> }
<add> if(!destFs.rename(srcStatus.getPath(), destFile)) {
<add> throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest:"
<add> + destPath + " returned false");
<add> }
<add> }
<add>
<ide> //it is assumed that parent directory of the destf should already exist when this
<ide> //method is called. when the replace value is true, this method works a little different
<ide> //from mv command if the destf is a directory, it replaces the destf instead of moving under
<ide> "Unable to move source " + srcStatus.getPath() + " to destination " + destFile;
<ide>
<ide> if (null == pool) {
<del> boolean success = false;
<del> if (destFs instanceof DistributedFileSystem) {
<del> ((DistributedFileSystem)destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE);
<del> success = true;
<del> } else {
<del> destFs.delete(destFile, false);
<del> success = destFs.rename(srcStatus.getPath(), destFile);
<del> }
<del> if(!success) {
<del> throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest:"
<del> + destf + " returned false");
<del> }
<add> deleteAndRename(destFs, destFile, srcStatus, destf);
<ide> } else {
<ide> futures.add(pool.submit(new Callable<Void>() {
<ide> @Override
<ide> public Void call() throws HiveException {
<ide> SessionState.setCurrentSessionState(parentSession);
<ide> try {
<del> boolean success = false;
<del> if (destFs instanceof DistributedFileSystem) {
<del> ((DistributedFileSystem)destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE);
<del> success = true;
<del> } else {
<del> destFs.delete(destFile, false);
<del> success = destFs.rename(srcStatus.getPath(), destFile);
<del> }
<del> if (!success) {
<del> throw new IOException(
<del> "rename for src path: " + srcStatus.getPath() + " to dest path:"
<del> + destFile + " returned false");
<del> }
<add> deleteAndRename(destFs, destFile, srcStatus, destf);
<ide> } catch (Exception e) {
<ide> throw getHiveException(e, poolMsg);
<ide> } |
|
Java | mit | c7c922c6db8d8ab1dd9f5a561cc454d6c041c267 | 0 | bcgit/bc-java,bcgit/bc-java,isghe/bc-java,open-keychain/spongycastle,isghe/bc-java,open-keychain/spongycastle,open-keychain/spongycastle,Skywalker-11/spongycastle,isghe/bc-java,onessimofalconi/bc-java,Skywalker-11/spongycastle,onessimofalconi/bc-java,onessimofalconi/bc-java,Skywalker-11/spongycastle,bcgit/bc-java | package org.bouncycastle.asn1.cms.ecc;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Object;
import org.bouncycastle.asn1.ASN1OctetString;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.ASN1TaggedObject;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERTaggedObject;
import org.bouncycastle.asn1.cms.OriginatorPublicKey;
/**
* <a href="http://tools.ietf.org/html/rfc5753">RFC 5753/3278</a>: MQVuserKeyingMaterial object.
* <pre>
* MQVuserKeyingMaterial ::= SEQUENCE {
* ephemeralPublicKey OriginatorPublicKey,
* addedukm [0] EXPLICIT UserKeyingMaterial OPTIONAL }
* </pre>
*/
public class MQVuserKeyingMaterial
extends ASN1Object
{
private OriginatorPublicKey ephemeralPublicKey;
private ASN1OctetString addedukm;
public MQVuserKeyingMaterial(
OriginatorPublicKey ephemeralPublicKey,
ASN1OctetString addedukm)
{
if (ephemeralPublicKey == null)
{
throw new IllegalArgumentException("Ephemeral public key cannot be null");
}
this.ephemeralPublicKey = ephemeralPublicKey;
this.addedukm = addedukm;
}
private MQVuserKeyingMaterial(
ASN1Sequence seq)
{
if (seq.size() != 1 && seq.size() != 2)
{
throw new IllegalArgumentException("Sequence has incorrect number of elements");
}
this.ephemeralPublicKey = OriginatorPublicKey.getInstance(
seq.getObjectAt(0));
if (seq.size() > 1)
{
this.addedukm = ASN1OctetString.getInstance(
(ASN1TaggedObject)seq.getObjectAt(1), true);
}
}
/**
* Return an MQVuserKeyingMaterial object from a tagged object.
*
* @param obj the tagged object holding the object we want.
* @param explicit true if the object is meant to be explicitly
* tagged false otherwise.
* @throws IllegalArgumentException if the object held by the
* tagged object cannot be converted.
*/
public static MQVuserKeyingMaterial getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
return getInstance(ASN1Sequence.getInstance(obj, explicit));
}
/**
* Return an MQVuserKeyingMaterial object from the given object.
* <p>
* Accepted inputs:
* <ul>
* <li> null → null
* <li> {@link MQVuserKeyingMaterial} object
* <li> {@link org.bouncycastle.asn1.ASN1Sequence ASN1Sequence} with MQVuserKeyingMaterial inside it.
* </ul>
*
* @param obj the object we want converted.
* @throws IllegalArgumentException if the object cannot be converted.
*/
public static MQVuserKeyingMaterial getInstance(
Object obj)
{
if (obj instanceof MQVuserKeyingMaterial)
{
return (MQVuserKeyingMaterial)obj;
}
else if (obj != null)
{
return new MQVuserKeyingMaterial(ASN1Sequence.getInstance(obj));
}
return null;
}
public OriginatorPublicKey getEphemeralPublicKey()
{
return ephemeralPublicKey;
}
public ASN1OctetString getAddedukm()
{
return addedukm;
}
/**
* Produce an object suitable for an ASN1OutputStream.
*/
public ASN1Primitive toASN1Primitive()
{
ASN1EncodableVector v = new ASN1EncodableVector();
v.add(ephemeralPublicKey);
if (addedukm != null)
{
v.add(new DERTaggedObject(true, 0, addedukm));
}
return new DERSequence(v);
}
}
| core/src/main/java/org/bouncycastle/asn1/cms/ecc/MQVuserKeyingMaterial.java | package org.bouncycastle.asn1.cms.ecc;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Object;
import org.bouncycastle.asn1.ASN1OctetString;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.ASN1TaggedObject;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERTaggedObject;
import org.bouncycastle.asn1.cms.OriginatorPublicKey;
/**
* <a href="http://tools.ietf.org/html/rfc5753">RFC 5753/3278</a>: MQVuserKeyingMaterial object.
* <pre>
* MQVuserKeyingMaterial ::= SEQUENCE {
* ephemeralPublicKey OriginatorPublicKey,
* addedukm [0] EXPLICIT UserKeyingMaterial OPTIONAL }
* </pre>
*/
public class MQVuserKeyingMaterial
extends ASN1Object
{
private OriginatorPublicKey ephemeralPublicKey;
private ASN1OctetString addedukm;
public MQVuserKeyingMaterial(
OriginatorPublicKey ephemeralPublicKey,
ASN1OctetString addedukm)
{
// TODO Check ephemeralPublicKey not null
this.ephemeralPublicKey = ephemeralPublicKey;
this.addedukm = addedukm;
}
private MQVuserKeyingMaterial(
ASN1Sequence seq)
{
// TODO Check seq has either 1 or 2 elements
this.ephemeralPublicKey = OriginatorPublicKey.getInstance(
seq.getObjectAt(0));
if (seq.size() > 1)
{
this.addedukm = ASN1OctetString.getInstance(
(ASN1TaggedObject)seq.getObjectAt(1), true);
}
}
/**
* Return an MQVuserKeyingMaterial object from a tagged object.
*
* @param obj the tagged object holding the object we want.
* @param explicit true if the object is meant to be explicitly
* tagged false otherwise.
* @throws IllegalArgumentException if the object held by the
* tagged object cannot be converted.
*/
public static MQVuserKeyingMaterial getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
return getInstance(ASN1Sequence.getInstance(obj, explicit));
}
/**
* Return an MQVuserKeyingMaterial object from the given object.
* <p>
* Accepted inputs:
* <ul>
* <li> null → null
* <li> {@link MQVuserKeyingMaterial} object
* <li> {@link org.bouncycastle.asn1.ASN1Sequence ASN1Sequence} with MQVuserKeyingMaterial inside it.
* </ul>
*
* @param obj the object we want converted.
* @throws IllegalArgumentException if the object cannot be converted.
*/
public static MQVuserKeyingMaterial getInstance(
Object obj)
{
if (obj instanceof MQVuserKeyingMaterial)
{
return (MQVuserKeyingMaterial)obj;
}
else if (obj != null)
{
return new MQVuserKeyingMaterial(ASN1Sequence.getInstance(obj));
}
return null;
}
public OriginatorPublicKey getEphemeralPublicKey()
{
return ephemeralPublicKey;
}
public ASN1OctetString getAddedukm()
{
return addedukm;
}
/**
* Produce an object suitable for an ASN1OutputStream.
*/
public ASN1Primitive toASN1Primitive()
{
ASN1EncodableVector v = new ASN1EncodableVector();
v.add(ephemeralPublicKey);
if (addedukm != null)
{
v.add(new DERTaggedObject(true, 0, addedukm));
}
return new DERSequence(v);
}
}
| fixed TODOs
| core/src/main/java/org/bouncycastle/asn1/cms/ecc/MQVuserKeyingMaterial.java | fixed TODOs | <ide><path>ore/src/main/java/org/bouncycastle/asn1/cms/ecc/MQVuserKeyingMaterial.java
<ide> OriginatorPublicKey ephemeralPublicKey,
<ide> ASN1OctetString addedukm)
<ide> {
<del> // TODO Check ephemeralPublicKey not null
<del>
<add> if (ephemeralPublicKey == null)
<add> {
<add> throw new IllegalArgumentException("Ephemeral public key cannot be null");
<add> }
<add>
<ide> this.ephemeralPublicKey = ephemeralPublicKey;
<ide> this.addedukm = addedukm;
<ide> }
<ide> private MQVuserKeyingMaterial(
<ide> ASN1Sequence seq)
<ide> {
<del> // TODO Check seq has either 1 or 2 elements
<add> if (seq.size() != 1 && seq.size() != 2)
<add> {
<add> throw new IllegalArgumentException("Sequence has incorrect number of elements");
<add> }
<ide>
<ide> this.ephemeralPublicKey = OriginatorPublicKey.getInstance(
<ide> seq.getObjectAt(0)); |
|
JavaScript | mit | bafe512e4f5d12e0d8f371d4d48eca3c4421a6bf | 0 | stephanebachelier/superapi-cache | 'use strict'
import readCache from './read-cache'
import serialize from './serialize'
import memoryStore from './memory'
function cache (config = {}) {
const store = config.store || memoryStore
const key = config.key || cache.key
if (!store) {
throw new Error('Cache middleware need to be provided a store.')
}
config.maxAge = config.maxAge || 0
config.readCache = config.readCache || readCache
config.serialize = config.serialize || serialize
config.exclude = config.exclude || []
if (config.log !== false) {
config.log = typeof config.log === 'function' ? config.log : console.log.bind(console)
}
return (req, next, service) => {
if (service) {
const useCache = !service.use || (service.use && (service.use.cache !== false))
if (!useCache) {
return null
}
}
// do not cache request with query
if (req.url.match(/\?.*$/)) {
return null
}
let found = false
config.exclude.forEach(regexp => {
if (req.url.match(regexp)) {
found = true
return false
}
})
if (found) {
return null
}
const uuid = key(req)
// clear cache if method different from GET
if (req.method.toLowerCase() !== 'get') {
store.removeItem(uuid)
return null
}
const f = () => {
return next()
.then(res => {
return store.setItem(uuid, {
expires: config.maxAge === 0 ? 0 : Date.now() + config.maxAge,
data: config.serialize(req, res)
})
})
}
return store.getItem(uuid).then(value => {
return config.readCache(req, config.log)(value)
.catch(err => {
// clean up cache if stale
err.reason === 'cache-stale' ? store.removeItem(uuid).then(f) : f()
})
})
}
}
cache.readCache = readCache
cache.serialize = serialize
cache.key = function (req) {
return req.url
}
export default cache
| lib/index.js | 'use strict'
import readCache from './read-cache'
import serialize from './serialize'
import memoryStore from './memory'
function cache (config = {}) {
const store = config.store || memoryStore
const key = config.key || cache.key
if (!store) {
throw new Error('Cache middleware need to be provided a store.')
}
config.maxAge = config.maxAge || 0
config.readCache = config.readCache || readCache
config.serialize = config.serialize || serialize
if (config.log !== false) {
config.log = typeof config.log === 'function' ? config.log : console.log.bind(console)
}
return (req, next, service) => {
if (service) {
const useCache = !service.use || (service.use && (service.use.cache !== false))
if (!useCache) {
return null
}
}
// do not cache request with query
if (req.url.match(/\?.*$/)) {
return null
}
const uuid = key(req)
// clear cache if method different from GET
if (req.method.toLowerCase() !== 'get') {
store.removeItem(uuid)
return null
}
const f = () => {
return next()
.then(res => {
return store.setItem(uuid, {
expires: config.maxAge === 0 ? 0 : Date.now() + config.maxAge,
data: config.serialize(req, res)
})
})
}
return store.getItem(uuid).then(value => {
return config.readCache(req, config.log)(value)
.catch(err => {
// clean up cache if stale
err.reason === 'cache-stale' ? store.removeItem(uuid).then(f) : f()
})
})
}
}
cache.readCache = readCache
cache.serialize = serialize
cache.key = function (req) {
return req.url
}
export default cache
| feat(cache): add support for path exclusion
| lib/index.js | feat(cache): add support for path exclusion | <ide><path>ib/index.js
<ide> config.readCache = config.readCache || readCache
<ide> config.serialize = config.serialize || serialize
<ide>
<add> config.exclude = config.exclude || []
<add>
<ide> if (config.log !== false) {
<ide> config.log = typeof config.log === 'function' ? config.log : console.log.bind(console)
<ide> }
<ide>
<ide> // do not cache request with query
<ide> if (req.url.match(/\?.*$/)) {
<add> return null
<add> }
<add>
<add> let found = false
<add>
<add> config.exclude.forEach(regexp => {
<add> if (req.url.match(regexp)) {
<add> found = true
<add> return false
<add> }
<add> })
<add>
<add> if (found) {
<ide> return null
<ide> }
<ide> |
|
Java | apache-2.0 | cae7dfa7fe71418ef4fade872a8721a027dd2eb8 | 0 | paul-callahan/storm-cassandra-cql,pabrahamsson/storm-cassandra-cql,hmsonline/storm-cassandra-cql,bflad/storm-cassandra-cql,hpcc-systems/storm-cassandra-cql | package com.hmsonline.trident.cql.example.wordcount;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.LocalDRPC;
import backtype.storm.generated.StormTopology;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.hmsonline.trident.cql.CassandraCqlMapState;
import com.hmsonline.trident.cql.MapConfiguredCqlClientFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import storm.trident.TridentState;
import storm.trident.TridentTopology;
import storm.trident.operation.builtin.FilterNull;
import storm.trident.operation.builtin.MapGet;
import storm.trident.operation.builtin.Sum;
import storm.trident.testing.FixedBatchSpout;
import storm.trident.testing.Split;
public class WordCountTopology {
private static final Logger LOG = LoggerFactory.getLogger(WordCountTopology.class);
@SuppressWarnings("unchecked")
public static StormTopology buildWordCountAndSourceTopology(LocalDRPC drpc) {
LOG.info("Building topology.");
TridentTopology topology = new TridentTopology();
String source1 = "spout1";
String source2 = "spout2";
FixedBatchSpout spout1 = new FixedBatchSpout(new Fields("sentence", "source"), 3,
new Values("the cow jumped over the moon", source1),
new Values("the man went to the store and bought some candy", source1),
new Values("four score and four years ago", source2),
new Values("how much wood can a wood chuck chuck", source2));
spout1.setCycle(true);
TridentState wordCounts =
topology.newStream("spout1", spout1)
.each(new Fields("sentence"), new Split(), new Fields("word"))
.groupBy(new Fields("word", "source"))
.persistentAggregate(CassandraCqlMapState.nonTransactional(new WordCountAndSourceMapper()),
new IntegerCount(), new Fields("count"))
.parallelismHint(6);
topology.newDRPCStream("words", drpc)
.each(new Fields("args"), new Split(), new Fields("word"))
.groupBy(new Fields("word"))
.stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count"))
.each(new Fields("count"), new FilterNull())
.aggregate(new Fields("count"), new Sum(), new Fields("sum"));
return topology.build();
}
public static void main(String[] args) throws Exception {
final Config configuration = new Config();
configuration.put(MapConfiguredCqlClientFactory.TRIDENT_CASSANDRA_CQL_HOSTS, "localhost");
final LocalCluster cluster = new LocalCluster();
LocalDRPC client = new LocalDRPC();
LOG.info("Submitting topology.");
cluster.submitTopology("cqlexample", configuration, buildWordCountAndSourceTopology(client));
LOG.info("Topology submitted.");
Thread.sleep(10000);
LOG.info("DRPC Query: Word Count [cat, dog, the, man]: {}", client.execute("words", "cat dog the man"));
cluster.shutdown();
client.shutdown();
}
}
| src/test/java/com/hmsonline/trident/cql/example/wordcount/WordCountTopology.java | package com.hmsonline.trident.cql.example.wordcount;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.LocalDRPC;
import backtype.storm.generated.StormTopology;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.hmsonline.trident.cql.CassandraCqlMapState;
import com.hmsonline.trident.cql.MapConfiguredCqlClientFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import storm.trident.TridentState;
import storm.trident.TridentTopology;
import storm.trident.operation.builtin.FilterNull;
import storm.trident.operation.builtin.MapGet;
import storm.trident.operation.builtin.Sum;
import storm.trident.testing.FixedBatchSpout;
import storm.trident.testing.Split;
public class WordCountTopology {
private static final Logger LOG = LoggerFactory.getLogger(WordCountTopology.class);
@SuppressWarnings("unchecked")
public static StormTopology buildWordCountAndSourceTopology(LocalDRPC drpc) {
LOG.info("Building topology.");
TridentTopology topology = new TridentTopology();
String source1 = "spout1";
String source2 = "spout2";
FixedBatchSpout spout1 = new FixedBatchSpout(new Fields("sentence", "source"), 3,
new Values("the cow jumped over the moon", source1),
new Values("the man went to the store and bought some candy", source1),
new Values("four score and seven years ago", source2),
new Values("how many apples can you eat", source2));
spout1.setCycle(true);
TridentState wordCounts =
topology.newStream("spout1", spout1)
.each(new Fields("sentence"), new Split(), new Fields("word"))
.groupBy(new Fields("word", "source"))
.persistentAggregate(CassandraCqlMapState.nonTransactional(new WordCountAndSourceMapper()),
new IntegerCount(), new Fields("count"))
.parallelismHint(6);
topology.newDRPCStream("words", drpc)
.each(new Fields("args"), new Split(), new Fields("word"))
.groupBy(new Fields("word"))
.stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count"))
.each(new Fields("count"), new FilterNull())
.aggregate(new Fields("count"), new Sum(), new Fields("sum"));
return topology.build();
}
public static void main(String[] args) throws Exception {
final Config configuration = new Config();
configuration.put(MapConfiguredCqlClientFactory.TRIDENT_CASSANDRA_CQL_HOSTS, "localhost");
final LocalCluster cluster = new LocalCluster();
LocalDRPC client = new LocalDRPC();
LOG.info("Submitting topology.");
cluster.submitTopology("cqlexample", configuration, buildWordCountAndSourceTopology(client));
LOG.info("Topology submitted.");
Thread.sleep(10000);
LOG.info("DRPC Query: Word Count [cat, dog, the, man]: {}", client.execute("words", "cat dog the man"));
cluster.shutdown();
client.shutdown();
}
}
| Changed sample data to get more variability in the counts.
| src/test/java/com/hmsonline/trident/cql/example/wordcount/WordCountTopology.java | Changed sample data to get more variability in the counts. | <ide><path>rc/test/java/com/hmsonline/trident/cql/example/wordcount/WordCountTopology.java
<ide> FixedBatchSpout spout1 = new FixedBatchSpout(new Fields("sentence", "source"), 3,
<ide> new Values("the cow jumped over the moon", source1),
<ide> new Values("the man went to the store and bought some candy", source1),
<del> new Values("four score and seven years ago", source2),
<del> new Values("how many apples can you eat", source2));
<add> new Values("four score and four years ago", source2),
<add> new Values("how much wood can a wood chuck chuck", source2));
<ide> spout1.setCycle(true);
<ide>
<ide> TridentState wordCounts = |
|
Java | apache-2.0 | c612fa73e6c4fc0e445415e4dee2a5a1240ac2fc | 0 | hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,HubSpot/Singularity,HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity | package com.hubspot.singularity.resources;
import static com.hubspot.singularity.WebExceptions.checkBadRequest;
import static com.hubspot.singularity.WebExceptions.checkConflict;
import static com.hubspot.singularity.WebExceptions.checkNotNullBadRequest;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.curator.framework.recipes.leader.LeaderLatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.hubspot.jackson.jaxrs.PropertyFiltering;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.MachineState;
import com.hubspot.singularity.RequestCleanupType;
import com.hubspot.singularity.RequestState;
import com.hubspot.singularity.SingularityAction;
import com.hubspot.singularity.SingularityAuthorizationScope;
import com.hubspot.singularity.SingularityCreateResult;
import com.hubspot.singularity.SingularityDeleteResult;
import com.hubspot.singularity.SingularityPendingDeploy;
import com.hubspot.singularity.SingularityPendingRequest;
import com.hubspot.singularity.SingularityPendingRequest.PendingType;
import com.hubspot.singularity.SingularityPendingRequestParent;
import com.hubspot.singularity.SingularityRequest;
import com.hubspot.singularity.SingularityRequestCleanup;
import com.hubspot.singularity.SingularityRequestHistory.RequestHistoryType;
import com.hubspot.singularity.SingularityRequestParent;
import com.hubspot.singularity.SingularityRequestWithState;
import com.hubspot.singularity.SingularityShellCommand;
import com.hubspot.singularity.SingularityTaskId;
import com.hubspot.singularity.SingularityTransformHelpers;
import com.hubspot.singularity.SingularityUser;
import com.hubspot.singularity.SlavePlacement;
import com.hubspot.singularity.WebExceptions;
import com.hubspot.singularity.api.SingularityBounceRequest;
import com.hubspot.singularity.api.SingularityDeleteRequestRequest;
import com.hubspot.singularity.api.SingularityExitCooldownRequest;
import com.hubspot.singularity.api.SingularityPauseRequest;
import com.hubspot.singularity.api.SingularityRunNowRequest;
import com.hubspot.singularity.api.SingularityScaleRequest;
import com.hubspot.singularity.api.SingularitySkipHealthchecksRequest;
import com.hubspot.singularity.api.SingularityUnpauseRequest;
import com.hubspot.singularity.auth.SingularityAuthorizationHelper;
import com.hubspot.singularity.config.ApiPaths;
import com.hubspot.singularity.data.DeployManager;
import com.hubspot.singularity.data.DisasterManager;
import com.hubspot.singularity.data.RequestManager;
import com.hubspot.singularity.data.SingularityValidator;
import com.hubspot.singularity.data.SlaveManager;
import com.hubspot.singularity.data.TaskManager;
import com.hubspot.singularity.data.history.RequestHistoryHelper;
import com.hubspot.singularity.expiring.SingularityExpiringBounce;
import com.hubspot.singularity.expiring.SingularityExpiringPause;
import com.hubspot.singularity.expiring.SingularityExpiringRequestActionParent;
import com.hubspot.singularity.expiring.SingularityExpiringScale;
import com.hubspot.singularity.expiring.SingularityExpiringSkipHealthchecks;
import com.hubspot.singularity.helpers.RequestHelper;
import com.hubspot.singularity.smtp.SingularityMailer;
import com.ning.http.client.AsyncHttpClient;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
@Path(ApiPaths.REQUEST_RESOURCE_PATH)
@Produces({ MediaType.APPLICATION_JSON })
@Api(description="Manages Singularity Requests, the parent object for any deployed task", value=ApiPaths.REQUEST_RESOURCE_PATH, position=1)
public class RequestResource extends AbstractRequestResource {
private static final Logger LOG = LoggerFactory.getLogger(RequestResource.class);
private final SingularityMailer mailer;
private final TaskManager taskManager;
private final RequestHelper requestHelper;
private final SlaveManager slaveManager;
@Inject
public RequestResource(SingularityValidator validator, DeployManager deployManager, TaskManager taskManager, RequestManager requestManager, SingularityMailer mailer,
SingularityAuthorizationHelper authorizationHelper, Optional<SingularityUser> user, RequestHelper requestHelper, LeaderLatch leaderLatch,
SlaveManager slaveManager, DisasterManager disasterManager, AsyncHttpClient httpClient, ObjectMapper objectMapper, RequestHistoryHelper requestHistoryHelper) {
super(requestManager, deployManager, user, validator, authorizationHelper, httpClient, leaderLatch, objectMapper, requestHelper, requestHistoryHelper);
this.mailer = mailer;
this.taskManager = taskManager;
this.requestHelper = requestHelper;
this.slaveManager = slaveManager;
}
private void submitRequest(SingularityRequest request, Optional<SingularityRequestWithState> oldRequestWithState, Optional<RequestHistoryType> historyType,
Optional<Boolean> skipHealthchecks, Optional<String> message, Optional<SingularityBounceRequest> maybeBounceRequest) {
checkNotNullBadRequest(request.getId(), "Request must have an id");
checkConflict(!requestManager.cleanupRequestExists(request.getId()), "Request %s is currently cleaning. Try again after a few moments", request.getId());
Optional<SingularityPendingDeploy> maybePendingDeploy = deployManager.getPendingDeploy(request.getId());
checkConflict(!(maybePendingDeploy.isPresent() && maybePendingDeploy.get().getUpdatedRequest().isPresent()), "Request %s has a pending deploy that may change the request data. Try again when the deploy has finished", request.getId());
Optional<SingularityRequest> oldRequest = oldRequestWithState.isPresent() ? Optional.of(oldRequestWithState.get().getRequest()) : Optional.<SingularityRequest> absent();
if (oldRequest.isPresent()) {
authorizationHelper.checkForAuthorization(oldRequest.get(), user, SingularityAuthorizationScope.WRITE);
authorizationHelper.checkForAuthorizedChanges(request, oldRequest.get(), user);
validator.checkActionEnabled(SingularityAction.UPDATE_REQUEST);
} else {
validator.checkActionEnabled(SingularityAction.CREATE_REQUEST);
}
if (request.getSlavePlacement().isPresent() && request.getSlavePlacement().get() == SlavePlacement.SPREAD_ALL_SLAVES) {
checkBadRequest(validator.isSpreadAllSlavesEnabled(), "You must enabled spread to all slaves in order to use the SPREAD_ALL_SLAVES request type");
int currentActiveSlaveCount = slaveManager.getNumObjectsAtState(MachineState.ACTIVE);
request = request.toBuilder().setInstances(Optional.of(currentActiveSlaveCount)).build();
}
if (!oldRequest.isPresent() || !(oldRequest.get().getInstancesSafe() == request.getInstancesSafe())) {
validator.checkScale(request, Optional.<Integer>absent());
}
authorizationHelper.checkForAuthorization(request, user, SingularityAuthorizationScope.WRITE);
RequestState requestState = RequestState.ACTIVE;
if (oldRequestWithState.isPresent()) {
requestState = oldRequestWithState.get().getState();
}
requestHelper.updateRequest(request, oldRequest, requestState, historyType, JavaUtils.getUserEmail(user), skipHealthchecks, message, maybeBounceRequest);
}
@POST
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Create or update a Singularity Request", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=400, message="Request object is invalid"),
@ApiResponse(code=409, message="Request object is being cleaned. Try again shortly"),
})
public SingularityRequestParent postRequest(@Context HttpServletRequest requestContext,
@ApiParam("The Singularity request to create or update") SingularityRequest request) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, request, () -> postRequest(request));
}
public SingularityRequestParent postRequest(SingularityRequest request) {
submitRequest(request, requestManager.getRequest(request.getId()), Optional.<RequestHistoryType> absent(), Optional.<Boolean> absent(), Optional.<String> absent(), Optional.<SingularityBounceRequest>absent());
return fillEntireRequest(fetchRequestWithState(request.getId()));
}
private String getAndCheckDeployId(String requestId) {
Optional<String> maybeDeployId = deployManager.getInUseDeployId(requestId);
checkConflict(maybeDeployId.isPresent(), "Can not schedule/bounce a request (%s) with no deploy", requestId);
return maybeDeployId.get();
}
@POST
@Path("/request/{requestId}/bounce")
public SingularityRequestParent bounce(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return bounce(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/bounce")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Bounce a specific Singularity request. A bounce launches replacement task(s), and then kills the original task(s) if the replacement(s) are healthy.",
response=SingularityRequestParent.class)
public SingularityRequestParent bounce(@ApiParam("The request ID to bounce") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Bounce request options") SingularityBounceRequest bounceRequest) {
final Optional<SingularityBounceRequest> maybeBounceRequest = Optional.fromNullable(bounceRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeBounceRequest.orNull(), () -> bounce(requestId, maybeBounceRequest));
}
public SingularityRequestParent bounce(String requestId, Optional<SingularityBounceRequest> bounceRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.BOUNCE_REQUEST);
checkBadRequest(requestWithState.getRequest().isLongRunning(), "Can not bounce a %s request (%s)", requestWithState.getRequest().getRequestType(), requestWithState);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to bounce (it must be manually unpaused first)", requestWithState.getRequest().getId());
final boolean isIncrementalBounce = bounceRequest.isPresent() && bounceRequest.get().getIncremental().or(false);
validator.checkResourcesForBounce(requestWithState.getRequest(), isIncrementalBounce);
validator.checkRequestForPriorityFreeze(requestWithState.getRequest());
final Optional<Boolean> skipHealthchecks = bounceRequest.isPresent() ? bounceRequest.get().getSkipHealthchecks() : Optional.<Boolean> absent();
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<SingularityShellCommand> runBeforeKill = Optional.absent();
if (bounceRequest.isPresent()) {
actionId = bounceRequest.get().getActionId();
message = bounceRequest.get().getMessage();
if (bounceRequest.get().getRunShellCommandBeforeKill().isPresent()) {
validator.checkValidShellCommand(bounceRequest.get().getRunShellCommandBeforeKill().get());
runBeforeKill = bounceRequest.get().getRunShellCommandBeforeKill();
}
}
if (!actionId.isPresent()) {
actionId = Optional.of(UUID.randomUUID().toString());
}
final String deployId = getAndCheckDeployId(requestId);
checkConflict(!(requestManager.markAsBouncing(requestId) == SingularityCreateResult.EXISTED), "%s is already bouncing", requestId);
requestManager.createCleanupRequest(
new SingularityRequestCleanup(JavaUtils.getUserEmail(user), isIncrementalBounce ? RequestCleanupType.INCREMENTAL_BOUNCE : RequestCleanupType.BOUNCE,
System.currentTimeMillis(), Optional.<Boolean> absent(), Optional.absent(), requestId, Optional.of(deployId), skipHealthchecks, message, actionId, runBeforeKill));
requestManager.bounce(requestWithState.getRequest(), System.currentTimeMillis(), JavaUtils.getUserEmail(user), message);
final SingularityBounceRequest validatedBounceRequest = validator.checkBounceRequest(bounceRequest.or(SingularityBounceRequest.defaultRequest()));
requestManager.saveExpiringObject(new SingularityExpiringBounce(requestId, deployId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), validatedBounceRequest, actionId.get()));
return fillEntireRequest(requestWithState);
}
@POST
@Path("/request/{requestId}/run")
public SingularityPendingRequestParent scheduleImmediately(@PathParam("requestId") String requestId) {
return scheduleImmediately(requestId, null);
}
@POST
@Path("/request/{requestId}/run")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Schedule a one-off or scheduled Singularity request for immediate or delayed execution.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=400, message="Singularity Request is not scheduled or one-off"),
})
public SingularityPendingRequestParent scheduleImmediately(@ApiParam("The request ID to run") @PathParam("requestId") String requestId,
SingularityRunNowRequest runNowRequest) {
final Optional<SingularityRunNowRequest> maybeRunNowRequest = Optional.fromNullable(runNowRequest);
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to run now (it must be manually unpaused first)", requestWithState.getRequest().getId());
final SingularityPendingRequest pendingRequest = validator.checkRunNowRequest(
getAndCheckDeployId(requestId),
JavaUtils.getUserEmail(user),
requestWithState.getRequest(),
maybeRunNowRequest,
taskManager.getActiveTaskIdsForRequest(requestId),
taskManager.getPendingTaskIdsForRequest(requestId));
SingularityCreateResult result = requestManager.addToPendingQueue(pendingRequest);
checkConflict(result != SingularityCreateResult.EXISTED, "%s is already pending, please try again soon", requestId);
return SingularityPendingRequestParent.fromSingularityRequestParent(fillEntireRequest(requestWithState), pendingRequest);
}
@GET
@Path("/request/{requestId}/run/{runId}")
@ApiOperation("Retrieve an active task by runId")
public Optional<SingularityTaskId> getTaskByRunId(@PathParam("requestId") String requestId, @PathParam("runId") String runId) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.READ);
return taskManager.getTaskByRunId(requestId, runId);
}
@POST
@Path("/request/{requestId}/pause")
public SingularityRequestParent pause(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return pause(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/pause")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Pause a Singularity request, future tasks will not run until it is manually unpaused. API can optionally choose to kill existing tasks", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is already paused or being cleaned"),
})
public SingularityRequestParent pause(@ApiParam("The request ID to pause") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Pause Request Options") SingularityPauseRequest pauseRequest) {
final Optional<SingularityPauseRequest> maybePauseRequest = Optional.fromNullable(pauseRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybePauseRequest.orNull(), () -> pause(requestId, maybePauseRequest));
}
public SingularityRequestParent pause(String requestId, Optional<SingularityPauseRequest> pauseRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to pause (it must be manually unpaused first)", requestWithState.getRequest().getId());
Optional<Boolean> killTasks = Optional.absent();
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<SingularityShellCommand> runBeforeKill = Optional.absent();
if (pauseRequest.isPresent()) {
killTasks = pauseRequest.get().getKillTasks();
message = pauseRequest.get().getMessage();
if (pauseRequest.get().getRunShellCommandBeforeKill().isPresent()) {
validator.checkValidShellCommand(pauseRequest.get().getRunShellCommandBeforeKill().get());
runBeforeKill = pauseRequest.get().getRunShellCommandBeforeKill();
}
if (pauseRequest.get().getDurationMillis().isPresent() && !actionId.isPresent()) {
actionId = Optional.of(UUID.randomUUID().toString());
}
}
final long now = System.currentTimeMillis();
Optional<Boolean> removeFromLoadBalancer = Optional.absent();
SingularityCreateResult result = requestManager.createCleanupRequest(new SingularityRequestCleanup(JavaUtils.getUserEmail(user),
RequestCleanupType.PAUSING, now, killTasks, removeFromLoadBalancer, requestId, Optional.<String> absent(), Optional.<Boolean> absent(), message, actionId, runBeforeKill));
checkConflict(result == SingularityCreateResult.CREATED, "%s is already pausing - try again soon", requestId, result);
mailer.sendRequestPausedMail(requestWithState.getRequest(), pauseRequest, JavaUtils.getUserEmail(user));
requestManager.pause(requestWithState.getRequest(), now, JavaUtils.getUserEmail(user), message);
if (pauseRequest.isPresent() && pauseRequest.get().getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringPause(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), pauseRequest.get(), actionId.get()));
}
return fillEntireRequest(new SingularityRequestWithState(requestWithState.getRequest(), RequestState.PAUSED, now));
}
@POST
@Path("/request/{requestId}/unpause")
public SingularityRequestParent unpauseNoBody(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return unpause(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/unpause")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Unpause a Singularity Request, scheduling new tasks immediately", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is not paused"),
})
public SingularityRequestParent unpause(@ApiParam("The request ID to unpause") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
SingularityUnpauseRequest unpauseRequest) {
final Optional<SingularityUnpauseRequest> maybeUnpauseRequest = Optional.fromNullable(unpauseRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeUnpauseRequest.orNull(), () -> unpause(requestId, maybeUnpauseRequest));
}
public SingularityRequestParent unpause(String requestId, Optional<SingularityUnpauseRequest> unpauseRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() == RequestState.PAUSED, "Request %s is not in PAUSED state, it is in %s", requestId, requestWithState.getState());
Optional<String> message = Optional.absent();
Optional<Boolean> skipHealthchecks = Optional.absent();
if (unpauseRequest.isPresent()) {
message = unpauseRequest.get().getMessage();
skipHealthchecks = unpauseRequest.get().getSkipHealthchecks();
}
requestManager.deleteExpiringObject(SingularityExpiringPause.class, requestId);
final long now = requestHelper.unpause(requestWithState.getRequest(), JavaUtils.getUserEmail(user), message, skipHealthchecks);
return fillEntireRequest(new SingularityRequestWithState(requestWithState.getRequest(), RequestState.ACTIVE, now));
}
@POST
@Path("/request/{requestId}/exit-cooldown")
public SingularityRequestParent exitCooldown(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return exitCooldown(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/exit-cooldown")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Immediately exits cooldown, scheduling new tasks immediately", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is not in cooldown"),
})
public SingularityRequestParent exitCooldown(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
SingularityExitCooldownRequest exitCooldownRequest) {
final Optional<SingularityExitCooldownRequest> maybeExitCooldownRequest = Optional.fromNullable(exitCooldownRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeExitCooldownRequest.orNull(), () -> exitCooldown(requestId, maybeExitCooldownRequest));
}
public SingularityRequestParent exitCooldown(String requestId, Optional<SingularityExitCooldownRequest> exitCooldownRequest) {
final SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() == RequestState.SYSTEM_COOLDOWN, "Request %s is not in SYSTEM_COOLDOWN state, it is in %s", requestId, requestWithState.getState());
final Optional<String> maybeDeployId = deployManager.getInUseDeployId(requestId);
final long now = System.currentTimeMillis();
Optional<String> message = Optional.absent();
Optional<Boolean> skipHealthchecks = Optional.absent();
if (exitCooldownRequest.isPresent()) {
message = exitCooldownRequest.get().getMessage();
skipHealthchecks = exitCooldownRequest.get().getSkipHealthchecks();
}
requestManager.exitCooldown(requestWithState.getRequest(), now, JavaUtils.getUserEmail(user), message);
if (maybeDeployId.isPresent() && !requestWithState.getRequest().isOneOff()) {
requestManager.addToPendingQueue(new SingularityPendingRequest(requestId, maybeDeployId.get(), now, JavaUtils.getUserEmail(user),
PendingType.IMMEDIATE, skipHealthchecks, message));
}
return fillEntireRequest(requestWithState);
}
@GET
@PropertyFiltering
@Path("/active")
@ApiOperation(value="Retrieve the list of active requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getActiveRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeFullRequestData") Boolean includeFullRequestData,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getActiveRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeFullRequestData, limit);
}
@GET
@PropertyFiltering
@Path("/paused")
@ApiOperation(value="Retrieve the list of paused requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getPausedRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeFullRequestData") Boolean includeFullRequestData,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getPausedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeFullRequestData, limit);
}
@GET
@PropertyFiltering
@Path("/cooldown")
@ApiOperation(value="Retrieve the list of requests in system cooldown", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getCooldownRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeFullRequestData") Boolean includeFullRequestData,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getCooldownRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeFullRequestData, limit);
}
@GET
@PropertyFiltering
@Path("/finished")
@ApiOperation(value="Retreive the list of finished requests (Scheduled requests which have exhausted their schedules)", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getFinishedRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeFullRequestData") Boolean includeFullRequestData,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getFinishedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeFullRequestData, limit);
}
@GET
@PropertyFiltering
@ApiOperation(value="Retrieve the list of all requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeFullRequestData") Boolean includeFullRequestData,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(requestManager.getRequests(useWebCache(useWebCache)), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeFullRequestData, limit);
}
private List<SingularityRequestWithState> filterAutorized(List<SingularityRequestWithState> requests, final SingularityAuthorizationScope scope, Optional<SingularityUser> user) {
if (!authorizationHelper.hasAdminAuthorization(user)) {
return requests.stream()
.filter((parent) -> authorizationHelper.isAuthorizedForRequest(parent.getRequest(), user, scope))
.collect(Collectors.toList());
}
return requests;
}
@GET
@PropertyFiltering
@Path("/queued/pending")
@ApiOperation(value="Retrieve the list of pending requests", response=SingularityPendingRequest.class, responseContainer="List")
public Iterable<SingularityPendingRequest> getPendingRequests() {
return authorizationHelper.filterByAuthorizedRequests(user, requestManager.getPendingRequests(), SingularityTransformHelpers.PENDING_REQUEST_TO_REQUEST_ID, SingularityAuthorizationScope.READ);
}
@GET
@PropertyFiltering
@Path("/queued/cleanup")
@ApiOperation(value="Retrieve the list of requests being cleaned up", response=SingularityRequestCleanup.class, responseContainer="List")
public Iterable<SingularityRequestCleanup> getCleanupRequests() {
return authorizationHelper.filterByAuthorizedRequests(user, requestManager.getCleanupRequests(), SingularityTransformHelpers.REQUEST_CLEANUP_TO_REQUEST_ID, SingularityAuthorizationScope.READ);
}
@GET
@Path("/request/{requestId}")
@ApiOperation(value="Retrieve a specific Request by ID", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent getRequest(@ApiParam("Request ID") @PathParam("requestId") String requestId, @QueryParam("useWebCache") Boolean useWebCache) {
return fillEntireRequest(fetchRequestWithState(requestId, useWebCache(useWebCache)));
}
public SingularityRequestParent getRequest(String requestId) {
return fillEntireRequest(fetchRequestWithState(requestId, false));
}
@DELETE
@Path("/request/{requestId}")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Delete a specific Request by ID and return the deleted Request", response=SingularityRequest.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequest deleteRequest(@ApiParam("The request ID to delete.") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Delete options") SingularityDeleteRequestRequest deleteRequest) {
final Optional<SingularityDeleteRequestRequest> maybeDeleteRequest = Optional.fromNullable(deleteRequest);
return maybeProxyToLeader(requestContext, SingularityRequest.class, maybeDeleteRequest.orNull(), () -> deleteRequest(requestId, maybeDeleteRequest));
}
public SingularityRequest deleteRequest(String requestId, Optional<SingularityDeleteRequestRequest> deleteRequest) {
SingularityRequest request = fetchRequestWithState(requestId).getRequest();
authorizationHelper.checkForAuthorization(request, user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.REMOVE_REQUEST);
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<Boolean> deleteFromLoadBalancer = Optional.absent();
if (deleteRequest.isPresent()) {
actionId = deleteRequest.get().getActionId();
message = deleteRequest.get().getMessage();
deleteFromLoadBalancer = deleteRequest.get().getDeleteFromLoadBalancer();
}
requestManager.startDeletingRequest(request, deleteFromLoadBalancer, JavaUtils.getUserEmail(user), actionId, message);
mailer.sendRequestRemovedMail(request, JavaUtils.getUserEmail(user), message);
return request;
}
@PUT
@Path("/request/{requestId}/scale")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Scale the number of instances up or down for a specific Request", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent scale(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Object to hold number of instances to request") SingularityScaleRequest scaleRequest) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, scaleRequest, () -> scale(requestId, scaleRequest));
}
public SingularityRequestParent scale(String requestId, SingularityScaleRequest scaleRequest) {
SingularityRequestWithState oldRequestWithState = fetchRequestWithState(requestId);
SingularityRequest oldRequest = oldRequestWithState.getRequest();
authorizationHelper.checkForAuthorization(oldRequest, user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.SCALE_REQUEST);
SingularityRequest newRequest = oldRequest.toBuilder().setInstances(scaleRequest.getInstances()).build();
validator.checkScale(newRequest, Optional.<Integer>absent());
checkBadRequest(oldRequest.getInstancesSafe() != newRequest.getInstancesSafe(), "Scale request has no affect on the # of instances (%s)", newRequest.getInstancesSafe());
String scaleMessage = String.format("Scaling from %d -> %d", oldRequest.getInstancesSafe(), newRequest.getInstancesSafe());
if (scaleRequest.getMessage().isPresent()) {
scaleMessage = String.format("%s -- %s", scaleRequest.getMessage().get(), scaleMessage);
} else {
scaleMessage = String.format("%s", scaleMessage);
}
if (scaleRequest.getBounce().or(newRequest.getBounceAfterScale().or(false))) {
validator.checkActionEnabled(SingularityAction.BOUNCE_REQUEST);
checkBadRequest(newRequest.isLongRunning(), "Can not bounce a %s request (%s)", newRequest.getRequestType(), newRequest);
checkConflict(oldRequestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to bounce (it must be manually unpaused first)", newRequest.getId());
checkConflict(!requestManager.cleanupRequestExists(newRequest.getId(), RequestCleanupType.BOUNCE), "Request %s is already bouncing cannot bounce again", newRequest.getId());
final boolean isIncrementalBounce = scaleRequest.getIncremental().or(true);
validator.checkResourcesForBounce(newRequest, isIncrementalBounce);
validator.checkRequestForPriorityFreeze(newRequest);
SingularityBounceRequest bounceRequest = new SingularityBounceRequest(Optional.of(isIncrementalBounce), scaleRequest.getSkipHealthchecks(), Optional.<Long>absent(), Optional.of(UUID.randomUUID().toString()), Optional.<String>absent(), Optional.<SingularityShellCommand>absent());
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.of(RequestHistoryType.SCALED), scaleRequest.getSkipHealthchecks(), Optional.of(scaleMessage), Optional.of(bounceRequest));
} else {
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.of(RequestHistoryType.SCALED), scaleRequest.getSkipHealthchecks(), Optional.of(scaleMessage), Optional.<SingularityBounceRequest>absent());
}
if (scaleRequest.getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringScale(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), scaleRequest, oldRequest.getInstances(), scaleRequest.getActionId().or(UUID.randomUUID().toString()), scaleRequest.getBounce()));
}
mailer.sendRequestScaledMail(newRequest, Optional.of(scaleRequest), oldRequest.getInstances(), JavaUtils.getUserEmail(user));
return fillEntireRequest(fetchRequestWithState(requestId));
}
private <T extends SingularityExpiringRequestActionParent<?>> SingularityRequestParent deleteExpiringObject(Class<T> clazz, String requestId) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
SingularityDeleteResult deleteResult = requestManager.deleteExpiringObject(clazz, requestId);
WebExceptions.checkNotFound(deleteResult == SingularityDeleteResult.DELETED, "%s didn't have an expiring %s request", clazz.getSimpleName(), requestId);
return fillEntireRequest(requestWithState);
}
@DELETE
@Path("/request/{requestId}/scale")
@ApiOperation(value="Delete/cancel the expiring scale. This makes the scale request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring scale request for that ID"),
})
public SingularityRequestParent deleteExpiringScale(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringScale.class, requestId);
}
@Deprecated
@DELETE
@Path("/request/{requestId}/skipHealthchecks")
@ApiOperation(value="Delete/cancel the expiring skipHealthchecks. This makes the skipHealthchecks request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring skipHealthchecks request for that ID"),
})
public SingularityRequestParent deleteExpiringSkipHealthchecksDeprecated(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringSkipHealthchecks(requestId);
}
@DELETE
@Path("/request/{requestId}/skip-healthchecks")
@ApiOperation(value="Delete/cancel the expiring skipHealthchecks. This makes the skipHealthchecks request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring skipHealthchecks request for that ID"),
})
public SingularityRequestParent deleteExpiringSkipHealthchecks(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringSkipHealthchecks.class, requestId);
}
@DELETE
@Path("/request/{requestId}/pause")
@ApiOperation(value="Delete/cancel the expiring pause. This makes the pause request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring pause request for that ID"),
})
public SingularityRequestParent deleteExpiringPause(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringPause.class, requestId);
}
@DELETE
@Path("/request/{requestId}/bounce")
@ApiOperation(value="Delete/cancel the expiring bounce. This makes the bounce request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring bounce request for that ID"),
})
public SingularityRequestParent deleteExpiringBounce(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringBounce.class, requestId);
}
@Deprecated
@PUT
@Path("/request/{requestId}/skipHealthchecks")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Update the skipHealthchecks field for the request, possibly temporarily", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent skipHealthchecksDeprecated(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("SkipHealtchecks options") SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
return skipHealthchecks(requestId, requestContext, skipHealthchecksRequest);
}
@PUT
@Path("/request/{requestId}/skip-healthchecks")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Update the skipHealthchecks field for the request, possibly temporarily", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent skipHealthchecks(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("SkipHealtchecks options") SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, skipHealthchecksRequest, () -> skipHealthchecks(requestId, skipHealthchecksRequest));
}
public SingularityRequestParent skipHealthchecks(String requestId, SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
SingularityRequestWithState oldRequestWithState = fetchRequestWithState(requestId);
SingularityRequest oldRequest = oldRequestWithState.getRequest();
SingularityRequest newRequest = oldRequest.toBuilder().setSkipHealthchecks(skipHealthchecksRequest.getSkipHealthchecks()).build();
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.<RequestHistoryType> absent(), Optional.<Boolean> absent(), skipHealthchecksRequest.getMessage(), Optional.<SingularityBounceRequest>absent());
if (skipHealthchecksRequest.getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringSkipHealthchecks(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), skipHealthchecksRequest, oldRequest.getSkipHealthchecks(), skipHealthchecksRequest.getActionId().or(UUID.randomUUID().toString())));
}
return fillEntireRequest(fetchRequestWithState(requestId));
}
@GET
@PropertyFiltering
@Path("/lbcleanup")
@ApiOperation("Retrieve the list of tasks being cleaned from load balancers.")
public Iterable<String> getLbCleanupRequests(@QueryParam("useWebCache") Boolean useWebCache) {
return authorizationHelper.filterAuthorizedRequestIds(user, requestManager.getLbCleanupRequestIds(), SingularityAuthorizationScope.READ, useWebCache(useWebCache));
}
}
| SingularityService/src/main/java/com/hubspot/singularity/resources/RequestResource.java | package com.hubspot.singularity.resources;
import static com.hubspot.singularity.WebExceptions.checkBadRequest;
import static com.hubspot.singularity.WebExceptions.checkConflict;
import static com.hubspot.singularity.WebExceptions.checkNotNullBadRequest;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.curator.framework.recipes.leader.LeaderLatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.hubspot.jackson.jaxrs.PropertyFiltering;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.MachineState;
import com.hubspot.singularity.RequestCleanupType;
import com.hubspot.singularity.RequestState;
import com.hubspot.singularity.SingularityAction;
import com.hubspot.singularity.SingularityAuthorizationScope;
import com.hubspot.singularity.SingularityCreateResult;
import com.hubspot.singularity.SingularityDeleteResult;
import com.hubspot.singularity.SingularityPendingDeploy;
import com.hubspot.singularity.SingularityPendingRequest;
import com.hubspot.singularity.SingularityPendingRequest.PendingType;
import com.hubspot.singularity.SingularityPendingRequestParent;
import com.hubspot.singularity.SingularityRequest;
import com.hubspot.singularity.SingularityRequestCleanup;
import com.hubspot.singularity.SingularityRequestHistory.RequestHistoryType;
import com.hubspot.singularity.SingularityRequestParent;
import com.hubspot.singularity.SingularityRequestWithState;
import com.hubspot.singularity.SingularityShellCommand;
import com.hubspot.singularity.SingularityTaskId;
import com.hubspot.singularity.SingularityTransformHelpers;
import com.hubspot.singularity.SingularityUser;
import com.hubspot.singularity.SlavePlacement;
import com.hubspot.singularity.WebExceptions;
import com.hubspot.singularity.api.SingularityBounceRequest;
import com.hubspot.singularity.api.SingularityDeleteRequestRequest;
import com.hubspot.singularity.api.SingularityExitCooldownRequest;
import com.hubspot.singularity.api.SingularityPauseRequest;
import com.hubspot.singularity.api.SingularityRunNowRequest;
import com.hubspot.singularity.api.SingularityScaleRequest;
import com.hubspot.singularity.api.SingularitySkipHealthchecksRequest;
import com.hubspot.singularity.api.SingularityUnpauseRequest;
import com.hubspot.singularity.auth.SingularityAuthorizationHelper;
import com.hubspot.singularity.config.ApiPaths;
import com.hubspot.singularity.data.DeployManager;
import com.hubspot.singularity.data.DisasterManager;
import com.hubspot.singularity.data.RequestManager;
import com.hubspot.singularity.data.SingularityValidator;
import com.hubspot.singularity.data.SlaveManager;
import com.hubspot.singularity.data.TaskManager;
import com.hubspot.singularity.data.history.RequestHistoryHelper;
import com.hubspot.singularity.expiring.SingularityExpiringBounce;
import com.hubspot.singularity.expiring.SingularityExpiringPause;
import com.hubspot.singularity.expiring.SingularityExpiringRequestActionParent;
import com.hubspot.singularity.expiring.SingularityExpiringScale;
import com.hubspot.singularity.expiring.SingularityExpiringSkipHealthchecks;
import com.hubspot.singularity.helpers.RequestHelper;
import com.hubspot.singularity.smtp.SingularityMailer;
import com.ning.http.client.AsyncHttpClient;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
@Path(ApiPaths.REQUEST_RESOURCE_PATH)
@Produces({ MediaType.APPLICATION_JSON })
@Api(description="Manages Singularity Requests, the parent object for any deployed task", value=ApiPaths.REQUEST_RESOURCE_PATH, position=1)
public class RequestResource extends AbstractRequestResource {
private static final Logger LOG = LoggerFactory.getLogger(RequestResource.class);
private final SingularityMailer mailer;
private final TaskManager taskManager;
private final RequestHelper requestHelper;
private final SlaveManager slaveManager;
@Inject
public RequestResource(SingularityValidator validator, DeployManager deployManager, TaskManager taskManager, RequestManager requestManager, SingularityMailer mailer,
SingularityAuthorizationHelper authorizationHelper, Optional<SingularityUser> user, RequestHelper requestHelper, LeaderLatch leaderLatch,
SlaveManager slaveManager, DisasterManager disasterManager, AsyncHttpClient httpClient, ObjectMapper objectMapper, RequestHistoryHelper requestHistoryHelper) {
super(requestManager, deployManager, user, validator, authorizationHelper, httpClient, leaderLatch, objectMapper, requestHelper, requestHistoryHelper);
this.mailer = mailer;
this.taskManager = taskManager;
this.requestHelper = requestHelper;
this.slaveManager = slaveManager;
}
private void submitRequest(SingularityRequest request, Optional<SingularityRequestWithState> oldRequestWithState, Optional<RequestHistoryType> historyType,
Optional<Boolean> skipHealthchecks, Optional<String> message, Optional<SingularityBounceRequest> maybeBounceRequest) {
checkNotNullBadRequest(request.getId(), "Request must have an id");
checkConflict(!requestManager.cleanupRequestExists(request.getId()), "Request %s is currently cleaning. Try again after a few moments", request.getId());
Optional<SingularityPendingDeploy> maybePendingDeploy = deployManager.getPendingDeploy(request.getId());
checkConflict(!(maybePendingDeploy.isPresent() && maybePendingDeploy.get().getUpdatedRequest().isPresent()), "Request %s has a pending deploy that may change the request data. Try again when the deploy has finished", request.getId());
Optional<SingularityRequest> oldRequest = oldRequestWithState.isPresent() ? Optional.of(oldRequestWithState.get().getRequest()) : Optional.<SingularityRequest> absent();
if (oldRequest.isPresent()) {
authorizationHelper.checkForAuthorization(oldRequest.get(), user, SingularityAuthorizationScope.WRITE);
authorizationHelper.checkForAuthorizedChanges(request, oldRequest.get(), user);
validator.checkActionEnabled(SingularityAction.UPDATE_REQUEST);
} else {
validator.checkActionEnabled(SingularityAction.CREATE_REQUEST);
}
if (request.getSlavePlacement().isPresent() && request.getSlavePlacement().get() == SlavePlacement.SPREAD_ALL_SLAVES) {
checkBadRequest(validator.isSpreadAllSlavesEnabled(), "You must enabled spread to all slaves in order to use the SPREAD_ALL_SLAVES request type");
int currentActiveSlaveCount = slaveManager.getNumObjectsAtState(MachineState.ACTIVE);
request = request.toBuilder().setInstances(Optional.of(currentActiveSlaveCount)).build();
}
if (!oldRequest.isPresent() || !(oldRequest.get().getInstancesSafe() == request.getInstancesSafe())) {
validator.checkScale(request, Optional.<Integer>absent());
}
authorizationHelper.checkForAuthorization(request, user, SingularityAuthorizationScope.WRITE);
RequestState requestState = RequestState.ACTIVE;
if (oldRequestWithState.isPresent()) {
requestState = oldRequestWithState.get().getState();
}
requestHelper.updateRequest(request, oldRequest, requestState, historyType, JavaUtils.getUserEmail(user), skipHealthchecks, message, maybeBounceRequest);
}
@POST
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Create or update a Singularity Request", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=400, message="Request object is invalid"),
@ApiResponse(code=409, message="Request object is being cleaned. Try again shortly"),
})
public SingularityRequestParent postRequest(@Context HttpServletRequest requestContext,
@ApiParam("The Singularity request to create or update") SingularityRequest request) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, request, () -> postRequest(request));
}
public SingularityRequestParent postRequest(SingularityRequest request) {
submitRequest(request, requestManager.getRequest(request.getId()), Optional.<RequestHistoryType> absent(), Optional.<Boolean> absent(), Optional.<String> absent(), Optional.<SingularityBounceRequest>absent());
return fillEntireRequest(fetchRequestWithState(request.getId()));
}
private String getAndCheckDeployId(String requestId) {
Optional<String> maybeDeployId = deployManager.getInUseDeployId(requestId);
checkConflict(maybeDeployId.isPresent(), "Can not schedule/bounce a request (%s) with no deploy", requestId);
return maybeDeployId.get();
}
@POST
@Path("/request/{requestId}/bounce")
public SingularityRequestParent bounce(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return bounce(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/bounce")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Bounce a specific Singularity request. A bounce launches replacement task(s), and then kills the original task(s) if the replacement(s) are healthy.",
response=SingularityRequestParent.class)
public SingularityRequestParent bounce(@ApiParam("The request ID to bounce") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Bounce request options") SingularityBounceRequest bounceRequest) {
final Optional<SingularityBounceRequest> maybeBounceRequest = Optional.fromNullable(bounceRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeBounceRequest.orNull(), () -> bounce(requestId, maybeBounceRequest));
}
public SingularityRequestParent bounce(String requestId, Optional<SingularityBounceRequest> bounceRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.BOUNCE_REQUEST);
checkBadRequest(requestWithState.getRequest().isLongRunning(), "Can not bounce a %s request (%s)", requestWithState.getRequest().getRequestType(), requestWithState);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to bounce (it must be manually unpaused first)", requestWithState.getRequest().getId());
final boolean isIncrementalBounce = bounceRequest.isPresent() && bounceRequest.get().getIncremental().or(false);
validator.checkResourcesForBounce(requestWithState.getRequest(), isIncrementalBounce);
validator.checkRequestForPriorityFreeze(requestWithState.getRequest());
final Optional<Boolean> skipHealthchecks = bounceRequest.isPresent() ? bounceRequest.get().getSkipHealthchecks() : Optional.<Boolean> absent();
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<SingularityShellCommand> runBeforeKill = Optional.absent();
if (bounceRequest.isPresent()) {
actionId = bounceRequest.get().getActionId();
message = bounceRequest.get().getMessage();
if (bounceRequest.get().getRunShellCommandBeforeKill().isPresent()) {
validator.checkValidShellCommand(bounceRequest.get().getRunShellCommandBeforeKill().get());
runBeforeKill = bounceRequest.get().getRunShellCommandBeforeKill();
}
}
if (!actionId.isPresent()) {
actionId = Optional.of(UUID.randomUUID().toString());
}
final String deployId = getAndCheckDeployId(requestId);
checkConflict(!(requestManager.markAsBouncing(requestId) == SingularityCreateResult.EXISTED), "%s is already bouncing", requestId);
requestManager.createCleanupRequest(
new SingularityRequestCleanup(JavaUtils.getUserEmail(user), isIncrementalBounce ? RequestCleanupType.INCREMENTAL_BOUNCE : RequestCleanupType.BOUNCE,
System.currentTimeMillis(), Optional.<Boolean> absent(), Optional.absent(), requestId, Optional.of(deployId), skipHealthchecks, message, actionId, runBeforeKill));
requestManager.bounce(requestWithState.getRequest(), System.currentTimeMillis(), JavaUtils.getUserEmail(user), message);
final SingularityBounceRequest validatedBounceRequest = validator.checkBounceRequest(bounceRequest.or(SingularityBounceRequest.defaultRequest()));
requestManager.saveExpiringObject(new SingularityExpiringBounce(requestId, deployId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), validatedBounceRequest, actionId.get()));
return fillEntireRequest(requestWithState);
}
@POST
@Path("/request/{requestId}/run")
public SingularityPendingRequestParent scheduleImmediately(@PathParam("requestId") String requestId) {
return scheduleImmediately(requestId, null);
}
@POST
@Path("/request/{requestId}/run")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Schedule a one-off or scheduled Singularity request for immediate or delayed execution.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=400, message="Singularity Request is not scheduled or one-off"),
})
public SingularityPendingRequestParent scheduleImmediately(@ApiParam("The request ID to run") @PathParam("requestId") String requestId,
SingularityRunNowRequest runNowRequest) {
final Optional<SingularityRunNowRequest> maybeRunNowRequest = Optional.fromNullable(runNowRequest);
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to run now (it must be manually unpaused first)", requestWithState.getRequest().getId());
final SingularityPendingRequest pendingRequest = validator.checkRunNowRequest(
getAndCheckDeployId(requestId),
JavaUtils.getUserEmail(user),
requestWithState.getRequest(),
maybeRunNowRequest,
taskManager.getActiveTaskIdsForRequest(requestId),
taskManager.getPendingTaskIdsForRequest(requestId));
SingularityCreateResult result = requestManager.addToPendingQueue(pendingRequest);
checkConflict(result != SingularityCreateResult.EXISTED, "%s is already pending, please try again soon", requestId);
return SingularityPendingRequestParent.fromSingularityRequestParent(fillEntireRequest(requestWithState), pendingRequest);
}
@GET
@Path("/request/{requestId}/run/{runId}")
@ApiOperation("Retrieve an active task by runId")
public Optional<SingularityTaskId> getTaskByRunId(@PathParam("requestId") String requestId, @PathParam("runId") String runId) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.READ);
return taskManager.getTaskByRunId(requestId, runId);
}
@POST
@Path("/request/{requestId}/pause")
public SingularityRequestParent pause(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return pause(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/pause")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Pause a Singularity request, future tasks will not run until it is manually unpaused. API can optionally choose to kill existing tasks", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is already paused or being cleaned"),
})
public SingularityRequestParent pause(@ApiParam("The request ID to pause") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Pause Request Options") SingularityPauseRequest pauseRequest) {
final Optional<SingularityPauseRequest> maybePauseRequest = Optional.fromNullable(pauseRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybePauseRequest.orNull(), () -> pause(requestId, maybePauseRequest));
}
public SingularityRequestParent pause(String requestId, Optional<SingularityPauseRequest> pauseRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to pause (it must be manually unpaused first)", requestWithState.getRequest().getId());
Optional<Boolean> killTasks = Optional.absent();
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<SingularityShellCommand> runBeforeKill = Optional.absent();
if (pauseRequest.isPresent()) {
killTasks = pauseRequest.get().getKillTasks();
message = pauseRequest.get().getMessage();
if (pauseRequest.get().getRunShellCommandBeforeKill().isPresent()) {
validator.checkValidShellCommand(pauseRequest.get().getRunShellCommandBeforeKill().get());
runBeforeKill = pauseRequest.get().getRunShellCommandBeforeKill();
}
if (pauseRequest.get().getDurationMillis().isPresent() && !actionId.isPresent()) {
actionId = Optional.of(UUID.randomUUID().toString());
}
}
final long now = System.currentTimeMillis();
Optional<Boolean> removeFromLoadBalancer = Optional.absent();
SingularityCreateResult result = requestManager.createCleanupRequest(new SingularityRequestCleanup(JavaUtils.getUserEmail(user),
RequestCleanupType.PAUSING, now, killTasks, removeFromLoadBalancer, requestId, Optional.<String> absent(), Optional.<Boolean> absent(), message, actionId, runBeforeKill));
checkConflict(result == SingularityCreateResult.CREATED, "%s is already pausing - try again soon", requestId, result);
mailer.sendRequestPausedMail(requestWithState.getRequest(), pauseRequest, JavaUtils.getUserEmail(user));
requestManager.pause(requestWithState.getRequest(), now, JavaUtils.getUserEmail(user), message);
if (pauseRequest.isPresent() && pauseRequest.get().getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringPause(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), pauseRequest.get(), actionId.get()));
}
return fillEntireRequest(new SingularityRequestWithState(requestWithState.getRequest(), RequestState.PAUSED, now));
}
@POST
@Path("/request/{requestId}/unpause")
public SingularityRequestParent unpauseNoBody(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return unpause(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/unpause")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Unpause a Singularity Request, scheduling new tasks immediately", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is not paused"),
})
public SingularityRequestParent unpause(@ApiParam("The request ID to unpause") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
SingularityUnpauseRequest unpauseRequest) {
final Optional<SingularityUnpauseRequest> maybeUnpauseRequest = Optional.fromNullable(unpauseRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeUnpauseRequest.orNull(), () -> unpause(requestId, maybeUnpauseRequest));
}
public SingularityRequestParent unpause(String requestId, Optional<SingularityUnpauseRequest> unpauseRequest) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() == RequestState.PAUSED, "Request %s is not in PAUSED state, it is in %s", requestId, requestWithState.getState());
Optional<String> message = Optional.absent();
Optional<Boolean> skipHealthchecks = Optional.absent();
if (unpauseRequest.isPresent()) {
message = unpauseRequest.get().getMessage();
skipHealthchecks = unpauseRequest.get().getSkipHealthchecks();
}
requestManager.deleteExpiringObject(SingularityExpiringPause.class, requestId);
final long now = requestHelper.unpause(requestWithState.getRequest(), JavaUtils.getUserEmail(user), message, skipHealthchecks);
return fillEntireRequest(new SingularityRequestWithState(requestWithState.getRequest(), RequestState.ACTIVE, now));
}
@POST
@Path("/request/{requestId}/exit-cooldown")
public SingularityRequestParent exitCooldown(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext) {
return exitCooldown(requestId, requestContext, null);
}
@POST
@Path("/request/{requestId}/exit-cooldown")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Immediately exits cooldown, scheduling new tasks immediately", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=409, message="Request is not in cooldown"),
})
public SingularityRequestParent exitCooldown(@PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
SingularityExitCooldownRequest exitCooldownRequest) {
final Optional<SingularityExitCooldownRequest> maybeExitCooldownRequest = Optional.fromNullable(exitCooldownRequest);
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, maybeExitCooldownRequest.orNull(), () -> exitCooldown(requestId, maybeExitCooldownRequest));
}
public SingularityRequestParent exitCooldown(String requestId, Optional<SingularityExitCooldownRequest> exitCooldownRequest) {
final SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
authorizationHelper.checkForAuthorization(requestWithState.getRequest(), user, SingularityAuthorizationScope.WRITE);
checkConflict(requestWithState.getState() == RequestState.SYSTEM_COOLDOWN, "Request %s is not in SYSTEM_COOLDOWN state, it is in %s", requestId, requestWithState.getState());
final Optional<String> maybeDeployId = deployManager.getInUseDeployId(requestId);
final long now = System.currentTimeMillis();
Optional<String> message = Optional.absent();
Optional<Boolean> skipHealthchecks = Optional.absent();
if (exitCooldownRequest.isPresent()) {
message = exitCooldownRequest.get().getMessage();
skipHealthchecks = exitCooldownRequest.get().getSkipHealthchecks();
}
requestManager.exitCooldown(requestWithState.getRequest(), now, JavaUtils.getUserEmail(user), message);
if (maybeDeployId.isPresent() && !requestWithState.getRequest().isOneOff()) {
requestManager.addToPendingQueue(new SingularityPendingRequest(requestId, maybeDeployId.get(), now, JavaUtils.getUserEmail(user),
PendingType.IMMEDIATE, skipHealthchecks, message));
}
return fillEntireRequest(requestWithState);
}
@GET
@PropertyFiltering
@Path("/active")
@ApiOperation(value="Retrieve the list of active requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getActiveRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeTaskIds") Boolean includeTaskIds,
@QueryParam("includeLastHistory") Boolean includeLastHistory,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getActiveRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
}
@GET
@PropertyFiltering
@Path("/paused")
@ApiOperation(value="Retrieve the list of paused requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getPausedRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeTaskIds") Boolean includeTaskIds,
@QueryParam("includeLastHistory") Boolean includeLastHistory,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getPausedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
}
@GET
@PropertyFiltering
@Path("/cooldown")
@ApiOperation(value="Retrieve the list of requests in system cooldown", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getCooldownRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeTaskIds") Boolean includeTaskIds,
@QueryParam("includeLastHistory") Boolean includeLastHistory,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getCooldownRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
}
@GET
@PropertyFiltering
@Path("/finished")
@ApiOperation(value="Retreive the list of finished requests (Scheduled requests which have exhausted their schedules)", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getFinishedRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeTaskIds") Boolean includeTaskIds,
@QueryParam("includeLastHistory") Boolean includeLastHistory,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(Lists.newArrayList(requestManager.getFinishedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
}
@GET
@PropertyFiltering
@ApiOperation(value="Retrieve the list of all requests", response=SingularityRequestParent.class, responseContainer="List")
public List<SingularityRequestParent> getRequests(@QueryParam("useWebCache") Boolean useWebCache,
@QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
@QueryParam("includeTaskIds") Boolean includeTaskIds,
@QueryParam("includeLastHistory") Boolean includeLastHistory,
@QueryParam("limit") Optional<Integer> limit) {
return requestHelper.fillDataForRequestsAndFilter(
filterAutorized(requestManager.getRequests(useWebCache(useWebCache)), SingularityAuthorizationScope.READ, user),
user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
}
private List<SingularityRequestWithState> filterAutorized(List<SingularityRequestWithState> requests, final SingularityAuthorizationScope scope, Optional<SingularityUser> user) {
if (!authorizationHelper.hasAdminAuthorization(user)) {
return requests.stream()
.filter((parent) -> authorizationHelper.isAuthorizedForRequest(parent.getRequest(), user, scope))
.collect(Collectors.toList());
}
return requests;
}
@GET
@PropertyFiltering
@Path("/queued/pending")
@ApiOperation(value="Retrieve the list of pending requests", response=SingularityPendingRequest.class, responseContainer="List")
public Iterable<SingularityPendingRequest> getPendingRequests() {
return authorizationHelper.filterByAuthorizedRequests(user, requestManager.getPendingRequests(), SingularityTransformHelpers.PENDING_REQUEST_TO_REQUEST_ID, SingularityAuthorizationScope.READ);
}
@GET
@PropertyFiltering
@Path("/queued/cleanup")
@ApiOperation(value="Retrieve the list of requests being cleaned up", response=SingularityRequestCleanup.class, responseContainer="List")
public Iterable<SingularityRequestCleanup> getCleanupRequests() {
return authorizationHelper.filterByAuthorizedRequests(user, requestManager.getCleanupRequests(), SingularityTransformHelpers.REQUEST_CLEANUP_TO_REQUEST_ID, SingularityAuthorizationScope.READ);
}
@GET
@Path("/request/{requestId}")
@ApiOperation(value="Retrieve a specific Request by ID", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent getRequest(@ApiParam("Request ID") @PathParam("requestId") String requestId, @QueryParam("useWebCache") Boolean useWebCache) {
return fillEntireRequest(fetchRequestWithState(requestId, useWebCache(useWebCache)));
}
public SingularityRequestParent getRequest(String requestId) {
return fillEntireRequest(fetchRequestWithState(requestId, false));
}
@DELETE
@Path("/request/{requestId}")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Delete a specific Request by ID and return the deleted Request", response=SingularityRequest.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequest deleteRequest(@ApiParam("The request ID to delete.") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Delete options") SingularityDeleteRequestRequest deleteRequest) {
final Optional<SingularityDeleteRequestRequest> maybeDeleteRequest = Optional.fromNullable(deleteRequest);
return maybeProxyToLeader(requestContext, SingularityRequest.class, maybeDeleteRequest.orNull(), () -> deleteRequest(requestId, maybeDeleteRequest));
}
public SingularityRequest deleteRequest(String requestId, Optional<SingularityDeleteRequestRequest> deleteRequest) {
SingularityRequest request = fetchRequestWithState(requestId).getRequest();
authorizationHelper.checkForAuthorization(request, user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.REMOVE_REQUEST);
Optional<String> message = Optional.absent();
Optional<String> actionId = Optional.absent();
Optional<Boolean> deleteFromLoadBalancer = Optional.absent();
if (deleteRequest.isPresent()) {
actionId = deleteRequest.get().getActionId();
message = deleteRequest.get().getMessage();
deleteFromLoadBalancer = deleteRequest.get().getDeleteFromLoadBalancer();
}
requestManager.startDeletingRequest(request, deleteFromLoadBalancer, JavaUtils.getUserEmail(user), actionId, message);
mailer.sendRequestRemovedMail(request, JavaUtils.getUserEmail(user), message);
return request;
}
@PUT
@Path("/request/{requestId}/scale")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Scale the number of instances up or down for a specific Request", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent scale(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("Object to hold number of instances to request") SingularityScaleRequest scaleRequest) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, scaleRequest, () -> scale(requestId, scaleRequest));
}
public SingularityRequestParent scale(String requestId, SingularityScaleRequest scaleRequest) {
SingularityRequestWithState oldRequestWithState = fetchRequestWithState(requestId);
SingularityRequest oldRequest = oldRequestWithState.getRequest();
authorizationHelper.checkForAuthorization(oldRequest, user, SingularityAuthorizationScope.WRITE);
validator.checkActionEnabled(SingularityAction.SCALE_REQUEST);
SingularityRequest newRequest = oldRequest.toBuilder().setInstances(scaleRequest.getInstances()).build();
validator.checkScale(newRequest, Optional.<Integer>absent());
checkBadRequest(oldRequest.getInstancesSafe() != newRequest.getInstancesSafe(), "Scale request has no affect on the # of instances (%s)", newRequest.getInstancesSafe());
String scaleMessage = String.format("Scaling from %d -> %d", oldRequest.getInstancesSafe(), newRequest.getInstancesSafe());
if (scaleRequest.getMessage().isPresent()) {
scaleMessage = String.format("%s -- %s", scaleRequest.getMessage().get(), scaleMessage);
} else {
scaleMessage = String.format("%s", scaleMessage);
}
if (scaleRequest.getBounce().or(newRequest.getBounceAfterScale().or(false))) {
validator.checkActionEnabled(SingularityAction.BOUNCE_REQUEST);
checkBadRequest(newRequest.isLongRunning(), "Can not bounce a %s request (%s)", newRequest.getRequestType(), newRequest);
checkConflict(oldRequestWithState.getState() != RequestState.PAUSED, "Request %s is paused. Unable to bounce (it must be manually unpaused first)", newRequest.getId());
checkConflict(!requestManager.cleanupRequestExists(newRequest.getId(), RequestCleanupType.BOUNCE), "Request %s is already bouncing cannot bounce again", newRequest.getId());
final boolean isIncrementalBounce = scaleRequest.getIncremental().or(true);
validator.checkResourcesForBounce(newRequest, isIncrementalBounce);
validator.checkRequestForPriorityFreeze(newRequest);
SingularityBounceRequest bounceRequest = new SingularityBounceRequest(Optional.of(isIncrementalBounce), scaleRequest.getSkipHealthchecks(), Optional.<Long>absent(), Optional.of(UUID.randomUUID().toString()), Optional.<String>absent(), Optional.<SingularityShellCommand>absent());
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.of(RequestHistoryType.SCALED), scaleRequest.getSkipHealthchecks(), Optional.of(scaleMessage), Optional.of(bounceRequest));
} else {
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.of(RequestHistoryType.SCALED), scaleRequest.getSkipHealthchecks(), Optional.of(scaleMessage), Optional.<SingularityBounceRequest>absent());
}
if (scaleRequest.getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringScale(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), scaleRequest, oldRequest.getInstances(), scaleRequest.getActionId().or(UUID.randomUUID().toString()), scaleRequest.getBounce()));
}
mailer.sendRequestScaledMail(newRequest, Optional.of(scaleRequest), oldRequest.getInstances(), JavaUtils.getUserEmail(user));
return fillEntireRequest(fetchRequestWithState(requestId));
}
private <T extends SingularityExpiringRequestActionParent<?>> SingularityRequestParent deleteExpiringObject(Class<T> clazz, String requestId) {
SingularityRequestWithState requestWithState = fetchRequestWithState(requestId);
SingularityDeleteResult deleteResult = requestManager.deleteExpiringObject(clazz, requestId);
WebExceptions.checkNotFound(deleteResult == SingularityDeleteResult.DELETED, "%s didn't have an expiring %s request", clazz.getSimpleName(), requestId);
return fillEntireRequest(requestWithState);
}
@DELETE
@Path("/request/{requestId}/scale")
@ApiOperation(value="Delete/cancel the expiring scale. This makes the scale request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring scale request for that ID"),
})
public SingularityRequestParent deleteExpiringScale(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringScale.class, requestId);
}
@Deprecated
@DELETE
@Path("/request/{requestId}/skipHealthchecks")
@ApiOperation(value="Delete/cancel the expiring skipHealthchecks. This makes the skipHealthchecks request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring skipHealthchecks request for that ID"),
})
public SingularityRequestParent deleteExpiringSkipHealthchecksDeprecated(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringSkipHealthchecks(requestId);
}
@DELETE
@Path("/request/{requestId}/skip-healthchecks")
@ApiOperation(value="Delete/cancel the expiring skipHealthchecks. This makes the skipHealthchecks request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring skipHealthchecks request for that ID"),
})
public SingularityRequestParent deleteExpiringSkipHealthchecks(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringSkipHealthchecks.class, requestId);
}
@DELETE
@Path("/request/{requestId}/pause")
@ApiOperation(value="Delete/cancel the expiring pause. This makes the pause request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring pause request for that ID"),
})
public SingularityRequestParent deleteExpiringPause(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringPause.class, requestId);
}
@DELETE
@Path("/request/{requestId}/bounce")
@ApiOperation(value="Delete/cancel the expiring bounce. This makes the bounce request permanent.", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request or expiring bounce request for that ID"),
})
public SingularityRequestParent deleteExpiringBounce(@ApiParam("The Request ID") @PathParam("requestId") String requestId) {
return deleteExpiringObject(SingularityExpiringBounce.class, requestId);
}
@Deprecated
@PUT
@Path("/request/{requestId}/skipHealthchecks")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Update the skipHealthchecks field for the request, possibly temporarily", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent skipHealthchecksDeprecated(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("SkipHealtchecks options") SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
return skipHealthchecks(requestId, requestContext, skipHealthchecksRequest);
}
@PUT
@Path("/request/{requestId}/skip-healthchecks")
@Consumes({ MediaType.APPLICATION_JSON })
@ApiOperation(value="Update the skipHealthchecks field for the request, possibly temporarily", response=SingularityRequestParent.class)
@ApiResponses({
@ApiResponse(code=404, message="No Request with that ID"),
})
public SingularityRequestParent skipHealthchecks(@ApiParam("The Request ID to scale") @PathParam("requestId") String requestId,
@Context HttpServletRequest requestContext,
@ApiParam("SkipHealtchecks options") SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
return maybeProxyToLeader(requestContext, SingularityRequestParent.class, skipHealthchecksRequest, () -> skipHealthchecks(requestId, skipHealthchecksRequest));
}
public SingularityRequestParent skipHealthchecks(String requestId, SingularitySkipHealthchecksRequest skipHealthchecksRequest) {
SingularityRequestWithState oldRequestWithState = fetchRequestWithState(requestId);
SingularityRequest oldRequest = oldRequestWithState.getRequest();
SingularityRequest newRequest = oldRequest.toBuilder().setSkipHealthchecks(skipHealthchecksRequest.getSkipHealthchecks()).build();
submitRequest(newRequest, Optional.of(oldRequestWithState), Optional.<RequestHistoryType> absent(), Optional.<Boolean> absent(), skipHealthchecksRequest.getMessage(), Optional.<SingularityBounceRequest>absent());
if (skipHealthchecksRequest.getDurationMillis().isPresent()) {
requestManager.saveExpiringObject(new SingularityExpiringSkipHealthchecks(requestId, JavaUtils.getUserEmail(user),
System.currentTimeMillis(), skipHealthchecksRequest, oldRequest.getSkipHealthchecks(), skipHealthchecksRequest.getActionId().or(UUID.randomUUID().toString())));
}
return fillEntireRequest(fetchRequestWithState(requestId));
}
@GET
@PropertyFiltering
@Path("/lbcleanup")
@ApiOperation("Retrieve the list of tasks being cleaned from load balancers.")
public Iterable<String> getLbCleanupRequests(@QueryParam("useWebCache") Boolean useWebCache) {
return authorizationHelper.filterAuthorizedRequestIds(user, requestManager.getLbCleanupRequestIds(), SingularityAuthorizationScope.READ, useWebCache(useWebCache));
}
}
| less params
| SingularityService/src/main/java/com/hubspot/singularity/resources/RequestResource.java | less params | <ide><path>ingularityService/src/main/java/com/hubspot/singularity/resources/RequestResource.java
<ide> @ApiOperation(value="Retrieve the list of active requests", response=SingularityRequestParent.class, responseContainer="List")
<ide> public List<SingularityRequestParent> getActiveRequests(@QueryParam("useWebCache") Boolean useWebCache,
<ide> @QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
<del> @QueryParam("includeTaskIds") Boolean includeTaskIds,
<del> @QueryParam("includeLastHistory") Boolean includeLastHistory,
<add> @QueryParam("includeFullRequestData") Boolean includeFullRequestData,
<ide> @QueryParam("limit") Optional<Integer> limit) {
<ide> return requestHelper.fillDataForRequestsAndFilter(
<ide> filterAutorized(Lists.newArrayList(requestManager.getActiveRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
<del> user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
<add> user, filterRelevantForUser, includeFullRequestData, limit);
<ide> }
<ide>
<ide>
<ide> @ApiOperation(value="Retrieve the list of paused requests", response=SingularityRequestParent.class, responseContainer="List")
<ide> public List<SingularityRequestParent> getPausedRequests(@QueryParam("useWebCache") Boolean useWebCache,
<ide> @QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
<del> @QueryParam("includeTaskIds") Boolean includeTaskIds,
<del> @QueryParam("includeLastHistory") Boolean includeLastHistory,
<add> @QueryParam("includeFullRequestData") Boolean includeFullRequestData,
<ide> @QueryParam("limit") Optional<Integer> limit) {
<ide> return requestHelper.fillDataForRequestsAndFilter(
<ide> filterAutorized(Lists.newArrayList(requestManager.getPausedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
<del> user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
<add> user, filterRelevantForUser, includeFullRequestData, limit);
<ide> }
<ide>
<ide> @GET
<ide> @ApiOperation(value="Retrieve the list of requests in system cooldown", response=SingularityRequestParent.class, responseContainer="List")
<ide> public List<SingularityRequestParent> getCooldownRequests(@QueryParam("useWebCache") Boolean useWebCache,
<ide> @QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
<del> @QueryParam("includeTaskIds") Boolean includeTaskIds,
<del> @QueryParam("includeLastHistory") Boolean includeLastHistory,
<add> @QueryParam("includeFullRequestData") Boolean includeFullRequestData,
<ide> @QueryParam("limit") Optional<Integer> limit) {
<ide> return requestHelper.fillDataForRequestsAndFilter(
<ide> filterAutorized(Lists.newArrayList(requestManager.getCooldownRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
<del> user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
<add> user, filterRelevantForUser, includeFullRequestData, limit);
<ide> }
<ide>
<ide> @GET
<ide> @ApiOperation(value="Retreive the list of finished requests (Scheduled requests which have exhausted their schedules)", response=SingularityRequestParent.class, responseContainer="List")
<ide> public List<SingularityRequestParent> getFinishedRequests(@QueryParam("useWebCache") Boolean useWebCache,
<ide> @QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
<del> @QueryParam("includeTaskIds") Boolean includeTaskIds,
<del> @QueryParam("includeLastHistory") Boolean includeLastHistory,
<add> @QueryParam("includeFullRequestData") Boolean includeFullRequestData,
<ide> @QueryParam("limit") Optional<Integer> limit) {
<ide> return requestHelper.fillDataForRequestsAndFilter(
<ide> filterAutorized(Lists.newArrayList(requestManager.getFinishedRequests(useWebCache(useWebCache))), SingularityAuthorizationScope.READ, user),
<del> user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
<add> user, filterRelevantForUser, includeFullRequestData, limit);
<ide> }
<ide>
<ide> @GET
<ide> @ApiOperation(value="Retrieve the list of all requests", response=SingularityRequestParent.class, responseContainer="List")
<ide> public List<SingularityRequestParent> getRequests(@QueryParam("useWebCache") Boolean useWebCache,
<ide> @QueryParam("filterRelevantForUser") Boolean filterRelevantForUser,
<del> @QueryParam("includeTaskIds") Boolean includeTaskIds,
<del> @QueryParam("includeLastHistory") Boolean includeLastHistory,
<add> @QueryParam("includeFullRequestData") Boolean includeFullRequestData,
<ide> @QueryParam("limit") Optional<Integer> limit) {
<ide> return requestHelper.fillDataForRequestsAndFilter(
<ide> filterAutorized(requestManager.getRequests(useWebCache(useWebCache)), SingularityAuthorizationScope.READ, user),
<del> user, filterRelevantForUser, includeTaskIds, includeLastHistory, limit);
<add> user, filterRelevantForUser, includeFullRequestData, limit);
<ide> }
<ide>
<ide> private List<SingularityRequestWithState> filterAutorized(List<SingularityRequestWithState> requests, final SingularityAuthorizationScope scope, Optional<SingularityUser> user) { |
|
JavaScript | mit | ce6de50f869e08bcd8337935879471c8589c515d | 0 | adityahase/frappe,indautgrp/frappe,yashodhank/frappe,gangadharkadam/vlinkfrappe,manassolanki/frappe,drukhil/frappe,pawaranand/phr-frappe,suyashphadtare/sajil-frappe,gangadharkadam/frappecontribution,elba7r/frameworking,RicardoJohann/frappe,rohitw1991/smartfrappe,rohitw1991/frappe,rohitwaghchaure/vestasi-frappe,mbauskar/omnitech-demo-frappe,gangadharkadam/v4_frappe,rohitwaghchaure/frappe,mbauskar/Das_frappe,hernad/frappe,indictranstech/Das_frappe,saguas/frappe,mbauskar/tele-frappe,BhupeshGupta/frappe,rohitwaghchaure/vestasi-frappe,gangadharkadam/saloon_frappe,gangadharkadam/vervefrappe,indictranstech/osmosis-frappe,aboganas/frappe,paurosello/frappe,gangadharkadam/v6_frappe,hatwar/buyback-frappe,Amber-Creative/amber-frappe,RicardoJohann/frappe,rohitw1991/frappe,suyashphadtare/propshikhari-frappe,gangadhar-kadam/verve_live_frappe,indictranstech/internal-frappe,pranalik/frappe-bb,sbkolate/sap_frappe_v6,StrellaGroup/frappe,gangadhar-kadam/verve_test_frappe,gangadharkadam/letzfrappe,pawaranand/phr_frappe,tmimori/frappe,indictranstech/ebuy-now-frappe,jevonearth/frappe,ashokrajbathu/secondrep,bcornwellmott/frappe,rohitwaghchaure/vestasi-frappe,ashokrajbathu/secondrep,gangadhar-kadam/verve_test_frappe,rohitwaghchaure/vestasi-frappe,neilLasrado/frappe,adityahase/frappe,mhbu50/frappe,StrellaGroup/frappe,sbktechnology/trufil-frappe,saurabh6790/phr-frappe,gangadharkadam/saloon_frappe_install,gangadhar-kadam/helpdesk-frappe,mbauskar/frappe,tmimori/frappe,saguas/frappe,rohitwaghchaure/frappe_smart,mbauskar/frappe,paurosello/frappe,indictranstech/fbd_frappe,pawaranand/phr_frappe,maxtorete/frappe,rohitw1991/smarttailorfrappe,indictranstech/tele-frappe,saurabh6790/frappe,suyashphadtare/sajil-final-frappe,rmehta/frappe,mbauskar/omnitech-demo-frappe,gangadhar-kadam/verve_test_frappe,elba7r/frameworking,indictranstech/frappe,mbauskar/helpdesk-frappe,sbktechnology/trufil-frappe,saguas/frappe,gangadhar-kadam/verve_live_frappe,gangadharkadam/v6_frappe,Amber-Creative/amber-frappe,tundebabzy/frappe,rohitwaghchaure/New_Theme_frappe,gangadharkadam/saloon_frappe_install,indictranstech/frappe-digitales,mhbu50/frappe,gangadhar-kadam/verve_live_frappe,geo-poland/frappe,mbauskar/frappe,saurabh6790/phr-frappe,indictranstech/frappe-digitales,rohitwaghchaure/frappe,pawaranand/phr-frappe,erpletzerp/letzerpcore,StrellaGroup/frappe,gangadharkadam/v5_frappe,neilLasrado/frappe,gangadharkadam/v4_frappe,MaxMorais/frappe,indictranstech/omnitech-frappe,gangadhar-kadam/laganfrappe,rohitwaghchaure/frappe-digitales,nerevu/frappe,indautgrp/frappe,elba7r/builder,rohitw1991/smarttailorfrappe,pawaranand/phr_frappe,RicardoJohann/frappe,BhupeshGupta/frappe,saurabh6790/test-frappe,maxtorete/frappe,hatwar/buyback-frappe,suyashphadtare/propshikhari-frappe,elba7r/builder,mbauskar/phr-frappe,rmehta/frappe,maxtorete/frappe,gangadharkadam/vlinkfrappe,indictranstech/ebuy-now-frappe,tmimori/frappe,gangadharkadam/frappecontribution,rohitwaghchaure/frappe,tundebabzy/frappe,vjFaLk/frappe,rohitwaghchaure/frappe-digitales,saurabh6790/frappe,vjFaLk/frappe,shitolepriya/test-frappe,pombredanne/frappe,gangadharkadam/shfr,anandpdoshi/frappe,indictranstech/trufil-frappe,reachalpineswift/frappe-bench,rohitwaghchaure/frappe-alec,elba7r/builder,elba7r/builder,vCentre/vFRP-6233,mbauskar/omnitech-frappe,adityahase/frappe,bohlian/frappe,Tejal011089/digitales_frappe,mhbu50/frappe,mbauskar/tele-frappe,gangadharkadam/frappecontribution,indictranstech/phr-frappe,vjFaLk/frappe,mhbu50/frappe,gangadharkadam/saloon_frappe,gangadharkadam/tailorfrappe,nerevu/frappe,indictranstech/fbd_frappe,sbkolate/sap_frappe_v6,pranalik/frappe-bb,gangadhar-kadam/helpdesk-frappe,hernad/frappe,indautgrp/frappe,bohlian/frappe,gangadharkadam/vervefrappe,pranalik/frappe-bb,indautgrp/frappe,rohitwaghchaure/frappe-digitales,frappe/frappe,aboganas/frappe,hernad/frappe,sbktechnology/sap_frappe,gangadharkadam/v6_frappe,rmehta/frappe,shitolepriya/test-frappe,sbktechnology/sap_frappe,yashodhank/frappe,mbauskar/helpdesk-frappe,gangadharkadam/v5_frappe,praba230890/frappe,indictranstech/ebuy-now-frappe,rohitwaghchaure/frappe-alec,gangadharkadam/vlinkfrappe,gangadharkadam/frappecontribution,rohitwaghchaure/frappe_smart,vjFaLk/frappe,mbauskar/phr-frappe,gangadhar-kadam/verve_test_frappe,indictranstech/reciphergroup-frappe,mbauskar/Das_frappe,deveninfotech/deven-frappe,tundebabzy/frappe,sbkolate/sap_frappe_v6,BhupeshGupta/frappe,ESS-LLP/frappe,suyashphadtare/propshikhari-frappe,gangadharkadam/v4_frappe,chdecultot/frappe,almeidapaulopt/frappe,indictranstech/frappe-digitales,rohitwaghchaure/New_Theme_frappe,yashodhank/frappe,deveninfotech/deven-frappe,geo-poland/frappe,rohitwaghchaure/frappe,vqw/frappe,vqw/frappe,mbauskar/tele-frappe,MaxMorais/frappe,Tejal011089/digitales_frappe,mbauskar/omnitech-frappe,mbauskar/phr-frappe,letzerp/framework,rohitw1991/smartfrappe,gangadhar-kadam/verve_frappe,tundebabzy/frappe,MaxMorais/frappe,ESS-LLP/frappe,saurabh6790/test-frappe,mbauskar/Das_frappe,sbktechnology/sap_frappe,tmimori/frappe,mbauskar/frappe,indictranstech/internal-frappe,bohlian/frappe,pranalik/frappe-bb,deveninfotech/deven-frappe,almeidapaulopt/frappe,mbauskar/omnitech-demo-frappe,paurosello/frappe,gangadharkadam/saloon_frappe,gangadhar-kadam/lgnlvefrape,anandpdoshi/frappe,ShashaQin/frappe,gangadharkadam/v5_frappe,gangadharkadam/letzfrappe,almeidapaulopt/frappe,gangadharkadam/saloon_frappe,saurabh6790/frappe,indictranstech/Das_frappe,shitolepriya/test-frappe,mbauskar/phr-frappe,drukhil/frappe,indictranstech/tele-frappe,mbauskar/omnitech-frappe,pawaranand/phr-frappe,pombredanne/frappe,mbauskar/omnitech-frappe,hernad/frappe,gangadharkadam/johnfrappe,indictranstech/frappe,geo-poland/frappe,chdecultot/frappe,indictranstech/omnitech-frappe,elba7r/frameworking,hatwar/buyback-frappe,letzerp/framework,gangadharkadam/saloon_frappe_install,manassolanki/frappe,indictranstech/internal-frappe,gangadhar-kadam/lgnlvefrape,indictranstech/osmosis-frappe,indictranstech/internal-frappe,jevonearth/frappe,sbkolate/sap_frappe_v6,sbktechnology/trufil-frappe,neilLasrado/frappe,hatwar/buyback-frappe,deveninfotech/deven-frappe,manassolanki/frappe,rohitwaghchaure/frappe-digitales,mbauskar/helpdesk-frappe,drukhil/frappe,chdecultot/frappe,rohitwaghchaure/New_Theme_frappe,sbktechnology/trufil-frappe,jevonearth/frappe,ShashaQin/frappe,indictranstech/osmosis-frappe,vCentre/vFRP-6233,mbauskar/helpdesk-frappe,indictranstech/osmosis-frappe,indictranstech/trufil-frappe,paurosello/frappe,rmehta/frappe,mbauskar/Das_frappe,aboganas/frappe,vqw/frappe,indictranstech/reciphergroup-frappe,aboganas/frappe,indictranstech/fbd_frappe,suyashphadtare/sajil-final-frappe,maxtorete/frappe,BhupeshGupta/frappe,suyashphadtare/propshikhari-frappe,gangadhar-kadam/laganfrappe,MaxMorais/frappe,letzerp/framework,suyashphadtare/sajil-final-frappe,bcornwellmott/frappe,frappe/frappe,chdecultot/frappe,elba7r/frameworking,reachalpineswift/frappe-bench,indictranstech/ebuy-now-frappe,suyashphadtare/sajil-frappe,indictranstech/frappe,gangadharkadam/v5_frappe,yashodhank/frappe,saurabh6790/phr-frappe,ShashaQin/frappe,adityahase/frappe,Amber-Creative/amber-frappe,indictranstech/phr-frappe,gangadharkadam/v6_frappe,indictranstech/omnitech-frappe,vqw/frappe,bcornwellmott/frappe,nerevu/frappe,indictranstech/reciphergroup-frappe,indictranstech/frappe-digitales,gangadhar-kadam/helpdesk-frappe,gangadharkadam/v4_frappe,drukhil/frappe,manassolanki/frappe,reachalpineswift/frappe-bench,indictranstech/fbd_frappe,indictranstech/tele-frappe,RicardoJohann/frappe,gangadharkadam/vlinkfrappe,gangadharkadam/letzfrappe,indictranstech/omnitech-frappe,pombredanne/frappe,ShashaQin/frappe,frappe/frappe,gangadharkadam/saloon_frappe_install,indictranstech/phr-frappe,erpletzerp/letzerpcore,neilLasrado/frappe,vCentre/vFRP-6233,rohitwaghchaure/frappe-alec,shitolepriya/test-frappe,erpletzerp/letzerpcore,gangadhar-kadam/verve_frappe,sbktechnology/sap_frappe,almeidapaulopt/frappe,indictranstech/reciphergroup-frappe,gangadhar-kadam/verve_frappe,saurabh6790/test-frappe,praba230890/frappe,nerevu/frappe,praba230890/frappe,praba230890/frappe,pombredanne/frappe,saurabh6790/test-frappe,saurabh6790/phr-frappe,gangadharkadam/letzfrappe,gangadhar-kadam/verve_live_frappe,erpletzerp/letzerpcore,indictranstech/Das_frappe,anandpdoshi/frappe,mbauskar/omnitech-demo-frappe,indictranstech/frappe,gangadharkadam/johnfrappe,anandpdoshi/frappe,Tejal011089/digitales_frappe,bcornwellmott/frappe,jevonearth/frappe,reachalpineswift/frappe-bench,ashokrajbathu/secondrep,indictranstech/trufil-frappe,Amber-Creative/amber-frappe,ESS-LLP/frappe,gangadhar-kadam/verve_frappe,gangadhar-kadam/laganfrappe,gangadharkadam/shfr,gangadhar-kadam/helpdesk-frappe,indictranstech/phr-frappe,indictranstech/tele-frappe,gangadhar-kadam/lgnlvefrape,indictranstech/trufil-frappe,vCentre/vFRP-6233,ashokrajbathu/secondrep,suyashphadtare/sajil-frappe,gangadharkadam/vervefrappe,gangadharkadam/tailorfrappe,mbauskar/tele-frappe,ESS-LLP/frappe,indictranstech/Das_frappe,saguas/frappe,gangadharkadam/vervefrappe,letzerp/framework,bohlian/frappe,pawaranand/phr_frappe,Tejal011089/digitales_frappe,saurabh6790/frappe | // Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
// MIT License. See license.txt
frappe.ui.form.Comments = Class.extend({
init: function(opts) {
$.extend(this, opts);
this.make();
},
make: function() {
var me = this;
this.wrapper =this.parent;
$('<div class="comment-connector"></div>').appendTo(this.parent);
this.list = $('<div class="comments"></div>')
.appendTo(this.parent);
this.row = $(repl('<div class="media comment" data-name="%(name)s">\
<span class="pull-left avatar avatar-small">\
<img class="media-object" src="%(image)s">\
</span>\
<div class="media-body">\
<textarea style="height: 80px" class="form-control"></textarea>\
<div class="text-right" style="margin-top: 10px">\
<button class="btn btn-default btn-go btn-sm">\
<i class="icon-ok"></i> Add comment</button>\
</div>\
</div>\
</div>', {image: frappe.user_info(user).image,
fullname: user_fullname})).appendTo(this.parent);
this.input = this.row.find(".form-control");
this.button = this.row.find(".btn-go")
.click(function() {
me.add_comment(this);
});
},
refresh: function() {
var me = this;
if(this.frm.doc.__islocal) {
this.wrapper.toggle(false);
return;
}
this.wrapper.toggle(true);
this.list.empty();
comments = [{"comment": "Created", "comment_type": "Created",
"comment_by": this.frm.doc.owner, "creation": this.frm.doc.creation}].concat(this.get_comments());
$.each(comments, function(i, c) {
if((c.comment_type || "Comment") === "Comment" && frappe.model.can_delete("Comment")) {
c["delete"] = '<a class="close" href="#">×</a>';
} else {
c["delete"] = "";
}
c.image = frappe.user_info(c.comment_by).image || frappe.get_gravatar(c.comment_by);
c.comment_on = comment_when(c.creation);
c.fullname = frappe.user_info(c.comment_by).fullname;
if(!c.comment_type) c.comment_type = "Comment"
c.icon = {
"Created": "icon-plus",
"Submitted": "icon-lock",
"Cancelled": "icon-remove",
"Assigned": "icon-user",
"Assignment Completed": "icon-ok",
"Comment": "icon-comment",
"Workflow": "icon-arrow-right",
"Label": "icon-tag",
"Attachment": "icon-paper-clip",
"Attachment Removed": "icon-paper-clip"
}[c.comment_type]
c.icon_bg = {
"Created": "#1abc9c",
"Submitted": "#3498db",
"Cancelled": "#c0392b",
"Assigned": "#f39c12",
"Assignment Completed": "#16a085",
"Comment": "#f39c12",
"Workflow": "#2c3e50",
"Label": "#2c3e50",
"Attachment": "#7f8c8d",
"Attachment Removed": "#eee"
}[c.comment_type];
c.icon_fg = {
"Attachment Removed": "#333",
}[c.comment_type]
if(!c.icon_fg) c.icon_fg = "#fff";
// label view
if(c.comment_type==="Workflow" || c.comment_type==="Label") {
c.comment_html = repl('<span class="label label-%(style)s">%(text)s</span>', {
style: frappe.utils.guess_style(c.comment),
text: c.comment
});
} else {
c.comment_html = frappe.markdown(c.comment);
}
// icon centering -- pixed perfect
if(in_list(["Comment"], c.comment_type)) {
c.padding = "padding-left: 8px;";
} else {
c.padding = "";
}
$(repl('<div class="media comment" data-name="%(name)s">\
<span class="pull-left avatar avatar-small">\
<img class="media-object" src="%(image)s">\
</span>\
<span class="pull-left comment-icon">\
<i class="%(icon)s icon-timeline" \
style="background-color: %(icon_bg)s; color: %(icon_fg)s; %(padding)s"></i>\
</span>\
<div class="media-body comment-body">\
%(comment_html)s\
<div>\
<span class="small text-muted">\
%(fullname)s / %(comment_on)s %(delete)s</span>\
</div>\
</div>\
</div>', c))
.appendTo(me.list)
.on("click", ".close", function() {
var name = $(this).parents(".comment:first").attr("data-name");
me.delete_comment(name);
return false;
})
});
},
get_comments: function() {
return this.frm.get_docinfo().comments
},
add_comment: function(btn) {
var txt = this.input.val();
if(txt) {
this.insert_comment("Comment", txt, btn);
}
},
insert_comment: function(comment_type, comment, btn) {
var me = this;
return frappe.call({
method: "frappe.widgets.form.utils.add_comment",
args: {
doc:{
doctype: "Comment",
comment_type: comment_type || "Comment",
comment_doctype: this.frm.doctype,
comment_docname: this.frm.docname,
comment: comment,
comment_by: user
}
},
btn: btn,
callback: function(r) {
if(!r.exc) {
me.frm.get_docinfo().comments =
me.get_comments().concat([r.message]);
me.frm.toolbar.show_infobar();
me.input.val("");
me.refresh();
}
}
});
},
delete_comment: function(name) {
var me = this;
return frappe.call({
method: "frappe.client.delete",
args: {
doctype: "Comment",
name: name
},
callback: function(r) {
if(!r.exc) {
me.frm.get_docinfo().comments =
$.map(me.frm.get_docinfo().comments,
function(v) {
if(v.name==name) return null;
else return v;
}
);
me.refresh();
me.frm.toolbar.show_infobar();
}
}
});
}
})
| frappe/public/js/frappe/form/comments.js | // Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
// MIT License. See license.txt
frappe.ui.form.Comments = Class.extend({
init: function(opts) {
$.extend(this, opts);
this.make();
},
make: function() {
var me = this;
this.wrapper =this.parent;
$('<div class="comment-connector"></div>').appendTo(this.parent);
this.list = $('<div class="comments"></div>')
.appendTo(this.parent);
this.row = $(repl('<div class="media comment" data-name="%(name)s">\
<span class="pull-left avatar avatar-small">\
<img class="media-object" src="%(image)s">\
</span>\
<div class="media-body">\
<textarea style="height: 80px" class="form-control"></textarea>\
<div class="text-right" style="margin-top: 10px">\
<button class="btn btn-default btn-go btn-sm">\
<i class="icon-ok"></i> Add comment</button>\
</div>\
</div>\
</div>', {image: frappe.user_info(user).image,
fullname: user_fullname})).appendTo(this.parent);
this.input = this.row.find(".form-control");
this.button = this.row.find(".btn-go")
.click(function() {
me.add_comment(this);
});
},
refresh: function() {
var me = this;
if(this.frm.doc.__islocal) {
this.wrapper.toggle(false);
return;
}
this.wrapper.toggle(true);
this.list.empty();
comments = [{"comment": "Created", "comment_type": "Created",
"comment_by": this.frm.doc.owner, "creation": this.frm.doc.creation}].concat(this.get_comments());
$.each(comments, function(i, c) {
if((c.comment_type || "Comment") === "Comment" && frappe.model.can_delete("Comment")) {
c["delete"] = '<a class="close" href="#">×</a>';
} else {
c["delete"] = "";
}
c.image = frappe.user_info(c.comment_by).image || frappe.get_gravatar(c.comment_by);
c.comment_on = dateutil.comment_when(c.creation);
c.fullname = frappe.user_info(c.comment_by).fullname;
if(!c.comment_type) c.comment_type = "Comment"
c.icon = {
"Created": "icon-plus",
"Submitted": "icon-lock",
"Cancelled": "icon-remove",
"Assigned": "icon-user",
"Assignment Completed": "icon-ok",
"Comment": "icon-comment",
"Workflow": "icon-arrow-right",
"Label": "icon-tag",
"Attachment": "icon-paper-clip",
"Attachment Removed": "icon-paper-clip"
}[c.comment_type]
c.icon_bg = {
"Created": "#1abc9c",
"Submitted": "#3498db",
"Cancelled": "#c0392b",
"Assigned": "#f39c12",
"Assignment Completed": "#16a085",
"Comment": "#f39c12",
"Workflow": "#2c3e50",
"Label": "#2c3e50",
"Attachment": "#7f8c8d",
"Attachment Removed": "#eee"
}[c.comment_type];
c.icon_fg = {
"Attachment Removed": "#333",
}[c.comment_type]
if(!c.icon_fg) c.icon_fg = "#fff";
// label view
if(c.comment_type==="Workflow" || c.comment_type==="Label") {
c.comment_html = repl('<span class="label label-%(style)s">%(text)s</span>', {
style: frappe.utils.guess_style(c.comment),
text: c.comment
});
} else {
c.comment_html = frappe.markdown(c.comment);
}
// icon centering -- pixed perfect
if(in_list(["Comment"], c.comment_type)) {
c.padding = "padding-left: 8px;";
} else {
c.padding = "";
}
$(repl('<div class="media comment" data-name="%(name)s">\
<span class="pull-left avatar avatar-small">\
<img class="media-object" src="%(image)s">\
</span>\
<span class="pull-left comment-icon">\
<i class="%(icon)s icon-timeline" \
style="background-color: %(icon_bg)s; color: %(icon_fg)s; %(padding)s"></i>\
</span>\
<div class="media-body comment-body">\
%(comment_html)s\
<div>\
<span class="small text-muted">\
%(fullname)s / %(comment_on)s %(delete)s</span>\
</div>\
</div>\
</div>', c))
.appendTo(me.list)
.on("click", ".close", function() {
var name = $(this).parents(".comment:first").attr("data-name");
me.delete_comment(name);
return false;
})
});
},
get_comments: function() {
return this.frm.get_docinfo().comments
},
add_comment: function(btn) {
var txt = this.input.val();
if(txt) {
this.insert_comment("Comment", txt, btn);
}
},
insert_comment: function(comment_type, comment, btn) {
var me = this;
return frappe.call({
method: "frappe.widgets.form.utils.add_comment",
args: {
doc:{
doctype: "Comment",
comment_type: comment_type || "Comment",
comment_doctype: this.frm.doctype,
comment_docname: this.frm.docname,
comment: comment,
comment_by: user
}
},
btn: btn,
callback: function(r) {
if(!r.exc) {
me.frm.get_docinfo().comments =
me.get_comments().concat([r.message]);
me.frm.toolbar.show_infobar();
me.input.val("");
me.refresh();
}
}
});
},
delete_comment: function(name) {
var me = this;
return frappe.call({
method: "frappe.client.delete",
args: {
doctype: "Comment",
name: name
},
callback: function(r) {
if(!r.exc) {
me.frm.get_docinfo().comments =
$.map(me.frm.get_docinfo().comments,
function(v) {
if(v.name==name) return null;
else return v;
}
);
me.refresh();
me.frm.toolbar.show_infobar();
}
}
});
}
})
| [ux] fixes to timeline
| frappe/public/js/frappe/form/comments.js | [ux] fixes to timeline | <ide><path>rappe/public/js/frappe/form/comments.js
<ide> c["delete"] = "";
<ide> }
<ide> c.image = frappe.user_info(c.comment_by).image || frappe.get_gravatar(c.comment_by);
<del> c.comment_on = dateutil.comment_when(c.creation);
<add> c.comment_on = comment_when(c.creation);
<ide> c.fullname = frappe.user_info(c.comment_by).fullname;
<ide>
<ide> if(!c.comment_type) c.comment_type = "Comment" |
|
Java | mit | error: pathspec 'src/test/java/io/teiler/api/endpoint/GroupEndpointTest.java' did not match any file(s) known to git
| f3e2eed65a0530393a8d18c6822047f649f0c575 | 1 | teiler/api.teiler.io | package io.teiler.api.endpoint;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.embedded.LocalServerPort;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import io.teiler.server.Tylr;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Tylr.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
@TestPropertySource(properties = {"local.server.port=4567"})
public class GroupEndpointTest {
@LocalServerPort
private int port;
@Autowired
private TestRestTemplate testRestTemplate;
@Test
public void shouldReturn401WhenViewingGroupWithoutValidUUID() {
ResponseEntity<String> response = testRestTemplate.getForEntity("http://localhost:" + port + "/v1/group", String.class);
Assert.assertEquals(HttpStatus.UNAUTHORIZED, response.getStatusCode());
}
}
| src/test/java/io/teiler/api/endpoint/GroupEndpointTest.java | add test for group-endpoint | src/test/java/io/teiler/api/endpoint/GroupEndpointTest.java | add test for group-endpoint | <ide><path>rc/test/java/io/teiler/api/endpoint/GroupEndpointTest.java
<add>package io.teiler.api.endpoint;
<add>
<add>import org.junit.Assert;
<add>import org.junit.Test;
<add>import org.junit.runner.RunWith;
<add>import org.springframework.beans.factory.annotation.Autowired;
<add>import org.springframework.boot.context.embedded.LocalServerPort;
<add>import org.springframework.boot.test.context.SpringBootTest;
<add>import org.springframework.boot.test.web.client.TestRestTemplate;
<add>import org.springframework.http.HttpStatus;
<add>import org.springframework.http.ResponseEntity;
<add>import org.springframework.test.context.TestPropertySource;
<add>import org.springframework.test.context.junit4.SpringRunner;
<add>
<add>import io.teiler.server.Tylr;
<add>
<add>@RunWith(SpringRunner.class)
<add>@SpringBootTest(classes = Tylr.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
<add>@TestPropertySource(properties = {"local.server.port=4567"})
<add>public class GroupEndpointTest {
<add>
<add> @LocalServerPort
<add> private int port;
<add>
<add> @Autowired
<add> private TestRestTemplate testRestTemplate;
<add>
<add> @Test
<add> public void shouldReturn401WhenViewingGroupWithoutValidUUID() {
<add> ResponseEntity<String> response = testRestTemplate.getForEntity("http://localhost:" + port + "/v1/group", String.class);
<add> Assert.assertEquals(HttpStatus.UNAUTHORIZED, response.getStatusCode());
<add> }
<add>
<add>} |
|
Java | mit | 35a2e5b4678392cc67ca1608e9843c66e75f2453 | 0 | Train-Track/Train-Track-Android | package dyl.anjon.es.traintrack.fragments;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import android.content.Context;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseQuery;
import dyl.anjon.es.traintrack.MapActivity;
import dyl.anjon.es.traintrack.R;
import dyl.anjon.es.traintrack.StationActivity;
import dyl.anjon.es.traintrack.adapters.StationRowAdapter;
import dyl.anjon.es.traintrack.models.Station;
import dyl.anjon.es.traintrack.utils.Utils;
public class StationsFragment extends Fragment {
private Location gps;
public StationsFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_stations, container,
false);
final ArrayList<Station> stations = new ArrayList<Station>();
final ListView list = (ListView) rootView.findViewById(R.id.list);
final StationRowAdapter adapter = new StationRowAdapter(inflater,
stations);
list.setAdapter(adapter);
list.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View view, int index,
long x) {
Station station = (Station) adapter.getItem(index);
Intent intent = new Intent().setClass(getActivity(),
StationActivity.class);
intent.putExtra("station_id", station.getObjectId());
startActivity(intent);
return;
}
});
ParseQuery<Station> query = ParseQuery.getQuery(Station.class);
query.fromLocalDatastore();
try {
int count = query.count();
Utils.log("Local stations count is " + count + " . Fetching...");
if (count == 0) {
query = ParseQuery.getQuery(Station.class);
}
} catch (ParseException e) {
Utils.log("Counting local stations: " + e.getMessage());
}
query.orderByAscending("name");
query.findInBackground(new FindCallback<Station>() {
@Override
public void done(List<Station> results, ParseException e) {
if (e == null) {
stations.addAll(results);
adapter.refresh(stations);
Station.pinAllInBackground(results);
} else {
Utils.log("Getting stations: " + e.getMessage());
}
}
});
EditText search = (EditText) rootView.findViewById(R.id.search);
search.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable arg0) {
}
public void beforeTextChanged(CharSequence arg0, int arg1,
int arg2, int arg3) {
}
public void onTextChanged(CharSequence search, int arg1, int arg2,
int arg3) {
adapter.getFilter().filter(search);
list.smoothScrollToPosition(0);
}
});
Button aZ = (Button) rootView.findViewById(R.id.a_z);
aZ.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
adapter.refresh(Station.getAll());
list.smoothScrollToPosition(0);
}
});
Button nearby = (Button) rootView.findViewById(R.id.nearby);
nearby.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (gps == null) {
return;
}
for (int i = 0; i < stations.size(); i++) {
Station station = stations.get(i);
float results[] = { 0, 0, 0 };
Location.distanceBetween(station.getLatitude(),
station.getLongitude(), gps.getLatitude(),
gps.getLongitude(), results);
station.setDistance(results[0]);
}
Collections.sort(stations, new DistanceComparator());
adapter.refresh(stations);
list.smoothScrollToPosition(0);
}
});
Button favourites = (Button) rootView.findViewById(R.id.favourites);
favourites.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
adapter.getFavouriteFilter().filter(null);
list.smoothScrollToPosition(0);
}
});
Button map = (Button) rootView.findViewById(R.id.map);
map.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent().setClass(getActivity(),
MapActivity.class);
intent.putExtra("all_stations", true);
startActivity(intent);
}
});
LocationManager locationManager = (LocationManager) getActivity()
.getSystemService(Context.LOCATION_SERVICE);
updateLocation(locationManager
.getLastKnownLocation(LocationManager.GPS_PROVIDER));
LocationListener locationListener = new LocationListener() {
public void onStatusChanged(String provider, int status,
Bundle extras) {
}
public void onProviderEnabled(String provider) {
}
public void onProviderDisabled(String provider) {
}
@Override
public void onLocationChanged(Location location) {
updateLocation(location);
}
};
locationManager.requestLocationUpdates(
LocationManager.NETWORK_PROVIDER, 0, 0, locationListener);
return rootView;
}
private void updateLocation(Location gps) {
if (gps != null) {
this.gps = gps;
}
}
public class DistanceComparator implements Comparator<Station> {
@Override
public int compare(Station o1, Station o2) {
return Float.compare(o1.getDistance(), o2.getDistance());
}
}
}
| src/dyl/anjon/es/traintrack/fragments/StationsFragment.java | package dyl.anjon.es.traintrack.fragments;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import android.content.Context;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseQuery;
import dyl.anjon.es.traintrack.MapActivity;
import dyl.anjon.es.traintrack.R;
import dyl.anjon.es.traintrack.StationActivity;
import dyl.anjon.es.traintrack.adapters.StationRowAdapter;
import dyl.anjon.es.traintrack.models.Station;
import dyl.anjon.es.traintrack.utils.Utils;
public class StationsFragment extends Fragment {
private Location gps;
public StationsFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_stations, container,
false);
final ArrayList<Station> stations = new ArrayList<Station>();
final ListView list = (ListView) rootView.findViewById(R.id.list);
final StationRowAdapter adapter = new StationRowAdapter(inflater,
stations);
list.setAdapter(adapter);
list.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View view, int index,
long x) {
Station station = (Station) adapter.getItem(index);
Intent intent = new Intent().setClass(getActivity(),
StationActivity.class);
intent.putExtra("station_id", station.getObjectId());
startActivity(intent);
return;
}
});
ParseQuery<Station> query = ParseQuery.getQuery(Station.class);
query.fromLocalDatastore();
try {
if (query.count() == 0) {
query = ParseQuery.getQuery(Station.class);
}
} catch (ParseException e) {
Utils.log("Counting local stations: " + e.getMessage());
}
query.findInBackground(new FindCallback<Station>() {
@Override
public void done(List<Station> results, ParseException e) {
if (e == null) {
stations.addAll(results);
adapter.refresh(stations);
Station.pinAllInBackground(results);
} else {
Utils.log("Downloading stations: " + e.getMessage());
}
}
});
EditText search = (EditText) rootView.findViewById(R.id.search);
search.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable arg0) {
}
public void beforeTextChanged(CharSequence arg0, int arg1,
int arg2, int arg3) {
}
public void onTextChanged(CharSequence search, int arg1, int arg2,
int arg3) {
adapter.getFilter().filter(search);
list.smoothScrollToPosition(0);
}
});
Button aZ = (Button) rootView.findViewById(R.id.a_z);
aZ.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
adapter.refresh(Station.getAll());
list.smoothScrollToPosition(0);
}
});
Button nearby = (Button) rootView.findViewById(R.id.nearby);
nearby.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (gps == null) {
return;
}
for (int i = 0; i < stations.size(); i++) {
Station station = stations.get(i);
float results[] = { 0, 0, 0 };
Location.distanceBetween(station.getLatitude(),
station.getLongitude(), gps.getLatitude(),
gps.getLongitude(), results);
station.setDistance(results[0]);
}
Collections.sort(stations, new DistanceComparator());
adapter.refresh(stations);
list.smoothScrollToPosition(0);
}
});
Button favourites = (Button) rootView.findViewById(R.id.favourites);
favourites.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
adapter.getFavouriteFilter().filter(null);
list.smoothScrollToPosition(0);
}
});
Button map = (Button) rootView.findViewById(R.id.map);
map.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent().setClass(getActivity(),
MapActivity.class);
intent.putExtra("all_stations", true);
startActivity(intent);
}
});
LocationManager locationManager = (LocationManager) getActivity()
.getSystemService(Context.LOCATION_SERVICE);
updateLocation(locationManager
.getLastKnownLocation(LocationManager.GPS_PROVIDER));
LocationListener locationListener = new LocationListener() {
public void onStatusChanged(String provider, int status,
Bundle extras) {
}
public void onProviderEnabled(String provider) {
}
public void onProviderDisabled(String provider) {
}
@Override
public void onLocationChanged(Location location) {
updateLocation(location);
}
};
locationManager.requestLocationUpdates(
LocationManager.NETWORK_PROVIDER, 0, 0, locationListener);
return rootView;
}
private void updateLocation(Location gps) {
if (gps != null) {
this.gps = gps;
}
}
public class DistanceComparator implements Comparator<Station> {
@Override
public int compare(Station o1, Station o2) {
return Float.compare(o1.getDistance(), o2.getDistance());
}
}
}
| orders stations a-z + adds better logging
| src/dyl/anjon/es/traintrack/fragments/StationsFragment.java | orders stations a-z + adds better logging | <ide><path>rc/dyl/anjon/es/traintrack/fragments/StationsFragment.java
<ide> ParseQuery<Station> query = ParseQuery.getQuery(Station.class);
<ide> query.fromLocalDatastore();
<ide> try {
<del> if (query.count() == 0) {
<add> int count = query.count();
<add> Utils.log("Local stations count is " + count + " . Fetching...");
<add> if (count == 0) {
<ide> query = ParseQuery.getQuery(Station.class);
<ide> }
<ide> } catch (ParseException e) {
<ide> Utils.log("Counting local stations: " + e.getMessage());
<ide> }
<add> query.orderByAscending("name");
<ide> query.findInBackground(new FindCallback<Station>() {
<ide> @Override
<ide> public void done(List<Station> results, ParseException e) {
<ide> adapter.refresh(stations);
<ide> Station.pinAllInBackground(results);
<ide> } else {
<del> Utils.log("Downloading stations: " + e.getMessage());
<add> Utils.log("Getting stations: " + e.getMessage());
<ide> }
<ide> }
<ide> }); |
|
JavaScript | bsd-3-clause | 92419acb1fbf996230c4c9a3427fa29ff8eb7c50 | 0 | sfchronicle/mapsense.js,manueltimita/mapsense.js,RavishankarDuMCA10/mapsense.js,Laurian/mapsense.js,cksachdev/mapsense.js,mapsense/mapsense.js,dillan/mapsense.js,gimlids/mapsense.js,shkfnly/mapsense.js | po.d3GeoJson = function(fetch) {
var d3GeoJson = po.layer(load, unload),
container = d3GeoJson.container(),
url,
clip = true,
clipId = "org.polymaps." + po.id(),
clipHref = "url(#" + clipId + ")",
clipPath = container.insertBefore(po.svg("clipPath"), container.firstChild),
clipRect = clipPath.appendChild(po.svg("rect")),
scale = "auto",
zoom = null,
features,
rectile = true;
container.setAttribute("fill-rule", "evenodd");
clipPath.setAttribute("id", clipId);
if (!arguments.length) fetch = po.queue.json;
function projection(proj) {
var l = {lat: 0, lon: 0};
return function(coordinates) {
l.lat = coordinates[1];
l.lon = coordinates[0];
var p = proj(l);
coordinates.x = p.x;
coordinates.y = p.y;
return p;
};
}
function rescale(o, e, k) {
return o.type in rescales && rescales[o.type](o, e, k);
}
var rescales = {
Point: function (o, e, k) {
var p = o.coordinates;
e.setAttribute("transform", "translate(" + p.x + "," + p.y + ")" + k);
},
MultiPoint: function (o, e, k) {
var c = o.coordinates,
i = -1,
n = p.length,
x = e.firstChild,
p;
while (++i < n) {
p = c[i];
x.setAttribute("transform", "translate(" + p.x + "," + p.y + ")" + k);
x = x.nextSibling;
}
}
};
function load(tile, proj) {
var g = tile.element = po.svg("g");
var tileProj = proj(tile),
path = d3.geo.path().projection({
stream: function(stream) {
return {
point: function(x, y) {
var p = tileProj.locationPoint({ lon: x, lat: y});
stream.point(Math.round(2 * p.x) / 2, Math.round(2 * p.y) / 2);
},
sphere: function() { stream.sphere(); },
lineStart: function() { stream.lineStart(); },
lineEnd: function() { stream.lineEnd(); },
polygonStart: function() { stream.polygonStart(); },
polygonEnd: function() { stream.polygonEnd(); }
};
}
});
tile.features = [];
function update(data) {
var updated = [];
/* Fetch the next batch of features, if so directed. */
if (data.next) tile.request = fetch(data.next.href, update);
if (d3GeoJson.tile() && rectile) {
var tileSize = d3GeoJson.map().tileSize();
d3.select(g.insertBefore(po.svg("rect"), g.firstChild))
.attr("width", tileSize.x)
.attr("height", tileSize.x)
.attr("class", "rectile");
}
draw(g, data, path, updated, tile);
tile.ready = true;
updated.push.apply(tile.features, updated);
d3GeoJson.dispatch({type: "load", tile: tile, features: updated});
}
if (url != null) {
tile.request = fetch(typeof url == "function" ? url(tile) : url, update);
} else {
update({type: "FeatureCollection", features: features || []});
}
}
function draw(g, data, path, updated, tile) {
var update = d3.select(g)
.selectAll("path")
.data(data.features);
update.exit()
.remove();
var enter = update
.enter()
.append("path");
if (updated)
enter.each(function(f) { updated.push({ element: this, data: f }); });
var paths = [];
update.each(function(f, i) {
paths[i] = path(f);
});
update.attr("d", function(f, i) { return paths[i]; });
}
function unload(tile) {
if (tile.request) tile.request.abort(true);
}
function move() {
var zoom = d3GeoJson.map().zoom(),
tiles = d3GeoJson.cache.locks(), // visible tiles
key, // key in locks
tile, // locks[key]
features, // tile.features
i, // current feature index
n, // current feature count, features.length
feature, // features[i]
k; // scale transform
if (scale == "fixed") {
for (key in tiles) {
if ((tile = tiles[key]).scale != zoom) {
k = "scale(" + Math.pow(2, tile.zoom - zoom) + ")";
i = -1;
n = (features = tile.features).length;
while (++i < n) rescale((feature = features[i]).data.geometry, feature.element, k);
tile.scale = zoom;
}
}
} else {
for (key in tiles) {
i = -1;
n = (features = (tile = tiles[key]).features).length;
while (++i < n) rescale((feature = features[i]).data.geometry, feature.element, "");
delete tile.scale;
}
}
}
d3GeoJson.rectile = function(x) {
if (!arguments.length) return rectile;
rectile = x;
return d3GeoJson;
};
d3GeoJson.url = function(x) {
if (!arguments.length) return url;
url = typeof x == "string" && /{.}/.test(x) ? po.url(x) : x;
if (url != null) features = null;
if (typeof url == "string") d3GeoJson.tile(false);
return d3GeoJson.reload();
};
d3GeoJson.features = function(x) {
if (!arguments.length) return features;
if (features = x) {
url = null;
d3GeoJson.tile(false);
}
return d3GeoJson.reload();
};
d3GeoJson.clip = function(x) {
if (!arguments.length) return clip;
if (clip) container.removeChild(clipPath);
if (clip = x) container.insertBefore(clipPath, container.firstChild);
var locks = d3GeoJson.cache.locks();
for (var key in locks) {
if (clip) locks[key].element.setAttribute("clip-path", clipHref);
else locks[key].element.removeAttribute("clip-path");
}
return d3GeoJson;
};
var __tile__ = d3GeoJson.tile;
d3GeoJson.tile = function(x) {
if (arguments.length && !x) d3GeoJson.clip(x);
return __tile__.apply(d3GeoJson, arguments);
};
var __map__ = d3GeoJson.map;
d3GeoJson.map = function(x) {
if (x && clipRect) {
var size = x.tileSize();
clipRect.setAttribute("width", size.x);
clipRect.setAttribute("height", size.y);
}
return __map__.apply(d3GeoJson, arguments);
};
d3GeoJson.scale = function(x) {
if (!arguments.length) return scale;
if (scale = x) d3GeoJson.on("move", move);
else d3GeoJson.off("move", move);
if (d3GeoJson.map()) move();
return d3GeoJson;
};
d3GeoJson.show = function(tile) {
if (clip) tile.element.setAttribute("clip-path", clipHref);
else tile.element.removeAttribute("clip-path");
d3GeoJson.dispatch({type: "show", tile: tile, features: tile.features});
return d3GeoJson;
};
d3GeoJson.reshow = function() {
var locks = d3GeoJson.cache.locks();
for (var key in locks) d3GeoJson.show(locks[key]);
return d3GeoJson;
};
return d3GeoJson;
};
| src/D3GeoJson.js | po.d3GeoJson = function(fetch) {
var d3GeoJson = po.layer(load, unload),
container = d3GeoJson.container(),
url,
clip = true,
clipId = "org.polymaps." + po.id(),
clipHref = "url(#" + clipId + ")",
clipPath = container.insertBefore(po.svg("clipPath"), container.firstChild),
clipRect = clipPath.appendChild(po.svg("rect")),
scale = "auto",
zoom = null,
features;
container.setAttribute("fill-rule", "evenodd");
clipPath.setAttribute("id", clipId);
if (!arguments.length) fetch = po.queue.json;
function projection(proj) {
var l = {lat: 0, lon: 0};
return function(coordinates) {
l.lat = coordinates[1];
l.lon = coordinates[0];
var p = proj(l);
coordinates.x = p.x;
coordinates.y = p.y;
return p;
};
}
function rescale(o, e, k) {
return o.type in rescales && rescales[o.type](o, e, k);
}
var rescales = {
Point: function (o, e, k) {
var p = o.coordinates;
e.setAttribute("transform", "translate(" + p.x + "," + p.y + ")" + k);
},
MultiPoint: function (o, e, k) {
var c = o.coordinates,
i = -1,
n = p.length,
x = e.firstChild,
p;
while (++i < n) {
p = c[i];
x.setAttribute("transform", "translate(" + p.x + "," + p.y + ")" + k);
x = x.nextSibling;
}
}
};
function load(tile, proj) {
var g = tile.element = po.svg("g");
var tileProj = proj(tile),
path = d3.geo.path().projection({
stream: function(stream) {
return {
point: function(x, y) {
var p = tileProj.locationPoint({ lon: x, lat: y});
stream.point(Math.round(2 * p.x) / 2, Math.round(2 * p.y) / 2);
},
sphere: function() { stream.sphere(); },
lineStart: function() { stream.lineStart(); },
lineEnd: function() { stream.lineEnd(); },
polygonStart: function() { stream.polygonStart(); },
polygonEnd: function() { stream.polygonEnd(); }
};
}
});
tile.features = [];
function update(data) {
var updated = [];
/* Fetch the next batch of features, if so directed. */
if (data.next) tile.request = fetch(data.next.href, update);
draw(g, data, path, updated, tile);
tile.ready = true;
updated.push.apply(tile.features, updated);
d3GeoJson.dispatch({type: "load", tile: tile, features: updated});
}
if (url != null) {
tile.request = fetch(typeof url == "function" ? url(tile) : url, update);
} else {
update({type: "FeatureCollection", features: features || []});
}
}
function draw(g, data, path, updated, tile) {
var update = d3.select(g)
.selectAll("path")
.data(data.features);
update.exit()
.remove();
var enter = update
.enter()
.append("path");
if (updated)
enter.each(function(f) { updated.push({ element: this, data: f }); });
var paths = [];
update.each(function(f, i) {
paths[i] = path(f);
});
update.attr("d", function(f, i) { return paths[i]; });
}
function unload(tile) {
if (tile.request) tile.request.abort(true);
}
function move() {
var zoom = d3GeoJson.map().zoom(),
tiles = d3GeoJson.cache.locks(), // visible tiles
key, // key in locks
tile, // locks[key]
features, // tile.features
i, // current feature index
n, // current feature count, features.length
feature, // features[i]
k; // scale transform
if (scale == "fixed") {
for (key in tiles) {
if ((tile = tiles[key]).scale != zoom) {
k = "scale(" + Math.pow(2, tile.zoom - zoom) + ")";
i = -1;
n = (features = tile.features).length;
while (++i < n) rescale((feature = features[i]).data.geometry, feature.element, k);
tile.scale = zoom;
}
}
} else {
for (key in tiles) {
i = -1;
n = (features = (tile = tiles[key]).features).length;
while (++i < n) rescale((feature = features[i]).data.geometry, feature.element, "");
delete tile.scale;
}
}
}
d3GeoJson.url = function(x) {
if (!arguments.length) return url;
url = typeof x == "string" && /{.}/.test(x) ? po.url(x) : x;
if (url != null) features = null;
if (typeof url == "string") d3GeoJson.tile(false);
return d3GeoJson.reload();
};
d3GeoJson.features = function(x) {
if (!arguments.length) return features;
if (features = x) {
url = null;
d3GeoJson.tile(false);
}
return d3GeoJson.reload();
};
d3GeoJson.clip = function(x) {
if (!arguments.length) return clip;
if (clip) container.removeChild(clipPath);
if (clip = x) container.insertBefore(clipPath, container.firstChild);
var locks = d3GeoJson.cache.locks();
for (var key in locks) {
if (clip) locks[key].element.setAttribute("clip-path", clipHref);
else locks[key].element.removeAttribute("clip-path");
}
return d3GeoJson;
};
var __tile__ = d3GeoJson.tile;
d3GeoJson.tile = function(x) {
if (arguments.length && !x) d3GeoJson.clip(x);
return __tile__.apply(d3GeoJson, arguments);
};
var __map__ = d3GeoJson.map;
d3GeoJson.map = function(x) {
if (x && clipRect) {
var size = x.tileSize();
clipRect.setAttribute("width", size.x);
clipRect.setAttribute("height", size.y);
}
return __map__.apply(d3GeoJson, arguments);
};
d3GeoJson.scale = function(x) {
if (!arguments.length) return scale;
if (scale = x) d3GeoJson.on("move", move);
else d3GeoJson.off("move", move);
if (d3GeoJson.map()) move();
return d3GeoJson;
};
d3GeoJson.show = function(tile) {
if (clip) tile.element.setAttribute("clip-path", clipHref);
else tile.element.removeAttribute("clip-path");
d3GeoJson.dispatch({type: "show", tile: tile, features: tile.features});
return d3GeoJson;
};
d3GeoJson.reshow = function() {
var locks = d3GeoJson.cache.locks();
for (var key in locks) d3GeoJson.show(locks[key]);
return d3GeoJson;
};
return d3GeoJson;
};
| Add support for rectiles.
| src/D3GeoJson.js | Add support for rectiles. | <ide><path>rc/D3GeoJson.js
<ide> clipRect = clipPath.appendChild(po.svg("rect")),
<ide> scale = "auto",
<ide> zoom = null,
<del> features;
<add> features,
<add> rectile = true;
<ide>
<ide> container.setAttribute("fill-rule", "evenodd");
<ide> clipPath.setAttribute("id", clipId);
<ide>
<ide> /* Fetch the next batch of features, if so directed. */
<ide> if (data.next) tile.request = fetch(data.next.href, update);
<add>
<add> if (d3GeoJson.tile() && rectile) {
<add> var tileSize = d3GeoJson.map().tileSize();
<add> d3.select(g.insertBefore(po.svg("rect"), g.firstChild))
<add> .attr("width", tileSize.x)
<add> .attr("height", tileSize.x)
<add> .attr("class", "rectile");
<add> }
<ide>
<ide> draw(g, data, path, updated, tile);
<ide>
<ide> }
<ide> }
<ide>
<add> d3GeoJson.rectile = function(x) {
<add> if (!arguments.length) return rectile;
<add> rectile = x;
<add> return d3GeoJson;
<add> };
<add>
<ide> d3GeoJson.url = function(x) {
<ide> if (!arguments.length) return url;
<ide> url = typeof x == "string" && /{.}/.test(x) ? po.url(x) : x; |
|
Java | apache-2.0 | e23599c5b0b1ba7079a378f865b6aed77c5caa92 | 0 | mrivingt/EventStoreJava |
import java.net.HttpURLConnection;
//import java.net.MalformedURLException;
import java.net.URL;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
abstract class Defaults {
public static final String server = "127.0.0.1";
public static final String port = "2113";
public static final String stream = "/streams/account-11";
public static final String user = "admin";
public static final String password = "changeit";
public static final int sleeptime = 5000; // milliseconds
public static final String embed = "";
public static final String mimetype = "application/json";
public static final String url = "http://" + server + ":" + port + stream + embed;
public static final int idleDotsPerLine = 80;
}
class MyAuthenticator extends Authenticator {
public PasswordAuthentication getPasswordAuthentication () {
return new PasswordAuthentication (Defaults.user, Defaults.password.toCharArray());
}
}
public class eventStore {
private class EventStoreStream {
private String stream;
private JSONObject payload;
private boolean result;
private String previous;
private String data;
EventStoreStream (String stream) throws IOException {
this.stream = stream;
}
void getHeadofStream () throws IOException {
// Start at the head of the stream
this.payload = extract(getURL(stream));
if (payload.get("headOfStream").equals(true)) {
String last = getLink(payload,"last");
if (!last.equals("")) {
this.payload = extract(getURL(last));
}
}
result = true;
}
boolean dataExists() {
if (!this.data.equals("")) {return true;}
else {return false;}
}
String getData() {
return this.data;
}
void gotoPrevious() throws IOException {
this.previous = getLink(this.payload,"previous");
this.payload = extract(getURL(this.previous));
}
String getPrevious() {
return this.previous;
}
void extractDataFromPayload() {
this.data = getTheData(this.payload);
}
private String getTheData(JSONObject payload) {
// Now we are getting the pages of data
// The data items are in an array of links within an array of entries
JSONArray entries = (JSONArray) payload.get("entries");
//System.out.println("Number of entries: " + entries.size());
String responseData = "";
String entryResponse = "";
if (entries.size() == 0) {responseData = "";}
else {
for (int i=entries.size()-1;i>-1;i--) {
// for (int i=0;i<entries.size();i++) {
JSONObject entry = (JSONObject) entries.get(i);
//System.out.println(entry.toString());
JSONArray entryLinks = (JSONArray) entry.get("links");
responseData = responseData + "\n" + entry.get("title") + "\t" + entry.get("summary");
for (int j=0;j<entryLinks.size(); j++) {
JSONObject entryLink = (JSONObject) entryLinks.get(j);
if (entryLink.get("relation").equals("alternate")) {
//System.out.println(entryLink.get("uri"));
try {
entryResponse = getURL(entryLink.get("uri").toString());
} catch (IOException e) {
e.printStackTrace();
}
JSONObject entryPayload = extract(entryResponse);
responseData = responseData + "\t" + entryPayload.toString();
}
}
}
}
return responseData;
}
private boolean getResult() {
return this.result;
}
private String getLink(JSONObject payload, String linkType) {
JSONArray links = (JSONArray) payload.get("links");
String responseURI = "";
for (int i=0;i<links.size();i++) {
JSONObject link = (JSONObject) links.get(i);
if (link.get("relation").toString().equals(linkType)) {
responseURI = link.get("uri").toString();
}
}
return responseURI;
}
private String getURL(String url) throws IOException {
//System.out.print("\nGoing to get: " + url);
int responseCode = 0;
URL urlObj = new URL(url);
HttpURLConnection con = (HttpURLConnection) urlObj.openConnection();
con.setRequestMethod("GET");
//con.setRequestProperty("Accept-Language", "en-US,en;q=0.5");
con.setRequestProperty("Accept" , Defaults.mimetype );
responseCode = con.getResponseCode();
//System.out.print("\nGET response code: " + responseCode);
if (responseCode != 200) {
if (responseCode == 406) {System.out.print("\nhttp response 406: unacceptable content type specified: " + Defaults.mimetype);}
if (responseCode == 404) {System.out.print("\nhttp response 404: unable to locate stream: " + Defaults.stream);}
System.exit(responseCode);
}
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine, response = "";
while ((inputLine = in.readLine()) != null) {
response = response + inputLine;
//System.out.println(inputLine);
}
in.close();
return response;
}
private JSONObject extract(String response) {
JSONParser parser = new JSONParser();
JSONObject payload = null;
try {
payload = (JSONObject) parser.parse(response);
} catch (ParseException e) {
System.out.println("Input file has JSON parse error: " + e.getPosition() + " "
+ e.toString());
System.exit(4);
}
return payload;
}
}
public static void main(String[] args) throws Exception {
// Start at the head of the stream
System.out.print("\nStarting point: " + Defaults.url);
eventStore myEventStore = new eventStore();
EventStoreStream myEventStream = myEventStore.new EventStoreStream(Defaults.url);
myEventStream.getHeadofStream(); // payload is now set
if (!myEventStream.getResult()) {
System.out.print("\nGet Head of Stream failed");
System.exit(1);
}
myEventStream.extractDataFromPayload();
if (myEventStream.dataExists()) {
System.out.print(myEventStream.getData());
}
int idleCount = 0;
do {
myEventStream.gotoPrevious();
String newLine = "\n";
do {
myEventStream.extractDataFromPayload();
if (myEventStream.dataExists()) {
System.out.print(myEventStream.getData());
}
else {
if ((idleCount % Defaults.idleDotsPerLine) == 0) {newLine = "\n";}
System.out.print(newLine + ".");
idleCount++;
newLine = "";
Thread.sleep(Defaults.sleeptime);}
} while (!myEventStream.dataExists());
} while (!myEventStream.getPrevious().equals(""));
System.exit(0);
}
}
| eventStore.java |
import java.net.HttpURLConnection;
//import java.net.MalformedURLException;
import java.net.URL;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
abstract class Defaults {
public static final String server = "127.0.0.1";
public static final String port = "2113";
public static final String stream = "/streams/account-11";
public static final String user = "admin";
public static final String password = "changeit";
public static final int sleeptime = 5000; // milliseconds
public static final String embed = "";
public static final String mimetype = "application/json";
public static final String url = "http://" + server + ":" + port + stream + embed;
}
public class eventStore {
public static void main(String[] args) throws Exception {
// Start at the head of the stream
System.out.print("\nStarting point: " + Defaults.url);
JSONObject payload = extract(getURL(Defaults.url));
if (payload.get("headOfStream").equals(true)) {
String last = getLink(payload,"last");
if (!last.equals("")) {
payload = extract(getURL(last));
}
}
String data = getTheData(payload);
if (!data.equals("")) {System.out.print(data);}
String previous = "";
int idleCount = 0;
do {
previous = getLink(payload,"previous");
data = "";
String newLine = "\n";
do {
payload = extract(getURL(previous));
data = getTheData(payload);
if (!data.equals( "")) {
System.out.print(data);
newLine = "\n";
idleCount = 0;}
else {
if ((idleCount % 80) == 0) {newLine = "\n";}
System.out.print(newLine + ".");
idleCount++;
newLine = "";
Thread.sleep(Defaults.sleeptime);
}
} while (data.equals(""));
} while(!previous.equals(""));
}
private static String getTheData(JSONObject payload) {
// Now we are getting the pages of data
// The data items are in an array of links within an array of entries
JSONArray entries = (JSONArray) payload.get("entries");
//System.out.println("Number of entries: " + entries.size());
String responseData = "";
String entryResponse = "";
if (entries.size() == 0) {responseData = "";}
else {
for (int i=entries.size()-1;i>-1;i--) {
// for (int i=0;i<entries.size();i++) {
JSONObject entry = (JSONObject) entries.get(i);
//System.out.println(entry.toString());
JSONArray entryLinks = (JSONArray) entry.get("links");
responseData = responseData + "\n" + entry.get("title") + "\t" + entry.get("summary");
for (int j=0;j<entryLinks.size(); j++) {
JSONObject entryLink = (JSONObject) entryLinks.get(j);
if (entryLink.get("relation").equals("alternate")) {
//System.out.println(entryLink.get("uri"));
try {
entryResponse = getURL(entryLink.get("uri").toString());
} catch (IOException e) {
e.printStackTrace();
}
JSONObject entryPayload = extract(entryResponse);
responseData = responseData + "\t" + entryPayload.toString();
}
}
}
}
return responseData;
}
private static String getLink(JSONObject payload, String linkType) {
JSONArray links = (JSONArray) payload.get("links");
String responseURI = "";
for (int i=0;i<links.size();i++) {
JSONObject link = (JSONObject) links.get(i);
if (link.get("relation").toString().equals(linkType)) {
responseURI = link.get("uri").toString();
}
}
return responseURI;
}
private static String getURL(String url) throws IOException {
//System.out.print("\nGoing to get: " + url);
int responseCode = 0;
URL urlObj = new URL(url);
HttpURLConnection con = (HttpURLConnection) urlObj.openConnection();
con.setRequestMethod("GET");
//con.setRequestProperty("Accept-Language", "en-US,en;q=0.5");
con.setRequestProperty("Accept" , Defaults.mimetype );
responseCode = con.getResponseCode();
//System.out.print("\nGET response code: " + responseCode);
if (responseCode != 200) {
if (responseCode == 406) {System.out.print("\nhttp response 406: unacceptable content type specified: " + Defaults.mimetype);}
if (responseCode == 404) {System.out.print("\nhttp response 404: unable to locate stream: " + Defaults.stream);}
System.exit(responseCode);
}
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine, response = "";
while ((inputLine = in.readLine()) != null) {
response = response + inputLine;
//System.out.println(inputLine);
}
in.close();
return response;
}
private static JSONObject extract(String response) {
JSONParser parser = new JSONParser();
JSONObject payload = null;
try {
payload = (JSONObject) parser.parse(response);
} catch (ParseException e) {
System.out.println("Input file has JSON parse error: " + e.getPosition() + " "
+ e.toString());
System.exit(4);
}
return payload;
}
class MyAuthenticator extends Authenticator {
public PasswordAuthentication getPasswordAuthentication () {
return new PasswordAuthentication (Defaults.user, Defaults.password.toCharArray());
}
}
}
| Now using an EventStoreStream class
EventStoreStream with lots of get methods
| eventStore.java | Now using an EventStoreStream class | <ide><path>ventStore.java
<add>
<add>
<ide>
<ide>
<ide> import java.net.HttpURLConnection;
<ide> public static final String embed = "";
<ide> public static final String mimetype = "application/json";
<ide> public static final String url = "http://" + server + ":" + port + stream + embed;
<add> public static final int idleDotsPerLine = 80;
<ide> }
<add>
<add>class MyAuthenticator extends Authenticator {
<add>
<add> public PasswordAuthentication getPasswordAuthentication () {
<add> return new PasswordAuthentication (Defaults.user, Defaults.password.toCharArray());
<add> }
<add>}
<ide>
<ide> public class eventStore {
<add>
<add> private class EventStoreStream {
<add>
<add> private String stream;
<add> private JSONObject payload;
<add> private boolean result;
<add> private String previous;
<add> private String data;
<add>
<add> EventStoreStream (String stream) throws IOException {
<add> this.stream = stream;
<add>
<add> }
<add>
<add> void getHeadofStream () throws IOException {
<add> // Start at the head of the stream
<add>
<add> this.payload = extract(getURL(stream));
<add>
<add> if (payload.get("headOfStream").equals(true)) {
<add> String last = getLink(payload,"last");
<add> if (!last.equals("")) {
<add> this.payload = extract(getURL(last));
<add> }
<add> }
<add> result = true;
<add>
<add> }
<add>
<add> boolean dataExists() {
<add> if (!this.data.equals("")) {return true;}
<add> else {return false;}
<add> }
<add>
<add> String getData() {
<add> return this.data;
<add> }
<add>
<add> void gotoPrevious() throws IOException {
<add> this.previous = getLink(this.payload,"previous");
<add> this.payload = extract(getURL(this.previous));
<add> }
<add>
<add> String getPrevious() {
<add> return this.previous;
<add> }
<add>
<add> void extractDataFromPayload() {
<add> this.data = getTheData(this.payload);
<add> }
<add>
<add> private String getTheData(JSONObject payload) {
<add> // Now we are getting the pages of data
<add> // The data items are in an array of links within an array of entries
<add>
<add> JSONArray entries = (JSONArray) payload.get("entries");
<add> //System.out.println("Number of entries: " + entries.size());
<add>
<add> String responseData = "";
<add> String entryResponse = "";
<add> if (entries.size() == 0) {responseData = "";}
<add> else {
<add> for (int i=entries.size()-1;i>-1;i--) {
<add> // for (int i=0;i<entries.size();i++) {
<add> JSONObject entry = (JSONObject) entries.get(i);
<add> //System.out.println(entry.toString());
<add> JSONArray entryLinks = (JSONArray) entry.get("links");
<add> responseData = responseData + "\n" + entry.get("title") + "\t" + entry.get("summary");
<add>
<add> for (int j=0;j<entryLinks.size(); j++) {
<add> JSONObject entryLink = (JSONObject) entryLinks.get(j);
<add> if (entryLink.get("relation").equals("alternate")) {
<add> //System.out.println(entryLink.get("uri"));
<add>
<add> try {
<add> entryResponse = getURL(entryLink.get("uri").toString());
<add> } catch (IOException e) {
<add> e.printStackTrace();
<add> }
<add> JSONObject entryPayload = extract(entryResponse);
<add> responseData = responseData + "\t" + entryPayload.toString();
<add> }
<add> }
<add> }
<add> }
<add> return responseData;
<add> }
<add> private boolean getResult() {
<add> return this.result;
<add> }
<add>
<add> private String getLink(JSONObject payload, String linkType) {
<add>
<add> JSONArray links = (JSONArray) payload.get("links");
<add> String responseURI = "";
<add>
<add> for (int i=0;i<links.size();i++) {
<add> JSONObject link = (JSONObject) links.get(i);
<add> if (link.get("relation").toString().equals(linkType)) {
<add> responseURI = link.get("uri").toString();
<add> }
<add> }
<add> return responseURI;
<add> }
<add>
<add> private String getURL(String url) throws IOException {
<add>
<add> //System.out.print("\nGoing to get: " + url);
<add> int responseCode = 0;
<add>
<add> URL urlObj = new URL(url);
<add> HttpURLConnection con = (HttpURLConnection) urlObj.openConnection();
<add> con.setRequestMethod("GET");
<add> //con.setRequestProperty("Accept-Language", "en-US,en;q=0.5");
<add> con.setRequestProperty("Accept" , Defaults.mimetype );
<add>
<add> responseCode = con.getResponseCode();
<add> //System.out.print("\nGET response code: " + responseCode);
<add>
<add> if (responseCode != 200) {
<add> if (responseCode == 406) {System.out.print("\nhttp response 406: unacceptable content type specified: " + Defaults.mimetype);}
<add> if (responseCode == 404) {System.out.print("\nhttp response 404: unable to locate stream: " + Defaults.stream);}
<add> System.exit(responseCode);
<add> }
<add>
<add> BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
<add> String inputLine, response = "";
<add>
<add> while ((inputLine = in.readLine()) != null) {
<add> response = response + inputLine;
<add> //System.out.println(inputLine);
<add> }
<add>
<add> in.close();
<add> return response;
<add> }
<add>
<add> private JSONObject extract(String response) {
<add> JSONParser parser = new JSONParser();
<add> JSONObject payload = null;
<add>
<add> try {
<add> payload = (JSONObject) parser.parse(response);
<add> } catch (ParseException e) {
<add> System.out.println("Input file has JSON parse error: " + e.getPosition() + " "
<add> + e.toString());
<add> System.exit(4);
<add> }
<add> return payload;
<add> }
<add>
<add>
<add> }
<ide>
<ide> public static void main(String[] args) throws Exception {
<ide>
<ide> // Start at the head of the stream
<ide> System.out.print("\nStarting point: " + Defaults.url);
<ide>
<del> JSONObject payload = extract(getURL(Defaults.url));
<del>
<del> if (payload.get("headOfStream").equals(true)) {
<del> String last = getLink(payload,"last");
<del> if (!last.equals("")) {
<del> payload = extract(getURL(last));
<del> }
<add> eventStore myEventStore = new eventStore();
<add> EventStoreStream myEventStream = myEventStore.new EventStoreStream(Defaults.url);
<add>
<add> myEventStream.getHeadofStream(); // payload is now set
<add> if (!myEventStream.getResult()) {
<add> System.out.print("\nGet Head of Stream failed");
<add> System.exit(1);
<ide> }
<del>
<del> String data = getTheData(payload);
<del> if (!data.equals("")) {System.out.print(data);}
<del>
<del> String previous = "";
<add> myEventStream.extractDataFromPayload();
<add> if (myEventStream.dataExists()) {
<add> System.out.print(myEventStream.getData());
<add> }
<add>
<ide> int idleCount = 0;
<add>
<ide> do {
<del> previous = getLink(payload,"previous");
<del>
<del> data = "";
<add> myEventStream.gotoPrevious();
<ide> String newLine = "\n";
<add>
<ide> do {
<del> payload = extract(getURL(previous));
<del> data = getTheData(payload);
<del> if (!data.equals( "")) {
<del> System.out.print(data);
<del> newLine = "\n";
<del> idleCount = 0;}
<add> myEventStream.extractDataFromPayload();
<add> if (myEventStream.dataExists()) {
<add> System.out.print(myEventStream.getData());
<add> }
<ide> else {
<del> if ((idleCount % 80) == 0) {newLine = "\n";}
<add>
<add> if ((idleCount % Defaults.idleDotsPerLine) == 0) {newLine = "\n";}
<ide> System.out.print(newLine + ".");
<ide> idleCount++;
<ide> newLine = "";
<del> Thread.sleep(Defaults.sleeptime);
<del> }
<del> } while (data.equals(""));
<del>
<del> } while(!previous.equals(""));
<del>
<add>
<add> Thread.sleep(Defaults.sleeptime);}
<add>
<add> } while (!myEventStream.dataExists());
<add>
<add>
<add> } while (!myEventStream.getPrevious().equals(""));
<add>
<add> System.exit(0);
<add>
<ide> }
<ide>
<del> private static String getTheData(JSONObject payload) {
<del> // Now we are getting the pages of data
<del> // The data items are in an array of links within an array of entries
<del>
<del> JSONArray entries = (JSONArray) payload.get("entries");
<del> //System.out.println("Number of entries: " + entries.size());
<del>
<del> String responseData = "";
<del> String entryResponse = "";
<del> if (entries.size() == 0) {responseData = "";}
<del> else {
<del> for (int i=entries.size()-1;i>-1;i--) {
<del> // for (int i=0;i<entries.size();i++) {
<del> JSONObject entry = (JSONObject) entries.get(i);
<del> //System.out.println(entry.toString());
<del> JSONArray entryLinks = (JSONArray) entry.get("links");
<del> responseData = responseData + "\n" + entry.get("title") + "\t" + entry.get("summary");
<del>
<del> for (int j=0;j<entryLinks.size(); j++) {
<del> JSONObject entryLink = (JSONObject) entryLinks.get(j);
<del> if (entryLink.get("relation").equals("alternate")) {
<del> //System.out.println(entryLink.get("uri"));
<del>
<del> try {
<del> entryResponse = getURL(entryLink.get("uri").toString());
<del> } catch (IOException e) {
<del> e.printStackTrace();
<del> }
<del> JSONObject entryPayload = extract(entryResponse);
<del> responseData = responseData + "\t" + entryPayload.toString();
<del> }
<del> }
<del> }
<del> }
<del> return responseData;
<del> }
<del>
<del> private static String getLink(JSONObject payload, String linkType) {
<del>
<del> JSONArray links = (JSONArray) payload.get("links");
<del> String responseURI = "";
<del>
<del> for (int i=0;i<links.size();i++) {
<del> JSONObject link = (JSONObject) links.get(i);
<del> if (link.get("relation").toString().equals(linkType)) {
<del> responseURI = link.get("uri").toString();
<del> }
<del> }
<del> return responseURI;
<del> }
<del>
<del> private static String getURL(String url) throws IOException {
<del>
<del> //System.out.print("\nGoing to get: " + url);
<del> int responseCode = 0;
<del>
<del> URL urlObj = new URL(url);
<del> HttpURLConnection con = (HttpURLConnection) urlObj.openConnection();
<del> con.setRequestMethod("GET");
<del> //con.setRequestProperty("Accept-Language", "en-US,en;q=0.5");
<del> con.setRequestProperty("Accept" , Defaults.mimetype );
<del>
<del> responseCode = con.getResponseCode();
<del> //System.out.print("\nGET response code: " + responseCode);
<del>
<del> if (responseCode != 200) {
<del> if (responseCode == 406) {System.out.print("\nhttp response 406: unacceptable content type specified: " + Defaults.mimetype);}
<del> if (responseCode == 404) {System.out.print("\nhttp response 404: unable to locate stream: " + Defaults.stream);}
<del> System.exit(responseCode);
<del> }
<del>
<del> BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
<del> String inputLine, response = "";
<del>
<del> while ((inputLine = in.readLine()) != null) {
<del> response = response + inputLine;
<del> //System.out.println(inputLine);
<del> }
<del>
<del> in.close();
<del> return response;
<del> }
<del>
<del> private static JSONObject extract(String response) {
<del> JSONParser parser = new JSONParser();
<del> JSONObject payload = null;
<del>
<del> try {
<del> payload = (JSONObject) parser.parse(response);
<del> } catch (ParseException e) {
<del> System.out.println("Input file has JSON parse error: " + e.getPosition() + " "
<del> + e.toString());
<del> System.exit(4);
<del> }
<del> return payload;
<del> }
<del>
<del> class MyAuthenticator extends Authenticator {
<del>
<del> public PasswordAuthentication getPasswordAuthentication () {
<del> return new PasswordAuthentication (Defaults.user, Defaults.password.toCharArray());
<del> }
<del> }
<add>
<add>
<add>
<ide> }
<ide>
<add>
<add> |
|
Java | apache-2.0 | a3482e85224f025fe10acdb5fbd2968895ef0822 | 0 | uschindler/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,HonzaKral/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,scorpionvicky/elasticsearch,HonzaKral/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,HonzaKral/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,HonzaKral/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,GlenRSmith/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
public class UUIDTests extends ESTestCase {
static UUIDGenerator timeUUIDGen = new TimeBasedUUIDGenerator();
static UUIDGenerator randomUUIDGen = new RandomBasedUUIDGenerator();
public void testRandomUUID() {
verifyUUIDSet(100000, randomUUIDGen);
}
public void testTimeUUID() {
verifyUUIDSet(100000, timeUUIDGen);
}
public void testThreadedTimeUUID() {
testUUIDThreaded(timeUUIDGen);
}
public void testThreadedRandomUUID() {
testUUIDThreaded(randomUUIDGen);
}
Set<String> verifyUUIDSet(int count, UUIDGenerator uuidSource) {
HashSet<String> uuidSet = new HashSet<>();
for (int i = 0; i < count; ++i) {
uuidSet.add(uuidSource.getBase64UUID());
}
assertEquals(count, uuidSet.size());
return uuidSet;
}
class UUIDGenRunner implements Runnable {
int count;
public Set<String> uuidSet = null;
UUIDGenerator uuidSource;
UUIDGenRunner(int count, UUIDGenerator uuidSource) {
this.count = count;
this.uuidSource = uuidSource;
}
@Override
public void run() {
uuidSet = verifyUUIDSet(count, uuidSource);
}
}
public void testUUIDThreaded(UUIDGenerator uuidSource) {
HashSet<UUIDGenRunner> runners = new HashSet<>();
HashSet<Thread> threads = new HashSet<>();
int count = 20;
int uuids = 10000;
for (int i = 0; i < count; ++i) {
UUIDGenRunner runner = new UUIDGenRunner(uuids, uuidSource);
Thread t = new Thread(runner);
threads.add(t);
runners.add(runner);
}
for (Thread t : threads) {
t.start();
}
boolean retry = false;
do {
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException ie) {
retry = true;
}
}
} while (retry);
HashSet<String> globalSet = new HashSet<>();
for (UUIDGenRunner runner : runners) {
globalSet.addAll(runner.uuidSet);
}
assertEquals(count*uuids, globalSet.size());
}
public void testCompression() throws Exception {
Logger logger = LogManager.getLogger(UUIDTests.class);
// Low number so that the test runs quickly, but the results are more interesting with larger numbers
// of indexed documents
assertThat(testCompression(100000, 10000, 3, logger), Matchers.lessThan(14d)); // ~12 in practice
assertThat(testCompression(100000, 1000, 3, logger), Matchers.lessThan(15d)); // ~13 in practice
assertThat(testCompression(100000, 100, 3, logger), Matchers.lessThan(21d)); // ~20 in practice
}
private static double testCompression(int numDocs, int numDocsPerSecond, int numNodes, Logger logger) throws Exception {
final double intervalBetweenDocs = 1000. / numDocsPerSecond; // milliseconds
final byte[][] macAddresses = new byte[numNodes][];
Random r = random();
for (int i = 0; i < macAddresses.length; ++i) {
macAddresses[i] = new byte[6];
random().nextBytes(macAddresses[i]);
}
UUIDGenerator generator = new TimeBasedUUIDGenerator() {
double currentTimeMillis = TestUtil.nextLong(random(), 0L, 10000000000L);
@Override
protected long currentTimeMillis() {
currentTimeMillis += intervalBetweenDocs * 2 * r.nextDouble();
return (long) currentTimeMillis;
}
@Override
protected byte[] macAddress() {
return RandomPicks.randomFrom(r, macAddresses);
}
};
// Avoid randomization which will slow down things without improving
// the quality of this test
Directory dir = newFSDirectory(createTempDir());
IndexWriterConfig config = new IndexWriterConfig()
.setCodec(Codec.forName(Lucene.LATEST_CODEC))
.setMergeScheduler(new SerialMergeScheduler()); // for reproducibility
IndexWriter w = new IndexWriter(dir, config);
Document doc = new Document();
StringField id = new StringField("_id", "", Store.NO);
doc.add(id);
long start = System.nanoTime();
for (int i = 0; i < numDocs; ++i) {
id.setStringValue(generator.getBase64UUID());
w.addDocument(doc);
}
w.forceMerge(1);
long time = (System.nanoTime() - start) / 1000 / 1000;
w.close();
long size = 0;
for (String file : dir.listAll()) {
size += dir.fileLength(file);
}
dir.close();
double bytesPerDoc = (double) size / numDocs;
logger.info(numDocs + " docs indexed at " + numDocsPerSecond + " docs/s required " + new ByteSizeValue(size)
+ " bytes of disk space, or " + bytesPerDoc + " bytes per document. Took: " + new TimeValue(time) + ".");
return bytesPerDoc;
}
}
| server/src/test/java/org/elasticsearch/common/UUIDTests.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
public class UUIDTests extends ESTestCase {
static UUIDGenerator timeUUIDGen = new TimeBasedUUIDGenerator();
static UUIDGenerator randomUUIDGen = new RandomBasedUUIDGenerator();
public void testRandomUUID() {
verifyUUIDSet(100000, randomUUIDGen);
}
public void testTimeUUID() {
verifyUUIDSet(100000, timeUUIDGen);
}
public void testThreadedTimeUUID() {
testUUIDThreaded(timeUUIDGen);
}
public void testThreadedRandomUUID() {
testUUIDThreaded(randomUUIDGen);
}
Set<String> verifyUUIDSet(int count, UUIDGenerator uuidSource) {
HashSet<String> uuidSet = new HashSet<>();
for (int i = 0; i < count; ++i) {
uuidSet.add(uuidSource.getBase64UUID());
}
assertEquals(count, uuidSet.size());
return uuidSet;
}
class UUIDGenRunner implements Runnable {
int count;
public Set<String> uuidSet = null;
UUIDGenerator uuidSource;
UUIDGenRunner(int count, UUIDGenerator uuidSource) {
this.count = count;
this.uuidSource = uuidSource;
}
@Override
public void run() {
uuidSet = verifyUUIDSet(count, uuidSource);
}
}
public void testUUIDThreaded(UUIDGenerator uuidSource) {
HashSet<UUIDGenRunner> runners = new HashSet<>();
HashSet<Thread> threads = new HashSet<>();
int count = 20;
int uuids = 10000;
for (int i = 0; i < count; ++i) {
UUIDGenRunner runner = new UUIDGenRunner(uuids, uuidSource);
Thread t = new Thread(runner);
threads.add(t);
runners.add(runner);
}
for (Thread t : threads) {
t.start();
}
boolean retry = false;
do {
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException ie) {
retry = true;
}
}
} while (retry);
HashSet<String> globalSet = new HashSet<>();
for (UUIDGenRunner runner : runners) {
globalSet.addAll(runner.uuidSet);
}
assertEquals(count*uuids, globalSet.size());
}
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/50048")
public void testCompression() throws Exception {
Logger logger = LogManager.getLogger(UUIDTests.class);
// Low number so that the test runs quickly, but the results are more interesting with larger numbers
// of indexed documents
assertThat(testCompression(100000, 10000, 3, logger), Matchers.lessThan(14d)); // ~12 in practice
assertThat(testCompression(100000, 1000, 3, logger), Matchers.lessThan(15d)); // ~13 in practice
assertThat(testCompression(100000, 100, 3, logger), Matchers.lessThan(21d)); // ~20 in practice
}
private static double testCompression(int numDocs, int numDocsPerSecond, int numNodes, Logger logger) throws Exception {
final double intervalBetweenDocs = 1000. / numDocsPerSecond; // milliseconds
final byte[][] macAddresses = new byte[numNodes][];
Random r = random();
for (int i = 0; i < macAddresses.length; ++i) {
macAddresses[i] = new byte[6];
random().nextBytes(macAddresses[i]);
}
UUIDGenerator generator = new TimeBasedUUIDGenerator() {
double currentTimeMillis = System.currentTimeMillis();
@Override
protected long currentTimeMillis() {
currentTimeMillis += intervalBetweenDocs * 2 * r.nextDouble();
return (long) currentTimeMillis;
}
@Override
protected byte[] macAddress() {
return RandomPicks.randomFrom(r, macAddresses);
}
};
// Avoid randomization which will slow down things without improving
// the quality of this test
Directory dir = newFSDirectory(createTempDir());
IndexWriterConfig config = new IndexWriterConfig()
.setMergeScheduler(new SerialMergeScheduler()); // for reproducibility
IndexWriter w = new IndexWriter(dir, config);
Document doc = new Document();
StringField id = new StringField("_id", "", Store.NO);
doc.add(id);
long start = System.nanoTime();
for (int i = 0; i < numDocs; ++i) {
id.setStringValue(generator.getBase64UUID());
w.addDocument(doc);
}
w.forceMerge(1);
long time = (System.nanoTime() - start) / 1000 / 1000;
w.close();
long size = 0;
for (String file : dir.listAll()) {
size += dir.fileLength(file);
}
dir.close();
double bytesPerDoc = (double) size / numDocs;
logger.info(numDocs + " docs indexed at " + numDocsPerSecond + " docs/s required " + new ByteSizeValue(size)
+ " bytes of disk space, or " + bytesPerDoc + " bytes per document. Took: " + new TimeValue(time) + ".");
return bytesPerDoc;
}
}
| Address UUIDTests#testCompression failures. (#50093)
Those were due to codec randomization.
Closes #50048
| server/src/test/java/org/elasticsearch/common/UUIDTests.java | Address UUIDTests#testCompression failures. (#50093) | <ide><path>erver/src/test/java/org/elasticsearch/common/UUIDTests.java
<ide>
<ide> import org.apache.logging.log4j.LogManager;
<ide> import org.apache.logging.log4j.Logger;
<add>import org.apache.lucene.codecs.Codec;
<ide> import org.apache.lucene.document.Document;
<ide> import org.apache.lucene.document.Field.Store;
<ide> import org.apache.lucene.document.StringField;
<ide> import org.apache.lucene.index.IndexWriterConfig;
<ide> import org.apache.lucene.index.SerialMergeScheduler;
<ide> import org.apache.lucene.store.Directory;
<add>import org.apache.lucene.util.TestUtil;
<add>import org.elasticsearch.common.lucene.Lucene;
<ide> import org.elasticsearch.common.unit.ByteSizeValue;
<ide> import org.elasticsearch.common.unit.TimeValue;
<ide> import org.elasticsearch.test.ESTestCase;
<ide> assertEquals(count*uuids, globalSet.size());
<ide> }
<ide>
<del> @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/50048")
<ide> public void testCompression() throws Exception {
<ide> Logger logger = LogManager.getLogger(UUIDTests.class);
<ide> // Low number so that the test runs quickly, but the results are more interesting with larger numbers
<ide> random().nextBytes(macAddresses[i]);
<ide> }
<ide> UUIDGenerator generator = new TimeBasedUUIDGenerator() {
<del> double currentTimeMillis = System.currentTimeMillis();
<add> double currentTimeMillis = TestUtil.nextLong(random(), 0L, 10000000000L);
<ide>
<ide> @Override
<ide> protected long currentTimeMillis() {
<ide> // the quality of this test
<ide> Directory dir = newFSDirectory(createTempDir());
<ide> IndexWriterConfig config = new IndexWriterConfig()
<add> .setCodec(Codec.forName(Lucene.LATEST_CODEC))
<ide> .setMergeScheduler(new SerialMergeScheduler()); // for reproducibility
<ide> IndexWriter w = new IndexWriter(dir, config);
<ide> Document doc = new Document(); |
|
Java | mit | 213459770953686b0c070252a3895c9fa1f8b61e | 0 | MarquisLP/World-Scribe,MarquisLP/WorldScribe | package com.averi.worldscribe.activities;
import android.Manifest;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.provider.Settings;
import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.core.content.FileProvider;
import androidx.documentfile.provider.DocumentFile;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ScrollView;
import android.widget.TextView;
import com.averi.worldscribe.R;
import com.averi.worldscribe.utilities.ActivityUtilities;
import com.averi.worldscribe.utilities.AppPreferences;
import com.averi.worldscribe.utilities.ExternalReader;
import com.averi.worldscribe.utilities.ExternalWriter;
import com.averi.worldscribe.utilities.FileRetriever;
import com.balda.flipper.Root;
import com.balda.flipper.StorageManagerCompat;
import java.io.File;
public class PermissionActivity extends ThemedActivity {
public static final int REQUEST_WRITE_EXTERNAL_STORAGE = 1;
public static final int REQUEST_WRITE_ROOT_DIRECTORY = 2;
private TextView textWelcome;
private TextView textExplanation;
private SharedPreferences preferences = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
preferences = getSharedPreferences("com.averi.worldscribe", MODE_PRIVATE);
textWelcome = findViewById(R.id.textWelcome);
textExplanation = findViewById(R.id.textExplanation);
if ((!(deviceUsesRuntimePermissions())) || (writePermissionWasGranted())) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((root != null) && (root.isAccessGranted(this))) {
try {
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
} catch (Exception exception) {
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
else {
textWelcome.setText(R.string.selectRootDirectoryTitle);
textExplanation.setText(R.string.selectRootDirectoryExplanation);
}
}
}
@Override
protected int getLayoutResourceID() {
return R.layout.activity_permission;
}
@Override
protected ViewGroup getRootLayout() {
return (ViewGroup) findViewById(R.id.linearScreen);
}
private boolean deviceUsesRuntimePermissions() {
return (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1);
}
private boolean writePermissionWasGranted() {
int permissionCheck = ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE);
return (permissionCheck == PackageManager.PERMISSION_GRANTED);
}
public void askForWritePermission(View view) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((writePermissionWasGranted()) && ((root == null) || (!root.isAccessGranted(this)))) {
Intent getExternalFolderAccessIntent = storageManagerCompat.requireExternalAccess(this);
startActivityForResult(getExternalFolderAccessIntent, REQUEST_WRITE_ROOT_DIRECTORY);
}
else {
if (preferences.getBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true)) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_WRITE_EXTERNAL_STORAGE);
} else {
goToAppSettings();
}
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[],
int[] grantResults) {
switch (requestCode) {
case REQUEST_WRITE_EXTERNAL_STORAGE:
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// On Android SDK 29 and above, we need to ask for permission to
// access the root of the user's external storage.
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((root == null) || (!root.isAccessGranted(this))) {
textWelcome.setText(R.string.selectRootDirectoryTitle);
textExplanation.setText(R.string.selectRootDirectoryExplanation);
}
else {
try {
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
enableWritePermissionPrompt();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
} catch (Exception exception) {
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
} else if (userDisabledAskingForWritePermission()) {
recordDisablingOfWritePermissionPrompt();
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Runs after permission is granted to read/write on root external directory on SDK 29 and above
try {
if (requestCode == REQUEST_WRITE_ROOT_DIRECTORY && resultCode == RESULT_OK) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
storageManagerCompat.addRoot(this, StorageManagerCompat.DEF_MAIN_ROOT, data);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
enableWritePermissionPrompt();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
}
}
catch (Exception exception) {
String rootUriString = preferences.getString(AppPreferences.ROOT_DIRECTORY_URI, null);
if (rootUriString == null) {
rootUriString = "NULL";
}
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
private void enableWritePermissionPrompt() {
preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED,
true).apply();
}
private void recordDisablingOfWritePermissionPrompt() {
preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED,
false).apply();
}
private boolean userDisabledAskingForWritePermission() {
return (!(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)));
}
private void goToAppSettings() {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
Uri uri = Uri.fromParts("package", this.getPackageName(), null);
intent.setData(uri);
startActivity(intent);
}
/**
* Generates the app directory and any necessary configuration files if they
* are missing from the user's external storage.
*/
private void generateMissingAppDirectoryAndFiles() {
if (!(ExternalReader.appDirectoryExists(this))) {
DocumentFile appDirectory = ExternalWriter.createAppDirectory(this);
if (appDirectory == null) {
String rootUriString = preferences.getString(AppPreferences.ROOT_DIRECTORY_URI, null);
throw new RuntimeException("Failed to create app directory. Device file root URI: " + rootUriString);
}
}
if (!(ExternalReader.noMediaFileExists(this))) {
ExternalWriter.createNoMediaFile(this);
}
}
private void goToNextActivity() {
String lastOpenedWorldName = preferences.getString(AppPreferences.LAST_OPENED_WORLD, "");
if ((!(lastOpenedWorldName.isEmpty())) && (ExternalReader.worldAlreadyExists(this, lastOpenedWorldName))) {
goToLastOpenedWorld(lastOpenedWorldName);
} else {
setLastOpenedWorldToNothing();
if (ExternalReader.worldListIsEmpty(this)) {
goToWorldCreation();
} else {
goToCreateOrLoadWorld();
}
}
}
private void goToLastOpenedWorld(String lastOpenedWorldName) {
ActivityUtilities.goToWorld(this, lastOpenedWorldName);
finish();
}
private void setLastOpenedWorldToNothing() {
preferences.edit().putString(AppPreferences.LAST_OPENED_WORLD, "").apply();
}
private void goToWorldCreation() {
Intent goToWorldCreationIntent = new Intent(this, CreateWorldActivity.class);
startActivity(goToWorldCreationIntent);
finish();
}
private void goToCreateOrLoadWorld() {
Intent goToCreateOrLoadWorldIntent = new Intent(this, CreateOrLoadWorldActivity.class);
startActivity(goToCreateOrLoadWorldIntent);
finish();
}
/**
* Given a URI for a device's root external storage location, returns the URI formatted
* as either "file:///" or "content:///" depending on the original format and the
* Android version.
*
* <p>
* From what we have learned in issue #43, devices running Android 9 and below
* can use "file:///" URIs just fine. However, this is not the case for Android 10
* and above, which requires "content:///" URIs. Some brands, such as Samsung,
* still return a "file:///" URI in Android 10, so they need to be converted.
* </p>
* @param fileRootUri The original URI for the external storage root
* @return The converted URI for the external storage root
*/
private Uri convertFileRootUriToCorrectFormat(Uri fileRootUri) {
if (fileRootUri.toString().startsWith("content")) {
return fileRootUri;
} else if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return fileRootUri;
} else { // If we have a "file:///" URI and Android version >= 10, convert to "content:///" URI.
String fileRootPath = fileRootUri.getPath();
if (fileRootPath == null) {
throw new RuntimeException("Something went wrong. Please take a screenshot and email it to [email protected]. Got null when retrieving file path for URI: " + fileRootUri.toString());
} else {
File fileRoot = new File(fileRootUri.getPath());
return FileProvider.getUriForFile(
this,
"com.averistudios.fileprovider",
fileRoot);
}
}
}
}
| app/src/main/java/com/averi/worldscribe/activities/PermissionActivity.java | package com.averi.worldscribe.activities;
import android.Manifest;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.provider.Settings;
import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.core.content.FileProvider;
import androidx.documentfile.provider.DocumentFile;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ScrollView;
import android.widget.TextView;
import com.averi.worldscribe.R;
import com.averi.worldscribe.utilities.ActivityUtilities;
import com.averi.worldscribe.utilities.AppPreferences;
import com.averi.worldscribe.utilities.ExternalReader;
import com.averi.worldscribe.utilities.ExternalWriter;
import com.averi.worldscribe.utilities.FileRetriever;
import com.balda.flipper.Root;
import com.balda.flipper.StorageManagerCompat;
import java.io.File;
public class PermissionActivity extends ThemedActivity {
public static final int REQUEST_WRITE_EXTERNAL_STORAGE = 1;
public static final int REQUEST_WRITE_ROOT_DIRECTORY = 2;
private TextView textWelcome;
private TextView textExplanation;
private SharedPreferences preferences = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
preferences = getSharedPreferences("com.averi.worldscribe", MODE_PRIVATE);
textWelcome = findViewById(R.id.textWelcome);
textExplanation = findViewById(R.id.textExplanation);
if ((!(deviceUsesRuntimePermissions())) || (writePermissionWasGranted())) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((root != null) && (root.isAccessGranted(this))) {
try {
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
} catch (Exception exception) {
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage())
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
else {
textWelcome.setText(R.string.selectRootDirectoryTitle);
textExplanation.setText(R.string.selectRootDirectoryExplanation);
}
}
}
@Override
protected int getLayoutResourceID() {
return R.layout.activity_permission;
}
@Override
protected ViewGroup getRootLayout() {
return (ViewGroup) findViewById(R.id.linearScreen);
}
private boolean deviceUsesRuntimePermissions() {
return (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1);
}
private boolean writePermissionWasGranted() {
int permissionCheck = ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE);
return (permissionCheck == PackageManager.PERMISSION_GRANTED);
}
public void askForWritePermission(View view) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((writePermissionWasGranted()) && ((root == null) || (!root.isAccessGranted(this)))) {
Intent getExternalFolderAccessIntent = storageManagerCompat.requireExternalAccess(this);
startActivityForResult(getExternalFolderAccessIntent, REQUEST_WRITE_ROOT_DIRECTORY);
}
else {
if (preferences.getBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true)) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_WRITE_EXTERNAL_STORAGE);
} else {
goToAppSettings();
}
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[],
int[] grantResults) {
switch (requestCode) {
case REQUEST_WRITE_EXTERNAL_STORAGE:
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// On Android SDK 29 and above, we need to ask for permission to
// access the root of the user's external storage.
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
if ((root == null) || (!root.isAccessGranted(this))) {
textWelcome.setText(R.string.selectRootDirectoryTitle);
textExplanation.setText(R.string.selectRootDirectoryExplanation);
}
else {
try {
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
enableWritePermissionPrompt();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
} catch (Exception exception) {
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage())
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
} else if (userDisabledAskingForWritePermission()) {
recordDisablingOfWritePermissionPrompt();
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Runs after permission is granted to read/write on root external directory on SDK 29 and above
try {
if (requestCode == REQUEST_WRITE_ROOT_DIRECTORY && resultCode == RESULT_OK) {
StorageManagerCompat storageManagerCompat = new StorageManagerCompat(this);
storageManagerCompat.addRoot(this, StorageManagerCompat.DEF_MAIN_ROOT, data);
Root root = storageManagerCompat.getRoot(StorageManagerCompat.DEF_MAIN_ROOT);
Uri convertedFileRootUri = convertFileRootUriToCorrectFormat(
root.toRootDirectory(this).getUri());
preferences.edit().putString(AppPreferences.ROOT_DIRECTORY_URI,
convertedFileRootUri.toString())
.apply();
enableWritePermissionPrompt();
generateMissingAppDirectoryAndFiles();
goToNextActivity();
}
}
catch (Exception exception) {
String rootUriString = preferences.getString(AppPreferences.ROOT_DIRECTORY_URI, null);
if (rootUriString == null) {
rootUriString = "NULL";
}
ScrollView scrollView = new ScrollView(this);
new AlertDialog.Builder(this)
.setTitle("Troubleshooting")
.setView(scrollView)
.setMessage(exception.getMessage())
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
}
})
.show();
}
}
private void enableWritePermissionPrompt() {
preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED,
true).apply();
}
private void recordDisablingOfWritePermissionPrompt() {
preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED,
false).apply();
}
private boolean userDisabledAskingForWritePermission() {
return (!(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)));
}
private void goToAppSettings() {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
Uri uri = Uri.fromParts("package", this.getPackageName(), null);
intent.setData(uri);
startActivity(intent);
}
/**
* Generates the app directory and any necessary configuration files if they
* are missing from the user's external storage.
*/
private void generateMissingAppDirectoryAndFiles() {
if (!(ExternalReader.appDirectoryExists(this))) {
DocumentFile appDirectory = ExternalWriter.createAppDirectory(this);
if (appDirectory == null) {
String rootUriString = preferences.getString(AppPreferences.ROOT_DIRECTORY_URI, null);
throw new RuntimeException("Failed to create app directory. Device file root URI: " + rootUriString);
}
}
if (!(ExternalReader.noMediaFileExists(this))) {
ExternalWriter.createNoMediaFile(this);
}
}
private void goToNextActivity() {
String lastOpenedWorldName = preferences.getString(AppPreferences.LAST_OPENED_WORLD, "");
if ((!(lastOpenedWorldName.isEmpty())) && (ExternalReader.worldAlreadyExists(this, lastOpenedWorldName))) {
goToLastOpenedWorld(lastOpenedWorldName);
} else {
setLastOpenedWorldToNothing();
if (ExternalReader.worldListIsEmpty(this)) {
goToWorldCreation();
} else {
goToCreateOrLoadWorld();
}
}
}
private void goToLastOpenedWorld(String lastOpenedWorldName) {
ActivityUtilities.goToWorld(this, lastOpenedWorldName);
finish();
}
private void setLastOpenedWorldToNothing() {
preferences.edit().putString(AppPreferences.LAST_OPENED_WORLD, "").apply();
}
private void goToWorldCreation() {
Intent goToWorldCreationIntent = new Intent(this, CreateWorldActivity.class);
startActivity(goToWorldCreationIntent);
finish();
}
private void goToCreateOrLoadWorld() {
Intent goToCreateOrLoadWorldIntent = new Intent(this, CreateOrLoadWorldActivity.class);
startActivity(goToCreateOrLoadWorldIntent);
finish();
}
/**
* Given a URI for a device's root external storage location, returns the URI formatted
* as either "file:///" or "content:///" depending on the original format and the
* Android version.
*
* <p>
* From what we have learned in issue #43, devices running Android 9 and below
* can use "file:///" URIs just fine. However, this is not the case for Android 10
* and above, which requires "content:///" URIs. Some brands, such as Samsung,
* still return a "file:///" URI in Android 10, so they need to be converted.
* </p>
* @param fileRootUri The original URI for the external storage root
* @return The converted URI for the external storage root
*/
private Uri convertFileRootUriToCorrectFormat(Uri fileRootUri) {
if (fileRootUri.toString().startsWith("content")) {
return fileRootUri;
} else if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return fileRootUri;
} else { // If we have a "file:///" URI and Android version >= 10, convert to "content:///" URI.
String fileRootPath = fileRootUri.getPath();
if (fileRootPath == null) {
throw new RuntimeException("Something went wrong. Please take a screenshot and email it to [email protected]. Got null when retrieving file path for URI: " + fileRootUri.toString());
} else {
File fileRoot = new File(fileRootUri.getPath());
return FileProvider.getUriForFile(
this,
"com.averistudios.fileprovider",
fileRoot);
}
}
}
}
| Log stack traces for thrown exception to help with debugging
| app/src/main/java/com/averi/worldscribe/activities/PermissionActivity.java | Log stack traces for thrown exception to help with debugging | <ide><path>pp/src/main/java/com/averi/worldscribe/activities/PermissionActivity.java
<ide> import androidx.core.content.FileProvider;
<ide> import androidx.documentfile.provider.DocumentFile;
<ide>
<add>import android.util.Log;
<ide> import android.view.View;
<ide> import android.view.ViewGroup;
<ide> import android.widget.ScrollView;
<ide> new AlertDialog.Builder(this)
<ide> .setTitle("Troubleshooting")
<ide> .setView(scrollView)
<del> .setMessage(exception.getMessage())
<add> .setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
<ide> .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
<ide> public void onClick(DialogInterface dialog, int which) {
<ide> }
<ide> new AlertDialog.Builder(this)
<ide> .setTitle("Troubleshooting")
<ide> .setView(scrollView)
<del> .setMessage(exception.getMessage())
<add> .setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
<ide> .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
<ide> public void onClick(DialogInterface dialog, int which) {
<ide> }
<ide> new AlertDialog.Builder(this)
<ide> .setTitle("Troubleshooting")
<ide> .setView(scrollView)
<del> .setMessage(exception.getMessage())
<add> .setMessage(exception.getMessage() + ". Stack trace: " + Log.getStackTraceString(exception))
<ide> .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
<ide> public void onClick(DialogInterface dialog, int which) {
<ide> } |
|
Java | apache-2.0 | 804e261f08263dbc84c4edaccc859b00b0debe01 | 0 | ASzc/fuse-patch-jdk6,ASzc/fuse-patch-jdk6 | package org.wildfly.extras.patch.bootstrap;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.zip.CRC32;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.wildfly.extras.patch.Patch;
import org.wildfly.extras.patch.PatchId;
import org.wildfly.extras.patch.PatchMetadataBuilder;
import org.wildfly.extras.patch.Record;
import org.wildfly.extras.patch.Record.Action;
import org.wildfly.extras.patch.internal.MetadataParser;
public class Main {
public static void main(String[] args) throws IOException {
String zipPath = null;
String patchName = null;
String patchVersion = null;
try {
zipPath = args[0];
patchName = args[1];
patchVersion = args[2];
} catch (ArrayIndexOutOfBoundsException e) {
System.err.println("Required parameters: ZIP_PATH PATCH_NAME PATCH_VERSION");
System.exit(2);
}
File zip = new File(zipPath);
try {
addMetadata(zip, patchName, patchVersion);
} catch (FileNotFoundException e) {
System.err.println("Zip file " + zip + " not found: " + e.getMessage());
System.exit(1);
}
}
public static void addMetadata(File zipFile, String patchName, String patchVersion) throws IOException {
File replacementZipFile = new File(zipFile.getPath() + ".replacement");
FileInputStream zipFileInputStream = null;
try {
zipFileInputStream = new FileInputStream(zipFile);
ZipInputStream input = new ZipInputStream(zipFileInputStream);
FileOutputStream zipFileOutputStream = null;
try {
zipFileOutputStream = new FileOutputStream(replacementZipFile);
ZipOutputStream output = new ZipOutputStream(zipFileOutputStream);
Map<String, Long> entries = copyAndEnumerateZipEntries(input, output);
Patch patch = createPatchFromZipEntries(entries, patchName, patchVersion);
String root = findArchiveRootDirectory(entries);
// In the case of no unique root directory, add another root
// directory
if (root == null)
root = "";
appendMetadataEntries(output, patch, root);
output.close();
} finally {
if (zipFileOutputStream != null)
try {
zipFileOutputStream.close();
} catch (IOException e) {
}
}
if (!replacementZipFile.renameTo(zipFile)) {
throw new IOException("Unable to rename replacement file after adding metadata");
}
} finally {
if (zipFileInputStream != null)
try {
zipFileInputStream.close();
} catch (IOException e) {
}
replacementZipFile.delete(); // No IOException to catch
}
}
public static Map<String, Long> copyAndEnumerateZipEntries(ZipInputStream input, ZipOutputStream output)
throws IOException {
Map<String, Long> ret = new TreeMap<String, Long>();
byte[] buffer = new byte[2 ^ 20];
ZipEntry entry;
CRC32 crc = new CRC32();
while ((entry = input.getNextEntry()) != null) {
output.putNextEntry(entry);
if (!entry.isDirectory()) {
int currentRead;
while ((currentRead = input.read(buffer)) != -1) {
output.write(buffer, 0, currentRead);
crc.update(buffer, 0, currentRead);
}
output.closeEntry();
// Can't use the zip's CRC, as that's of the compressed data
ret.put(entry.getName(), crc.getValue());
crc.reset();
}
}
return ret;
}
public static String findArchiveRootDirectory(Map<String, Long> entries) {
String rootCandidateName = null;
String[] rootCandidate = null;
for (Entry<String, Long> entry : entries.entrySet()) {
String name = entry.getKey();
if (name.endsWith("/")) {
String[] split = name.split("/");
if (rootCandidate == null) {
rootCandidateName = name;
rootCandidate = split;
} else if (!rootCandidate[0].equals(split[0])) {
// There isn't just one root directory
rootCandidateName = null;
break;
} else if (rootCandidate.length > split.length) {
rootCandidateName = name;
rootCandidate = split;
}
}
}
return rootCandidateName;
}
public static Patch createPatchFromZipEntries(Map<String, Long> entries, String patchName, String patchVersion) {
PatchId patchId = PatchId.create(patchName, patchVersion);
Collection<Record> records = new ArrayList<Record>(entries.size());
for (Entry<String, Long> entry : entries.entrySet()) {
Record record = Record.create(patchId, Action.INFO, new File(entry.getKey()), entry.getValue());
records.add(record);
}
return Patch.create(new PatchMetadataBuilder().patchId(patchId).build(), records);
}
public static void appendMetadataEntries(ZipOutputStream output, Patch patch, String namePrefix)
throws IOException {
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/repository/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/audit.log"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/managed-paths.metadata"));
writePatchManagedPaths(output, patch);
PatchId pid = patch.getPatchId();
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/" + pid.getName() + "/" + pid.getVersion()
+ "/" + pid.getCanonicalForm() + ".metadata"));
MetadataParser.writePatch(patch, output, true);
output.closeEntry();
}
public static void writePatchManagedPaths(OutputStream output, Patch patch) throws IOException {
List<PatchId> owners = new LinkedList<PatchId>();
owners.add(patch.getPatchId());
String ownersSuffix = " " + owners.toString();
PrintStream writer = new PrintStream(output);
for (Record record : patch.getRecords()) {
writer.print(record.getPath());
writer.println(ownersSuffix);
}
}
}
| bootstrap/src/main/java/org/wildfly/extras/patch/bootstrap/Main.java | package org.wildfly.extras.patch.bootstrap;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.zip.CRC32;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.wildfly.extras.patch.Patch;
import org.wildfly.extras.patch.PatchId;
import org.wildfly.extras.patch.PatchMetadataBuilder;
import org.wildfly.extras.patch.Record;
import org.wildfly.extras.patch.Record.Action;
import org.wildfly.extras.patch.internal.MetadataParser;
public class Main {
public static void main(String[] args) throws IOException {
String zipPath = null;
String patchName = null;
String patchVersion = null;
try {
zipPath = args[0];
patchName = args[1];
patchVersion = args[2];
} catch (ArrayIndexOutOfBoundsException e) {
System.err.println("Required parameters: ZIP_PATH PATCH_NAME PATCH_VERSION");
System.exit(2);
}
File zip = new File(zipPath);
try {
addMetadata(zip, patchName, patchVersion);
} catch (FileNotFoundException e) {
System.err.println("Zip file " + zip + " not found: " + e.getMessage());
System.exit(1);
}
}
public static void addMetadata(File zipFile, String patchName, String patchVersion) throws IOException {
File replacementZipFile = new File(zipFile.getPath() + ".replacement");
FileInputStream zipFileInputStream = null;
try {
zipFileInputStream = new FileInputStream(zipFile);
ZipInputStream input = new ZipInputStream(zipFileInputStream);
FileOutputStream zipFileOutputStream = null;
try {
zipFileOutputStream = new FileOutputStream(replacementZipFile);
ZipOutputStream output = new ZipOutputStream(zipFileOutputStream);
Map<String, Long> entries = copyAndEnumerateZipEntries(input, output);
Patch patch = createPatchFromZipEntries(entries, patchName, patchVersion);
String root = findArchiveRootDirectory(entries);
// In the case of no unique root directory, add another root
// directory
if (root == null)
root = "";
appendMetadataEntries(output, patch, root);
output.close();
} finally {
if (zipFileOutputStream != null)
try {
zipFileOutputStream.close();
} catch (IOException e) {
}
}
if (!replacementZipFile.renameTo(zipFile)) {
throw new IOException("Unable to rename replacement file after adding metadata");
}
} finally {
if (zipFileInputStream != null)
try {
zipFileInputStream.close();
} catch (IOException e) {
}
replacementZipFile.delete(); // No IOException to catch
}
}
public static Map<String, Long> copyAndEnumerateZipEntries(ZipInputStream input, ZipOutputStream output)
throws IOException {
Map<String, Long> ret = new TreeMap<String, Long>();
byte[] buffer = new byte[2 ^ 20];
ZipEntry entry;
CRC32 crc = new CRC32();
while ((entry = input.getNextEntry()) != null) {
output.putNextEntry(entry);
if (!entry.isDirectory()) {
int currentRead;
while ((currentRead = input.read(buffer)) != -1) {
output.write(buffer, 0, currentRead);
crc.update(buffer, 0, currentRead);
}
output.closeEntry();
// Can't use the zip's CRC, as that's of the compressed data
ret.put(entry.getName(), crc.getValue());
crc.reset();
}
}
return ret;
}
public static String findArchiveRootDirectory(Map<String, Long> entries) {
String rootCandidateName = null;
String[] rootCandidate = null;
for (Entry<String, Long> entry : entries.entrySet()) {
String name = entry.getKey();
if (name.endsWith("/")) {
String[] split = name.split("/");
if (rootCandidate == null) {
rootCandidateName = name;
rootCandidate = split;
} else if (rootCandidate[0] != split[0]) {
// There isn't just one root directory
rootCandidateName = null;
break;
} else if (rootCandidate.length > split.length) {
rootCandidateName = name;
rootCandidate = split;
}
}
}
return rootCandidateName;
}
public static Patch createPatchFromZipEntries(Map<String, Long> entries, String patchName, String patchVersion) {
PatchId patchId = PatchId.create(patchName, patchVersion);
Collection<Record> records = new ArrayList<Record>(entries.size());
for (Entry<String, Long> entry : entries.entrySet()) {
Record record = Record.create(patchId, Action.INFO, new File(entry.getKey()), entry.getValue());
records.add(record);
}
return Patch.create(new PatchMetadataBuilder().patchId(patchId).build(), records);
}
public static void appendMetadataEntries(ZipOutputStream output, Patch patch, String namePrefix)
throws IOException {
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/repository/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/audit.log"));
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/managed-paths.metadata"));
writePatchManagedPaths(output, patch);
PatchId pid = patch.getPatchId();
output.putNextEntry(new ZipEntry(namePrefix + "fusepatch/workspace/" + pid.getName() + "/" + pid.getVersion()
+ "/" + pid.getCanonicalForm() + ".metadata"));
MetadataParser.writePatch(patch, output, true);
output.closeEntry();
}
public static void writePatchManagedPaths(OutputStream output, Patch patch) throws IOException {
List<PatchId> owners = new LinkedList<PatchId>();
owners.add(patch.getPatchId());
String ownersSuffix = " " + owners.toString();
PrintStream writer = new PrintStream(output);
for (Record record : patch.getRecords()) {
writer.print(record.getPath());
writer.println(ownersSuffix);
}
}
}
| Fix string comparison
| bootstrap/src/main/java/org/wildfly/extras/patch/bootstrap/Main.java | Fix string comparison | <ide><path>ootstrap/src/main/java/org/wildfly/extras/patch/bootstrap/Main.java
<ide> if (rootCandidate == null) {
<ide> rootCandidateName = name;
<ide> rootCandidate = split;
<del> } else if (rootCandidate[0] != split[0]) {
<add> } else if (!rootCandidate[0].equals(split[0])) {
<ide> // There isn't just one root directory
<ide> rootCandidateName = null;
<ide> break; |
|
Java | agpl-3.0 | b8e427112781253e67718913fdbd93aedb9626c1 | 0 | hsarmiento/AIDR,qcri-social/Crisis-Computing,qcri-social/Crisis-Computing,hsarmiento/AIDR,qcri-social/AIDR,qcri-social/AIDR,hsarmiento/AIDR,qcri-social/Crisis-Computing,roselleebarle04/AIDR,qcri-social/AIDR,roselleebarle04/AIDR,roselleebarle04/AIDR,roselleebarle04/AIDR,qcri-social/AIDR,hsarmiento/AIDR,qcri-social/Crisis-Computing | package qa.qcri.aidr.manager.service.impl;
import qa.qcri.aidr.manager.dto.*;
import qa.qcri.aidr.manager.exception.AidrException;
import qa.qcri.aidr.manager.service.TaggerService;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.MediaType;
import java.util.*;
@Service("taggerService")
public class TaggerServiceImpl implements TaggerService {
private Logger logger = Logger.getLogger(getClass());
@Autowired
private Client client;
@Value("${taggerMainUrl}")
private String taggerMainUrl;
@Value("${crowdsourcingAPIMainUrl}")
private String crowdsourcingAPIMainUrl;
@Value("${persisterMainUrl}")
private String persisterMainUrl;
@Value("${taggerPersisterMainUrl}")
private String taggerPersisterMainUrl;
@Override
public List<TaggerCrisisType> getAllCrisisTypes() throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisisType/all");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAllCrisesTypesResponse crisesTypesResponse = objectMapper.readValue(jsonResponse, TaggerAllCrisesTypesResponse.class);
if (crisesTypesResponse.getCrisisTypes() != null) {
logger.info("Tagger returned " + crisesTypesResponse.getCrisisTypes().size() + " crises types");
}
return crisesTypesResponse.getCrisisTypes();
} catch (Exception e) {
throw new AidrException("Error while getting all crisis from Tagger", e);
}
}
@Override
public List<TaggerCrisis> getCrisesByUserId(Integer userId) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis?userID=" + userId);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAllCrisesResponse taggerAllCrisesResponse = objectMapper.readValue(jsonResponse, TaggerAllCrisesResponse.class);
if (taggerAllCrisesResponse.getCrisises() != null) {
logger.info("Tagger returned " + taggerAllCrisesResponse.getCrisises().size() + " crisis for user");
}
return taggerAllCrisesResponse.getCrisises();
} catch (Exception e) {
throw new AidrException("No collection is enabled for Tagger. Please enable tagger for one of your collections.", e);
}
}
@Override
public String createNewCrises(TaggerCrisisRequest crisis) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(crisis));
return clientResponse.getEntity(String.class);
} catch (Exception e) {
throw new AidrException("Error while creating new crises in Tagger", e);
}
}
@Override
public Collection<TaggerAttribute> getAttributesForCrises(Integer crisisID, Integer userId) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/crisis/all?exceptCrisis=" + crisisID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisAttributesResponse crisisAttributesResponse = objectMapper.readValue(jsonResponse, TaggerCrisisAttributesResponse.class);
if (crisisAttributesResponse.getCrisisAttributes() != null) {
logger.info("Tagger returned " + crisisAttributesResponse.getCrisisAttributes().size() + " attributes available for crises with ID " + crisisID);
} else {
return Collections.emptyList();
}
return convertTaggerCrisesAttributeToDTO(crisisAttributesResponse.getCrisisAttributes(), userId);
} catch (Exception e) {
throw new AidrException("Error while getting all attributes for crisis from Tagger", e);
}
}
@Override
public TaggerCrisisExist isCrisesExist(String code) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/crisis/code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisExist crisisExist = objectMapper.readValue(jsonResponse, TaggerCrisisExist.class);
if (crisisExist.getCrisisId() != null) {
logger.info("Crises with the code " + code + " already exist in Tagger.");
return crisisExist;
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if crisis exist in Tagger", e);
}
}
@Override
public Integer isUserExistsByUsername(String userName) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/user/" + userName);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerUser taggerUser = objectMapper.readValue(jsonResponse, TaggerUser.class);
if (taggerUser != null && taggerUser.getUserID() != null) {
logger.info("User with the user name " + userName + " already exist in Tagger and has ID: " + taggerUser.getUserID());
return taggerUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if user exist in Tagger", e);
}
}
@Override
public Integer addNewUser(TaggerUser taggerUser) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/user");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(taggerUser));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerUser createdUser = objectMapper.readValue(jsonResponse, TaggerUser.class);
if (createdUser != null && createdUser.getUserID() != null) {
logger.info("User with ID " + createdUser.getUserID() + " was created in Tagger");
return createdUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while adding new user to Tagger", e);
}
}
@Override
public Integer addAttributeToCrisis(TaggerModelFamily modelFamily) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelfamily");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(modelFamily));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerModelFamily createdModelFamily = objectMapper.readValue(jsonResponse, TaggerModelFamily.class);
if (createdModelFamily != null && createdModelFamily.getModelFamilyID() != null) {
logger.info("Attribute was added to crises");
return createdModelFamily.getModelFamilyID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while adding attribute to crises", e);
}
}
@Override
public TaggerCrisis getCrisesByCode(String code) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis/by-code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisis crisis = objectMapper.readValue(jsonResponse, TaggerCrisis.class);
if (crisis != null) {
logger.info("Tagger returned crisis with code" + crisis.getCode());
}
return crisis;
} catch (Exception e) {
throw new AidrException("Error while getting crisis by code from Tagger", e);
}
}
@Override
public TaggerCrisis updateCode(TaggerCrisis crisis) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(crisis));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisis updatedCrisis = objectMapper.readValue(jsonResponse, TaggerCrisis.class);
if (updatedCrisis != null) {
logger.info("Crisis with id " + updatedCrisis.getCrisisID() + " was updated in Tagger");
}
return crisis;
} catch (Exception e) {
throw new AidrException("Error while getting crisis by code from Tagger", e);
}
}
@Override
public List<TaggerModel> getModelsForCrisis(Integer crisisID) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/model/crisis/" + crisisID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisModelsResponse crisisModelsResponse = objectMapper.readValue(jsonResponse, TaggerCrisisModelsResponse.class);
if (crisisModelsResponse.getModelWrapper() != null) {
logger.info("Tagger returned " + crisisModelsResponse.getModelWrapper().size() + " models for crises with ID " + crisisID);
return crisisModelsResponse.getModelWrapper();
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting all models for crisis from Tagger", e);
}
}
@Override
public TaggerAttribute createNewAttribute(TaggerAttribute attribute) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(attribute));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute response = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (response != null) {
logger.info("Attribute with ID " + response.getNominalAttributeID() + " was created in Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while creating new attribute in Tagger", e);
}
}
@Override
public TaggerAttribute getAttributeInfo(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/" + id);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute response = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (response != null) {
logger.info("Attribute with ID " + response.getNominalAttributeID() + " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting attribute from Tagger", e);
}
}
@Override
public TaggerLabel getLabelInfo(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label/" + id);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel response = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID() + " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting label from Tagger", e);
}
}
@Override
public boolean deleteAttribute(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/" + id);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Attribute with ID " + id + " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while deleting attribute in Tagger", e);
}
}
@Override
public boolean deleteTrainingExample(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/document/removeTrainingExample/" + id);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Document with ID " + id + " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while deleting document in Tagger", e);
}
}
@Override
public boolean removeAttributeFromCrises(Integer modelFamilyID) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelfamily/" + modelFamilyID);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Classifier was remove from crises by modelFamilyID: " + modelFamilyID);
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while removing classifier from crisis in Tagger", e);
}
}
@Override
public TaggerAttribute updateAttribute(TaggerAttribute attribute) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(attribute));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute updatedAttribute = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (updatedAttribute != null) {
logger.info("Attribute with id " + updatedAttribute.getNominalAttributeID() + " was updated in Tagger");
} else {
return null;
}
return attribute;
} catch (Exception e) {
throw new AidrException("Error while updating attribute in Tagger", e);
}
}
@Override
public TaggerLabel updateLabel(TaggerLabelRequest label) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(label));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel updatedLabel = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (updatedLabel != null) {
logger.info("Label with id " + updatedLabel.getNominalLabelID() + " was updated in Tagger");
} else {
return null;
}
return updatedLabel;
} catch (Exception e) {
throw new AidrException("Error while updating label in Tagger", e);
}
}
@Override
public TaggerLabel createNewLabel(TaggerLabelRequest label) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(label));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel response = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID() + " was created in Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while creating new label in Tagger", e);
}
}
@Override
public TaggerAttribute attributeExists(String code) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/attribute/code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute attribute = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (attribute != null) {
logger.info("Attribute with the code " + code + " already exist in Tagger.");
return attribute;
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if attribute exist in Tagger", e);
}
}
@Override
public List<TrainingDataDTO> getTrainingDataByModelIdAndCrisisId(Integer modelFamilyId,
Integer crisisId,
Integer start,
Integer limit,
String sortColumn,
String sortDirection) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/misc/getTrainingData?crisisID=" + crisisId
+ "&modelFamilyID=" + modelFamilyId
+ "&fromRecord=" + start
+ "&limit=" + limit
+ "&sortColumn=" + sortColumn
+ "&sortDirection=" + sortDirection);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TrainingDataRequest trainingDataRequest = objectMapper.readValue(jsonResponse, TrainingDataRequest.class);
if (trainingDataRequest != null && trainingDataRequest.getTrainingData() != null) {
logger.info("Tagger returned " + trainingDataRequest.getTrainingData().size() + " training data records for crises with ID: "
+ crisisId + " and family model with ID: " + modelFamilyId);
return trainingDataRequest.getTrainingData();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while Getting training data for Crisis and Model.", e);
}
}
@Override
public String getAssignableTask(Integer id, String userName) throws AidrException {
try {
// taskBufferNumber currently always 1
int taskBufferNumber = 1;
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskbuffer/getassignabletask/" + userName + "/" + id + "/" + taskBufferNumber);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("getAssignableTask - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("getAssignableTask - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while getting Assignable Task in Tagger", e);
}
}
@Override
public String getTemplateStatus(String code) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/template/status/crisis/code/" + code);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("getTemplateStatus - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("getTemplateStatus - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while getting Template Status in Tagger", e);
}
}
@Override
public String skipTask(Integer id, String userName) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskassignment/revert/searchByDocUserName/" + userName + "/" + id);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("skipTask - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("skipTask - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while Skip Task operation", e);
}
}
@Override
public boolean saveTaskAnswer(List<TaskAnswer> taskAnswer) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskanswer/save");
ObjectMapper objectMapper = new ObjectMapper();
logger.info("saveTaskAnswer - postData : " + objectMapper.writeValueAsString(taskAnswer));
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(taskAnswer));
logger.info("saveTaskAnswer - response status : " + clientResponse.getStatus());
return clientResponse.getStatus() == 204;
} catch (Exception e) {
throw new AidrException("Error while saving TaskAnswer in AIDRCrowdsourcing", e);
}
}
@Override
public String generateCSVLink(String code) throws AidrException {
try {
WebResource webResource = client.resource(taggerPersisterMainUrl + "/genCSV?collectionCode=" + code + "&exportLimit=100000");
ClientResponse clientResponse = webResource.type(MediaType.TEXT_PLAIN)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
if (jsonResponse != null && "http".equals(jsonResponse.substring(0, 4))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
throw new AidrException("Error while generating CSV link in taggerPersister", e);
}
}
@Override
public String generateTweetIdsLink(String code) throws AidrException {
try {
WebResource webResource = client.resource(taggerPersisterMainUrl + "/genTweetIds?collectionCode=" + code);
ClientResponse clientResponse = webResource.type(MediaType.TEXT_PLAIN)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
if (jsonResponse != null && "http".equals(jsonResponse.substring(0, 4))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
throw new AidrException("Error while generating Tweet Ids link in taggerPersister", e);
}
}
@Override
public ModelHistoryWrapper getModelHistoryByModelFamilyID(Integer start, Integer limit, Integer id) throws AidrException {
try {
WebResource webResource = client.resource(taggerMainUrl + "/model/modelFamily/" + id
+ "?start=" + start
+ "&limit=" + limit);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
ModelHistoryWrapper modelHistoryWrapper = objectMapper.readValue(jsonResponse, ModelHistoryWrapper.class);
return modelHistoryWrapper;
} catch (Exception e) {
throw new AidrException("Error while Getting history records for Model.", e);
}
}
@Override
public List<TaggerModelNominalLabel> getAllLabelsForModel(Integer modelID) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelNominalLabel/" + modelID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerModelLabelsResponse modelLabelsResponse = objectMapper.readValue(jsonResponse, TaggerModelLabelsResponse.class);
if (modelLabelsResponse.getModelNominalLabelsDTO() != null) {
logger.info("Tagger returned " + modelLabelsResponse.getModelNominalLabelsDTO().size() + " labels for model with ID " + modelID);
}
return modelLabelsResponse.getModelNominalLabelsDTO();
} catch (Exception e) {
throw new AidrException("Error while getting all labels for model from Tagger", e);
}
}
private Collection<TaggerAttribute> convertTaggerCrisesAttributeToDTO (List<TaggerCrisesAttribute> attributes, Integer userId) {
Map<Integer, TaggerAttribute> result = new HashMap<Integer, TaggerAttribute>();
for (TaggerCrisesAttribute a : attributes) {
if(!result.containsKey(a.getNominalAttributeID())){
if (!userId.equals(a.getUserID()) && !(new Integer(1)).equals(a.getUserID())){
continue;
}
TaggerUser user = new TaggerUser(a.getUserID());
List<TaggerLabel> labels = new ArrayList<TaggerLabel>();
TaggerLabel label = new TaggerLabel(a.getLabelName(), a.getLabelID());
labels.add(label);
TaggerAttribute taggerAttribute = new TaggerAttribute(a.getCode(), a.getDescription(), a.getName(), a.getNominalAttributeID(), user, labels);
result.put(a.getNominalAttributeID(), taggerAttribute);
} else {
TaggerAttribute taggerAttribute = result.get(a.getNominalAttributeID());
List<TaggerLabel> labels = taggerAttribute.getNominalLabelCollection();
TaggerLabel label = new TaggerLabel(a.getLabelName(), a.getLabelID());
labels.add(label);
}
}
return result.values();
}
}
| aidr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java | package qa.qcri.aidr.manager.service.impl;
import qa.qcri.aidr.manager.dto.*;
import qa.qcri.aidr.manager.exception.AidrException;
import qa.qcri.aidr.manager.service.TaggerService;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.MediaType;
import java.util.*;
@Service("taggerService")
public class TaggerServiceImpl implements TaggerService {
private Logger logger = Logger.getLogger(getClass());
@Autowired
private Client client;
@Value("${taggerMainUrl}")
private String taggerMainUrl;
@Value("${crowdsourcingAPIMainUrl}")
private String crowdsourcingAPIMainUrl;
@Value("${persisterMainUrl}")
private String persisterMainUrl;
@Value("${taggerPersisterMainUrl}")
private String taggerPersisterMainUrl;
@Override
public List<TaggerCrisisType> getAllCrisisTypes() throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisisType/all");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAllCrisesTypesResponse crisesTypesResponse = objectMapper.readValue(jsonResponse, TaggerAllCrisesTypesResponse.class);
if (crisesTypesResponse.getCrisisTypes() != null) {
logger.info("Tagger returned " + crisesTypesResponse.getCrisisTypes().size() + " crises types");
}
return crisesTypesResponse.getCrisisTypes();
} catch (Exception e) {
throw new AidrException("Error while getting all crisis from Tagger", e);
}
}
@Override
public List<TaggerCrisis> getCrisesByUserId(Integer userId) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis?userID=" + userId);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAllCrisesResponse taggerAllCrisesResponse = objectMapper.readValue(jsonResponse, TaggerAllCrisesResponse.class);
if (taggerAllCrisesResponse.getCrisises() != null) {
logger.info("Tagger returned " + taggerAllCrisesResponse.getCrisises().size() + " crisis for user");
}
return taggerAllCrisesResponse.getCrisises();
} catch (Exception e) {
throw new AidrException("No collection is enabled for Tagger. Please enable tagger for one of your collections.", e);
}
}
@Override
public String createNewCrises(TaggerCrisisRequest crisis) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(crisis));
return clientResponse.getEntity(String.class);
} catch (Exception e) {
throw new AidrException("Error while creating new crises in Tagger", e);
}
}
@Override
public Collection<TaggerAttribute> getAttributesForCrises(Integer crisisID, Integer userId) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/crisis/all?exceptCrisis=" + crisisID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisAttributesResponse crisisAttributesResponse = objectMapper.readValue(jsonResponse, TaggerCrisisAttributesResponse.class);
if (crisisAttributesResponse.getCrisisAttributes() != null) {
logger.info("Tagger returned " + crisisAttributesResponse.getCrisisAttributes().size() + " attributes available for crises with ID " + crisisID);
} else {
return Collections.emptyList();
}
return convertTaggerCrisesAttributeToDTO(crisisAttributesResponse.getCrisisAttributes(), userId);
} catch (Exception e) {
throw new AidrException("Error while getting all attributes for crisis from Tagger", e);
}
}
@Override
public TaggerCrisisExist isCrisesExist(String code) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/crisis/code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisExist crisisExist = objectMapper.readValue(jsonResponse, TaggerCrisisExist.class);
if (crisisExist.getCrisisId() != null) {
logger.info("Crises with the code " + code + " already exist in Tagger.");
return crisisExist;
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if crisis exist in Tagger", e);
}
}
@Override
public Integer isUserExistsByUsername(String userName) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/user/" + userName);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerUser taggerUser = objectMapper.readValue(jsonResponse, TaggerUser.class);
if (taggerUser != null && taggerUser.getUserID() != null) {
logger.info("User with the user name " + userName + " already exist in Tagger and has ID: " + taggerUser.getUserID());
return taggerUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if user exist in Tagger", e);
}
}
@Override
public Integer addNewUser(TaggerUser taggerUser) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/user");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(taggerUser));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerUser createdUser = objectMapper.readValue(jsonResponse, TaggerUser.class);
if (createdUser != null && createdUser.getUserID() != null) {
logger.info("User with ID " + createdUser.getUserID() + " was created in Tagger");
return createdUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while adding new user to Tagger", e);
}
}
@Override
public Integer addAttributeToCrisis(TaggerModelFamily modelFamily) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelfamily");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(modelFamily));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerModelFamily createdModelFamily = objectMapper.readValue(jsonResponse, TaggerModelFamily.class);
if (createdModelFamily != null && createdModelFamily.getModelFamilyID() != null) {
logger.info("Attribute was added to crises");
return createdModelFamily.getModelFamilyID();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while adding attribute to crises", e);
}
}
@Override
public TaggerCrisis getCrisesByCode(String code) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis/by-code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisis crisis = objectMapper.readValue(jsonResponse, TaggerCrisis.class);
if (crisis != null) {
logger.info("Tagger returned crisis with code" + crisis.getCode());
}
return crisis;
} catch (Exception e) {
throw new AidrException("Error while getting crisis by code from Tagger", e);
}
}
@Override
public TaggerCrisis updateCode(TaggerCrisis crisis) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(crisis));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisis updatedCrisis = objectMapper.readValue(jsonResponse, TaggerCrisis.class);
if (updatedCrisis != null) {
logger.info("Crisis with id " + updatedCrisis.getCrisisID() + " was updated in Tagger");
}
return crisis;
} catch (Exception e) {
throw new AidrException("Error while getting crisis by code from Tagger", e);
}
}
@Override
public List<TaggerModel> getModelsForCrisis(Integer crisisID) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/model/crisis/" + crisisID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerCrisisModelsResponse crisisModelsResponse = objectMapper.readValue(jsonResponse, TaggerCrisisModelsResponse.class);
if (crisisModelsResponse.getModelWrapper() != null) {
logger.info("Tagger returned " + crisisModelsResponse.getModelWrapper().size() + " models for crises with ID " + crisisID);
return crisisModelsResponse.getModelWrapper();
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting all models for crisis from Tagger", e);
}
}
@Override
public TaggerAttribute createNewAttribute(TaggerAttribute attribute) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(attribute));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute response = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (response != null) {
logger.info("Attribute with ID " + response.getNominalAttributeID() + " was created in Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while creating new attribute in Tagger", e);
}
}
@Override
public TaggerAttribute getAttributeInfo(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/" + id);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute response = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (response != null) {
logger.info("Attribute with ID " + response.getNominalAttributeID() + " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting attribute from Tagger", e);
}
}
@Override
public TaggerLabel getLabelInfo(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label/" + id);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel response = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID() + " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while getting label from Tagger", e);
}
}
@Override
public boolean deleteAttribute(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute/" + id);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Attribute with ID " + id + " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while deleting attribute in Tagger", e);
}
}
@Override
public boolean deleteTrainingExample(Integer id) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/document/" + id);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Document with ID " + id + " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while deleting document in Tagger", e);
}
}
@Override
public boolean removeAttributeFromCrises(Integer modelFamilyID) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelfamily/" + modelFamilyID);
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.delete(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Classifier was remove from crises by modelFamilyID: " + modelFamilyID);
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
throw new AidrException("Error while removing classifier from crisis in Tagger", e);
}
}
@Override
public TaggerAttribute updateAttribute(TaggerAttribute attribute) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(attribute));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute updatedAttribute = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (updatedAttribute != null) {
logger.info("Attribute with id " + updatedAttribute.getNominalAttributeID() + " was updated in Tagger");
} else {
return null;
}
return attribute;
} catch (Exception e) {
throw new AidrException("Error while updating attribute in Tagger", e);
}
}
@Override
public TaggerLabel updateLabel(TaggerLabelRequest label) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.put(ClientResponse.class, objectMapper.writeValueAsString(label));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel updatedLabel = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (updatedLabel != null) {
logger.info("Label with id " + updatedLabel.getNominalLabelID() + " was updated in Tagger");
} else {
return null;
}
return updatedLabel;
} catch (Exception e) {
throw new AidrException("Error while updating label in Tagger", e);
}
}
@Override
public TaggerLabel createNewLabel(TaggerLabelRequest label) throws AidrException {
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/label");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(label));
String jsonResponse = clientResponse.getEntity(String.class);
TaggerLabel response = objectMapper.readValue(jsonResponse, TaggerLabel.class);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID() + " was created in Tagger");
return response;
}
return null;
} catch (Exception e) {
throw new AidrException("Error while creating new label in Tagger", e);
}
}
@Override
public TaggerAttribute attributeExists(String code) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/attribute/code/" + code);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerAttribute attribute = objectMapper.readValue(jsonResponse, TaggerAttribute.class);
if (attribute != null) {
logger.info("Attribute with the code " + code + " already exist in Tagger.");
return attribute;
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while checking if attribute exist in Tagger", e);
}
}
@Override
public List<TrainingDataDTO> getTrainingDataByModelIdAndCrisisId(Integer modelFamilyId,
Integer crisisId,
Integer start,
Integer limit,
String sortColumn,
String sortDirection) throws AidrException{
try {
WebResource webResource = client.resource(taggerMainUrl + "/misc/getTrainingData?crisisID=" + crisisId
+ "&modelFamilyID=" + modelFamilyId
+ "&fromRecord=" + start
+ "&limit=" + limit
+ "&sortColumn=" + sortColumn
+ "&sortDirection=" + sortDirection);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TrainingDataRequest trainingDataRequest = objectMapper.readValue(jsonResponse, TrainingDataRequest.class);
if (trainingDataRequest != null && trainingDataRequest.getTrainingData() != null) {
logger.info("Tagger returned " + trainingDataRequest.getTrainingData().size() + " training data records for crises with ID: "
+ crisisId + " and family model with ID: " + modelFamilyId);
return trainingDataRequest.getTrainingData();
} else {
return null;
}
} catch (Exception e) {
throw new AidrException("Error while Getting training data for Crisis and Model.", e);
}
}
@Override
public String getAssignableTask(Integer id, String userName) throws AidrException {
try {
// taskBufferNumber currently always 1
int taskBufferNumber = 1;
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskbuffer/getassignabletask/" + userName + "/" + id + "/" + taskBufferNumber);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("getAssignableTask - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("getAssignableTask - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while getting Assignable Task in Tagger", e);
}
}
@Override
public String getTemplateStatus(String code) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/template/status/crisis/code/" + code);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("getTemplateStatus - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("getTemplateStatus - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while getting Template Status in Tagger", e);
}
}
@Override
public String skipTask(Integer id, String userName) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskassignment/revert/searchByDocUserName/" + userName + "/" + id);
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
logger.info("skipTask - clientResponse : " + clientResponse);
String jsonResponse = clientResponse.getEntity(String.class);
logger.info("skipTask - jsonResponse : " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
throw new AidrException("Error while Skip Task operation", e);
}
}
@Override
public boolean saveTaskAnswer(List<TaskAnswer> taskAnswer) throws AidrException {
try {
WebResource webResource = client.resource(crowdsourcingAPIMainUrl + "/taskanswer/save");
ObjectMapper objectMapper = new ObjectMapper();
logger.info("saveTaskAnswer - postData : " + objectMapper.writeValueAsString(taskAnswer));
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.post(ClientResponse.class, objectMapper.writeValueAsString(taskAnswer));
logger.info("saveTaskAnswer - response status : " + clientResponse.getStatus());
return clientResponse.getStatus() == 204;
} catch (Exception e) {
throw new AidrException("Error while saving TaskAnswer in AIDRCrowdsourcing", e);
}
}
@Override
public String generateCSVLink(String code) throws AidrException {
try {
WebResource webResource = client.resource(taggerPersisterMainUrl + "/genCSV?collectionCode=" + code + "&exportLimit=100000");
ClientResponse clientResponse = webResource.type(MediaType.TEXT_PLAIN)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
if (jsonResponse != null && "http".equals(jsonResponse.substring(0, 4))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
throw new AidrException("Error while generating CSV link in taggerPersister", e);
}
}
@Override
public String generateTweetIdsLink(String code) throws AidrException {
try {
WebResource webResource = client.resource(taggerPersisterMainUrl + "/genTweetIds?collectionCode=" + code);
ClientResponse clientResponse = webResource.type(MediaType.TEXT_PLAIN)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
if (jsonResponse != null && "http".equals(jsonResponse.substring(0, 4))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
throw new AidrException("Error while generating Tweet Ids link in taggerPersister", e);
}
}
@Override
public ModelHistoryWrapper getModelHistoryByModelFamilyID(Integer start, Integer limit, Integer id) throws AidrException {
try {
WebResource webResource = client.resource(taggerMainUrl + "/model/modelFamily/" + id
+ "?start=" + start
+ "&limit=" + limit);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
ModelHistoryWrapper modelHistoryWrapper = objectMapper.readValue(jsonResponse, ModelHistoryWrapper.class);
return modelHistoryWrapper;
} catch (Exception e) {
throw new AidrException("Error while Getting history records for Model.", e);
}
}
@Override
public List<TaggerModelNominalLabel> getAllLabelsForModel(Integer modelID) throws AidrException{
try {
/**
* Rest call to Tagger
*/
WebResource webResource = client.resource(taggerMainUrl + "/modelNominalLabel/" + modelID);
ObjectMapper objectMapper = new ObjectMapper();
ClientResponse clientResponse = webResource.type(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
String jsonResponse = clientResponse.getEntity(String.class);
TaggerModelLabelsResponse modelLabelsResponse = objectMapper.readValue(jsonResponse, TaggerModelLabelsResponse.class);
if (modelLabelsResponse.getModelNominalLabelsDTO() != null) {
logger.info("Tagger returned " + modelLabelsResponse.getModelNominalLabelsDTO().size() + " labels for model with ID " + modelID);
}
return modelLabelsResponse.getModelNominalLabelsDTO();
} catch (Exception e) {
throw new AidrException("Error while getting all labels for model from Tagger", e);
}
}
private Collection<TaggerAttribute> convertTaggerCrisesAttributeToDTO (List<TaggerCrisesAttribute> attributes, Integer userId) {
Map<Integer, TaggerAttribute> result = new HashMap<Integer, TaggerAttribute>();
for (TaggerCrisesAttribute a : attributes) {
if(!result.containsKey(a.getNominalAttributeID())){
if (!userId.equals(a.getUserID()) && !(new Integer(1)).equals(a.getUserID())){
continue;
}
TaggerUser user = new TaggerUser(a.getUserID());
List<TaggerLabel> labels = new ArrayList<TaggerLabel>();
TaggerLabel label = new TaggerLabel(a.getLabelName(), a.getLabelID());
labels.add(label);
TaggerAttribute taggerAttribute = new TaggerAttribute(a.getCode(), a.getDescription(), a.getName(), a.getNominalAttributeID(), user, labels);
result.put(a.getNominalAttributeID(), taggerAttribute);
} else {
TaggerAttribute taggerAttribute = result.get(a.getNominalAttributeID());
List<TaggerLabel> labels = taggerAttribute.getNominalLabelCollection();
TaggerLabel label = new TaggerLabel(a.getLabelName(), a.getLabelID());
labels.add(label);
}
}
return result.values();
}
}
| aidr-manager: Remove training examples changes.
| aidr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java | aidr-manager: Remove training examples changes. | <ide><path>idr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java
<ide> /**
<ide> * Rest call to Tagger
<ide> */
<del> WebResource webResource = client.resource(taggerMainUrl + "/document/" + id);
<add> WebResource webResource = client.resource(taggerMainUrl + "/document/removeTrainingExample/" + id);
<ide> ObjectMapper objectMapper = new ObjectMapper();
<ide> objectMapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
<ide> |
|
Java | mit | 99ca76b59cf56044e9a69bf868133e8392714dd7 | 0 | shunghsiyu/MessengerApp | package com.example.myfirstapp;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.Random;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v7.app.ActionBarActivity;
import android.telephony.SmsManager;
import android.text.SpannableString;
import android.text.style.ImageSpan;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends ActionBarActivity {
private static final String TASK_FRAGMENT = "task_fragment";
private EditText numberText;
private EditText contentText;
private TextView textView;
private TcpSender tcpSender;
private Thread senderThread;
private TcpReceiver tcpReceiver;
private Thread receiverThread;
private Queue<String> uiMessageQueue;
private Handler mHandler;
private Fragment mFragment;
private View fragmentFace;
private ScrollView scrollView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
FragmentManager fm = getSupportFragmentManager();
mFragment = fm.findFragmentByTag(TASK_FRAGMENT);
if (mFragment == null) {
mFragment = new PlaceholderFragment();
fm.beginTransaction().add(mFragment, TASK_FRAGMENT).commit();
}
setContentView(R.layout.fragment_main);
ButtonClickListener bc=new ButtonClickListener();
ButtonClickListener bc2=new ButtonClickListener();
textView=(TextView) this.findViewById(R.id.display);
numberText=(EditText) this.findViewById(R.id.To);
contentText=(EditText) this.findViewById(R.id.edit_message);
scrollView = (ScrollView) this.findViewById(R.id.scrollView1);
Button button=(Button) this.findViewById(R.id.button_send);
button.setOnClickListener(bc);
Button showExression=(Button) this.findViewById(R.id.edit);
final ViewGroup linear = (ViewGroup) this.findViewById(R.id.linear);
fragmentFace = this.getLayoutInflater().inflate(R.layout.fragment_face, linear, false);
// Set fragmentFace's visibility to GONE so it won't show when added
fragmentFace.setVisibility(View.GONE);
// Add fragmentFace to fragment_main
linear.addView(fragmentFace);
// Added click listener for button 'image'
showExression.setOnClickListener(new OnClickListener(){
public void onClick(View view){
if(fragmentFace.getVisibility() == View.GONE) {
// Toggle the view to visible if it is not
fragmentFace.setVisibility(View.VISIBLE);
} else {
// Toggle the view to hidden if it is visible
fragmentFace.setVisibility(View.GONE);
}
}
});
ImageButton ib1=(ImageButton) this.findViewById(R.id.imageFace1);
ImageButton ib2=(ImageButton) this.findViewById(R.id.imageFace2);
ImageButton ib3=(ImageButton) this.findViewById(R.id.imageFace3);
ImageButton ib4=(ImageButton) this.findViewById(R.id.imageFace4);
ImageButton ib5=(ImageButton) this.findViewById(R.id.imageFace5);
ImageButton ib6=(ImageButton) this.findViewById(R.id.imageFace6);
ImageButton ib7=(ImageButton) this.findViewById(R.id.imageFace7);
// ib.setImageResource(R.drawable.face1);
//send face1
ib1.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+1);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face2
ib2.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+2);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face3
ib3.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+3);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face4
ib4.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+4);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face5
ib5.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+5);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face6
ib6.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+6);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face7
ib7.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+7);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
textView.append("\n");
}
catch(Exception e){
e.printStackTrace();
}
}
});
// Bind Handler with main Looper
mHandler = new Handler(Looper.getMainLooper()) {
// Tell main looper to poll uiMessageQueue
@Override
public void handleMessage(Message signalMessage) {
super.handleMessage(signalMessage);
String message = uiMessageQueue.poll();
Activity mainActivity = mFragment.getActivity();
if(mainActivity != null) {
textView=(TextView) mainActivity.findViewById(R.id.display);
System.out.println("::handler: textView = " + textView);
} else {
System.out.println("::mainActivity is null");
}
String myIP = "127.0.0.1:";
if(message != null) {
if(textView != null) {
if(message.startsWith(myIP)) {
message = "Me (127.0.0.1):"+
message.substring(myIP.length());
}
textView.append(message);
scrollDown();
} else {
System.out.println("::textView is null");
}
} else {
System.out.println("::No new message");
}
}
};
// Create a queue to store incoming message
uiMessageQueue = new ArrayBlockingQueue<String>(50);
// Launch TCP Sender
tcpSender = new TcpSender();
senderThread = new Thread(tcpSender);
senderThread.start();
// Launch TCP Receiver
tcpReceiver = new TcpReceiver(tcpSender, uiMessageQueue, mHandler);
receiverThread = new Thread(tcpReceiver);
receiverThread.start();
// Added self IP for debugging purpose
new Thread(new Runnable() {
@Override
public void run() {
InetAddress ip = null;
try {
ip = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
tcpSender.addReceiver(ip);
}
}).start();
}
private void scrollDown() {
mHandler.post(new Runnable() {
@Override
public void run() {
MainActivity.this.scrollView.fullScroll(View.FOCUS_DOWN);
}
});
}
private final class ButtonClickListener implements View.OnClickListener{
public void onClick(View v){
String number=numberText.getText().toString();
String content=contentText.getText().toString();
tcpSender.send(content);
contentText.setText("");
scrollDown();
Toast.makeText(MainActivity.this,R.string.success,Toast.LENGTH_LONG).show();;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
Activity mainActivity;
public PlaceholderFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
System.out.println("::Fragment created");
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mainActivity = activity;
System.out.println("::Fragment attached");
}
@Override
public void onDetach() {
super.onDetach();
mainActivity = null;
System.out.println("::Fragment detached");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container,
false);
return rootView;
}
public Activity getMainActivity() {
return this.mainActivity;
}
}
}
| src/com/example/myfirstapp/MainActivity.java | package com.example.myfirstapp;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.Random;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v7.app.ActionBarActivity;
import android.telephony.SmsManager;
import android.text.SpannableString;
import android.text.style.ImageSpan;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends ActionBarActivity {
private static final String TASK_FRAGMENT = "task_fragment";
private EditText numberText;
private EditText contentText;
private TextView textView;
private TcpSender tcpSender;
private Thread senderThread;
private TcpReceiver tcpReceiver;
private Thread receiverThread;
private Queue<String> uiMessageQueue;
private Handler mHandler;
private Fragment mFragment;
private View fragmentFace;
private ScrollView scrollView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
FragmentManager fm = getSupportFragmentManager();
mFragment = fm.findFragmentByTag(TASK_FRAGMENT);
if (mFragment == null) {
mFragment = new PlaceholderFragment();
fm.beginTransaction().add(mFragment, TASK_FRAGMENT).commit();
}
setContentView(R.layout.fragment_main);
ButtonClickListener bc=new ButtonClickListener();
ButtonClickListener bc2=new ButtonClickListener();
textView=(TextView) this.findViewById(R.id.display);
numberText=(EditText) this.findViewById(R.id.To);
contentText=(EditText) this.findViewById(R.id.edit_message);
scrollView = (ScrollView) this.findViewById(R.id.scrollView1);
Button button=(Button) this.findViewById(R.id.button_send);
button.setOnClickListener(bc);
Button showExression=(Button) this.findViewById(R.id.edit);
final ViewGroup linear = (ViewGroup) this.findViewById(R.id.linear);
fragmentFace = this.getLayoutInflater().inflate(R.layout.fragment_face, linear, false);
// Set fragmentFace's visibility to GONE so it won't show when added
fragmentFace.setVisibility(View.GONE);
// Add fragmentFace to fragment_main
linear.addView(fragmentFace);
// Added click listener for button 'image'
showExression.setOnClickListener(new OnClickListener(){
public void onClick(View view){
if(fragmentFace.getVisibility() == View.GONE) {
// Toggle the view to visible if it is not
fragmentFace.setVisibility(View.VISIBLE);
} else {
// Toggle the view to hidden if it is visible
fragmentFace.setVisibility(View.GONE);
}
}
});
ImageButton ib1=(ImageButton) this.findViewById(R.id.imageFace1);
ImageButton ib2=(ImageButton) this.findViewById(R.id.imageFace2);
ImageButton ib3=(ImageButton) this.findViewById(R.id.imageFace3);
ImageButton ib4=(ImageButton) this.findViewById(R.id.imageFace4);
ImageButton ib5=(ImageButton) this.findViewById(R.id.imageFace5);
ImageButton ib6=(ImageButton) this.findViewById(R.id.imageFace6);
ImageButton ib7=(ImageButton) this.findViewById(R.id.imageFace7);
// ib.setImageResource(R.drawable.face1);
//send face1
ib1.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+1);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face2
ib2.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+2);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face3
ib3.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+3);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face4
ib4.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+4);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face5
ib5.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+5);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face6
ib6.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+6);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
//send face7
ib7.setOnClickListener(new OnClickListener(){
public void onClick(View view){
try{
Field field=R.drawable.class.getDeclaredField("face"+7);
int resourceID=Integer.parseInt(field.get(null).toString());
Bitmap bitmap=BitmapFactory.decodeResource(getResources(), resourceID);
ImageSpan span=new ImageSpan(MainActivity.this,bitmap);
SpannableString spannableString=new SpannableString("face");
spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.append(spannableString);
scrollDown();
}
catch(Exception e){
e.printStackTrace();
}
}
});
// Bind Handler with main Looper
mHandler = new Handler(Looper.getMainLooper()) {
// Tell main looper to poll uiMessageQueue
@Override
public void handleMessage(Message signalMessage) {
super.handleMessage(signalMessage);
String message = uiMessageQueue.poll();
Activity mainActivity = mFragment.getActivity();
if(mainActivity != null) {
textView=(TextView) mainActivity.findViewById(R.id.display);
System.out.println("::handler: textView = " + textView);
} else {
System.out.println("::mainActivity is null");
}
String myIP = "127.0.0.1:";
if(message != null) {
if(textView != null) {
if(message.startsWith(myIP)) {
message = "Me (127.0.0.1):"+
message.substring(myIP.length());
}
textView.append(message);
scrollDown();
} else {
System.out.println("::textView is null");
}
} else {
System.out.println("::No new message");
}
}
};
// Create a queue to store incoming message
uiMessageQueue = new ArrayBlockingQueue<String>(50);
// Launch TCP Sender
tcpSender = new TcpSender();
senderThread = new Thread(tcpSender);
senderThread.start();
// Launch TCP Receiver
tcpReceiver = new TcpReceiver(tcpSender, uiMessageQueue, mHandler);
receiverThread = new Thread(tcpReceiver);
receiverThread.start();
// Added self IP for debugging purpose
new Thread(new Runnable() {
@Override
public void run() {
InetAddress ip = null;
try {
ip = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
tcpSender.addReceiver(ip);
}
}).start();
}
private void scrollDown() {
mHandler.post(new Runnable() {
@Override
public void run() {
MainActivity.this.scrollView.fullScroll(View.FOCUS_DOWN);
}
});
}
private final class ButtonClickListener implements View.OnClickListener{
public void onClick(View v){
String number=numberText.getText().toString();
String content=contentText.getText().toString();
tcpSender.send(content);
contentText.setText("");
scrollDown();
Toast.makeText(MainActivity.this,R.string.success,Toast.LENGTH_LONG).show();;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
Activity mainActivity;
public PlaceholderFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
System.out.println("::Fragment created");
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mainActivity = activity;
System.out.println("::Fragment attached");
}
@Override
public void onDetach() {
super.onDetach();
mainActivity = null;
System.out.println("::Fragment detached");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container,
false);
return rootView;
}
public Activity getMainActivity() {
return this.mainActivity;
}
}
}
| Updated MainActivity.java
Add line break in textView after sending or receiving. | src/com/example/myfirstapp/MainActivity.java | Updated MainActivity.java Add line break in textView after sending or receiving. | <ide><path>rc/com/example/myfirstapp/MainActivity.java
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace();
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<del> }
<del> catch(Exception e){
<del> e.printStackTrace();
<del> }
<del> }
<del> });
<add> textView.append("\n");
<add> }
<add> catch(Exception e){
<add> e.printStackTrace();
<add> }
<add> }
<add> });
<add>
<add>
<ide>
<ide> //send face3
<ide> ib3.setOnClickListener(new OnClickListener(){
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace();
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace();
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace();
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace();
<ide> spannableString.setSpan(span, 0, 4, SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE);
<ide> textView.append(spannableString);
<ide> scrollDown();
<add> textView.append("\n");
<ide> }
<ide> catch(Exception e){
<ide> e.printStackTrace(); |
|
Java | apache-2.0 | 370ea6c68d3af73497debc3c2d5dc7f35eb0c91d | 0 | xnslong/logging-log4j2,GFriedrich/logging-log4j2,apache/logging-log4j2,apache/logging-log4j2,codescale/logging-log4j2,codescale/logging-log4j2,xnslong/logging-log4j2,codescale/logging-log4j2,GFriedrich/logging-log4j2,apache/logging-log4j2,GFriedrich/logging-log4j2,xnslong/logging-log4j2 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.nosql.appender;
import static org.easymock.EasyMock.anyInt;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.getCurrentArguments;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.MarkerManager;
import org.apache.logging.log4j.ThreadContext;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.junit.ThreadContextStackRule;
import org.apache.logging.log4j.message.Message;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class NoSqlDatabaseManagerTest {
private NoSqlConnection<Map<String, Object>, DefaultNoSqlObject> connection;
private NoSqlProvider<NoSqlConnection<Map<String, Object>, DefaultNoSqlObject>> provider;
@Rule
public final ThreadContextStackRule threadContextRule = new ThreadContextStackRule();
@Before
@SuppressWarnings("unchecked")
public void setUp() {
this.provider = createStrictMock(NoSqlProvider.class);
this.connection = createStrictMock(NoSqlConnection.class);
}
@After
public void tearDown() {
verify(this.provider, this.connection);
}
@Test
public void testConnection() {
replay(this.provider, this.connection);
try (final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0,
this.provider)) {
assertNotNull("The manager should not be null.", manager);
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
manager.commitAndClose();
}
}
@Test
public void testWriteInternalNotConnected01() {
replay(this.provider, this.connection);
try (final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0,
this.provider)) {
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final LogEvent event = createStrictMock(LogEvent.class);
replay(this.provider, this.connection, event);
try {
manager.writeInternal(event);
fail("Expected AppenderLoggingException.");
} catch (final AppenderLoggingException ignore) {
/* */
}
verify(event);
}
}
@Test
public void testWriteInternalNotConnected02() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final LogEvent event = createStrictMock(LogEvent.class);
expect(this.connection.isClosed()).andReturn(true);
replay(this.provider, this.connection, event);
try {
manager.writeInternal(event);
fail("Expected AppenderLoggingException.");
} catch (final AppenderLoggingException ignore) {
/* */
}
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal01() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.WARN);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal01");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("My formatted message 01.");
expect(event.getSource()).andReturn(new StackTraceElement("com.foo.Bar", "testMethod01", "Bar.java", 15));
expect(event.getMarker()).andReturn(null);
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("MyThread-A");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(1234567890123L).times(2);
expect(event.getThrown()).andReturn(null);
expect(event.getContextMap()).andReturn(null);
expect(event.getContextStack()).andReturn(null);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.WARN, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal01",
object.get("loggerName"));
assertEquals("The message is not correct.", "My formatted message 01.", object.get("message"));
assertEquals("The thread is not correct.", "MyThread-A", object.get("threadName"));
assertEquals("The millis is not correct.", 1234567890123L, object.get("millis"));
assertEquals("The date is not correct.", 1234567890123L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.foo.Bar", source.get("className"));
assertEquals("The method is not correct.", "testMethod01", source.get("methodName"));
assertEquals("The file name is not correct.", "Bar.java", source.get("fileName"));
assertEquals("The line number is not correct.", 15, source.get("lineNumber"));
assertNull("The marker should be null.", object.get("marker"));
assertNull("The thrown should be null.", object.get("thrown"));
assertNull("The context map should be null.", object.get("contextMap"));
assertNull("The context stack should be null.", object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal02() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final RuntimeException exception = new RuntimeException("This is something cool!");
final Map<String, String> context = new HashMap<>();
context.put("hello", "world");
context.put("user", "pass");
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
ThreadContext.push("message1");
ThreadContext.push("stack2");
final ThreadContext.ContextStack stack = ThreadContext.getImmutableStack();
ThreadContext.clearStack();
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.DEBUG);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal02");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("Another cool message 02.");
expect(event.getSource()).andReturn(new StackTraceElement("com.bar.Foo", "anotherMethod03", "Foo.java", 9));
expect(event.getMarker()).andReturn(MarkerManager.getMarker("LoneMarker"));
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("AnotherThread-B");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(987654321564L).times(2);
expect(event.getThrown()).andReturn(exception);
expect(event.getContextMap()).andReturn(context);
expect(event.getContextStack()).andReturn(stack);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.DEBUG, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal02",
object.get("loggerName"));
assertEquals("The message is not correct.", "Another cool message 02.", object.get("message"));
assertEquals("The thread is not correct.", "AnotherThread-B", object.get("threadName"));
assertEquals("The millis is not correct.", 987654321564L, object.get("millis"));
assertEquals("The date is not correct.", 987654321564L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.bar.Foo", source.get("className"));
assertEquals("The method is not correct.", "anotherMethod03", source.get("methodName"));
assertEquals("The file name is not correct.", "Foo.java", source.get("fileName"));
assertEquals("The line number is not correct.", 9, source.get("lineNumber"));
assertTrue("The marker should be a map.", object.get("marker") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> marker = (Map<String, Object>) object.get("marker");
assertEquals("The marker name is not correct.", "LoneMarker", marker.get("name"));
assertNull("The marker parent should be null.", marker.get("parent"));
assertTrue("The thrown should be a map.", object.get("thrown") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> thrown = (Map<String, Object>) object.get("thrown");
assertEquals("The thrown type is not correct.", "java.lang.RuntimeException", thrown.get("type"));
assertEquals("The thrown message is not correct.", "This is something cool!", thrown.get("message"));
assertTrue("The thrown stack trace should be a list.", thrown.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> stackTrace = (List<Map<String, Object>>) thrown.get("stackTrace");
assertEquals("The thrown stack trace length is not correct.", exception.getStackTrace().length,
stackTrace.size());
for (int i = 0; i < exception.getStackTrace().length; i++) {
final StackTraceElement e1 = exception.getStackTrace()[i];
final Map<String, Object> e2 = stackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertNull("The thrown should have no cause.", thrown.get("cause"));
assertTrue("The context map should be a map.", object.get("contextMap") instanceof Map);
assertEquals("The context map is not correct.", context, object.get("contextMap"));
assertTrue("The context stack should be list.", object.get("contextStack") instanceof List);
assertEquals("The context stack is not correct.", stack.asList(), object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal03() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final IOException exception1 = new IOException("This is the cause.");
final SQLException exception2 = new SQLException("This is the result.", exception1);
final Map<String, String> context = new HashMap<>();
context.put("hello", "world");
context.put("user", "pass");
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
ThreadContext.push("message1");
ThreadContext.push("stack2");
final ThreadContext.ContextStack stack = ThreadContext.getImmutableStack();
ThreadContext.clearStack();
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.DEBUG);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal02");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("Another cool message 02.");
expect(event.getSource()).andReturn(new StackTraceElement("com.bar.Foo", "anotherMethod03", "Foo.java", 9));
expect(event.getMarker()).andReturn(
MarkerManager.getMarker("AnotherMarker").addParents(MarkerManager.getMarker("Parent1").addParents(MarkerManager.getMarker("GrandParent1")),
MarkerManager.getMarker("Parent2")));
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("AnotherThread-B");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(987654321564L).times(2);
expect(event.getThrown()).andReturn(exception2);
expect(event.getContextMap()).andReturn(context);
expect(event.getContextStack()).andReturn(stack);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.DEBUG, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal02",
object.get("loggerName"));
assertEquals("The message is not correct.", "Another cool message 02.", object.get("message"));
assertEquals("The thread is not correct.", "AnotherThread-B", object.get("threadName"));
assertEquals("The millis is not correct.", 987654321564L, object.get("millis"));
assertEquals("The date is not correct.", 987654321564L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.bar.Foo", source.get("className"));
assertEquals("The method is not correct.", "anotherMethod03", source.get("methodName"));
assertEquals("The file name is not correct.", "Foo.java", source.get("fileName"));
assertEquals("The line number is not correct.", 9, source.get("lineNumber"));
assertTrue("The marker should be a map.", object.get("marker") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> marker = (Map<String, Object>) object.get("marker");
assertEquals("The marker name is not correct.", "AnotherMarker", marker.get("name"));
assertTrue("The marker parents should be a list.", marker.get("parents") instanceof List);
@SuppressWarnings("unchecked")
final List<Object> markerParents = (List<Object>) marker.get("parents");
assertEquals("The marker parents should contain two parents", 2, markerParents.size());
assertTrue("The marker parents[0] should be a map.", markerParents.get(0) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent1 = (Map<String, Object>) markerParents.get(0);
assertEquals("The first marker parent name is not correct.", "Parent1", parent1.get("name"));
assertTrue("The marker parents[1] should be a map.", markerParents.get(1) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent2 = (Map<String, Object>) markerParents.get(1);
assertEquals("The second marker parent name is not correct.", "Parent2", parent2.get("name"));
assertNull("The second marker should have no parent.", parent2.get("parent"));
assertTrue("The parent1 parents should be a list.", parent1.get("parents") instanceof List);
@SuppressWarnings("unchecked")
final List<Object> parent1Parents = (List<Object>) parent1.get("parents");
assertEquals("The parent1 parents should have only one parent", 1, parent1Parents.size());
assertTrue("The parent1Parents[0] should be a map.", parent1Parents.get(0) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent1parent = (Map<String, Object>) parent1Parents.get(0);
assertEquals("The first parent1 parent name is not correct.", "GrandParent1", parent1parent.get("name"));
assertNull("The parent1parent marker should have no parent.", parent1parent.get("parent"));
assertTrue("The thrown should be a map.", object.get("thrown") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> thrown = (Map<String, Object>) object.get("thrown");
assertEquals("The thrown type is not correct.", "java.sql.SQLException", thrown.get("type"));
assertEquals("The thrown message is not correct.", "This is the result.", thrown.get("message"));
assertTrue("The thrown stack trace should be a list.", thrown.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> stackTrace = (List<Map<String, Object>>) thrown.get("stackTrace");
assertEquals("The thrown stack trace length is not correct.", exception2.getStackTrace().length,
stackTrace.size());
for (int i = 0; i < exception2.getStackTrace().length; i++) {
final StackTraceElement e1 = exception2.getStackTrace()[i];
final Map<String, Object> e2 = stackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertTrue("The thrown cause should be a map.", thrown.get("cause") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> cause = (Map<String, Object>) thrown.get("cause");
assertEquals("The cause type is not correct.", "java.io.IOException", cause.get("type"));
assertEquals("The cause message is not correct.", "This is the cause.", cause.get("message"));
assertTrue("The cause stack trace should be a list.", cause.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> causeStackTrace = (List<Map<String, Object>>) cause.get("stackTrace");
assertEquals("The cause stack trace length is not correct.", exception1.getStackTrace().length,
causeStackTrace.size());
for (int i = 0; i < exception1.getStackTrace().length; i++) {
final StackTraceElement e1 = exception1.getStackTrace()[i];
final Map<String, Object> e2 = causeStackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertNull("The cause should have no cause.", cause.get("cause"));
assertTrue("The context map should be a map.", object.get("contextMap") instanceof Map);
assertEquals("The context map is not correct.", context, object.get("contextMap"));
assertTrue("The context stack should be list.", object.get("contextStack") instanceof List);
assertEquals("The context stack is not correct.", stack.asList(), object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
}
| log4j-nosql/src/test/java/org/apache/logging/log4j/nosql/appender/NoSqlDatabaseManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.nosql.appender;
import static org.easymock.EasyMock.anyInt;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.getCurrentArguments;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.MarkerManager;
import org.apache.logging.log4j.ThreadContext;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.junit.ThreadContextStackRule;
import org.apache.logging.log4j.message.Message;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class NoSqlDatabaseManagerTest {
private NoSqlConnection<Map<String, Object>, DefaultNoSqlObject> connection;
private NoSqlProvider<NoSqlConnection<Map<String, Object>, DefaultNoSqlObject>> provider;
@Rule
public final ThreadContextStackRule threadContextRule = new ThreadContextStackRule();
@Before
@SuppressWarnings("unchecked")
public void setUp() {
this.provider = createStrictMock(NoSqlProvider.class);
this.connection = createStrictMock(NoSqlConnection.class);
}
@After
public void tearDown() {
verify(this.provider, this.connection);
}
@Test
public void testConnection() {
replay(this.provider, this.connection);
try (final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0,
this.provider)) {
assertNotNull("The manager should not be null.", manager);
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
manager.commitAndClose();
}
}
@Test
public void testWriteInternalNotConnected01() {
replay(this.provider, this.connection);
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final LogEvent event = createStrictMock(LogEvent.class);
replay(this.provider, this.connection, event);
try {
manager.writeInternal(event);
fail("Expected AppenderLoggingException.");
} catch (final AppenderLoggingException ignore) {
/* */
}
verify(event);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternalNotConnected02() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final LogEvent event = createStrictMock(LogEvent.class);
expect(this.connection.isClosed()).andReturn(true);
replay(this.provider, this.connection, event);
try {
manager.writeInternal(event);
fail("Expected AppenderLoggingException.");
} catch (final AppenderLoggingException ignore) {
/* */
}
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal01() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.WARN);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal01");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("My formatted message 01.");
expect(event.getSource()).andReturn(new StackTraceElement("com.foo.Bar", "testMethod01", "Bar.java", 15));
expect(event.getMarker()).andReturn(null);
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("MyThread-A");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(1234567890123L).times(2);
expect(event.getThrown()).andReturn(null);
expect(event.getContextMap()).andReturn(null);
expect(event.getContextStack()).andReturn(null);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.WARN, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal01",
object.get("loggerName"));
assertEquals("The message is not correct.", "My formatted message 01.", object.get("message"));
assertEquals("The thread is not correct.", "MyThread-A", object.get("threadName"));
assertEquals("The millis is not correct.", 1234567890123L, object.get("millis"));
assertEquals("The date is not correct.", 1234567890123L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.foo.Bar", source.get("className"));
assertEquals("The method is not correct.", "testMethod01", source.get("methodName"));
assertEquals("The file name is not correct.", "Bar.java", source.get("fileName"));
assertEquals("The line number is not correct.", 15, source.get("lineNumber"));
assertNull("The marker should be null.", object.get("marker"));
assertNull("The thrown should be null.", object.get("thrown"));
assertNull("The context map should be null.", object.get("contextMap"));
assertNull("The context stack should be null.", object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal02() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final RuntimeException exception = new RuntimeException("This is something cool!");
final Map<String, String> context = new HashMap<>();
context.put("hello", "world");
context.put("user", "pass");
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
ThreadContext.push("message1");
ThreadContext.push("stack2");
final ThreadContext.ContextStack stack = ThreadContext.getImmutableStack();
ThreadContext.clearStack();
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.DEBUG);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal02");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("Another cool message 02.");
expect(event.getSource()).andReturn(new StackTraceElement("com.bar.Foo", "anotherMethod03", "Foo.java", 9));
expect(event.getMarker()).andReturn(MarkerManager.getMarker("LoneMarker"));
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("AnotherThread-B");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(987654321564L).times(2);
expect(event.getThrown()).andReturn(exception);
expect(event.getContextMap()).andReturn(context);
expect(event.getContextStack()).andReturn(stack);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.DEBUG, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal02",
object.get("loggerName"));
assertEquals("The message is not correct.", "Another cool message 02.", object.get("message"));
assertEquals("The thread is not correct.", "AnotherThread-B", object.get("threadName"));
assertEquals("The millis is not correct.", 987654321564L, object.get("millis"));
assertEquals("The date is not correct.", 987654321564L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.bar.Foo", source.get("className"));
assertEquals("The method is not correct.", "anotherMethod03", source.get("methodName"));
assertEquals("The file name is not correct.", "Foo.java", source.get("fileName"));
assertEquals("The line number is not correct.", 9, source.get("lineNumber"));
assertTrue("The marker should be a map.", object.get("marker") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> marker = (Map<String, Object>) object.get("marker");
assertEquals("The marker name is not correct.", "LoneMarker", marker.get("name"));
assertNull("The marker parent should be null.", marker.get("parent"));
assertTrue("The thrown should be a map.", object.get("thrown") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> thrown = (Map<String, Object>) object.get("thrown");
assertEquals("The thrown type is not correct.", "java.lang.RuntimeException", thrown.get("type"));
assertEquals("The thrown message is not correct.", "This is something cool!", thrown.get("message"));
assertTrue("The thrown stack trace should be a list.", thrown.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> stackTrace = (List<Map<String, Object>>) thrown.get("stackTrace");
assertEquals("The thrown stack trace length is not correct.", exception.getStackTrace().length,
stackTrace.size());
for (int i = 0; i < exception.getStackTrace().length; i++) {
final StackTraceElement e1 = exception.getStackTrace()[i];
final Map<String, Object> e2 = stackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertNull("The thrown should have no cause.", thrown.get("cause"));
assertTrue("The context map should be a map.", object.get("contextMap") instanceof Map);
assertEquals("The context map is not correct.", context, object.get("contextMap"));
assertTrue("The context stack should be list.", object.get("contextStack") instanceof List);
assertEquals("The context stack is not correct.", stack.asList(), object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
@Test
public void testWriteInternal03() {
final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
try {
replay(this.provider, this.connection);
manager.startup();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
expect(this.provider.getConnection()).andReturn(this.connection);
replay(this.provider, this.connection);
manager.connectAndStart();
verify(this.provider, this.connection);
reset(this.provider, this.connection);
final Capture<NoSqlObject<Map<String, Object>>> capture = EasyMock.newCapture();
final IOException exception1 = new IOException("This is the cause.");
final SQLException exception2 = new SQLException("This is the result.", exception1);
final Map<String, String> context = new HashMap<>();
context.put("hello", "world");
context.put("user", "pass");
final LogEvent event = createStrictMock(LogEvent.class);
final Message message = createStrictMock(Message.class);
ThreadContext.push("message1");
ThreadContext.push("stack2");
final ThreadContext.ContextStack stack = ThreadContext.getImmutableStack();
ThreadContext.clearStack();
expect(this.connection.isClosed()).andReturn(false);
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(this.connection.createList(anyInt())).andAnswer(new IAnswer<DefaultNoSqlObject[]>() {
@Override
public DefaultNoSqlObject[] answer() throws Throwable {
return new DefaultNoSqlObject[(Integer) getCurrentArguments()[0]];
}
});
expect(this.connection.createObject()).andAnswer(new IAnswer<DefaultNoSqlObject>() {
@Override
public DefaultNoSqlObject answer() throws Throwable {
return new DefaultNoSqlObject();
}
}).atLeastOnce();
expect(event.getLevel()).andReturn(Level.DEBUG);
expect(event.getLoggerName()).andReturn("com.foo.NoSQLDbTest.testWriteInternal02");
expect(event.getMessage()).andReturn(message).times(2);
expect(message.getFormattedMessage()).andReturn("Another cool message 02.");
expect(event.getSource()).andReturn(new StackTraceElement("com.bar.Foo", "anotherMethod03", "Foo.java", 9));
expect(event.getMarker()).andReturn(
MarkerManager.getMarker("AnotherMarker").addParents(MarkerManager.getMarker("Parent1").addParents(MarkerManager.getMarker("GrandParent1")),
MarkerManager.getMarker("Parent2")));
expect(event.getThreadId()).andReturn(1L);
expect(event.getThreadName()).andReturn("AnotherThread-B");
expect(event.getThreadPriority()).andReturn(1);
expect(event.getTimeMillis()).andReturn(987654321564L).times(2);
expect(event.getThrown()).andReturn(exception2);
expect(event.getContextMap()).andReturn(context);
expect(event.getContextStack()).andReturn(stack);
this.connection.insertObject(capture(capture));
expectLastCall();
replay(this.provider, this.connection, event, message);
manager.writeInternal(event);
final NoSqlObject<Map<String, Object>> inserted = capture.getValue();
assertNotNull("The inserted value should not be null.", inserted);
final Map<String, Object> object = inserted.unwrap();
assertNotNull("The unwrapped object should not be null.", object);
assertEquals("The level is not correct.", Level.DEBUG, object.get("level"));
assertEquals("The logger is not correct.", "com.foo.NoSQLDbTest.testWriteInternal02",
object.get("loggerName"));
assertEquals("The message is not correct.", "Another cool message 02.", object.get("message"));
assertEquals("The thread is not correct.", "AnotherThread-B", object.get("threadName"));
assertEquals("The millis is not correct.", 987654321564L, object.get("millis"));
assertEquals("The date is not correct.", 987654321564L, ((Date) object.get("date")).getTime());
assertTrue("The source should be a map.", object.get("source") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> source = (Map<String, Object>) object.get("source");
assertEquals("The class is not correct.", "com.bar.Foo", source.get("className"));
assertEquals("The method is not correct.", "anotherMethod03", source.get("methodName"));
assertEquals("The file name is not correct.", "Foo.java", source.get("fileName"));
assertEquals("The line number is not correct.", 9, source.get("lineNumber"));
assertTrue("The marker should be a map.", object.get("marker") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> marker = (Map<String, Object>) object.get("marker");
assertEquals("The marker name is not correct.", "AnotherMarker", marker.get("name"));
assertTrue("The marker parents should be a list.", marker.get("parents") instanceof List);
@SuppressWarnings("unchecked")
final List<Object> markerParents = (List<Object>) marker.get("parents");
assertEquals("The marker parents should contain two parents", 2, markerParents.size());
assertTrue("The marker parents[0] should be a map.", markerParents.get(0) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent1 = (Map<String, Object>) markerParents.get(0);
assertEquals("The first marker parent name is not correct.", "Parent1", parent1.get("name"));
assertTrue("The marker parents[1] should be a map.", markerParents.get(1) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent2 = (Map<String, Object>) markerParents.get(1);
assertEquals("The second marker parent name is not correct.", "Parent2", parent2.get("name"));
assertNull("The second marker should have no parent.", parent2.get("parent"));
assertTrue("The parent1 parents should be a list.", parent1.get("parents") instanceof List);
@SuppressWarnings("unchecked")
final List<Object> parent1Parents = (List<Object>) parent1.get("parents");
assertEquals("The parent1 parents should have only one parent", 1, parent1Parents.size());
assertTrue("The parent1Parents[0] should be a map.", parent1Parents.get(0) instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> parent1parent = (Map<String, Object>) parent1Parents.get(0);
assertEquals("The first parent1 parent name is not correct.", "GrandParent1", parent1parent.get("name"));
assertNull("The parent1parent marker should have no parent.", parent1parent.get("parent"));
assertTrue("The thrown should be a map.", object.get("thrown") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> thrown = (Map<String, Object>) object.get("thrown");
assertEquals("The thrown type is not correct.", "java.sql.SQLException", thrown.get("type"));
assertEquals("The thrown message is not correct.", "This is the result.", thrown.get("message"));
assertTrue("The thrown stack trace should be a list.", thrown.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> stackTrace = (List<Map<String, Object>>) thrown.get("stackTrace");
assertEquals("The thrown stack trace length is not correct.", exception2.getStackTrace().length,
stackTrace.size());
for (int i = 0; i < exception2.getStackTrace().length; i++) {
final StackTraceElement e1 = exception2.getStackTrace()[i];
final Map<String, Object> e2 = stackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertTrue("The thrown cause should be a map.", thrown.get("cause") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> cause = (Map<String, Object>) thrown.get("cause");
assertEquals("The cause type is not correct.", "java.io.IOException", cause.get("type"));
assertEquals("The cause message is not correct.", "This is the cause.", cause.get("message"));
assertTrue("The cause stack trace should be a list.", cause.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> causeStackTrace = (List<Map<String, Object>>) cause.get("stackTrace");
assertEquals("The cause stack trace length is not correct.", exception1.getStackTrace().length,
causeStackTrace.size());
for (int i = 0; i < exception1.getStackTrace().length; i++) {
final StackTraceElement e1 = exception1.getStackTrace()[i];
final Map<String, Object> e2 = causeStackTrace.get(i);
assertEquals("Element class name [" + i + "] is not correct.", e1.getClassName(), e2.get("className"));
assertEquals("Element method name [" + i + "] is not correct.", e1.getMethodName(),
e2.get("methodName"));
assertEquals("Element file name [" + i + "] is not correct.", e1.getFileName(), e2.get("fileName"));
assertEquals("Element line number [" + i + "] is not correct.", e1.getLineNumber(),
e2.get("lineNumber"));
}
assertNull("The cause should have no cause.", cause.get("cause"));
assertTrue("The context map should be a map.", object.get("contextMap") instanceof Map);
assertEquals("The context map is not correct.", context, object.get("contextMap"));
assertTrue("The context stack should be list.", object.get("contextStack") instanceof List);
assertEquals("The context stack is not correct.", stack.asList(), object.get("contextStack"));
verify(this.provider, this.connection, event, message);
} finally {
try {
manager.close();
} catch (final Throwable ignore) {
/* */
}
}
}
}
| Use try-with-resources. | log4j-nosql/src/test/java/org/apache/logging/log4j/nosql/appender/NoSqlDatabaseManagerTest.java | Use try-with-resources. | <ide><path>og4j-nosql/src/test/java/org/apache/logging/log4j/nosql/appender/NoSqlDatabaseManagerTest.java
<ide> }
<ide>
<ide> @Test
<del> public void testWriteInternalNotConnected01() {
<del> replay(this.provider, this.connection);
<del>
<del> final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0, this.provider);
<del>
<del> try {
<del> verify(this.provider, this.connection);
<del> reset(this.provider, this.connection);
<del>
<del> final LogEvent event = createStrictMock(LogEvent.class);
<del> replay(this.provider, this.connection, event);
<del>
<del> try {
<del> manager.writeInternal(event);
<del> fail("Expected AppenderLoggingException.");
<del> } catch (final AppenderLoggingException ignore) {
<del> /* */
<del> }
<del>
<del> verify(event);
<del> } finally {
<del> try {
<del> manager.close();
<del> } catch (final Throwable ignore) {
<del> /* */
<del> }
<del> }
<del> }
<add> public void testWriteInternalNotConnected01() {
<add> replay(this.provider, this.connection);
<add>
<add> try (final NoSqlDatabaseManager<?> manager = NoSqlDatabaseManager.getNoSqlDatabaseManager("name", 0,
<add> this.provider)) {
<add>
<add> verify(this.provider, this.connection);
<add> reset(this.provider, this.connection);
<add>
<add> final LogEvent event = createStrictMock(LogEvent.class);
<add> replay(this.provider, this.connection, event);
<add>
<add> try {
<add> manager.writeInternal(event);
<add> fail("Expected AppenderLoggingException.");
<add> } catch (final AppenderLoggingException ignore) {
<add> /* */
<add> }
<add>
<add> verify(event);
<add> }
<add> }
<ide>
<ide> @Test
<ide> public void testWriteInternalNotConnected02() { |
|
Java | lgpl-2.1 | d129ab5bbd0413b876ff29412b736865303274f7 | 0 | innopolis-jolie-smt-typechecker/jolie,innopolis-jolie-smt-typechecker/jolie,agwe/jolie,jolie-projects/jolie-async,jolie-projects/jolie-async,mawolf/jolie,agwe/jolie,innopolis-jolie-smt-typechecker/jolie,agwe/jolie,mawolf/jolie,jolie-projects/jolie-async,mawolf/jolie,mawolf/jolie,jolie-projects/jolie-async,agwe/jolie,innopolis-jolie-smt-typechecker/jolie | /***************************************************************************
* Copyright (C) by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package joliex.db;
import java.math.BigDecimal;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import jolie.net.CommMessage;
import jolie.runtime.CanUseJars;
import jolie.runtime.FaultException;
import jolie.runtime.JavaService;
import jolie.runtime.Value;
import jolie.runtime.ValueVector;
/**
* @author Fabrizio Montesi
* 2008 - Marco Montesi: connection string fix for Microsoft SQL Server
* 2009 - Claudio Guidi: added support for SQLite
*/
@CanUseJars({
"derby.jar", // Java DB - Embedded
"derbyclient.jar", // Java DB - Client
"jdbc-mysql.jar", // MySQL
"jdbc-postgresql.jar", // PostgreSQL
"jdbc-sqlserver.jar", // Microsoft SQLServer
"jdbc-sqlite.jar" // SQLite
})
public class DatabaseService extends JavaService
{
private Connection connection = null;
private String connectionString = null;
private String username = null;
private String password = null;
private boolean mustCheckConnection = false;
final private Object transactionMutex = new Object();
@Override
protected void finalize()
{
if ( connection != null ) {
try {
connection.close();
} catch( SQLException e ) {}
}
}
public CommMessage connect( CommMessage message )
throws FaultException
{
if ( connection != null ) {
try {
connectionString = null;
username = null;
password = null;
connection.close();
} catch( SQLException e ) {}
}
mustCheckConnection = message.value().getFirstChild( "checkConnection" ).intValue() > 0;
String driver = message.value().getChildren( "driver" ).first().strValue();
String host = message.value().getChildren( "host" ).first().strValue();
String port = message.value().getChildren( "port" ).first().strValue();
String databaseName = message.value().getChildren( "database" ).first().strValue();
username = message.value().getChildren( "username" ).first().strValue();
password = message.value().getChildren( "password" ).first().strValue();
String separator = "/";
try {
if ( "postgresql".equals( driver ) ) {
Class.forName( "org.postgresql.Driver" );
} else if ( "mysql".equals( driver ) ) {
Class.forName( "com.mysql.jdbc.Driver" );
} else if ( "derby".equals( driver ) ) {
Class.forName( "org.apache.derby.jdbc.ClientDriver" );
} else if ( "sqlite".equals( driver ) ) {
Class.forName( "org.sqlite.JDBC" );
} else if ( "sqlserver".equals( driver ) ) {
//Class.forName( "com.microsoft.sqlserver.jdbc.SQLServerDriver" );
separator = ";";
databaseName = "databaseName=" + databaseName;
} else if ( "as400".equals( driver ) ) {
Class.forName( "com.ibm.as400.access.AS400JDBCDriver" );
} else {
throw new FaultException( "InvalidDriver", "Uknown driver: " + driver );
}
connectionString = "jdbc:"+ driver + "://" + host + ( port.equals( "" ) ? "" : ":" + port ) + separator + databaseName;
connection = DriverManager.getConnection(
connectionString,
username,
password
);
if ( connection == null ) {
throw new FaultException( "ConnectionError" );
}
} catch( ClassNotFoundException e ) {
throw new FaultException( "InvalidDriver", e );
} catch( SQLException e ) {
throw new FaultException( "ConnectionError", e );
}
return CommMessage.createResponse( message, Value.create() );
}
private void checkConnection()
throws FaultException
{
if ( connection == null ) {
throw new FaultException( "ConnectionError" );
}
if ( mustCheckConnection ) {
try {
if ( !connection.isValid( 0 ) ) {
connection = DriverManager.getConnection(
connectionString,
username,
password
);
}
} catch( SQLException e ) {
throw new FaultException( e );
}
}
}
public CommMessage update( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
String query = request.value().strValue();
try {
synchronized( transactionMutex ) {
Statement stm = connection.createStatement();
resultValue.setValue( stm.executeUpdate( query ) );
}
} catch( SQLException e ) {
throw new FaultException( e );
}
return CommMessage.createResponse( request, resultValue );
}
private static void resultSetToValueVector( ResultSet result, ValueVector vector )
throws SQLException
{
Value rowValue, fieldValue;
ResultSetMetaData metadata = result.getMetaData();
int cols = metadata.getColumnCount();
int i;
int rowIndex = 0;
while( result.next() ) {
rowValue = vector.get( rowIndex );
for( i = 1; i <= cols; i++ ) {
fieldValue = rowValue.getFirstChild( metadata.getColumnLabel( i ) );
switch( metadata.getColumnType( i ) ) {
case java.sql.Types.INTEGER:
case java.sql.Types.SMALLINT:
case java.sql.Types.TINYINT:
fieldValue.setValue( result.getInt( i ) );
break;
case java.sql.Types.BIGINT:
// TODO: to be changed when getting support for Long in Jolie.
fieldValue.setValue( result.getInt( i ) );
break;
case java.sql.Types.DOUBLE:
fieldValue.setValue( result.getDouble( i ) );
break;
case java.sql.Types.FLOAT:
fieldValue.setValue( result.getFloat( i ) );
break;
case java.sql.Types.BLOB:
//fieldValue.setStrValue( result.getBlob( i ).toString() );
break;
case java.sql.Types.CLOB:
Clob clob = result.getClob( i );
fieldValue.setValue( clob.getSubString( 0L, (int)clob.length() ) );
break;
case java.sql.Types.NVARCHAR:
case java.sql.Types.NCHAR:
case java.sql.Types.LONGNVARCHAR:
fieldValue.setValue( result.getNString( i ) );
break;
case java.sql.Types.NUMERIC:
BigDecimal dec = result.getBigDecimal( i );
if ( dec.scale() <= 0 ) {
// May lose information.
// Pay some attention to this when Long becomes supported by JOLIE.
fieldValue.setValue( dec.intValue() );
} else if ( dec.scale() > 0 ) {
fieldValue.setValue( dec.doubleValue() );
}
break;
case java.sql.Types.VARCHAR:
default:
fieldValue.setValue( result.getString( i ) );
break;
}
}
rowIndex++;
}
}
public CommMessage executeTransaction( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
ValueVector resultVector = resultValue.getChildren( "result" );
try {
synchronized( transactionMutex ) {
connection.setAutoCommit( false );
Value currResultValue;
for( Value statementValue : request.value().getChildren( "statement" ) ) {
currResultValue = Value.create();
Statement stm = connection.createStatement();
if ( stm.execute( statementValue.strValue() ) == true ) {
resultSetToValueVector( stm.getResultSet(), currResultValue.getChildren( "row" ) );
}
resultVector.add( currResultValue );
}
connection.commit();
connection.setAutoCommit( true );
}
} catch( SQLException e ) {
throw new FaultException( e );
}
return CommMessage.createResponse( request, resultValue );
}
public CommMessage query( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
try {
synchronized( transactionMutex ) {
Statement stm = connection.createStatement();
ResultSet result = stm.executeQuery( request.value().strValue() );
resultSetToValueVector( result, resultValue.getChildren( "row" ) );
}
} catch( SQLException e ) {
throw new FaultException( "SQLException", e );
}
return CommMessage.createResponse( request, resultValue );
}
}
| jolieJavaServices/src/joliex/db/DatabaseService.java | /***************************************************************************
* Copyright (C) by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package joliex.db;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import jolie.net.CommMessage;
import jolie.runtime.CanUseJars;
import jolie.runtime.FaultException;
import jolie.runtime.JavaService;
import jolie.runtime.Value;
import jolie.runtime.ValueVector;
/**
* @author Fabrizio Montesi
* 2008 - Marco Montesi: connection string fix for Microsoft SQL Server
* 2009 - Claudio Guidi: added support for SQLite
*/
@CanUseJars({
"derby.jar", // Java DB - Embedded
"derbyclient.jar", // Java DB - Client
"jdbc-mysql.jar", // MySQL
"jdbc-postgresql.jar", // PostgreSQL
"jdbc-sqlserver.jar", // Microsoft SQLServer
"jdbc-sqlite.jar" // SQLite
})
public class DatabaseService extends JavaService
{
private Connection connection = null;
private String connectionString = null;
private String username = null;
private String password = null;
private boolean mustCheckConnection = false;
final private Object transactionMutex = new Object();
@Override
protected void finalize()
{
if ( connection != null ) {
try {
connection.close();
} catch( SQLException e ) {}
}
}
public CommMessage connect( CommMessage message )
throws FaultException
{
if ( connection != null ) {
try {
connectionString = null;
username = null;
password = null;
connection.close();
} catch( SQLException e ) {}
}
mustCheckConnection = message.value().getFirstChild( "checkConnection" ).intValue() > 0;
String driver = message.value().getChildren( "driver" ).first().strValue();
String host = message.value().getChildren( "host" ).first().strValue();
String port = message.value().getChildren( "port" ).first().strValue();
String databaseName = message.value().getChildren( "database" ).first().strValue();
username = message.value().getChildren( "username" ).first().strValue();
password = message.value().getChildren( "password" ).first().strValue();
String separator = "/";
try {
if ( "postgresql".equals( driver ) ) {
Class.forName( "org.postgresql.Driver" );
} else if ( "mysql".equals( driver ) ) {
Class.forName( "com.mysql.jdbc.Driver" );
} else if ( "derby".equals( driver ) ) {
Class.forName( "org.apache.derby.jdbc.ClientDriver" );
} else if ( "sqlite".equals( driver ) ) {
Class.forName( "org.sqlite.JDBC" );
} else if ( "sqlserver".equals( driver ) ) {
//Class.forName( "com.microsoft.sqlserver.jdbc.SQLServerDriver" );
separator = ";";
databaseName = "databaseName=" + databaseName;
} else if ( "as400".equals( driver ) ) {
Class.forName( "com.ibm.as400.access.AS400JDBCDriver" );
} else {
throw new FaultException( "InvalidDriver", "Uknown driver: " + driver );
}
connectionString = "jdbc:"+ driver + "://" + host + ( port.equals( "" ) ? "" : ":" + port ) + separator + databaseName;
connection = DriverManager.getConnection(
connectionString,
username,
password
);
if ( connection == null ) {
throw new FaultException( "ConnectionError" );
}
} catch( ClassNotFoundException e ) {
throw new FaultException( "InvalidDriver", e );
} catch( SQLException e ) {
throw new FaultException( "ConnectionError", e );
}
return CommMessage.createResponse( message, Value.create() );
}
private void checkConnection()
throws FaultException
{
if ( connection == null ) {
throw new FaultException( "ConnectionError" );
}
if ( mustCheckConnection ) {
try {
if ( !connection.isValid( 0 ) ) {
connection = DriverManager.getConnection(
connectionString,
username,
password
);
}
} catch( SQLException e ) {
throw new FaultException( e );
}
}
}
public CommMessage update( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
String query = request.value().strValue();
try {
synchronized( transactionMutex ) {
Statement stm = connection.createStatement();
resultValue.setValue( stm.executeUpdate( query ) );
}
} catch( SQLException e ) {
throw new FaultException( e );
}
return CommMessage.createResponse( request, resultValue );
}
private static void resultSetToValueVector( ResultSet result, ValueVector vector )
throws SQLException
{
Value rowValue, fieldValue;
ResultSetMetaData metadata = result.getMetaData();
int cols = metadata.getColumnCount();
int i;
int rowIndex = 0;
while( result.next() ) {
rowValue = vector.get( rowIndex );
for( i = 1; i <= cols; i++ ) {
fieldValue = rowValue.getFirstChild( metadata.getColumnLabel( i ) );
switch( metadata.getColumnType( i ) ) {
case java.sql.Types.INTEGER:
case java.sql.Types.SMALLINT:
case java.sql.Types.TINYINT:
case java.sql.Types.NUMERIC:
fieldValue.setValue( result.getInt( i ) );
break;
case java.sql.Types.BIGINT:
// TODO: to be changed when getting support for Long in Jolie.
fieldValue.setValue( result.getInt( i ) );
break;
case java.sql.Types.DOUBLE:
fieldValue.setValue( result.getDouble( i ) );
break;
case java.sql.Types.FLOAT:
fieldValue.setValue( result.getFloat( i ) );
break;
case java.sql.Types.BLOB:
//fieldValue.setStrValue( result.getBlob( i ).toString() );
break;
case java.sql.Types.CLOB:
Clob clob = result.getClob( i );
fieldValue.setValue( clob.getSubString( 0L, (int)clob.length() ) );
break;
case java.sql.Types.NVARCHAR:
case java.sql.Types.NCHAR:
case java.sql.Types.LONGNVARCHAR:
fieldValue.setValue( result.getNString( i ) );
break;
case java.sql.Types.VARCHAR:
default:
fieldValue.setValue( result.getString( i ) );
break;
}
}
rowIndex++;
}
}
public CommMessage executeTransaction( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
ValueVector resultVector = resultValue.getChildren( "result" );
try {
synchronized( transactionMutex ) {
connection.setAutoCommit( false );
Value currResultValue;
for( Value statementValue : request.value().getChildren( "statement" ) ) {
currResultValue = Value.create();
Statement stm = connection.createStatement();
if ( stm.execute( statementValue.strValue() ) == true ) {
resultSetToValueVector( stm.getResultSet(), currResultValue.getChildren( "row" ) );
}
resultVector.add( currResultValue );
}
connection.commit();
connection.setAutoCommit( true );
}
} catch( SQLException e ) {
throw new FaultException( e );
}
return CommMessage.createResponse( request, resultValue );
}
public CommMessage query( CommMessage request )
throws FaultException
{
checkConnection();
Value resultValue = Value.create();
try {
synchronized( transactionMutex ) {
Statement stm = connection.createStatement();
ResultSet result = stm.executeQuery( request.value().strValue() );
resultSetToValueVector( result, resultValue.getChildren( "row" ) );
}
} catch( SQLException e ) {
throw new FaultException( "SQLException", e );
}
return CommMessage.createResponse( request, resultValue );
}
}
| Handle the NUMERIC data type gracefully.
| jolieJavaServices/src/joliex/db/DatabaseService.java | Handle the NUMERIC data type gracefully. | <ide><path>olieJavaServices/src/joliex/db/DatabaseService.java
<ide>
<ide> package joliex.db;
<ide>
<add>import java.math.BigDecimal;
<ide> import java.sql.Clob;
<ide> import java.sql.Connection;
<ide> import java.sql.DriverManager;
<ide> case java.sql.Types.INTEGER:
<ide> case java.sql.Types.SMALLINT:
<ide> case java.sql.Types.TINYINT:
<del> case java.sql.Types.NUMERIC:
<ide> fieldValue.setValue( result.getInt( i ) );
<ide> break;
<ide> case java.sql.Types.BIGINT:
<ide> case java.sql.Types.NCHAR:
<ide> case java.sql.Types.LONGNVARCHAR:
<ide> fieldValue.setValue( result.getNString( i ) );
<add> break;
<add> case java.sql.Types.NUMERIC:
<add> BigDecimal dec = result.getBigDecimal( i );
<add> if ( dec.scale() <= 0 ) {
<add> // May lose information.
<add> // Pay some attention to this when Long becomes supported by JOLIE.
<add> fieldValue.setValue( dec.intValue() );
<add> } else if ( dec.scale() > 0 ) {
<add> fieldValue.setValue( dec.doubleValue() );
<add> }
<ide> break;
<ide> case java.sql.Types.VARCHAR:
<ide> default: |
|
Java | apache-2.0 | 39072c2d3f59e337e060238bd3fe09b58bffb2cc | 0 | xiaojiaqi/Metamorphosis,fogu/Metamorphosis,ronaldo9grey/Metamorphosis,xiaojiaqi/Metamorphosis,ronaldo9grey/Metamorphosis,ronaldo9grey/Metamorphosis,githubcolin/Metamorphosis,fengshao0907/Metamorphosis,fengshao0907/Metamorphosis,githubcolin/Metamorphosis,xiaojiaqi/Metamorphosis,yuzhu712/Metamorphosis,fogu/Metamorphosis,binarygu/Metamorphosis,fogu/Metamorphosis,272029252/Metamorphosis,fogu/Metamorphosis,yuzhu712/Metamorphosis,fool-persen/Metamorphosis,yuzhu712/Metamorphosis,fengshao0907/Metamorphosis,binarygu/Metamorphosis,yuzhu712/Metamorphosis,IBYoung/Metamorphosis,IBYoung/Metamorphosis,binarygu/Metamorphosis,binarygu/Metamorphosis,githubcolin/Metamorphosis,xiaojiaqi/Metamorphosis,jarvisxiong/Metamorphosis,makemyownlife/Metamorphosis,272029252/Metamorphosis,fool-persen/Metamorphosis,binarygu/Metamorphosis,ronaldo9grey/Metamorphosis,githubcolin/Metamorphosis,killme2008/Metamorphosis,ronaldo9grey/Metamorphosis,fool-persen/Metamorphosis,killme2008/Metamorphosis,fengshao0907/Metamorphosis,fogu/Metamorphosis,killme2008/Metamorphosis,yuzhu712/Metamorphosis,fengshao0907/Metamorphosis,makemyownlife/Metamorphosis,272029252/Metamorphosis,makemyownlife/Metamorphosis,jarvisxiong/Metamorphosis,makemyownlife/Metamorphosis,272029252/Metamorphosis,binarygu/Metamorphosis,killme2008/Metamorphosis,fogu/Metamorphosis,xiaojiaqi/Metamorphosis,fool-persen/Metamorphosis,xiaojiaqi/Metamorphosis,jarvisxiong/Metamorphosis,IBYoung/Metamorphosis,killme2008/Metamorphosis,githubcolin/Metamorphosis,jarvisxiong/Metamorphosis,fool-persen/Metamorphosis,fengshao0907/Metamorphosis,makemyownlife/Metamorphosis,makemyownlife/Metamorphosis,IBYoung/Metamorphosis,killme2008/Metamorphosis,jarvisxiong/Metamorphosis,IBYoung/Metamorphosis,fool-persen/Metamorphosis,IBYoung/Metamorphosis,272029252/Metamorphosis,272029252/Metamorphosis,jarvisxiong/Metamorphosis,githubcolin/Metamorphosis,yuzhu712/Metamorphosis,ronaldo9grey/Metamorphosis | /*
* (C) 2007-2012 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Authors:
* wuhua <[email protected]> , boyan <[email protected]>
*/
package com.taobao.metamorphosis.server.network;
import org.easymock.classextension.EasyMock;
import org.easymock.classextension.IMocksControl;
import com.taobao.gecko.service.Connection;
import com.taobao.metamorphosis.server.BrokerZooKeeper;
import com.taobao.metamorphosis.server.assembly.BrokerCommandProcessor;
import com.taobao.metamorphosis.server.assembly.ExecutorsManager;
import com.taobao.metamorphosis.server.filter.ConsumerFilterManager;
import com.taobao.metamorphosis.server.stats.StatsManager;
import com.taobao.metamorphosis.server.store.MessageStoreManager;
import com.taobao.metamorphosis.server.utils.MetaConfig;
import com.taobao.metamorphosis.utils.IdWorker;
public abstract class BaseProcessorUnitTest {
protected MessageStoreManager storeManager;
protected MetaConfig metaConfig;
protected Connection conn;
protected IMocksControl mocksControl;
protected BrokerCommandProcessor commandProcessor;
protected StatsManager statsManager;
protected IdWorker idWorker;
protected BrokerZooKeeper brokerZooKeeper;
protected ExecutorsManager executorsManager;
protected SessionContext sessionContext;
protected ConsumerFilterManager consumerFilterManager;
protected void mock() {
this.metaConfig = new MetaConfig();
this.mocksControl = EasyMock.createControl();
this.storeManager = this.mocksControl.createMock(MessageStoreManager.class);
this.conn = this.mocksControl.createMock(Connection.class);
try {
this.consumerFilterManager = new ConsumerFilterManager(this.metaConfig);
}
catch (Exception e) {
throw new RuntimeException(e);
}
this.sessionContext = new SessionContextImpl(null, this.conn);
EasyMock.expect(this.conn.getAttribute(SessionContextHolder.GLOBAL_SESSION_KEY)).andReturn(this.sessionContext)
.anyTimes();
this.statsManager = new StatsManager(new MetaConfig(), null, null);
this.idWorker = this.mocksControl.createMock(IdWorker.class);
this.brokerZooKeeper = this.mocksControl.createMock(BrokerZooKeeper.class);
this.executorsManager = this.mocksControl.createMock(ExecutorsManager.class);
this.commandProcessor = new BrokerCommandProcessor();
this.commandProcessor.setMetaConfig(this.metaConfig);
this.commandProcessor.setStoreManager(this.storeManager);
this.commandProcessor.setStatsManager(this.statsManager);
this.commandProcessor.setBrokerZooKeeper(this.brokerZooKeeper);
this.commandProcessor.setIdWorker(this.idWorker);
this.commandProcessor.setExecutorsManager(this.executorsManager);
this.commandProcessor.setConsumerFilterManager(this.consumerFilterManager);
}
} | metamorphosis-server/src/test/java/com/taobao/metamorphosis/server/network/BaseProcessorUnitTest.java | /*
* (C) 2007-2012 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Authors:
* wuhua <[email protected]> , boyan <[email protected]>
*/
package com.taobao.metamorphosis.server.network;
import org.easymock.classextension.EasyMock;
import org.easymock.classextension.IMocksControl;
import com.taobao.gecko.service.Connection;
import com.taobao.metamorphosis.server.BrokerZooKeeper;
import com.taobao.metamorphosis.server.assembly.BrokerCommandProcessor;
import com.taobao.metamorphosis.server.assembly.ExecutorsManager;
import com.taobao.metamorphosis.server.stats.StatsManager;
import com.taobao.metamorphosis.server.store.MessageStoreManager;
import com.taobao.metamorphosis.server.utils.MetaConfig;
import com.taobao.metamorphosis.utils.IdWorker;
public abstract class BaseProcessorUnitTest {
protected MessageStoreManager storeManager;
protected MetaConfig metaConfig;
protected Connection conn;
protected IMocksControl mocksControl;
protected BrokerCommandProcessor commandProcessor;
protected StatsManager statsManager;
protected IdWorker idWorker;
protected BrokerZooKeeper brokerZooKeeper;
protected ExecutorsManager executorsManager;
protected SessionContext sessionContext;
protected void mock() {
this.metaConfig = new MetaConfig();
this.mocksControl = EasyMock.createControl();
this.storeManager = this.mocksControl.createMock(MessageStoreManager.class);
this.conn = this.mocksControl.createMock(Connection.class);
this.sessionContext = new SessionContextImpl(null, this.conn);
EasyMock.expect(this.conn.getAttribute(SessionContextHolder.GLOBAL_SESSION_KEY)).andReturn(this.sessionContext)
.anyTimes();
this.statsManager = new StatsManager(new MetaConfig(), null, null);
this.idWorker = this.mocksControl.createMock(IdWorker.class);
this.brokerZooKeeper = this.mocksControl.createMock(BrokerZooKeeper.class);
this.executorsManager = this.mocksControl.createMock(ExecutorsManager.class);
this.commandProcessor = new BrokerCommandProcessor();
this.commandProcessor.setMetaConfig(this.metaConfig);
this.commandProcessor.setStoreManager(this.storeManager);
this.commandProcessor.setStatsManager(this.statsManager);
this.commandProcessor.setBrokerZooKeeper(this.brokerZooKeeper);
this.commandProcessor.setIdWorker(this.idWorker);
this.commandProcessor.setExecutorsManager(this.executorsManager);
}
} | Fixed NPE
| metamorphosis-server/src/test/java/com/taobao/metamorphosis/server/network/BaseProcessorUnitTest.java | Fixed NPE | <ide><path>etamorphosis-server/src/test/java/com/taobao/metamorphosis/server/network/BaseProcessorUnitTest.java
<ide> import com.taobao.metamorphosis.server.BrokerZooKeeper;
<ide> import com.taobao.metamorphosis.server.assembly.BrokerCommandProcessor;
<ide> import com.taobao.metamorphosis.server.assembly.ExecutorsManager;
<add>import com.taobao.metamorphosis.server.filter.ConsumerFilterManager;
<ide> import com.taobao.metamorphosis.server.stats.StatsManager;
<ide> import com.taobao.metamorphosis.server.store.MessageStoreManager;
<ide> import com.taobao.metamorphosis.server.utils.MetaConfig;
<ide> protected BrokerZooKeeper brokerZooKeeper;
<ide> protected ExecutorsManager executorsManager;
<ide> protected SessionContext sessionContext;
<add> protected ConsumerFilterManager consumerFilterManager;
<ide>
<ide>
<ide> protected void mock() {
<ide> this.mocksControl = EasyMock.createControl();
<ide> this.storeManager = this.mocksControl.createMock(MessageStoreManager.class);
<ide> this.conn = this.mocksControl.createMock(Connection.class);
<add> try {
<add> this.consumerFilterManager = new ConsumerFilterManager(this.metaConfig);
<add> }
<add> catch (Exception e) {
<add> throw new RuntimeException(e);
<add> }
<ide> this.sessionContext = new SessionContextImpl(null, this.conn);
<ide> EasyMock.expect(this.conn.getAttribute(SessionContextHolder.GLOBAL_SESSION_KEY)).andReturn(this.sessionContext)
<del> .anyTimes();
<add> .anyTimes();
<ide> this.statsManager = new StatsManager(new MetaConfig(), null, null);
<ide> this.idWorker = this.mocksControl.createMock(IdWorker.class);
<ide> this.brokerZooKeeper = this.mocksControl.createMock(BrokerZooKeeper.class);
<ide> this.commandProcessor.setBrokerZooKeeper(this.brokerZooKeeper);
<ide> this.commandProcessor.setIdWorker(this.idWorker);
<ide> this.commandProcessor.setExecutorsManager(this.executorsManager);
<add> this.commandProcessor.setConsumerFilterManager(this.consumerFilterManager);
<ide> }
<ide>
<ide> } |
|
Java | apache-2.0 | bbe297121d127ef14863d8763d20fcedf198904d | 0 | ultratendency/hbase,ndimiduk/hbase,mapr/hbase,ndimiduk/hbase,toshimasa-nasu/hbase,gustavoanatoly/hbase,ChinmaySKulkarni/hbase,vincentpoon/hbase,lshmouse/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,mapr/hbase,Apache9/hbase,joshelser/hbase,Guavus/hbase,ibmsoe/hbase,toshimasa-nasu/hbase,intel-hadoop/hbase-rhino,justintung/hbase,HubSpot/hbase,amyvmiwei/hbase,SeekerResource/hbase,ndimiduk/hbase,ChinmaySKulkarni/hbase,lshmouse/hbase,andrewmains12/hbase,SeekerResource/hbase,drewpope/hbase,Eshcar/hbase,juwi/hbase,justintung/hbase,vincentpoon/hbase,HubSpot/hbase,francisliu/hbase,vincentpoon/hbase,SeekerResource/hbase,joshelser/hbase,andrewmains12/hbase,Guavus/hbase,gustavoanatoly/hbase,StackVista/hbase,Apache9/hbase,francisliu/hbase,HubSpot/hbase,Guavus/hbase,ultratendency/hbase,francisliu/hbase,drewpope/hbase,joshelser/hbase,ultratendency/hbase,StackVista/hbase,StackVista/hbase,vincentpoon/hbase,apurtell/hbase,lshmouse/hbase,narendragoyal/hbase,SeekerResource/hbase,Apache9/hbase,drewpope/hbase,ultratendency/hbase,gustavoanatoly/hbase,mahak/hbase,ibmsoe/hbase,gustavoanatoly/hbase,JingchengDu/hbase,Eshcar/hbase,ndimiduk/hbase,Eshcar/hbase,vincentpoon/hbase,mahak/hbase,juwi/hbase,justintung/hbase,apurtell/hbase,Apache9/hbase,drewpope/hbase,drewpope/hbase,narendragoyal/hbase,joshelser/hbase,Apache9/hbase,ibmsoe/hbase,SeekerResource/hbase,lshmouse/hbase,toshimasa-nasu/hbase,francisliu/hbase,SeekerResource/hbase,francisliu/hbase,JingchengDu/hbase,apurtell/hbase,amyvmiwei/hbase,amyvmiwei/hbase,toshimasa-nasu/hbase,apurtell/hbase,ndimiduk/hbase,intel-hadoop/hbase-rhino,andrewmains12/hbase,mahak/hbase,ibmsoe/hbase,ultratendency/hbase,narendragoyal/hbase,StackVista/hbase,mapr/hbase,andrewmains12/hbase,ndimiduk/hbase,ibmsoe/hbase,joshelser/hbase,justintung/hbase,ultratendency/hbase,bijugs/hbase,Apache9/hbase,francisliu/hbase,mahak/hbase,HubSpot/hbase,gustavoanatoly/hbase,ibmsoe/hbase,toshimasa-nasu/hbase,bijugs/hbase,andrewmains12/hbase,juwi/hbase,ibmsoe/hbase,ndimiduk/hbase,ultratendency/hbase,intel-hadoop/hbase-rhino,francisliu/hbase,intel-hadoop/hbase-rhino,mapr/hbase,bijugs/hbase,ndimiduk/hbase,mapr/hbase,vincentpoon/hbase,andrewmains12/hbase,Guavus/hbase,gustavoanatoly/hbase,intel-hadoop/hbase-rhino,narendragoyal/hbase,intel-hadoop/hbase-rhino,SeekerResource/hbase,StackVista/hbase,HubSpot/hbase,bijugs/hbase,gustavoanatoly/hbase,amyvmiwei/hbase,toshimasa-nasu/hbase,amyvmiwei/hbase,ChinmaySKulkarni/hbase,HubSpot/hbase,andrewmains12/hbase,JingchengDu/hbase,justintung/hbase,lshmouse/hbase,drewpope/hbase,justintung/hbase,apurtell/hbase,justintung/hbase,ultratendency/hbase,narendragoyal/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,apurtell/hbase,gustavoanatoly/hbase,StackVista/hbase,lshmouse/hbase,HubSpot/hbase,apurtell/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,Guavus/hbase,amyvmiwei/hbase,ChinmaySKulkarni/hbase,narendragoyal/hbase,JingchengDu/hbase,SeekerResource/hbase,Apache9/hbase,joshelser/hbase,JingchengDu/hbase,Guavus/hbase,ibmsoe/hbase,juwi/hbase,mapr/hbase,Guavus/hbase,francisliu/hbase,Apache9/hbase,vincentpoon/hbase,bijugs/hbase,joshelser/hbase,apurtell/hbase,mahak/hbase,SeekerResource/hbase,StackVista/hbase,intel-hadoop/hbase-rhino,bijugs/hbase,StackVista/hbase,JingchengDu/hbase,joshelser/hbase,lshmouse/hbase,ndimiduk/hbase,amyvmiwei/hbase,andrewmains12/hbase,HubSpot/hbase,bijugs/hbase,JingchengDu/hbase,amyvmiwei/hbase,ChinmaySKulkarni/hbase,mapr/hbase,justintung/hbase,ultratendency/hbase,mahak/hbase,Guavus/hbase,bijugs/hbase,apurtell/hbase,mahak/hbase,toshimasa-nasu/hbase,joshelser/hbase,ibmsoe/hbase,francisliu/hbase,narendragoyal/hbase,juwi/hbase,Eshcar/hbase,juwi/hbase,intel-hadoop/hbase-rhino,ultratendency/hbase,narendragoyal/hbase,francisliu/hbase,justintung/hbase,andrewmains12/hbase,narendragoyal/hbase,Apache9/hbase,ndimiduk/hbase,gustavoanatoly/hbase,lshmouse/hbase,bijugs/hbase,apurtell/hbase,HubSpot/hbase,Eshcar/hbase,drewpope/hbase,juwi/hbase,drewpope/hbase,ChinmaySKulkarni/hbase,drewpope/hbase,StackVista/hbase,andrewmains12/hbase,gustavoanatoly/hbase,Guavus/hbase,juwi/hbase,ibmsoe/hbase,mapr/hbase,vincentpoon/hbase,HubSpot/hbase,intel-hadoop/hbase-rhino,Eshcar/hbase,intel-hadoop/hbase-rhino,StackVista/hbase,joshelser/hbase,lshmouse/hbase,vincentpoon/hbase,vincentpoon/hbase,justintung/hbase,amyvmiwei/hbase,SeekerResource/hbase,ChinmaySKulkarni/hbase,JingchengDu/hbase,mahak/hbase,JingchengDu/hbase,JingchengDu/hbase,juwi/hbase,mahak/hbase,toshimasa-nasu/hbase,lshmouse/hbase,mapr/hbase,Apache9/hbase,Guavus/hbase,bijugs/hbase,toshimasa-nasu/hbase,narendragoyal/hbase,Eshcar/hbase,amyvmiwei/hbase,mahak/hbase | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.Thread.UncaughtExceptionHandler;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.management.ObjectName;
import org.apache.commons.lang.mutable.MutableDouble;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionMovedException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.UnknownRowLockException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.YouAreDeadException;
import org.apache.hadoop.hbase.ZNodeClearer;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.catalog.MetaEditor;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.AdminProtocol;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ClientProtocol;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.MultiAction;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.Exec;
import org.apache.hadoop.hbase.client.coprocessor.ExecResult;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.HBaseRPC;
import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
import org.apache.hadoop.hbase.ipc.HBaseRpcMetrics;
import org.apache.hadoop.hbase.ipc.Invocation;
import org.apache.hadoop.hbase.ipc.ProtocolSignature;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody;
import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler;
import org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler;
import org.apache.hadoop.hbase.regionserver.handler.CloseRootHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenRootHandler;
import org.apache.hadoop.hbase.regionserver.metrics.RegionMetricsStorage;
import org.apache.hadoop.hbase.regionserver.metrics.RegionServerDynamicMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.RegionServerMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.StoreMetricType;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.InfoServer;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Sleeper;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.RootRegionTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperNodeTracker;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.metrics.util.MBeanUtil;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;
import org.apache.hadoop.hbase.RegionServerStatusProtocol;
import com.google.common.base.Function;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.RpcController;
/**
* HRegionServer makes a set of HRegions available to clients. It checks in with
* the HMaster. There are many HRegionServers in a single HBase deployment.
*/
@InterfaceAudience.Private
@SuppressWarnings("deprecation")
public class HRegionServer implements ClientProtocol,
AdminProtocol, Runnable, RegionServerServices, HBaseRPCErrorHandler {
public static final Log LOG = LogFactory.getLog(HRegionServer.class);
private final Random rand = new Random();
/*
* Strings to be used in forming the exception message for
* RegionsAlreadyInTransitionException.
*/
protected static final String OPEN = "OPEN";
protected static final String CLOSE = "CLOSE";
//RegionName vs current action in progress
//true - if open region action in progress
//false - if close region action in progress
protected final ConcurrentSkipListMap<byte[], Boolean> regionsInTransitionInRS =
new ConcurrentSkipListMap<byte[], Boolean>(Bytes.BYTES_COMPARATOR);
protected long maxScannerResultSize;
// Cache flushing
protected MemStoreFlusher cacheFlusher;
// catalog tracker
protected CatalogTracker catalogTracker;
/**
* Go here to get table descriptors.
*/
protected TableDescriptors tableDescriptors;
// Replication services. If no replication, this handler will be null.
protected ReplicationSourceService replicationSourceHandler;
protected ReplicationSinkService replicationSinkHandler;
// Compactions
public CompactSplitThread compactSplitThread;
final Map<String, RegionScanner> scanners =
new ConcurrentHashMap<String, RegionScanner>();
/**
* Map of regions currently being served by this region server. Key is the
* encoded region name. All access should be synchronized.
*/
protected final Map<String, HRegion> onlineRegions =
new ConcurrentHashMap<String, HRegion>();
// Leases
protected Leases leases;
// Instance of the hbase executor service.
protected ExecutorService service;
// Request counter.
// Do we need this? Can't we just sum region counters? St.Ack 20110412
protected AtomicInteger requestCount = new AtomicInteger();
// If false, the file system has become unavailable
protected volatile boolean fsOk;
protected HFileSystem fs;
protected static final int NORMAL_QOS = 0;
protected static final int QOS_THRESHOLD = 10; // the line between low and high qos
protected static final int HIGH_QOS = 100;
// Set when a report to the master comes back with a message asking us to
// shutdown. Also set by call to stop when debugging or running unit tests
// of HRegionServer in isolation.
protected volatile boolean stopped = false;
// Go down hard. Used if file system becomes unavailable and also in
// debugging and unit tests.
protected volatile boolean abortRequested;
// Port we put up the webui on.
protected int webuiport = -1;
ConcurrentMap<String, Integer> rowlocks = new ConcurrentHashMap<String, Integer>();
// A state before we go into stopped state. At this stage we're closing user
// space regions.
private boolean stopping = false;
private volatile boolean killed = false;
protected final Configuration conf;
protected final AtomicBoolean haveRootRegion = new AtomicBoolean(false);
private boolean useHBaseChecksum; // verify hbase checksums?
private Path rootDir;
protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
final int numRetries;
protected final int threadWakeFrequency;
private final int msgInterval;
protected final int numRegionsToReport;
// Remote HMaster
private RegionServerStatusProtocol hbaseMaster;
// Server to handle client requests. Default access so can be accessed by
// unit tests.
RpcServer rpcServer;
private final InetSocketAddress isa;
// Info server. Default access so can be used by unit tests. REGIONSERVER
// is name of the webapp and the attribute name used stuffing this instance
// into web context.
InfoServer infoServer;
/** region server process name */
public static final String REGIONSERVER = "regionserver";
/** region server configuration name */
public static final String REGIONSERVER_CONF = "regionserver_conf";
/*
* Space is reserved in HRS constructor and then released when aborting to
* recover from an OOME. See HBASE-706. TODO: Make this percentage of the heap
* or a minimum.
*/
private final LinkedList<byte[]> reservedSpace = new LinkedList<byte[]>();
private RegionServerMetrics metrics;
private RegionServerDynamicMetrics dynamicMetrics;
/*
* Check for compactions requests.
*/
Chore compactionChecker;
// HLog and HLog roller. log is protected rather than private to avoid
// eclipse warning when accessed by inner classes
protected volatile HLog hlog;
LogRoller hlogRoller;
// flag set after we're done setting up server threads (used for testing)
protected volatile boolean isOnline;
// zookeeper connection and watcher
private ZooKeeperWatcher zooKeeper;
// master address manager and watcher
private MasterAddressTracker masterAddressManager;
// Cluster Status Tracker
private ClusterStatusTracker clusterStatusTracker;
// Log Splitting Worker
private SplitLogWorker splitLogWorker;
// A sleeper that sleeps for msgInterval.
private final Sleeper sleeper;
private final int rpcTimeout;
private final RegionServerAccounting regionServerAccounting;
// Cache configuration and block cache reference
private final CacheConfig cacheConfig;
// reference to the Thrift Server.
volatile private HRegionThriftServer thriftServer;
/**
* The server name the Master sees us as. Its made from the hostname the
* master passes us, port, and server startcode. Gets set after registration
* against Master. The hostname can differ from the hostname in {@link #isa}
* but usually doesn't if both servers resolve .
*/
private ServerName serverNameFromMasterPOV;
/**
* This servers startcode.
*/
private final long startcode;
/**
* MX Bean for RegionServerInfo
*/
private ObjectName mxBean = null;
/**
* Chore to clean periodically the moved region list
*/
private MovedRegionsCleaner movedRegionsCleaner;
/**
* The lease timeout period for row locks (milliseconds).
*/
private final int rowLockLeaseTimeoutPeriod;
/**
* The lease timeout period for client scanners (milliseconds).
*/
private final int scannerLeaseTimeoutPeriod;
/**
* The reference to the QosFunction
*/
private final QosFunction qosFunction;
/**
* Starts a HRegionServer at the default location
*
* @param conf
* @throws IOException
* @throws InterruptedException
*/
public HRegionServer(Configuration conf)
throws IOException, InterruptedException {
this.fsOk = true;
this.conf = conf;
// Set how many times to retry talking to another server over HConnection.
HConnectionManager.setServerSideHConnectionRetries(this.conf, LOG);
this.isOnline = false;
checkCodecs(this.conf);
// do we use checksum verfication in the hbase? If hbase checksum verification
// is enabled, then we automatically switch off hdfs checksum verification.
this.useHBaseChecksum = conf.getBoolean(
HConstants.HBASE_CHECKSUM_VERIFICATION, true);
// Config'ed params
this.numRetries = conf.getInt("hbase.client.retries.number", 10);
this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000);
this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);
this.sleeper = new Sleeper(this.msgInterval, this);
this.maxScannerResultSize = conf.getLong(
HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY,
HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE);
this.numRegionsToReport = conf.getInt(
"hbase.regionserver.numregionstoreport", 10);
this.rpcTimeout = conf.getInt(
HConstants.HBASE_RPC_TIMEOUT_KEY,
HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
this.abortRequested = false;
this.stopped = false;
this.rowLockLeaseTimeoutPeriod = conf.getInt(
HConstants.HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD,
HConstants.DEFAULT_HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD);
this.scannerLeaseTimeoutPeriod = conf.getInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
HConstants.DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
// Server to handle client requests.
String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
conf.get("hbase.regionserver.dns.interface", "default"),
conf.get("hbase.regionserver.dns.nameserver", "default")));
int port = conf.getInt(HConstants.REGIONSERVER_PORT,
HConstants.DEFAULT_REGIONSERVER_PORT);
// Creation of a HSA will force a resolve.
InetSocketAddress initialIsa = new InetSocketAddress(hostname, port);
if (initialIsa.getAddress() == null) {
throw new IllegalArgumentException("Failed resolve of " + initialIsa);
}
this.rpcServer = HBaseRPC.getServer(AdminProtocol.class, this,
new Class<?>[]{ClientProtocol.class,
AdminProtocol.class, HBaseRPCErrorHandler.class,
OnlineRegions.class},
initialIsa.getHostName(), // BindAddress is IP we got for this server.
initialIsa.getPort(),
conf.getInt("hbase.regionserver.handler.count", 10),
conf.getInt("hbase.regionserver.metahandler.count", 10),
conf.getBoolean("hbase.rpc.verbose", false),
conf, QOS_THRESHOLD);
// Set our address.
this.isa = this.rpcServer.getListenerAddress();
this.rpcServer.setErrorHandler(this);
this.rpcServer.setQosFunction((qosFunction = new QosFunction()));
this.startcode = System.currentTimeMillis();
// login the server principal (if using secure Hadoop)
User.login(this.conf, "hbase.regionserver.keytab.file",
"hbase.regionserver.kerberos.principal", this.isa.getHostName());
regionServerAccounting = new RegionServerAccounting();
cacheConfig = new CacheConfig(conf);
}
/**
* Run test on configured codecs to make sure supporting libs are in place.
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException("Compression codec " + codec +
" not supported, aborting RS construction");
}
}
}
@Retention(RetentionPolicy.RUNTIME)
protected @interface QosPriority {
int priority() default 0;
}
QosFunction getQosFunction() {
return qosFunction;
}
RegionScanner getScanner(long scannerId) {
String scannerIdString = Long.toString(scannerId);
return scanners.get(scannerIdString);
}
/**
* Utility used ensuring higher quality of service for priority rpcs; e.g.
* rpcs to .META. and -ROOT-, etc.
*/
class QosFunction implements Function<RpcRequestBody,Integer> {
private final Map<String, Integer> annotatedQos;
//We need to mock the regionserver instance for some unit tests (set via
//setRegionServer method.
//The field value is initially set to the enclosing instance of HRegionServer.
private HRegionServer hRegionServer = HRegionServer.this;
//The logic for figuring out high priority RPCs is as follows:
//1. if the method is annotated with a QosPriority of QOS_HIGH,
// that is honored
//2. parse out the protobuf message and see if the request is for meta
// region, and if so, treat it as a high priority RPC
//Some optimizations for (2) are done here -
//Clients send the argument classname as part of making the RPC. The server
//decides whether to deserialize the proto argument message based on the
//pre-established set of argument classes (knownArgumentClasses below).
//This prevents the server from having to deserialize all proto argument
//messages prematurely.
//All the argument classes declare a 'getRegion' method that returns a
//RegionSpecifier object. Methods can be invoked on the returned object
//to figure out whether it is a meta region or not.
@SuppressWarnings("unchecked")
private final Class<? extends Message>[] knownArgumentClasses = new Class[]{
GetRegionInfoRequest.class,
GetStoreFileRequest.class,
CloseRegionRequest.class,
FlushRegionRequest.class,
SplitRegionRequest.class,
CompactRegionRequest.class,
GetRequest.class,
MutateRequest.class,
ScanRequest.class,
LockRowRequest.class,
UnlockRowRequest.class,
MultiRequest.class
};
//Some caches for helping performance
private final Map<String, Class<? extends Message>> argumentToClassMap =
new HashMap<String, Class<? extends Message>>();
private final Map<String, Map<Class<? extends Message>, Method>>
methodMap = new HashMap<String, Map<Class<? extends Message>, Method>>();
public QosFunction() {
Map<String, Integer> qosMap = new HashMap<String, Integer>();
for (Method m : HRegionServer.class.getMethods()) {
QosPriority p = m.getAnnotation(QosPriority.class);
if (p != null) {
qosMap.put(m.getName(), p.priority());
}
}
annotatedQos = qosMap;
if (methodMap.get("parseFrom") == null) {
methodMap.put("parseFrom",
new HashMap<Class<? extends Message>, Method>());
}
if (methodMap.get("getRegion") == null) {
methodMap.put("getRegion",
new HashMap<Class<? extends Message>, Method>());
}
for (Class<? extends Message> cls : knownArgumentClasses) {
argumentToClassMap.put(cls.getCanonicalName(), cls);
try {
methodMap.get("parseFrom").put(cls,
cls.getDeclaredMethod("parseFrom",ByteString.class));
methodMap.get("getRegion").put(cls, cls.getDeclaredMethod("getRegion"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
void setRegionServer(HRegionServer server) {
this.hRegionServer = server;
}
public boolean isMetaRegion(byte[] regionName) {
HRegion region;
try {
region = hRegionServer.getRegion(regionName);
} catch (NotServingRegionException ignored) {
return false;
}
return region.getRegionInfo().isMetaRegion();
}
@Override
public Integer apply(RpcRequestBody from) {
String methodName = from.getMethodName();
Class<? extends Message> rpcArgClass = null;
if (from.hasRequestClassName()) {
String cls = from.getRequestClassName();
rpcArgClass = argumentToClassMap.get(cls);
}
Integer priorityByAnnotation = annotatedQos.get(methodName);
if (priorityByAnnotation != null) {
return priorityByAnnotation;
}
if (rpcArgClass == null || from.getRequest().isEmpty()) {
return NORMAL_QOS;
}
Object deserializedRequestObj = null;
//check whether the request has reference to Meta region
try {
Method parseFrom = methodMap.get("parseFrom").get(rpcArgClass);
deserializedRequestObj = parseFrom.invoke(null, from.getRequest());
Method getRegion = methodMap.get("getRegion").get(rpcArgClass);
RegionSpecifier regionSpecifier =
(RegionSpecifier)getRegion.invoke(deserializedRequestObj,
(Object[])null);
HRegion region = hRegionServer.getRegion(regionSpecifier);
if (region.getRegionInfo().isMetaRegion()) {
if (LOG.isDebugEnabled()) {
LOG.debug("High priority: " + from.toString());
}
return HIGH_QOS;
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
if (methodName.equals("scan")) { // scanner methods...
ScanRequest request = (ScanRequest)deserializedRequestObj;
if (!request.hasScannerId()) {
return NORMAL_QOS;
}
RegionScanner scanner = hRegionServer.getScanner(request.getScannerId());
if (scanner != null && scanner.getRegionInfo().isMetaRegion()) {
if (LOG.isDebugEnabled()) {
LOG.debug("High priority scanner request: " + request.getScannerId());
}
return HIGH_QOS;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Low priority: " + from.toString());
}
return NORMAL_QOS;
}
}
/**
* All initialization needed before we go register with Master.
*
* @throws IOException
* @throws InterruptedException
*/
private void preRegistrationInitialization(){
try {
initializeZooKeeper();
initializeThreads();
int nbBlocks = conf.getInt("hbase.regionserver.nbreservationblocks", 4);
for (int i = 0; i < nbBlocks; i++) {
reservedSpace.add(new byte[HConstants.DEFAULT_SIZE_RESERVATION_BLOCK]);
}
} catch (Throwable t) {
// Call stop if error or process will stick around for ever since server
// puts up non-daemon threads.
this.rpcServer.stop();
abort("Initialization of RS failed. Hence aborting RS.", t);
}
}
/**
* Bring up connection to zk ensemble and then wait until a master for this
* cluster and then after that, wait until cluster 'up' flag has been set.
* This is the order in which master does things.
* Finally put up a catalog tracker.
* @throws IOException
* @throws InterruptedException
*/
private void initializeZooKeeper() throws IOException, InterruptedException {
// Open connection to zookeeper and set primary watcher
this.zooKeeper = new ZooKeeperWatcher(conf, REGIONSERVER + ":" +
this.isa.getPort(), this);
// Create the master address manager, register with zk, and start it. Then
// block until a master is available. No point in starting up if no master
// running.
this.masterAddressManager = new MasterAddressTracker(this.zooKeeper, this);
this.masterAddressManager.start();
blockAndCheckIfStopped(this.masterAddressManager);
// Wait on cluster being up. Master will set this flag up in zookeeper
// when ready.
this.clusterStatusTracker = new ClusterStatusTracker(this.zooKeeper, this);
this.clusterStatusTracker.start();
blockAndCheckIfStopped(this.clusterStatusTracker);
// Create the catalog tracker and start it;
this.catalogTracker = new CatalogTracker(this.zooKeeper, this.conf,
this, this.conf.getInt("hbase.regionserver.catalog.timeout", Integer.MAX_VALUE));
catalogTracker.start();
}
/**
* Utilty method to wait indefinitely on a znode availability while checking
* if the region server is shut down
* @param tracker znode tracker to use
* @throws IOException any IO exception, plus if the RS is stopped
* @throws InterruptedException
*/
private void blockAndCheckIfStopped(ZooKeeperNodeTracker tracker)
throws IOException, InterruptedException {
while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {
if (this.stopped) {
throw new IOException("Received the shutdown message while waiting.");
}
}
}
/**
* @return False if cluster shutdown in progress
*/
private boolean isClusterUp() {
return this.clusterStatusTracker.isClusterUp();
}
private void initializeThreads() throws IOException {
// Cache flushing thread.
this.cacheFlusher = new MemStoreFlusher(conf, this);
// Compaction thread
this.compactSplitThread = new CompactSplitThread(this);
// Background thread to check for compactions; needed if region
// has not gotten updates in a while. Make it run at a lesser frequency.
int multiplier = this.conf.getInt(HConstants.THREAD_WAKE_FREQUENCY +
".multiplier", 1000);
this.compactionChecker = new CompactionChecker(this,
this.threadWakeFrequency * multiplier, this);
this.leases = new Leases(this.threadWakeFrequency);
// Create the thread for the ThriftServer.
if (conf.getBoolean("hbase.regionserver.export.thrift", false)) {
thriftServer = new HRegionThriftServer(this, conf);
thriftServer.start();
LOG.info("Started Thrift API from Region Server.");
}
// Create the thread to clean the moved regions list
movedRegionsCleaner = MovedRegionsCleaner.createAndStart(this);
}
/**
* The HRegionServer sticks in this loop until closed.
*/
public void run() {
try {
// Do pre-registration initializations; zookeeper, lease threads, etc.
preRegistrationInitialization();
} catch (Throwable e) {
abort("Fatal exception during initialization", e);
}
try {
// Try and register with the Master; tell it we are here. Break if
// server is stopped or the clusterup flag is down or hdfs went wacky.
while (keepLooping()) {
RegionServerStartupResponse w = reportForDuty();
if (w == null) {
LOG.warn("reportForDuty failed; sleeping and then retrying.");
this.sleeper.sleep();
} else {
handleReportForDutyResponse(w);
break;
}
}
registerMBean();
// We registered with the Master. Go into run mode.
long lastMsg = 0;
long oldRequestCount = -1;
// The main run loop.
while (!this.stopped && isHealthy()) {
if (!isClusterUp()) {
if (isOnlineRegionsEmpty()) {
stop("Exiting; cluster shutdown set and not carrying any regions");
} else if (!this.stopping) {
this.stopping = true;
LOG.info("Closing user regions");
closeUserRegions(this.abortRequested);
} else if (this.stopping) {
boolean allUserRegionsOffline = areAllUserRegionsOffline();
if (allUserRegionsOffline) {
// Set stopped if no requests since last time we went around the loop.
// The remaining meta regions will be closed on our way out.
if (oldRequestCount == this.requestCount.get()) {
stop("Stopped; only catalog regions remaining online");
break;
}
oldRequestCount = this.requestCount.get();
} else {
// Make sure all regions have been closed -- some regions may
// have not got it because we were splitting at the time of
// the call to closeUserRegions.
closeUserRegions(this.abortRequested);
}
LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());
}
}
long now = System.currentTimeMillis();
if ((now - lastMsg) >= msgInterval) {
doMetrics();
tryRegionServerReport(lastMsg, now);
lastMsg = System.currentTimeMillis();
}
if (!this.stopped) this.sleeper.sleep();
} // for
} catch (Throwable t) {
if (!checkOOME(t)) {
abort("Unhandled exception: " + t.getMessage(), t);
}
}
// Run shutdown.
if (mxBean != null) {
MBeanUtil.unregisterMBean(mxBean);
mxBean = null;
}
if (this.thriftServer != null) this.thriftServer.shutdown();
this.leases.closeAfterLeasesExpire();
this.rpcServer.stop();
if (this.splitLogWorker != null) {
splitLogWorker.stop();
}
if (this.infoServer != null) {
LOG.info("Stopping infoServer");
try {
this.infoServer.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
// Send cache a shutdown.
if (cacheConfig.isBlockCacheEnabled()) {
cacheConfig.getBlockCache().shutdown();
}
movedRegionsCleaner.stop("Region Server stopping");
// Send interrupts to wake up threads if sleeping so they notice shutdown.
// TODO: Should we check they are alive? If OOME could have exited already
if (this.cacheFlusher != null) this.cacheFlusher.interruptIfNecessary();
if (this.compactSplitThread != null) this.compactSplitThread.interruptIfNecessary();
if (this.hlogRoller != null) this.hlogRoller.interruptIfNecessary();
if (this.compactionChecker != null)
this.compactionChecker.interrupt();
if (this.killed) {
// Just skip out w/o closing regions. Used when testing.
} else if (abortRequested) {
if (this.fsOk) {
closeAllRegions(abortRequested); // Don't leave any open file handles
}
LOG.info("aborting server " + this.serverNameFromMasterPOV);
} else {
closeAllRegions(abortRequested);
closeAllScanners();
LOG.info("stopping server " + this.serverNameFromMasterPOV);
}
// Interrupt catalog tracker here in case any regions being opened out in
// handlers are stuck waiting on meta or root.
if (this.catalogTracker != null) this.catalogTracker.stop();
if (!this.killed && this.fsOk) {
waitOnAllRegionsToClose(abortRequested);
LOG.info("stopping server " + this.serverNameFromMasterPOV +
"; all regions closed.");
}
//fsOk flag may be changed when closing regions throws exception.
if (!this.killed && this.fsOk) {
closeWAL(abortRequested ? false : true);
}
// Make sure the proxy is down.
if (this.hbaseMaster != null) {
HBaseRPC.stopProxy(this.hbaseMaster);
this.hbaseMaster = null;
}
this.leases.close();
if (!killed) {
join();
}
try {
deleteMyEphemeralNode();
} catch (KeeperException e) {
LOG.warn("Failed deleting my ephemeral node", e);
}
// We may have failed to delete the znode at the previous step, but
// we delete the file anyway: a second attempt to delete the znode is likely to fail again.
ZNodeClearer.deleteMyEphemeralNodeOnDisk();
this.zooKeeper.close();
LOG.info("stopping server " + this.serverNameFromMasterPOV +
"; zookeeper connection closed.");
LOG.info(Thread.currentThread().getName() + " exiting");
}
private boolean areAllUserRegionsOffline() {
if (getNumberOfOnlineRegions() > 2) return false;
boolean allUserRegionsOffline = true;
for (Map.Entry<String, HRegion> e: this.onlineRegions.entrySet()) {
if (!e.getValue().getRegionInfo().isMetaRegion()) {
allUserRegionsOffline = false;
break;
}
}
return allUserRegionsOffline;
}
void tryRegionServerReport(long reportStartTime, long reportEndTime)
throws IOException {
HBaseProtos.ServerLoad sl = buildServerLoad(reportStartTime, reportEndTime);
// Why we do this?
this.requestCount.set(0);
try {
RegionServerReportRequest.Builder request = RegionServerReportRequest.newBuilder();
ServerName sn = ServerName.parseVersionedServerName(
this.serverNameFromMasterPOV.getVersionedBytes());
request.setServer(ProtobufUtil.toServerName(sn));
request.setLoad(sl);
this.hbaseMaster.regionServerReport(null, request.build());
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof YouAreDeadException) {
// This will be caught and handled as a fatal error in run()
throw ioe;
}
// Couldn't connect to the master, get location from zk and reconnect
// Method blocks until new master is found or we are stopped
getMaster();
}
}
HBaseProtos.ServerLoad buildServerLoad(long reportStartTime, long reportEndTime) {
Collection<HRegion> regions = getOnlineRegionsLocalContext();
MemoryUsage memory =
ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
HBaseProtos.ServerLoad.Builder serverLoad = HBaseProtos.ServerLoad.newBuilder();
serverLoad.setNumberOfRequests((int)metrics.getRequests());
serverLoad.setTotalNumberOfRequests(requestCount.get());
serverLoad.setUsedHeapMB((int)(memory.getUsed() / 1024 / 1024));
serverLoad.setMaxHeapMB((int) (memory.getMax() / 1024 / 1024));
Set<String> coprocessors = this.hlog.getCoprocessorHost().getCoprocessors();
for (String coprocessor : coprocessors) {
serverLoad.addCoprocessors(
Coprocessor.newBuilder().setName(coprocessor).build());
}
for (HRegion region : regions) {
serverLoad.addRegionLoads(createRegionLoad(region));
}
serverLoad.setReportStartTime(reportStartTime);
serverLoad.setReportEndTime(reportEndTime);
return serverLoad.build();
}
String getOnlineRegionsAsPrintableString() {
StringBuilder sb = new StringBuilder();
for (HRegion r: this.onlineRegions.values()) {
if (sb.length() > 0) sb.append(", ");
sb.append(r.getRegionInfo().getEncodedName());
}
return sb.toString();
}
/**
* Wait on regions close.
*/
private void waitOnAllRegionsToClose(final boolean abort) {
// Wait till all regions are closed before going out.
int lastCount = -1;
long previousLogTime = 0;
Set<String> closedRegions = new HashSet<String>();
while (!isOnlineRegionsEmpty()) {
int count = getNumberOfOnlineRegions();
// Only print a message if the count of regions has changed.
if (count != lastCount) {
// Log every second at most
if (System.currentTimeMillis() > (previousLogTime + 1000)) {
previousLogTime = System.currentTimeMillis();
lastCount = count;
LOG.info("Waiting on " + count + " regions to close");
// Only print out regions still closing if a small number else will
// swamp the log.
if (count < 10 && LOG.isDebugEnabled()) {
LOG.debug(this.onlineRegions);
}
}
}
// Ensure all user regions have been sent a close. Use this to
// protect against the case where an open comes in after we start the
// iterator of onlineRegions to close all user regions.
for (Map.Entry<String, HRegion> e : this.onlineRegions.entrySet()) {
HRegionInfo hri = e.getValue().getRegionInfo();
if (!this.regionsInTransitionInRS.containsKey(hri.getEncodedNameAsBytes())
&& !closedRegions.contains(hri.getEncodedName())) {
closedRegions.add(hri.getEncodedName());
// Don't update zk with this close transition; pass false.
closeRegion(hri, abort, false);
}
}
// No regions in RIT, we could stop waiting now.
if (this.regionsInTransitionInRS.isEmpty()) {
if (!isOnlineRegionsEmpty()) {
LOG.info("We were exiting though online regions are not empty," +
" because some regions failed closing");
}
break;
}
Threads.sleep(200);
}
}
private void closeWAL(final boolean delete) {
try {
if (this.hlog != null) {
if (delete) {
hlog.closeAndDelete();
} else {
hlog.close();
}
}
} catch (Throwable e) {
LOG.error("Close and delete failed", RemoteExceptionHandler.checkThrowable(e));
}
}
private void closeAllScanners() {
// Close any outstanding scanners. Means they'll get an UnknownScanner
// exception next time they come in.
for (Map.Entry<String, RegionScanner> e : this.scanners.entrySet()) {
try {
e.getValue().close();
} catch (IOException ioe) {
LOG.warn("Closing scanner " + e.getKey(), ioe);
}
}
}
/*
* Run init. Sets up hlog and starts up all server threads.
*
* @param c Extra configuration.
*/
protected void handleReportForDutyResponse(final RegionServerStartupResponse c)
throws IOException {
try {
for (NameStringPair e : c.getMapEntriesList()) {
String key = e.getName();
// The hostname the master sees us as.
if (key.equals(HConstants.KEY_FOR_HOSTNAME_SEEN_BY_MASTER)) {
String hostnameFromMasterPOV = e.getValue();
this.serverNameFromMasterPOV = new ServerName(hostnameFromMasterPOV,
this.isa.getPort(), this.startcode);
LOG.info("Master passed us hostname to use. Was=" +
this.isa.getHostName() + ", Now=" +
this.serverNameFromMasterPOV.getHostname());
continue;
}
String value = e.getValue().toString();
if (LOG.isDebugEnabled()) {
LOG.debug("Config from master: " + key + "=" + value);
}
this.conf.set(key, value);
}
// hack! Maps DFSClient => RegionServer for logs. HDFS made this
// config param for task trackers, but we can piggyback off of it.
if (this.conf.get("mapred.task.id") == null) {
this.conf.set("mapred.task.id", "hb_rs_" +
this.serverNameFromMasterPOV.toString());
}
// Set our ephemeral znode up in zookeeper now we have a name.
createMyEphemeralNode();
// Save it in a file, this will allow to see if we crash
ZNodeClearer.writeMyEphemeralNodeOnDisk(getMyEphemeralNodePath());
// Master sent us hbase.rootdir to use. Should be fully qualified
// path with file system specification included. Set 'fs.defaultFS'
// to match the filesystem on hbase.rootdir else underlying hadoop hdfs
// accessors will be going against wrong filesystem (unless all is set
// to defaults).
this.conf.set("fs.defaultFS", this.conf.get("hbase.rootdir"));
// Get fs instance used by this RS
this.fs = new HFileSystem(this.conf, this.useHBaseChecksum);
this.rootDir = new Path(this.conf.get(HConstants.HBASE_DIR));
this.tableDescriptors = new FSTableDescriptors(this.fs, this.rootDir, true);
this.hlog = setupWALAndReplication();
// Init in here rather than in constructor after thread name has been set
this.metrics = new RegionServerMetrics();
this.dynamicMetrics = RegionServerDynamicMetrics.newInstance();
startServiceThreads();
LOG.info("Serving as " + this.serverNameFromMasterPOV +
", RPC listening on " + this.isa +
", sessionid=0x" +
Long.toHexString(this.zooKeeper.getRecoverableZooKeeper().getSessionId()));
isOnline = true;
} catch (Throwable e) {
this.isOnline = false;
stop("Failed initialization");
throw convertThrowableToIOE(cleanup(e, "Failed init"),
"Region server startup failed");
} finally {
sleeper.skipSleepCycle();
}
}
private void createMyEphemeralNode() throws KeeperException {
ZKUtil.createEphemeralNodeAndWatch(this.zooKeeper, getMyEphemeralNodePath(),
HConstants.EMPTY_BYTE_ARRAY);
}
private void deleteMyEphemeralNode() throws KeeperException {
ZKUtil.deleteNode(this.zooKeeper, getMyEphemeralNodePath());
}
public RegionServerAccounting getRegionServerAccounting() {
return regionServerAccounting;
}
/*
* @param r Region to get RegionLoad for.
*
* @return RegionLoad instance.
*
* @throws IOException
*/
private RegionLoad createRegionLoad(final HRegion r) {
byte[] name = r.getRegionName();
int stores = 0;
int storefiles = 0;
int storeUncompressedSizeMB = 0;
int storefileSizeMB = 0;
int memstoreSizeMB = (int) (r.memstoreSize.get() / 1024 / 1024);
int storefileIndexSizeMB = 0;
int rootIndexSizeKB = 0;
int totalStaticIndexSizeKB = 0;
int totalStaticBloomSizeKB = 0;
long totalCompactingKVs = 0;
long currentCompactedKVs = 0;
synchronized (r.stores) {
stores += r.stores.size();
for (Store store : r.stores.values()) {
storefiles += store.getStorefilesCount();
storeUncompressedSizeMB += (int) (store.getStoreSizeUncompressed()
/ 1024 / 1024);
storefileSizeMB += (int) (store.getStorefilesSize() / 1024 / 1024);
storefileIndexSizeMB += (int) (store.getStorefilesIndexSize() / 1024 / 1024);
CompactionProgress progress = store.getCompactionProgress();
if (progress != null) {
totalCompactingKVs += progress.totalCompactingKVs;
currentCompactedKVs += progress.currentCompactedKVs;
}
rootIndexSizeKB +=
(int) (store.getStorefilesIndexSize() / 1024);
totalStaticIndexSizeKB +=
(int) (store.getTotalStaticIndexSize() / 1024);
totalStaticBloomSizeKB +=
(int) (store.getTotalStaticBloomSize() / 1024);
}
}
RegionLoad.Builder regionLoad = RegionLoad.newBuilder();
RegionSpecifier.Builder regionSpecifier = RegionSpecifier.newBuilder();
regionSpecifier.setType(RegionSpecifierType.REGION_NAME);
regionSpecifier.setValue(ByteString.copyFrom(name));
regionLoad.setRegionSpecifier(regionSpecifier.build())
.setStores(stores)
.setStorefiles(storefiles)
.setStoreUncompressedSizeMB(storeUncompressedSizeMB)
.setStorefileSizeMB(storefileSizeMB)
.setMemstoreSizeMB(memstoreSizeMB)
.setStorefileIndexSizeMB(storefileIndexSizeMB)
.setRootIndexSizeKB(rootIndexSizeKB)
.setTotalStaticIndexSizeKB(totalStaticIndexSizeKB)
.setTotalStaticBloomSizeKB(totalStaticBloomSizeKB)
.setReadRequestsCount((int) r.readRequestsCount.get())
.setWriteRequestsCount((int) r.writeRequestsCount.get())
.setTotalCompactingKVs(totalCompactingKVs)
.setCurrentCompactedKVs(currentCompactedKVs);
Set<String> coprocessors = r.getCoprocessorHost().getCoprocessors();
for (String coprocessor : coprocessors) {
regionLoad.addCoprocessors(
Coprocessor.newBuilder().setName(coprocessor).build());
}
return regionLoad.build();
}
/**
* @param encodedRegionName
* @return An instance of RegionLoad.
*/
public RegionLoad createRegionLoad(final String encodedRegionName) {
HRegion r = null;
r = this.onlineRegions.get(encodedRegionName);
return r != null ? createRegionLoad(r) : null;
}
/*
* Inner class that runs on a long period checking if regions need compaction.
*/
private static class CompactionChecker extends Chore {
private final HRegionServer instance;
private final int majorCompactPriority;
private final static int DEFAULT_PRIORITY = Integer.MAX_VALUE;
CompactionChecker(final HRegionServer h, final int sleepTime,
final Stoppable stopper) {
super("CompactionChecker", sleepTime, h);
this.instance = h;
LOG.info("Runs every " + StringUtils.formatTime(sleepTime));
/* MajorCompactPriority is configurable.
* If not set, the compaction will use default priority.
*/
this.majorCompactPriority = this.instance.conf.
getInt("hbase.regionserver.compactionChecker.majorCompactPriority",
DEFAULT_PRIORITY);
}
@Override
protected void chore() {
for (HRegion r : this.instance.onlineRegions.values()) {
if (r == null)
continue;
for (Store s : r.getStores().values()) {
try {
if (s.needsCompaction()) {
// Queue a compaction. Will recognize if major is needed.
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests compaction");
} else if (s.isMajorCompaction()) {
if (majorCompactPriority == DEFAULT_PRIORITY ||
majorCompactPriority > r.getCompactPriority()) {
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests major compaction; use default priority");
} else {
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests major compaction; use configured priority",
this.majorCompactPriority);
}
}
} catch (IOException e) {
LOG.warn("Failed major compaction check on " + r, e);
}
}
}
}
}
/**
* Report the status of the server. A server is online once all the startup is
* completed (setting up filesystem, starting service threads, etc.). This
* method is designed mostly to be useful in tests.
*
* @return true if online, false if not.
*/
public boolean isOnline() {
return isOnline;
}
/**
* Setup WAL log and replication if enabled.
* Replication setup is done in here because it wants to be hooked up to WAL.
* @return A WAL instance.
* @throws IOException
*/
private HLog setupWALAndReplication() throws IOException {
final Path oldLogDir = new Path(rootDir, HConstants.HREGION_OLDLOGDIR_NAME);
Path logdir = new Path(rootDir,
HLog.getHLogDirectoryName(this.serverNameFromMasterPOV.toString()));
if (LOG.isDebugEnabled()) LOG.debug("logdir=" + logdir);
if (this.fs.exists(logdir)) {
throw new RegionServerRunningException("Region server has already " +
"created directory at " + this.serverNameFromMasterPOV.toString());
}
// Instantiate replication manager if replication enabled. Pass it the
// log directories.
createNewReplicationInstance(conf, this, this.fs, logdir, oldLogDir);
return instantiateHLog(logdir, oldLogDir);
}
/**
* Called by {@link #setupWALAndReplication()} creating WAL instance.
* @param logdir
* @param oldLogDir
* @return WAL instance.
* @throws IOException
*/
protected HLog instantiateHLog(Path logdir, Path oldLogDir) throws IOException {
return new HLog(this.fs.getBackingFs(), logdir, oldLogDir, this.conf,
getWALActionListeners(), this.serverNameFromMasterPOV.toString());
}
/**
* Called by {@link #instantiateHLog(Path, Path)} setting up WAL instance.
* Add any {@link WALActionsListener}s you want inserted before WAL startup.
* @return List of WALActionsListener that will be passed in to
* {@link HLog} on construction.
*/
protected List<WALActionsListener> getWALActionListeners() {
List<WALActionsListener> listeners = new ArrayList<WALActionsListener>();
// Log roller.
this.hlogRoller = new LogRoller(this, this);
listeners.add(this.hlogRoller);
if (this.replicationSourceHandler != null &&
this.replicationSourceHandler.getWALActionsListener() != null) {
// Replication handler is an implementation of WALActionsListener.
listeners.add(this.replicationSourceHandler.getWALActionsListener());
}
return listeners;
}
protected LogRoller getLogRoller() {
return hlogRoller;
}
/*
* @param interval Interval since last time metrics were called.
*/
protected void doMetrics() {
try {
metrics();
} catch (Throwable e) {
LOG.warn("Failed metrics", e);
}
}
protected void metrics() {
this.metrics.regions.set(this.onlineRegions.size());
this.metrics.incrementRequests(this.requestCount.get());
this.metrics.requests.intervalHeartBeat();
// Is this too expensive every three seconds getting a lock on onlineRegions
// and then per store carried? Can I make metrics be sloppier and avoid
// the synchronizations?
int stores = 0;
int storefiles = 0;
long memstoreSize = 0;
int readRequestsCount = 0;
int writeRequestsCount = 0;
long storefileIndexSize = 0;
HDFSBlocksDistribution hdfsBlocksDistribution =
new HDFSBlocksDistribution();
long totalStaticIndexSize = 0;
long totalStaticBloomSize = 0;
long numPutsWithoutWAL = 0;
long dataInMemoryWithoutWAL = 0;
// Note that this is a map of Doubles instead of Longs. This is because we
// do effective integer division, which would perhaps truncate more than it
// should because we do it only on one part of our sum at a time. Rather
// than dividing at the end, where it is difficult to know the proper
// factor, everything is exact then truncated.
final Map<String, MutableDouble> tempVals =
new HashMap<String, MutableDouble>();
for (Map.Entry<String, HRegion> e : this.onlineRegions.entrySet()) {
HRegion r = e.getValue();
memstoreSize += r.memstoreSize.get();
numPutsWithoutWAL += r.numPutsWithoutWAL.get();
dataInMemoryWithoutWAL += r.dataInMemoryWithoutWAL.get();
readRequestsCount += r.readRequestsCount.get();
writeRequestsCount += r.writeRequestsCount.get();
synchronized (r.stores) {
stores += r.stores.size();
for (Map.Entry<byte[], Store> ee : r.stores.entrySet()) {
final Store store = ee.getValue();
final SchemaMetrics schemaMetrics = store.getSchemaMetrics();
{
long tmpStorefiles = store.getStorefilesCount();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_COUNT, tmpStorefiles);
storefiles += tmpStorefiles;
}
{
long tmpStorefileIndexSize = store.getStorefilesIndexSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_INDEX_SIZE,
(long) (tmpStorefileIndexSize / (1024.0 * 1024)));
storefileIndexSize += tmpStorefileIndexSize;
}
{
long tmpStorefilesSize = store.getStorefilesSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_SIZE_MB,
(long) (tmpStorefilesSize / (1024.0 * 1024)));
}
{
long tmpStaticBloomSize = store.getTotalStaticBloomSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STATIC_BLOOM_SIZE_KB,
(long) (tmpStaticBloomSize / 1024.0));
totalStaticBloomSize += tmpStaticBloomSize;
}
{
long tmpStaticIndexSize = store.getTotalStaticIndexSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STATIC_INDEX_SIZE_KB,
(long) (tmpStaticIndexSize / 1024.0));
totalStaticIndexSize += tmpStaticIndexSize;
}
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.MEMSTORE_SIZE_MB,
(long) (store.getMemStoreSize() / (1024.0 * 1024)));
}
}
hdfsBlocksDistribution.add(r.getHDFSBlocksDistribution());
}
for (Entry<String, MutableDouble> e : tempVals.entrySet()) {
RegionMetricsStorage.setNumericMetric(e.getKey(), e.getValue().longValue());
}
this.metrics.stores.set(stores);
this.metrics.storefiles.set(storefiles);
this.metrics.memstoreSizeMB.set((int) (memstoreSize / (1024 * 1024)));
this.metrics.mbInMemoryWithoutWAL.set((int) (dataInMemoryWithoutWAL / (1024 * 1024)));
this.metrics.numPutsWithoutWAL.set(numPutsWithoutWAL);
this.metrics.storefileIndexSizeMB.set(
(int) (storefileIndexSize / (1024 * 1024)));
this.metrics.rootIndexSizeKB.set(
(int) (storefileIndexSize / 1024));
this.metrics.totalStaticIndexSizeKB.set(
(int) (totalStaticIndexSize / 1024));
this.metrics.totalStaticBloomSizeKB.set(
(int) (totalStaticBloomSize / 1024));
this.metrics.readRequestsCount.set(readRequestsCount);
this.metrics.writeRequestsCount.set(writeRequestsCount);
this.metrics.compactionQueueSize.set(compactSplitThread
.getCompactionQueueSize());
this.metrics.flushQueueSize.set(cacheFlusher
.getFlushQueueSize());
BlockCache blockCache = cacheConfig.getBlockCache();
if (blockCache != null) {
this.metrics.blockCacheCount.set(blockCache.size());
this.metrics.blockCacheFree.set(blockCache.getFreeSize());
this.metrics.blockCacheSize.set(blockCache.getCurrentSize());
CacheStats cacheStats = blockCache.getStats();
this.metrics.blockCacheHitCount.set(cacheStats.getHitCount());
this.metrics.blockCacheMissCount.set(cacheStats.getMissCount());
this.metrics.blockCacheEvictedCount.set(blockCache.getEvictedCount());
double ratio = blockCache.getStats().getHitRatio();
int percent = (int) (ratio * 100);
this.metrics.blockCacheHitRatio.set(percent);
ratio = blockCache.getStats().getHitCachingRatio();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitCachingRatio.set(percent);
// past N period block cache hit / hit caching ratios
cacheStats.rollMetricsPeriod();
ratio = cacheStats.getHitRatioPastNPeriods();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitRatioPastNPeriods.set(percent);
ratio = cacheStats.getHitCachingRatioPastNPeriods();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitCachingRatioPastNPeriods.set(percent);
}
float localityIndex = hdfsBlocksDistribution.getBlockLocalityIndex(
getServerName().getHostname());
int percent = (int) (localityIndex * 100);
this.metrics.hdfsBlocksLocalityIndex.set(percent);
}
/**
* @return Region server metrics instance.
*/
public RegionServerMetrics getMetrics() {
return this.metrics;
}
/**
* @return Master address tracker instance.
*/
public MasterAddressTracker getMasterAddressManager() {
return this.masterAddressManager;
}
/*
* Start maintanence Threads, Server, Worker and lease checker threads.
* Install an UncaughtExceptionHandler that calls abort of RegionServer if we
* get an unhandled exception. We cannot set the handler on all threads.
* Server's internal Listener thread is off limits. For Server, if an OOME, it
* waits a while then retries. Meantime, a flush or a compaction that tries to
* run should trigger same critical condition and the shutdown will run. On
* its way out, this server will shut down Server. Leases are sort of
* inbetween. It has an internal thread that while it inherits from Chore, it
* keeps its own internal stop mechanism so needs to be stopped by this
* hosting server. Worker logs the exception and exits.
*/
private void startServiceThreads() throws IOException {
String n = Thread.currentThread().getName();
UncaughtExceptionHandler handler = new UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
abort("Uncaught exception in service thread " + t.getName(), e);
}
};
// Start executor services
this.service = new ExecutorService(getServerName().toString());
this.service.startExecutorService(ExecutorType.RS_OPEN_REGION,
conf.getInt("hbase.regionserver.executor.openregion.threads", 3));
this.service.startExecutorService(ExecutorType.RS_OPEN_ROOT,
conf.getInt("hbase.regionserver.executor.openroot.threads", 1));
this.service.startExecutorService(ExecutorType.RS_OPEN_META,
conf.getInt("hbase.regionserver.executor.openmeta.threads", 1));
this.service.startExecutorService(ExecutorType.RS_CLOSE_REGION,
conf.getInt("hbase.regionserver.executor.closeregion.threads", 3));
this.service.startExecutorService(ExecutorType.RS_CLOSE_ROOT,
conf.getInt("hbase.regionserver.executor.closeroot.threads", 1));
this.service.startExecutorService(ExecutorType.RS_CLOSE_META,
conf.getInt("hbase.regionserver.executor.closemeta.threads", 1));
Threads.setDaemonThreadRunning(this.hlogRoller.getThread(), n + ".logRoller", handler);
Threads.setDaemonThreadRunning(this.cacheFlusher.getThread(), n + ".cacheFlusher",
handler);
Threads.setDaemonThreadRunning(this.compactionChecker.getThread(), n +
".compactionChecker", handler);
// Leases is not a Thread. Internally it runs a daemon thread. If it gets
// an unhandled exception, it will just exit.
this.leases.setName(n + ".leaseChecker");
this.leases.start();
// Put up the webui. Webui may come up on port other than configured if
// that port is occupied. Adjust serverInfo if this is the case.
this.webuiport = putUpWebUI();
if (this.replicationSourceHandler == this.replicationSinkHandler &&
this.replicationSourceHandler != null) {
this.replicationSourceHandler.startReplicationService();
} else if (this.replicationSourceHandler != null) {
this.replicationSourceHandler.startReplicationService();
} else if (this.replicationSinkHandler != null) {
this.replicationSinkHandler.startReplicationService();
}
// Start Server. This service is like leases in that it internally runs
// a thread.
this.rpcServer.start();
// Create the log splitting worker and start it
this.splitLogWorker = new SplitLogWorker(this.zooKeeper,
this.getConfiguration(), this.getServerName());
splitLogWorker.start();
}
/**
* Puts up the webui.
* @return Returns final port -- maybe different from what we started with.
* @throws IOException
*/
private int putUpWebUI() throws IOException {
int port = this.conf.getInt(HConstants.REGIONSERVER_INFO_PORT, 60030);
// -1 is for disabling info server
if (port < 0) return port;
String addr = this.conf.get("hbase.regionserver.info.bindAddress", "0.0.0.0");
// check if auto port bind enabled
boolean auto = this.conf.getBoolean(HConstants.REGIONSERVER_INFO_PORT_AUTO,
false);
while (true) {
try {
this.infoServer = new InfoServer("regionserver", addr, port, false, this.conf);
this.infoServer.addServlet("status", "/rs-status", RSStatusServlet.class);
this.infoServer.addServlet("dump", "/dump", RSDumpServlet.class);
this.infoServer.setAttribute(REGIONSERVER, this);
this.infoServer.setAttribute(REGIONSERVER_CONF, conf);
this.infoServer.start();
break;
} catch (BindException e) {
if (!auto) {
// auto bind disabled throw BindException
throw e;
}
// auto bind enabled, try to use another port
LOG.info("Failed binding http info server to port: " + port);
port++;
}
}
return port;
}
/*
* Verify that server is healthy
*/
private boolean isHealthy() {
if (!fsOk) {
// File system problem
return false;
}
// Verify that all threads are alive
if (!(leases.isAlive()
&& cacheFlusher.isAlive() && hlogRoller.isAlive()
&& this.compactionChecker.isAlive())) {
stop("One or more threads are no longer alive -- stop");
return false;
}
return true;
}
@Override
public HLog getWAL() {
return this.hlog;
}
@Override
public CatalogTracker getCatalogTracker() {
return this.catalogTracker;
}
@Override
public void stop(final String msg) {
this.stopped = true;
LOG.info("STOPPED: " + msg);
// Wakes run() if it is sleeping
sleeper.skipSleepCycle();
}
public void waitForServerOnline(){
while (!isOnline() && !isStopped()){
sleeper.sleep();
}
}
@Override
public void postOpenDeployTasks(final HRegion r, final CatalogTracker ct,
final boolean daughter)
throws KeeperException, IOException {
checkOpen();
LOG.info("Post open deploy tasks for region=" + r.getRegionNameAsString() +
", daughter=" + daughter);
// Do checks to see if we need to compact (references or too many files)
for (Store s : r.getStores().values()) {
if (s.hasReferences() || s.needsCompaction()) {
getCompactionRequester().requestCompaction(r, s, "Opening Region");
}
}
// Update ZK, ROOT or META
if (r.getRegionInfo().isRootRegion()) {
RootRegionTracker.setRootLocation(getZooKeeper(),
this.serverNameFromMasterPOV);
} else if (r.getRegionInfo().isMetaRegion()) {
MetaEditor.updateMetaLocation(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
} else {
if (daughter) {
// If daughter of a split, update whole row, not just location.
MetaEditor.addDaughter(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
} else {
MetaEditor.updateRegionLocation(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
}
}
LOG.info("Done with post open deploy task for region=" +
r.getRegionNameAsString() + ", daughter=" + daughter);
}
/**
* Return a reference to the metrics instance used for counting RPC calls.
* @return Metrics instance.
*/
public HBaseRpcMetrics getRpcMetrics() {
return rpcServer.getRpcMetrics();
}
@Override
public RpcServer getRpcServer() {
return rpcServer;
}
/**
* Cause the server to exit without closing the regions it is serving, the log
* it is using and without notifying the master. Used unit testing and on
* catastrophic events such as HDFS is yanked out from under hbase or we OOME.
*
* @param reason
* the reason we are aborting
* @param cause
* the exception that caused the abort, or null
*/
public void abort(String reason, Throwable cause) {
String msg = "ABORTING region server " + this + ": " + reason;
if (cause != null) {
LOG.fatal(msg, cause);
} else {
LOG.fatal(msg);
}
this.abortRequested = true;
this.reservedSpace.clear();
// HBASE-4014: show list of coprocessors that were loaded to help debug
// regionserver crashes.Note that we're implicitly using
// java.util.HashSet's toString() method to print the coprocessor names.
LOG.fatal("RegionServer abort: loaded coprocessors are: " +
CoprocessorHost.getLoadedCoprocessors());
if (this.metrics != null) {
LOG.info("Dump of metrics: " + this.metrics);
}
// Do our best to report our abort to the master, but this may not work
try {
if (cause != null) {
msg += "\nCause:\n" + StringUtils.stringifyException(cause);
}
if (hbaseMaster != null) {
ReportRSFatalErrorRequest.Builder builder =
ReportRSFatalErrorRequest.newBuilder();
ServerName sn =
ServerName.parseVersionedServerName(this.serverNameFromMasterPOV.getVersionedBytes());
builder.setServer(ProtobufUtil.toServerName(sn));
builder.setErrorMessage(msg);
hbaseMaster.reportRSFatalError(
null,builder.build());
}
} catch (Throwable t) {
LOG.warn("Unable to report fatal error to master", t);
}
stop(reason);
}
/**
* @see HRegionServer#abort(String, Throwable)
*/
public void abort(String reason) {
abort(reason, null);
}
public boolean isAborted() {
return this.abortRequested;
}
/*
* Simulate a kill -9 of this server. Exits w/o closing regions or cleaninup
* logs but it does close socket in case want to bring up server on old
* hostname+port immediately.
*/
protected void kill() {
this.killed = true;
abort("Simulated kill");
}
/**
* Wait on all threads to finish. Presumption is that all closes and stops
* have already been called.
*/
protected void join() {
Threads.shutdown(this.compactionChecker.getThread());
Threads.shutdown(this.cacheFlusher.getThread());
if (this.hlogRoller != null) {
Threads.shutdown(this.hlogRoller.getThread());
}
if (this.compactSplitThread != null) {
this.compactSplitThread.join();
}
if (this.service != null) this.service.shutdown();
if (this.replicationSourceHandler != null &&
this.replicationSourceHandler == this.replicationSinkHandler) {
this.replicationSourceHandler.stopReplicationService();
} else if (this.replicationSourceHandler != null) {
this.replicationSourceHandler.stopReplicationService();
} else if (this.replicationSinkHandler != null) {
this.replicationSinkHandler.stopReplicationService();
}
}
/**
* @return Return the object that implements the replication
* source service.
*/
ReplicationSourceService getReplicationSourceService() {
return replicationSourceHandler;
}
/**
* @return Return the object that implements the replication
* sink service.
*/
ReplicationSinkService getReplicationSinkService() {
return replicationSinkHandler;
}
/**
* Get the current master from ZooKeeper and open the RPC connection to it.
*
* Method will block until a master is available. You can break from this
* block by requesting the server stop.
*
* @return master + port, or null if server has been stopped
*/
private ServerName getMaster() {
ServerName masterServerName = null;
long previousLogTime = 0;
RegionServerStatusProtocol master = null;
boolean refresh = false; // for the first time, use cached data
while (keepLooping() && master == null) {
masterServerName = this.masterAddressManager.getMasterAddress(refresh);
if (masterServerName == null) {
if (!keepLooping()) {
// give up with no connection.
LOG.debug("No master found and cluster is stopped; bailing out");
return null;
}
LOG.debug("No master found; retry");
previousLogTime = System.currentTimeMillis();
refresh = true; // let's try pull it from ZK directly
sleeper.sleep();
continue;
}
InetSocketAddress isa =
new InetSocketAddress(masterServerName.getHostname(), masterServerName.getPort());
LOG.info("Attempting connect to Master server at " +
this.masterAddressManager.getMasterAddress());
try {
// Do initial RPC setup. The final argument indicates that the RPC
// should retry indefinitely.
master = (RegionServerStatusProtocol) HBaseRPC.waitForProxy(
RegionServerStatusProtocol.class, RegionServerStatusProtocol.VERSION,
isa, this.conf, -1,
this.rpcTimeout, this.rpcTimeout);
LOG.info("Connected to master at " + isa);
} catch (IOException e) {
e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e;
if (e instanceof ServerNotRunningYetException) {
if (System.currentTimeMillis() > (previousLogTime+1000)){
LOG.info("Master isn't available yet, retrying");
previousLogTime = System.currentTimeMillis();
}
} else {
if (System.currentTimeMillis() > (previousLogTime + 1000)) {
LOG.warn("Unable to connect to master. Retrying. Error was:", e);
previousLogTime = System.currentTimeMillis();
}
}
try {
Thread.sleep(200);
} catch (InterruptedException ignored) {
}
}
}
this.hbaseMaster = master;
return masterServerName;
}
/**
* @return True if we should break loop because cluster is going down or
* this server has been stopped or hdfs has gone bad.
*/
private boolean keepLooping() {
return !this.stopped && isClusterUp();
}
/*
* Let the master know we're here Run initialization using parameters passed
* us by the master.
* @return A Map of key/value configurations we got from the Master else
* null if we failed to register.
* @throws IOException
*/
private RegionServerStartupResponse reportForDuty() throws IOException {
RegionServerStartupResponse result = null;
ServerName masterServerName = getMaster();
if (masterServerName == null) return result;
try {
this.requestCount.set(0);
LOG.info("Telling master at " + masterServerName + " that we are up " +
"with port=" + this.isa.getPort() + ", startcode=" + this.startcode);
long now = EnvironmentEdgeManager.currentTimeMillis();
int port = this.isa.getPort();
RegionServerStartupRequest.Builder request = RegionServerStartupRequest.newBuilder();
request.setPort(port);
request.setServerStartCode(this.startcode);
request.setServerCurrentTime(now);
result = this.hbaseMaster.regionServerStartup(null, request.build());
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof ClockOutOfSyncException) {
LOG.fatal("Master rejected startup because clock is out of sync", ioe);
// Re-throw IOE will cause RS to abort
throw ioe;
} else {
LOG.warn("error telling master we are up", se);
}
}
return result;
}
/**
* Closes all regions. Called on our way out.
* Assumes that its not possible for new regions to be added to onlineRegions
* while this method runs.
*/
protected void closeAllRegions(final boolean abort) {
closeUserRegions(abort);
// Only root and meta should remain. Are we carrying root or meta?
HRegion meta = null;
HRegion root = null;
this.lock.writeLock().lock();
try {
for (Map.Entry<String, HRegion> e: onlineRegions.entrySet()) {
HRegionInfo hri = e.getValue().getRegionInfo();
if (hri.isRootRegion()) {
root = e.getValue();
} else if (hri.isMetaRegion()) {
meta = e.getValue();
}
if (meta != null && root != null) break;
}
} finally {
this.lock.writeLock().unlock();
}
if (meta != null) closeRegion(meta.getRegionInfo(), abort, false);
if (root != null) closeRegion(root.getRegionInfo(), abort, false);
}
/**
* Schedule closes on all user regions.
* Should be safe calling multiple times because it wont' close regions
* that are already closed or that are closing.
* @param abort Whether we're running an abort.
*/
void closeUserRegions(final boolean abort) {
this.lock.writeLock().lock();
try {
for (Map.Entry<String, HRegion> e: this.onlineRegions.entrySet()) {
HRegion r = e.getValue();
if (!r.getRegionInfo().isMetaRegion() && r.isAvailable()) {
// Don't update zk with this close transition; pass false.
closeRegion(r.getRegionInfo(), abort, false);
}
}
} finally {
this.lock.writeLock().unlock();
}
}
/** @return the info server */
public InfoServer getInfoServer() {
return infoServer;
}
/**
* @return true if a stop has been requested.
*/
public boolean isStopped() {
return this.stopped;
}
@Override
public boolean isStopping() {
return this.stopping;
}
/**
*
* @return the configuration
*/
public Configuration getConfiguration() {
return conf;
}
/** @return the write lock for the server */
ReentrantReadWriteLock.WriteLock getWriteLock() {
return lock.writeLock();
}
public int getNumberOfOnlineRegions() {
return this.onlineRegions.size();
}
boolean isOnlineRegionsEmpty() {
return this.onlineRegions.isEmpty();
}
/**
* @param encodedRegionName
* @return JSON Map of labels to values for passed in <code>encodedRegionName</code>
* @throws IOException
*/
public byte [] getRegionStats(final String encodedRegionName)
throws IOException {
HRegion r = null;
synchronized (this.onlineRegions) {
r = this.onlineRegions.get(encodedRegionName);
}
if (r == null) return null;
ObjectMapper mapper = new ObjectMapper();
int stores = 0;
int storefiles = 0;
int storefileSizeMB = 0;
int memstoreSizeMB = (int) (r.memstoreSize.get() / 1024 / 1024);
int storefileIndexSizeMB = 0;
synchronized (r.stores) {
stores += r.stores.size();
for (Store store : r.stores.values()) {
storefiles += store.getStorefilesCount();
storefileSizeMB += (int) (store.getStorefilesSize() / 1024 / 1024);
storefileIndexSizeMB += (int) (store.getStorefilesIndexSize() / 1024 / 1024);
}
}
Map<String, Integer> map = new TreeMap<String, Integer>();
map.put("stores", stores);
map.put("storefiles", storefiles);
map.put("storefileSizeMB", storefileSizeMB);
map.put("storefileIndexSizeMB", storefileIndexSizeMB);
map.put("memstoreSizeMB", memstoreSizeMB);
StringWriter w = new StringWriter();
mapper.writeValue(w, map);
w.close();
return Bytes.toBytes(w.toString());
}
/**
* For tests and web ui.
* This method will only work if HRegionServer is in the same JVM as client;
* HRegion cannot be serialized to cross an rpc.
* @see #getOnlineRegions()
*/
public Collection<HRegion> getOnlineRegionsLocalContext() {
Collection<HRegion> regions = this.onlineRegions.values();
return Collections.unmodifiableCollection(regions);
}
@Override
public void addToOnlineRegions(HRegion region) {
this.onlineRegions.put(region.getRegionInfo().getEncodedName(), region);
}
/**
* @return A new Map of online regions sorted by region size with the first
* entry being the biggest.
*/
public SortedMap<Long, HRegion> getCopyOfOnlineRegionsSortedBySize() {
// we'll sort the regions in reverse
SortedMap<Long, HRegion> sortedRegions = new TreeMap<Long, HRegion>(
new Comparator<Long>() {
public int compare(Long a, Long b) {
return -1 * a.compareTo(b);
}
});
// Copy over all regions. Regions are sorted by size with biggest first.
for (HRegion region : this.onlineRegions.values()) {
sortedRegions.put(Long.valueOf(region.memstoreSize.get()), region);
}
return sortedRegions;
}
/** @return the request count */
public AtomicInteger getRequestCount() {
return this.requestCount;
}
/**
* @return time stamp in millis of when this region server was started
*/
public long getStartcode() {
return this.startcode;
}
/** @return reference to FlushRequester */
public FlushRequester getFlushRequester() {
return this.cacheFlusher;
}
/**
* Get the top N most loaded regions this server is serving so we can tell the
* master which regions it can reallocate if we're overloaded. TODO: actually
* calculate which regions are most loaded. (Right now, we're just grabbing
* the first N regions being served regardless of load.)
*/
protected HRegionInfo[] getMostLoadedRegions() {
ArrayList<HRegionInfo> regions = new ArrayList<HRegionInfo>();
for (HRegion r : onlineRegions.values()) {
if (!r.isAvailable()) {
continue;
}
if (regions.size() < numRegionsToReport) {
regions.add(r.getRegionInfo());
} else {
break;
}
}
return regions.toArray(new HRegionInfo[regions.size()]);
}
@Override
@QosPriority(priority=HIGH_QOS)
public ProtocolSignature getProtocolSignature(
String protocol, long version, int clientMethodsHashCode)
throws IOException {
if (protocol.equals(ClientProtocol.class.getName())) {
return new ProtocolSignature(ClientProtocol.VERSION, null);
} else if (protocol.equals(AdminProtocol.class.getName())) {
return new ProtocolSignature(AdminProtocol.VERSION, null);
}
throw new IOException("Unknown protocol: " + protocol);
}
@Override
@QosPriority(priority=HIGH_QOS)
public long getProtocolVersion(final String protocol, final long clientVersion)
throws IOException {
if (protocol.equals(ClientProtocol.class.getName())) {
return ClientProtocol.VERSION;
} else if (protocol.equals(AdminProtocol.class.getName())) {
return AdminProtocol.VERSION;
}
throw new IOException("Unknown protocol: " + protocol);
}
@Override
public Leases getLeases() {
return leases;
}
/**
* @return Return the rootDir.
*/
protected Path getRootDir() {
return rootDir;
}
/**
* @return Return the fs.
*/
public FileSystem getFileSystem() {
return fs;
}
public String toString() {
return getServerName().toString();
}
/**
* Interval at which threads should run
*
* @return the interval
*/
public int getThreadWakeFrequency() {
return threadWakeFrequency;
}
@Override
public ZooKeeperWatcher getZooKeeper() {
return zooKeeper;
}
@Override
public ServerName getServerName() {
// Our servername could change after we talk to the master.
return this.serverNameFromMasterPOV == null?
new ServerName(this.isa.getHostName(), this.isa.getPort(), this.startcode):
this.serverNameFromMasterPOV;
}
@Override
public CompactionRequestor getCompactionRequester() {
return this.compactSplitThread;
}
public ZooKeeperWatcher getZooKeeperWatcher() {
return this.zooKeeper;
}
public ConcurrentSkipListMap<byte[], Boolean> getRegionsInTransitionInRS() {
return this.regionsInTransitionInRS;
}
public ExecutorService getExecutorService() {
return service;
}
//
// Main program and support routines
//
/**
* Load the replication service objects, if any
*/
static private void createNewReplicationInstance(Configuration conf,
HRegionServer server, FileSystem fs, Path logDir, Path oldLogDir) throws IOException{
// If replication is not enabled, then return immediately.
if (!conf.getBoolean(HConstants.REPLICATION_ENABLE_KEY, false)) {
return;
}
// read in the name of the source replication class from the config file.
String sourceClassname = conf.get(HConstants.REPLICATION_SOURCE_SERVICE_CLASSNAME,
HConstants.REPLICATION_SERVICE_CLASSNAME_DEFAULT);
// read in the name of the sink replication class from the config file.
String sinkClassname = conf.get(HConstants.REPLICATION_SINK_SERVICE_CLASSNAME,
HConstants.REPLICATION_SERVICE_CLASSNAME_DEFAULT);
// If both the sink and the source class names are the same, then instantiate
// only one object.
if (sourceClassname.equals(sinkClassname)) {
server.replicationSourceHandler = (ReplicationSourceService)
newReplicationInstance(sourceClassname,
conf, server, fs, logDir, oldLogDir);
server.replicationSinkHandler = (ReplicationSinkService)
server.replicationSourceHandler;
}
else {
server.replicationSourceHandler = (ReplicationSourceService)
newReplicationInstance(sourceClassname,
conf, server, fs, logDir, oldLogDir);
server.replicationSinkHandler = (ReplicationSinkService)
newReplicationInstance(sinkClassname,
conf, server, fs, logDir, oldLogDir);
}
}
static private ReplicationService newReplicationInstance(String classname,
Configuration conf, HRegionServer server, FileSystem fs, Path logDir,
Path oldLogDir) throws IOException{
Class<?> clazz = null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
clazz = Class.forName(classname, true, classLoader);
} catch (java.lang.ClassNotFoundException nfe) {
throw new IOException("Cound not find class for " + classname);
}
// create an instance of the replication object.
ReplicationService service = (ReplicationService)
ReflectionUtils.newInstance(clazz, conf);
service.initialize(server, fs, logDir, oldLogDir);
return service;
}
/**
* @param hrs
* @return Thread the RegionServer is running in correctly named.
* @throws IOException
*/
public static Thread startRegionServer(final HRegionServer hrs)
throws IOException {
return startRegionServer(hrs, "regionserver" + hrs.isa.getPort());
}
/**
* @param hrs
* @param name
* @return Thread the RegionServer is running in correctly named.
* @throws IOException
*/
public static Thread startRegionServer(final HRegionServer hrs,
final String name) throws IOException {
Thread t = new Thread(hrs);
t.setName(name);
t.start();
// Install shutdown hook that will catch signals and run an orderly shutdown
// of the hrs.
ShutdownHook.install(hrs.getConfiguration(), FileSystem.get(hrs
.getConfiguration()), hrs, t);
return t;
}
/**
* Utility for constructing an instance of the passed HRegionServer class.
*
* @param regionServerClass
* @param conf2
* @return HRegionServer instance.
*/
public static HRegionServer constructRegionServer(
Class<? extends HRegionServer> regionServerClass,
final Configuration conf2) {
try {
Constructor<? extends HRegionServer> c = regionServerClass
.getConstructor(Configuration.class);
return c.newInstance(conf2);
} catch (Exception e) {
throw new RuntimeException("Failed construction of " + "Regionserver: "
+ regionServerClass.toString(), e);
}
}
/**
* @see org.apache.hadoop.hbase.regionserver.HRegionServerCommandLine
*/
public static void main(String[] args) throws Exception {
VersionInfo.logVersion();
Configuration conf = HBaseConfiguration.create();
@SuppressWarnings("unchecked")
Class<? extends HRegionServer> regionServerClass = (Class<? extends HRegionServer>) conf
.getClass(HConstants.REGION_SERVER_IMPL, HRegionServer.class);
new HRegionServerCommandLine(regionServerClass).doMain(args);
}
/**
* Gets the online regions of the specified table.
* This method looks at the in-memory onlineRegions. It does not go to <code>.META.</code>.
* Only returns <em>online</em> regions. If a region on this table has been
* closed during a disable, etc., it will not be included in the returned list.
* So, the returned list may not necessarily be ALL regions in this table, its
* all the ONLINE regions in the table.
* @param tableName
* @return Online regions from <code>tableName</code>
*/
public List<HRegion> getOnlineRegions(byte[] tableName) {
List<HRegion> tableRegions = new ArrayList<HRegion>();
synchronized (this.onlineRegions) {
for (HRegion region: this.onlineRegions.values()) {
HRegionInfo regionInfo = region.getRegionInfo();
if(Bytes.equals(regionInfo.getTableName(), tableName)) {
tableRegions.add(region);
}
}
}
return tableRegions;
}
// used by org/apache/hbase/tmpl/regionserver/RSStatusTmpl.jamon (HBASE-4070).
public String[] getCoprocessors() {
// passing fake times to buildServerLoad is okay, because we only care about the coprocessor part.
HBaseProtos.ServerLoad sl = buildServerLoad(0, 0);
return sl == null? null:
new ServerLoad(sl).getRegionServerCoprocessors();
}
/**
* Register bean with platform management server
*/
void registerMBean() {
MXBeanImpl mxBeanInfo = MXBeanImpl.init(this);
mxBean = MBeanUtil.registerMBean("RegionServer", "RegionServer",
mxBeanInfo);
LOG.info("Registered RegionServer MXBean");
}
/**
* Instantiated as a row lock lease. If the lease times out, the row lock is
* released
*/
private class RowLockListener implements LeaseListener {
private final String lockName;
private final HRegion region;
RowLockListener(final String lockName, final HRegion region) {
this.lockName = lockName;
this.region = region;
}
public void leaseExpired() {
LOG.info("Row Lock " + this.lockName + " lease expired");
Integer r = rowlocks.remove(this.lockName);
if (r != null) {
region.releaseRowLock(r);
}
}
}
/**
* Instantiated as a scanner lease. If the lease times out, the scanner is
* closed
*/
private class ScannerListener implements LeaseListener {
private final String scannerName;
ScannerListener(final String n) {
this.scannerName = n;
}
public void leaseExpired() {
RegionScanner s = scanners.remove(this.scannerName);
if (s != null) {
LOG.info("Scanner " + this.scannerName + " lease expired on region "
+ s.getRegionInfo().getRegionNameAsString());
try {
HRegion region = getRegion(s.getRegionInfo().getRegionName());
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().preScannerClose(s);
}
s.close();
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerClose(s);
}
} catch (IOException e) {
LOG.error("Closing scanner for "
+ s.getRegionInfo().getRegionNameAsString(), e);
}
} else {
LOG.info("Scanner " + this.scannerName + " lease expired");
}
}
}
/**
* Method to get the Integer lock identifier used internally from the long
* lock identifier used by the client.
*
* @param lockId
* long row lock identifier from client
* @return intId Integer row lock used internally in HRegion
* @throws IOException
* Thrown if this is not a valid client lock id.
*/
Integer getLockFromId(long lockId) throws IOException {
if (lockId == -1L) {
return null;
}
String lockName = String.valueOf(lockId);
Integer rl = rowlocks.get(lockName);
if (rl == null) {
throw new UnknownRowLockException("Invalid row lock");
}
this.leases.renewLease(lockName);
return rl;
}
/**
* Called to verify that this server is up and running.
*
* @throws IOException
*/
protected void checkOpen() throws IOException {
if (this.stopped || this.abortRequested) {
throw new RegionServerStoppedException("Server " + getServerName() +
" not running" + (this.abortRequested ? ", aborting" : ""));
}
if (!fsOk) {
throw new RegionServerStoppedException("File system not available");
}
}
protected void checkIfRegionInTransition(HRegionInfo region,
String currentAction) throws RegionAlreadyInTransitionException {
byte[] encodedName = region.getEncodedNameAsBytes();
if (this.regionsInTransitionInRS.containsKey(encodedName)) {
boolean openAction = this.regionsInTransitionInRS.get(encodedName);
// The below exception message will be used in master.
throw new RegionAlreadyInTransitionException("Received:" + currentAction +
" for the region:" + region.getRegionNameAsString() +
" ,which we are already trying to " +
(openAction ? OPEN : CLOSE)+ ".");
}
}
/**
* @param region Region to close
* @param abort True if we are aborting
* @param zk True if we are to update zk about the region close; if the close
* was orchestrated by master, then update zk. If the close is being run by
* the regionserver because its going down, don't update zk.
* @return True if closed a region.
*/
protected boolean closeRegion(HRegionInfo region, final boolean abort,
final boolean zk) {
return closeRegion(region, abort, zk, -1, null);
}
/**
* @param region Region to close
* @param abort True if we are aborting
* @param zk True if we are to update zk about the region close; if the close
* was orchestrated by master, then update zk. If the close is being run by
* the regionserver because its going down, don't update zk.
* @param versionOfClosingNode
* the version of znode to compare when RS transitions the znode from
* CLOSING state.
* @return True if closed a region.
*/
protected boolean closeRegion(HRegionInfo region, final boolean abort,
final boolean zk, final int versionOfClosingNode, ServerName sn) {
if (this.regionsInTransitionInRS.containsKey(region.getEncodedNameAsBytes())) {
LOG.warn("Received close for region we are already opening or closing; " +
region.getEncodedName());
return false;
}
this.regionsInTransitionInRS.putIfAbsent(region.getEncodedNameAsBytes(), false);
CloseRegionHandler crh = null;
if (region.isRootRegion()) {
crh = new CloseRootHandler(this, this, region, abort, zk,
versionOfClosingNode);
} else if (region.isMetaRegion()) {
crh = new CloseMetaHandler(this, this, region, abort, zk,
versionOfClosingNode);
} else {
crh = new CloseRegionHandler(this, this, region, abort, zk, versionOfClosingNode, sn);
}
this.service.submit(crh);
return true;
}
/**
* @param regionName
* @return HRegion for the passed binary <code>regionName</code> or null if
* named region is not member of the online regions.
*/
public HRegion getOnlineRegion(final byte[] regionName) {
String encodedRegionName = HRegionInfo.encodeRegionName(regionName);
return this.onlineRegions.get(encodedRegionName);
}
@Override
public HRegion getFromOnlineRegions(final String encodedRegionName) {
return this.onlineRegions.get(encodedRegionName);
}
@Override
public boolean removeFromOnlineRegions(final String encodedRegionName, ServerName destination) {
HRegion toReturn = this.onlineRegions.remove(encodedRegionName);
if (destination != null){
addToMovedRegions(encodedRegionName, destination);
}
//Clear all of the dynamic metrics as they are now probably useless.
//This is a clear because dynamic metrics could include metrics per cf and
//per hfile. Figuring out which cfs, hfiles, and regions are still relevant to
//this region server would be an onerous task. Instead just clear everything
//and on the next tick of the metrics everything that is still relevant will be
//re-added.
this.dynamicMetrics.clear();
return toReturn != null;
}
/**
* Protected utility method for safely obtaining an HRegion handle.
*
* @param regionName
* Name of online {@link HRegion} to return
* @return {@link HRegion} for <code>regionName</code>
* @throws NotServingRegionException
*/
protected HRegion getRegion(final byte[] regionName)
throws NotServingRegionException {
String encodedRegionName = HRegionInfo.encodeRegionName(regionName);
return getRegionByEncodedName(encodedRegionName);
}
protected HRegion getRegionByEncodedName(String encodedRegionName)
throws NotServingRegionException {
HRegion region = this.onlineRegions.get(encodedRegionName);
if (region == null) {
ServerName sn = getMovedRegion(encodedRegionName);
if (sn != null) {
throw new RegionMovedException(sn.getHostname(), sn.getPort());
} else {
throw new NotServingRegionException("Region is not online: " + encodedRegionName);
}
}
return region;
}
/*
* Cleanup after Throwable caught invoking method. Converts <code>t</code> to
* IOE if it isn't already.
*
* @param t Throwable
*
* @return Throwable converted to an IOE; methods can only let out IOEs.
*/
protected Throwable cleanup(final Throwable t) {
return cleanup(t, null);
}
/*
* Cleanup after Throwable caught invoking method. Converts <code>t</code> to
* IOE if it isn't already.
*
* @param t Throwable
*
* @param msg Message to log in error. Can be null.
*
* @return Throwable converted to an IOE; methods can only let out IOEs.
*/
protected Throwable cleanup(final Throwable t, final String msg) {
// Don't log as error if NSRE; NSRE is 'normal' operation.
if (t instanceof NotServingRegionException) {
LOG.debug("NotServingRegionException; " + t.getMessage());
return t;
}
if (msg == null) {
LOG.error("", RemoteExceptionHandler.checkThrowable(t));
} else {
LOG.error(msg, RemoteExceptionHandler.checkThrowable(t));
}
if (!checkOOME(t)) {
checkFileSystem();
}
return t;
}
/*
* @param t
*
* @return Make <code>t</code> an IOE if it isn't already.
*/
protected IOException convertThrowableToIOE(final Throwable t) {
return convertThrowableToIOE(t, null);
}
/*
* @param t
*
* @param msg Message to put in new IOE if passed <code>t</code> is not an IOE
*
* @return Make <code>t</code> an IOE if it isn't already.
*/
protected IOException convertThrowableToIOE(final Throwable t, final String msg) {
return (t instanceof IOException ? (IOException) t : msg == null
|| msg.length() == 0 ? new IOException(t) : new IOException(msg, t));
}
/*
* Check if an OOME and, if so, abort immediately to avoid creating more objects.
*
* @param e
*
* @return True if we OOME'd and are aborting.
*/
public boolean checkOOME(final Throwable e) {
boolean stop = false;
try {
if (e instanceof OutOfMemoryError
|| (e.getCause() != null && e.getCause() instanceof OutOfMemoryError)
|| (e.getMessage() != null && e.getMessage().contains(
"java.lang.OutOfMemoryError"))) {
stop = true;
LOG.fatal(
"Run out of memory; HRegionServer will abort itself immediately", e);
}
} finally {
if (stop) {
Runtime.getRuntime().halt(1);
}
}
return stop;
}
/**
* Checks to see if the file system is still accessible. If not, sets
* abortRequested and stopRequested
*
* @return false if file system is not available
*/
public boolean checkFileSystem() {
if (this.fsOk && this.fs != null) {
try {
FSUtils.checkFileSystemAvailable(this.fs);
} catch (IOException e) {
abort("File System not available", e);
this.fsOk = false;
}
}
return this.fsOk;
}
protected long addRowLock(Integer r, HRegion region) throws LeaseStillHeldException {
String lockName = null;
long lockId;
do {
lockId = nextLong();
lockName = String.valueOf(lockId);
} while (rowlocks.putIfAbsent(lockName, r) != null);
this.leases.createLease(lockName, this.rowLockLeaseTimeoutPeriod, new RowLockListener(lockName,
region));
return lockId;
}
protected long addScanner(RegionScanner s) throws LeaseStillHeldException {
long scannerId = nextLong();
String scannerName = String.valueOf(scannerId);
scanners.put(scannerName, s);
this.leases.createLease(scannerName, this.scannerLeaseTimeoutPeriod, new ScannerListener(
scannerName));
return scannerId;
}
/**
* Generate a random positive long number
*
* @return a random positive long number
*/
protected long nextLong() {
long n = rand.nextLong();
if (n == 0) {
return nextLong();
}
if (n < 0) {
n = -n;
}
return n;
}
// Start Client methods
/**
* Get data from a table.
*
* @param controller the RPC controller
* @param request the get request
* @throws ServiceException
*/
@Override
public GetResponse get(final RpcController controller,
final GetRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
GetResponse.Builder builder = GetResponse.newBuilder();
ClientProtos.Get get = request.getGet();
Boolean existence = null;
Result r = null;
if (request.getClosestRowBefore()) {
if (get.getColumnCount() != 1) {
throw new DoNotRetryIOException(
"get ClosestRowBefore supports one and only one family now, not "
+ get.getColumnCount() + " families");
}
byte[] row = get.getRow().toByteArray();
byte[] family = get.getColumn(0).getFamily().toByteArray();
r = region.getClosestRowBefore(row, family);
} else {
Get clientGet = ProtobufUtil.toGet(get);
if (request.getExistenceOnly() && region.getCoprocessorHost() != null) {
existence = region.getCoprocessorHost().preExists(clientGet);
}
if (existence == null) {
Integer lock = getLockFromId(clientGet.getLockId());
r = region.get(clientGet, lock);
if (request.getExistenceOnly()) {
boolean exists = r != null && !r.isEmpty();
if (region.getCoprocessorHost() != null) {
exists = region.getCoprocessorHost().postExists(clientGet, exists);
}
existence = Boolean.valueOf(exists);
}
}
}
if (existence != null) {
builder.setExists(existence.booleanValue());
} else if (r != null) {
builder.setResult(ProtobufUtil.toResult(r));
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Mutate data in a table.
*
* @param controller the RPC controller
* @param request the mutate request
* @throws ServiceException
*/
@Override
public MutateResponse mutate(final RpcController controller,
final MutateRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
MutateResponse.Builder builder = MutateResponse.newBuilder();
Mutate mutate = request.getMutate();
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
Integer lock = null;
Result r = null;
Boolean processed = null;
MutateType type = mutate.getMutateType();
switch (type) {
case APPEND:
r = append(region, mutate);
break;
case INCREMENT:
r = increment(region, mutate);
break;
case PUT:
Put put = ProtobufUtil.toPut(mutate);
lock = getLockFromId(put.getLockId());
if (request.hasCondition()) {
Condition condition = request.getCondition();
byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name());
WritableByteArrayComparable comparator =
(WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator());
if (region.getCoprocessorHost() != null) {
processed = region.getCoprocessorHost().preCheckAndPut(
row, family, qualifier, compareOp, comparator, put);
}
if (processed == null) {
boolean result = region.checkAndMutate(row, family,
qualifier, compareOp, comparator, put, lock, true);
if (region.getCoprocessorHost() != null) {
result = region.getCoprocessorHost().postCheckAndPut(row, family,
qualifier, compareOp, comparator, put, result);
}
processed = Boolean.valueOf(result);
}
} else {
region.put(put, lock);
processed = Boolean.TRUE;
}
break;
case DELETE:
Delete delete = ProtobufUtil.toDelete(mutate);
lock = getLockFromId(delete.getLockId());
if (request.hasCondition()) {
Condition condition = request.getCondition();
byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name());
WritableByteArrayComparable comparator =
(WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator());
if (region.getCoprocessorHost() != null) {
processed = region.getCoprocessorHost().preCheckAndDelete(
row, family, qualifier, compareOp, comparator, delete);
}
if (processed == null) {
boolean result = region.checkAndMutate(row, family,
qualifier, compareOp, comparator, delete, lock, true);
if (region.getCoprocessorHost() != null) {
result = region.getCoprocessorHost().postCheckAndDelete(row, family,
qualifier, compareOp, comparator, delete, result);
}
processed = Boolean.valueOf(result);
}
} else {
region.delete(delete, lock, delete.getWriteToWAL());
processed = Boolean.TRUE;
}
break;
default:
throw new DoNotRetryIOException(
"Unsupported mutate type: " + type.name());
}
if (processed != null) {
builder.setProcessed(processed.booleanValue());
} else if (r != null) {
builder.setResult(ProtobufUtil.toResult(r));
}
return builder.build();
} catch (IOException ie) {
checkFileSystem();
throw new ServiceException(ie);
}
}
//
// remote scanner interface
//
/**
* Scan data in a table.
*
* @param controller the RPC controller
* @param request the scan request
* @throws ServiceException
*/
@Override
public ScanResponse scan(final RpcController controller,
final ScanRequest request) throws ServiceException {
Leases.Lease lease = null;
String scannerName = null;
try {
if (!request.hasScannerId() && !request.hasScan()) {
throw new DoNotRetryIOException(
"Missing required input: scannerId or scan");
}
long scannerId = -1;
if (request.hasScannerId()) {
scannerId = request.getScannerId();
scannerName = String.valueOf(scannerId);
}
try {
checkOpen();
} catch (IOException e) {
// If checkOpen failed, server not running or filesystem gone,
// cancel this lease; filesystem is gone or we're closing or something.
if (scannerName != null) {
try {
leases.cancelLease(scannerName);
} catch (LeaseException le) {
LOG.info("Server shutting down and client tried to access missing scanner " +
scannerName);
}
}
throw e;
}
requestCount.incrementAndGet();
try {
int ttl = 0;
HRegion region = null;
RegionScanner scanner = null;
boolean moreResults = true;
boolean closeScanner = false;
ScanResponse.Builder builder = ScanResponse.newBuilder();
if (request.hasCloseScanner()) {
closeScanner = request.getCloseScanner();
}
int rows = 1;
if (request.hasNumberOfRows()) {
rows = request.getNumberOfRows();
}
if (request.hasScannerId()) {
scanner = scanners.get(scannerName);
if (scanner == null) {
throw new UnknownScannerException(
"Name: " + scannerName + ", already closed?");
}
region = getRegion(scanner.getRegionInfo().getRegionName());
} else {
region = getRegion(request.getRegion());
ClientProtos.Scan protoScan = request.getScan();
Scan scan = ProtobufUtil.toScan(protoScan);
region.prepareScanner(scan);
if (region.getCoprocessorHost() != null) {
scanner = region.getCoprocessorHost().preScannerOpen(scan);
}
if (scanner == null) {
scanner = region.getScanner(scan);
}
if (region.getCoprocessorHost() != null) {
scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner);
}
scannerId = addScanner(scanner);
scannerName = String.valueOf(scannerId);
ttl = this.scannerLeaseTimeoutPeriod;
}
if (rows > 0) {
try {
// Remove lease while its being processed in server; protects against case
// where processing of request takes > lease expiration time.
lease = leases.removeLease(scannerName);
List<Result> results = new ArrayList<Result>(rows);
long currentScanResultSize = 0;
boolean done = false;
// Call coprocessor. Get region info from scanner.
if (region != null && region.getCoprocessorHost() != null) {
Boolean bypass = region.getCoprocessorHost().preScannerNext(
scanner, results, rows);
if (!results.isEmpty()) {
for (Result r : results) {
for (KeyValue kv : r.raw()) {
currentScanResultSize += kv.heapSize();
}
}
}
if (bypass != null && bypass.booleanValue()) {
done = true;
}
}
if (!done) {
long maxResultSize = scanner.getMaxResultSize();
if (maxResultSize <= 0) {
maxResultSize = maxScannerResultSize;
}
List<KeyValue> values = new ArrayList<KeyValue>();
for (int i = 0; i < rows
&& currentScanResultSize < maxResultSize; i++) {
// Collect values to be returned here
boolean moreRows = scanner.next(values, SchemaMetrics.METRIC_NEXTSIZE);
if (!values.isEmpty()) {
for (KeyValue kv : values) {
currentScanResultSize += kv.heapSize();
}
results.add(new Result(values));
}
if (!moreRows) {
break;
}
values.clear();
}
// coprocessor postNext hook
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerNext(scanner, results, rows, true);
}
}
// If the scanner's filter - if any - is done with the scan
// and wants to tell the client to stop the scan. This is done by passing
// a null result, and setting moreResults to false.
if (scanner.isFilterDone() && results.isEmpty()) {
moreResults = false;
results = null;
} else {
for (Result result: results) {
if (result != null) {
builder.addResult(ProtobufUtil.toResult(result));
}
}
}
} finally {
// We're done. On way out re-add the above removed lease.
// Adding resets expiration time on lease.
if (scanners.containsKey(scannerName)) {
if (lease != null) leases.addLease(lease);
ttl = this.scannerLeaseTimeoutPeriod;
}
}
}
if (!moreResults || closeScanner) {
ttl = 0;
moreResults = false;
if (region != null && region.getCoprocessorHost() != null) {
if (region.getCoprocessorHost().preScannerClose(scanner)) {
return builder.build(); // bypass
}
}
scanner = scanners.remove(scannerName);
if (scanner != null) {
scanner.close();
leases.cancelLease(scannerName);
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerClose(scanner);
}
}
}
if (ttl > 0) {
builder.setTtl(ttl);
}
builder.setScannerId(scannerId);
builder.setMoreResults(moreResults);
return builder.build();
} catch (Throwable t) {
if (scannerName != null &&
t instanceof NotServingRegionException) {
scanners.remove(scannerName);
}
throw convertThrowableToIOE(cleanup(t));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Lock a row in a table.
*
* @param controller the RPC controller
* @param request the lock row request
* @throws ServiceException
*/
@Override
public LockRowResponse lockRow(final RpcController controller,
final LockRowRequest request) throws ServiceException {
try {
if (request.getRowCount() != 1) {
throw new DoNotRetryIOException(
"lockRow supports only one row now, not " + request.getRowCount() + " rows");
}
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
byte[] row = request.getRow(0).toByteArray();
try {
Integer r = region.obtainRowLock(row);
long lockId = addRowLock(r, region);
LOG.debug("Row lock " + lockId + " explicitly acquired by client");
LockRowResponse.Builder builder = LockRowResponse.newBuilder();
builder.setLockId(lockId);
return builder.build();
} catch (Throwable t) {
throw convertThrowableToIOE(cleanup(t,
"Error obtaining row lock (fsOk: " + this.fsOk + ")"));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Unlock a locked row in a table.
*
* @param controller the RPC controller
* @param request the unlock row request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public UnlockRowResponse unlockRow(final RpcController controller,
final UnlockRowRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
if (!request.hasLockId()) {
throw new DoNotRetryIOException(
"Invalid unlock rowrequest, missing lock id");
}
long lockId = request.getLockId();
String lockName = String.valueOf(lockId);
try {
Integer r = rowlocks.remove(lockName);
if (r == null) {
throw new UnknownRowLockException(lockName);
}
region.releaseRowLock(r);
this.leases.cancelLease(lockName);
LOG.debug("Row lock " + lockId
+ " has been explicitly released by client");
return UnlockRowResponse.newBuilder().build();
} catch (Throwable t) {
throw convertThrowableToIOE(cleanup(t));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Atomically bulk load several HFiles into an open region
* @return true if successful, false is failed but recoverably (no action)
* @throws IOException if failed unrecoverably
*/
@Override
public BulkLoadHFileResponse bulkLoadHFile(final RpcController controller,
final BulkLoadHFileRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
List<Pair<byte[], String>> familyPaths = new ArrayList<Pair<byte[], String>>();
for (FamilyPath familyPath: request.getFamilyPathList()) {
familyPaths.add(new Pair<byte[], String>(familyPath.getFamily().toByteArray(),
familyPath.getPath()));
}
boolean bypass = false;
if (region.getCoprocessorHost() != null) {
bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
}
boolean loaded = false;
if (!bypass) {
loaded = region.bulkLoadHFiles(familyPaths);
}
if (region.getCoprocessorHost() != null) {
loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
}
BulkLoadHFileResponse.Builder builder = BulkLoadHFileResponse.newBuilder();
builder.setLoaded(loaded);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
* method using the registered protocol handlers.
* {@link CoprocessorProtocol} implementations must be registered per-region
* via the
* {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)}
* method before they are available.
*
* @param regionName name of the region against which the invocation is executed
* @param call an {@code Exec} instance identifying the protocol, method name,
* and parameters for the method invocation
* @return an {@code ExecResult} instance containing the region name of the
* invocation and the return value
* @throws IOException if no registered protocol handler is found or an error
* occurs during the invocation
* @see org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)
*/
@Override
public ExecCoprocessorResponse execCoprocessor(final RpcController controller,
final ExecCoprocessorRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
ExecCoprocessorResponse.Builder
builder = ExecCoprocessorResponse.newBuilder();
ClientProtos.Exec call = request.getCall();
Exec clientCall = ProtobufUtil.toExec(call);
ExecResult result = region.exec(clientCall);
builder.setValue(ProtobufUtil.toParameter(result.getValue()));
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Execute multiple actions on a table: get, mutate, and/or execCoprocessor
*
* @param controller the RPC controller
* @param request the multi request
* @throws ServiceException
*/
@Override
public MultiResponse multi(final RpcController controller,
final MultiRequest request) throws ServiceException {
try {
HRegion region = getRegion(request.getRegion());
MultiResponse.Builder builder = MultiResponse.newBuilder();
if (request.hasAtomic() && request.getAtomic()) {
List<Mutate> mutates = new ArrayList<Mutate>();
for (ClientProtos.MultiAction actionUnion : request.getActionList()) {
if (actionUnion.hasMutate()) {
mutates.add(actionUnion.getMutate());
} else {
throw new DoNotRetryIOException(
"Unsupported atomic action type: " + actionUnion);
}
}
mutateRows(region, mutates);
} else {
ActionResult.Builder resultBuilder = null;
List<Mutate> mutates = new ArrayList<Mutate>();
for (ClientProtos.MultiAction actionUnion : request.getActionList()) {
requestCount.incrementAndGet();
try {
Object result = null;
if (actionUnion.hasGet()) {
Get get = ProtobufUtil.toGet(actionUnion.getGet());
Integer lock = getLockFromId(get.getLockId());
Result r = region.get(get, lock);
if (r != null) {
result = ProtobufUtil.toResult(r);
}
} else if (actionUnion.hasMutate()) {
Mutate mutate = actionUnion.getMutate();
MutateType type = mutate.getMutateType();
if (type != MutateType.PUT && type != MutateType.DELETE) {
if (!mutates.isEmpty()) {
doBatchOp(builder, region, mutates);
mutates.clear();
} else if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
}
Result r = null;
switch (type) {
case APPEND:
r = append(region, mutate);
break;
case INCREMENT:
r = increment(region, mutate);
break;
case PUT:
mutates.add(mutate);
break;
case DELETE:
mutates.add(mutate);
break;
default:
throw new DoNotRetryIOException("Unsupported mutate type: " + type.name());
}
if (r != null) {
result = ProtobufUtil.toResult(r);
}
} else if (actionUnion.hasExec()) {
Exec call = ProtobufUtil.toExec(actionUnion.getExec());
result = region.exec(call).getValue();
} else {
LOG.warn("Error: invalid action: " + actionUnion + ". "
+ "it must be a Get, Mutate, or Exec.");
throw new DoNotRetryIOException("Invalid action, "
+ "it must be a Get, Mutate, or Exec.");
}
if (result != null) {
if (resultBuilder == null) {
resultBuilder = ActionResult.newBuilder();
} else {
resultBuilder.clear();
}
NameBytesPair value = ProtobufUtil.toParameter(result);
resultBuilder.setValue(value);
builder.addResult(resultBuilder.build());
}
} catch (IOException ie) {
builder.addResult(ResponseConverter.buildActionResult(ie));
}
}
if (!mutates.isEmpty()) {
doBatchOp(builder, region, mutates);
}
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
// End Client methods
// Start Admin methods
@Override
@QosPriority(priority=HIGH_QOS)
public GetRegionInfoResponse getRegionInfo(final RpcController controller,
final GetRegionInfoRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
HRegionInfo info = region.getRegionInfo();
GetRegionInfoResponse.Builder builder = GetRegionInfoResponse.newBuilder();
builder.setRegionInfo(HRegionInfo.convert(info));
if (request.hasCompactionState() && request.getCompactionState()) {
builder.setCompactionState(
CompactionRequest.getCompactionState(info.getRegionId()));
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetStoreFileResponse getStoreFile(final RpcController controller,
final GetStoreFileRequest request) throws ServiceException {
try {
HRegion region = getRegion(request.getRegion());
requestCount.incrementAndGet();
Set<byte[]> columnFamilies = null;
if (request.getFamilyCount() == 0) {
columnFamilies = region.getStores().keySet();
} else {
columnFamilies = new HashSet<byte[]>();
for (ByteString cf: request.getFamilyList()) {
columnFamilies.add(cf.toByteArray());
}
}
int nCF = columnFamilies.size();
List<String> fileList = region.getStoreFileList(
columnFamilies.toArray(new byte[nCF][]));
GetStoreFileResponse.Builder builder = GetStoreFileResponse.newBuilder();
builder.addAllStoreFile(fileList);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
@QosPriority(priority=HIGH_QOS)
public GetOnlineRegionResponse getOnlineRegion(final RpcController controller,
final GetOnlineRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
List<HRegionInfo> list = new ArrayList<HRegionInfo>(onlineRegions.size());
for (HRegion region: this.onlineRegions.values()) {
list.add(region.getRegionInfo());
}
Collections.sort(list);
return ResponseConverter.buildGetOnlineRegionResponse(list);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
// Region open/close direct RPCs
/**
* Open a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public OpenRegionResponse openRegion(final RpcController controller, final OpenRegionRequest request)
throws ServiceException {
int versionOfOfflineNode = -1;
if (request.hasVersionOfOfflineNode()) {
versionOfOfflineNode = request.getVersionOfOfflineNode();
}
try {
checkOpen();
} catch (IOException ie) {
throw new ServiceException(ie);
}
requestCount.incrementAndGet();
OpenRegionResponse.Builder builder = OpenRegionResponse.newBuilder();
Map<String, HTableDescriptor> htds = new HashMap<String, HTableDescriptor>(
request.getRegionList().size());
boolean isBulkAssign = request.getRegionList().size() > 1;
for (RegionInfo regionInfo : request.getRegionList()) {
HRegionInfo region = HRegionInfo.convert(regionInfo);
try {
checkIfRegionInTransition(region, OPEN);
HRegion onlineRegion = getFromOnlineRegions(region.getEncodedName());
if (null != onlineRegion) {
// See HBASE-5094. Cross check with META if still this RS is owning
// the region.
Pair<HRegionInfo, ServerName> p = MetaReader.getRegion(
this.catalogTracker, region.getRegionName());
if (this.getServerName().equals(p.getSecond())) {
LOG.warn("Attempted open of " + region.getEncodedName()
+ " but already online on this server");
builder.addOpeningState(RegionOpeningState.ALREADY_OPENED);
continue;
} else {
LOG.warn("The region " + region.getEncodedName()
+ " is online on this server but META does not have this server.");
removeFromOnlineRegions(region.getEncodedName(), null);
}
}
LOG.info("Received request to open region: " + region.getRegionNameAsString() + " on "
+ this.serverNameFromMasterPOV);
HTableDescriptor htd = htds.get(region.getTableNameAsString());
if (htd == null) {
htd = this.tableDescriptors.get(region.getTableName());
htds.put(region.getTableNameAsString(), htd);
}
this.regionsInTransitionInRS.putIfAbsent(
region.getEncodedNameAsBytes(), true);
// Need to pass the expected version in the constructor.
if (region.isRootRegion()) {
this.service.submit(new OpenRootHandler(this, this, region, htd,
versionOfOfflineNode));
} else if (region.isMetaRegion()) {
this.service.submit(new OpenMetaHandler(this, this, region, htd,
versionOfOfflineNode));
} else {
this.service.submit(new OpenRegionHandler(this, this, region, htd,
versionOfOfflineNode));
}
builder.addOpeningState(RegionOpeningState.OPENED);
} catch (RegionAlreadyInTransitionException rie) {
LOG.warn("Region is already in transition", rie);
if (isBulkAssign) {
builder.addOpeningState(RegionOpeningState.OPENED);
} else {
throw new ServiceException(rie);
}
} catch (IOException ie) {
LOG.warn("Failed opening region " + region.getRegionNameAsString(), ie);
if (isBulkAssign) {
builder.addOpeningState(RegionOpeningState.FAILED_OPENING);
} else {
throw new ServiceException(ie);
}
}
}
return builder.build();
}
/**
* Close a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public CloseRegionResponse closeRegion(final RpcController controller,
final CloseRegionRequest request) throws ServiceException {
int versionOfClosingNode = -1;
if (request.hasVersionOfClosingNode()) {
versionOfClosingNode = request.getVersionOfClosingNode();
}
boolean zk = request.getTransitionInZK();
final ServerName sn = (request.hasDestinationServer() ?
ProtobufUtil.toServerName(request.getDestinationServer()) : null);
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
CloseRegionResponse.Builder
builder = CloseRegionResponse.newBuilder();
LOG.info("Received close region: " + region.getRegionNameAsString() +
". Version of ZK closing node:" + versionOfClosingNode +
". Destination server:" + sn);
HRegionInfo regionInfo = region.getRegionInfo();
checkIfRegionInTransition(regionInfo, CLOSE);
boolean closed = closeRegion(
regionInfo, false, zk, versionOfClosingNode, sn);
builder.setClosed(closed);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Flush a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public FlushRegionResponse flushRegion(final RpcController controller,
final FlushRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Flushing " + region.getRegionNameAsString());
boolean shouldFlush = true;
if (request.hasIfOlderThanTs()) {
shouldFlush = region.getLastFlushTime() < request.getIfOlderThanTs();
}
FlushRegionResponse.Builder builder = FlushRegionResponse.newBuilder();
if (shouldFlush) {
builder.setFlushed(region.flushcache());
}
builder.setLastFlushTime(region.getLastFlushTime());
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Split a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public SplitRegionResponse splitRegion(final RpcController controller,
final SplitRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Splitting " + region.getRegionNameAsString());
region.flushcache();
byte[] splitPoint = null;
if (request.hasSplitPoint()) {
splitPoint = request.getSplitPoint().toByteArray();
}
region.forceSplit(splitPoint);
compactSplitThread.requestSplit(region, region.checkSplit());
return SplitRegionResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Compact a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public CompactRegionResponse compactRegion(final RpcController controller,
final CompactRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Compacting " + region.getRegionNameAsString());
boolean major = false;
if (request.hasMajor()) {
major = request.getMajor();
}
if (major) {
region.triggerMajorCompaction();
}
LOG.trace("User-triggered compaction requested for region " +
region.getRegionNameAsString());
compactSplitThread.requestCompaction(region,
"User-triggered " + (major ? "major " : "") + "compaction",
Store.PRIORITY_USER);
return CompactRegionResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Replicate WAL entries on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public ReplicateWALEntryResponse replicateWALEntry(final RpcController controller,
final ReplicateWALEntryRequest request) throws ServiceException {
try {
if (replicationSinkHandler != null) {
checkOpen();
requestCount.incrementAndGet();
HLog.Entry[] entries = ProtobufUtil.toHLogEntries(request.getEntryList());
if (entries != null && entries.length > 0) {
replicationSinkHandler.replicateLogEntries(entries);
}
}
return ReplicateWALEntryResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Roll the WAL writer of the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public RollWALWriterResponse rollWALWriter(final RpcController controller,
final RollWALWriterRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HLog wal = this.getWAL();
byte[][] regionsToFlush = wal.rollWriter(true);
RollWALWriterResponse.Builder builder = RollWALWriterResponse.newBuilder();
if (regionsToFlush != null) {
for (byte[] region: regionsToFlush) {
builder.addRegionToFlush(ByteString.copyFrom(region));
}
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Stop the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public StopServerResponse stopServer(final RpcController controller,
final StopServerRequest request) throws ServiceException {
requestCount.incrementAndGet();
String reason = request.getReason();
stop(reason);
return StopServerResponse.newBuilder().build();
}
/**
* Get some information of the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public GetServerInfoResponse getServerInfo(final RpcController controller,
final GetServerInfoRequest request) throws ServiceException {
ServerName serverName = getServerName();
requestCount.incrementAndGet();
return ResponseConverter.buildGetServerInfoResponse(serverName, webuiport);
}
// End Admin methods
/**
* Find the HRegion based on a region specifier
*
* @param regionSpecifier the region specifier
* @return the corresponding region
* @throws IOException if the specifier is not null,
* but failed to find the region
*/
protected HRegion getRegion(
final RegionSpecifier regionSpecifier) throws IOException {
byte[] value = regionSpecifier.getValue().toByteArray();
RegionSpecifierType type = regionSpecifier.getType();
checkOpen();
switch (type) {
case REGION_NAME:
return getRegion(value);
case ENCODED_REGION_NAME:
return getRegionByEncodedName(Bytes.toString(value));
default:
throw new DoNotRetryIOException(
"Unsupported region specifier type: " + type);
}
}
/**
* Execute an append mutation.
*
* @param region
* @param mutate
* @return
* @throws IOException
*/
protected Result append(final HRegion region,
final Mutate mutate) throws IOException {
Append append = ProtobufUtil.toAppend(mutate);
Result r = null;
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().preAppend(append);
}
if (r == null) {
Integer lock = getLockFromId(append.getLockId());
r = region.append(append, lock, append.getWriteToWAL());
if (region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postAppend(append, r);
}
}
return r;
}
/**
* Execute an increment mutation.
*
* @param region
* @param mutate
* @return
* @throws IOException
*/
protected Result increment(final HRegion region,
final Mutate mutate) throws IOException {
Increment increment = ProtobufUtil.toIncrement(mutate);
Result r = null;
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().preIncrement(increment);
}
if (r == null) {
Integer lock = getLockFromId(increment.getLockId());
r = region.increment(increment, lock, increment.getWriteToWAL());
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().postIncrement(increment, r);
}
}
return r;
}
/**
* Execute a list of Put/Delete mutations.
*
* @param builder
* @param region
* @param mutates
*/
protected void doBatchOp(final MultiResponse.Builder builder,
final HRegion region, final List<Mutate> mutates) {
@SuppressWarnings("unchecked")
Pair<Mutation, Integer>[] mutationsWithLocks = new Pair[mutates.size()];
try {
ActionResult.Builder resultBuilder = ActionResult.newBuilder();
NameBytesPair value = ProtobufUtil.toParameter(new Result());
resultBuilder.setValue(value);
ActionResult result = resultBuilder.build();
int i = 0;
for (Mutate m : mutates) {
Mutation mutation = null;
if (m.getMutateType() == MutateType.PUT) {
mutation = ProtobufUtil.toPut(m);
} else {
mutation = ProtobufUtil.toDelete(m);
}
Integer lock = getLockFromId(mutation.getLockId());
mutationsWithLocks[i++] = new Pair<Mutation, Integer>(mutation, lock);
builder.addResult(result);
}
requestCount.addAndGet(mutates.size());
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
OperationStatus codes[] = region.batchMutate(mutationsWithLocks);
for (i = 0; i < codes.length; i++) {
if (codes[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
result = ResponseConverter.buildActionResult(
new DoNotRetryIOException(codes[i].getExceptionMsg()));
builder.setResult(i, result);
}
}
} catch (IOException ie) {
ActionResult result = ResponseConverter.buildActionResult(ie);
for (int i = 0, n = mutates.size(); i < n; i++) {
builder.setResult(i, result);
}
}
}
/**
* Mutate a list of rows atomically.
*
* @param region
* @param mutates
* @throws IOException
*/
protected void mutateRows(final HRegion region,
final List<Mutate> mutates) throws IOException {
Mutate firstMutate = mutates.get(0);
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
byte[] row = firstMutate.getRow().toByteArray();
RowMutations rm = new RowMutations(row);
for (Mutate mutate: mutates) {
MutateType type = mutate.getMutateType();
switch (mutate.getMutateType()) {
case PUT:
rm.add(ProtobufUtil.toPut(mutate));
break;
case DELETE:
rm.add(ProtobufUtil.toDelete(mutate));
break;
default:
throw new DoNotRetryIOException(
"mutate supports atomic put and/or delete, not "
+ type.name());
}
}
region.mutateRow(rm);
}
// This map will containsall the regions that we closed for a move.
// We add the time it was moved as we don't want to keep too old information
protected Map<String, Pair<Long, ServerName>> movedRegions =
new ConcurrentHashMap<String, Pair<Long, ServerName>>(3000);
// We need a timeout. If not there is a risk of giving a wrong information: this would double
// the number of network calls instead of reducing them.
private static final int TIMEOUT_REGION_MOVED = (2 * 60 * 1000);
protected void addToMovedRegions(HRegionInfo hri, ServerName destination){
addToMovedRegions(hri.getEncodedName(), destination);
}
protected void addToMovedRegions(String encodedName, ServerName destination){
final Long time = System.currentTimeMillis();
movedRegions.put(
encodedName,
new Pair<Long, ServerName>(time, destination));
}
private ServerName getMovedRegion(final String encodedRegionName) {
Pair<Long, ServerName> dest = movedRegions.get(encodedRegionName);
if (dest != null) {
if (dest.getFirst() > (System.currentTimeMillis() - TIMEOUT_REGION_MOVED)) {
return dest.getSecond();
} else {
movedRegions.remove(encodedRegionName);
}
}
return null;
}
/**
* Remove the expired entries from the moved regions list.
*/
protected void cleanMovedRegions(){
final long cutOff = System.currentTimeMillis() - TIMEOUT_REGION_MOVED;
Iterator<Entry<String, Pair<Long, ServerName>>> it = movedRegions.entrySet().iterator();
while (it.hasNext()){
Map.Entry<String, Pair<Long, ServerName>> e = it.next();
if (e.getValue().getFirst() < cutOff){
it.remove();
}
}
}
/**
* Creates a Chore thread to clean the moved region cache.
*/
protected static class MovedRegionsCleaner extends Chore implements Stoppable {
private HRegionServer regionServer;
Stoppable stoppable;
private MovedRegionsCleaner(
HRegionServer regionServer, Stoppable stoppable){
super("MovedRegionsCleaner for region "+regionServer, TIMEOUT_REGION_MOVED, stoppable);
this.regionServer = regionServer;
this.stoppable = stoppable;
}
static MovedRegionsCleaner createAndStart(HRegionServer rs){
Stoppable stoppable = new Stoppable() {
private volatile boolean isStopped = false;
@Override public void stop(String why) { isStopped = true;}
@Override public boolean isStopped() {return isStopped;}
};
return new MovedRegionsCleaner(rs, stoppable);
}
@Override
protected void chore() {
regionServer.cleanMovedRegions();
}
@Override
public void stop(String why) {
stoppable.stop(why);
}
@Override
public boolean isStopped() {
return stoppable.isStopped();
}
}
private String getMyEphemeralNodePath() {
return ZKUtil.joinZNode(this.zooKeeper.rsZNode, getServerName().toString());
}
}
| hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.Thread.UncaughtExceptionHandler;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.management.ObjectName;
import org.apache.commons.lang.mutable.MutableDouble;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionMovedException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.UnknownRowLockException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.YouAreDeadException;
import org.apache.hadoop.hbase.ZNodeClearer;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.catalog.MetaEditor;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.AdminProtocol;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ClientProtocol;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.MultiAction;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.Exec;
import org.apache.hadoop.hbase.client.coprocessor.ExecResult;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.HBaseRPC;
import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
import org.apache.hadoop.hbase.ipc.HBaseRpcMetrics;
import org.apache.hadoop.hbase.ipc.Invocation;
import org.apache.hadoop.hbase.ipc.ProtocolSignature;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody;
import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler;
import org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler;
import org.apache.hadoop.hbase.regionserver.handler.CloseRootHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
import org.apache.hadoop.hbase.regionserver.handler.OpenRootHandler;
import org.apache.hadoop.hbase.regionserver.metrics.RegionMetricsStorage;
import org.apache.hadoop.hbase.regionserver.metrics.RegionServerDynamicMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.RegionServerMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.StoreMetricType;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.InfoServer;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Sleeper;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ClusterStatusTracker;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.RootRegionTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperNodeTracker;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.metrics.util.MBeanUtil;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;
import org.apache.hadoop.hbase.RegionServerStatusProtocol;
import com.google.common.base.Function;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.RpcController;
/**
* HRegionServer makes a set of HRegions available to clients. It checks in with
* the HMaster. There are many HRegionServers in a single HBase deployment.
*/
@InterfaceAudience.Private
@SuppressWarnings("deprecation")
public class HRegionServer implements ClientProtocol,
AdminProtocol, Runnable, RegionServerServices, HBaseRPCErrorHandler {
public static final Log LOG = LogFactory.getLog(HRegionServer.class);
private final Random rand = new Random();
/*
* Strings to be used in forming the exception message for
* RegionsAlreadyInTransitionException.
*/
protected static final String OPEN = "OPEN";
protected static final String CLOSE = "CLOSE";
//RegionName vs current action in progress
//true - if open region action in progress
//false - if close region action in progress
protected final ConcurrentSkipListMap<byte[], Boolean> regionsInTransitionInRS =
new ConcurrentSkipListMap<byte[], Boolean>(Bytes.BYTES_COMPARATOR);
protected long maxScannerResultSize;
// Cache flushing
protected MemStoreFlusher cacheFlusher;
// catalog tracker
protected CatalogTracker catalogTracker;
/**
* Go here to get table descriptors.
*/
protected TableDescriptors tableDescriptors;
// Replication services. If no replication, this handler will be null.
protected ReplicationSourceService replicationSourceHandler;
protected ReplicationSinkService replicationSinkHandler;
// Compactions
public CompactSplitThread compactSplitThread;
final Map<String, RegionScanner> scanners =
new ConcurrentHashMap<String, RegionScanner>();
/**
* Map of regions currently being served by this region server. Key is the
* encoded region name. All access should be synchronized.
*/
protected final Map<String, HRegion> onlineRegions =
new ConcurrentHashMap<String, HRegion>();
// Leases
protected Leases leases;
// Instance of the hbase executor service.
protected ExecutorService service;
// Request counter.
// Do we need this? Can't we just sum region counters? St.Ack 20110412
protected AtomicInteger requestCount = new AtomicInteger();
// If false, the file system has become unavailable
protected volatile boolean fsOk;
protected HFileSystem fs;
protected static final int NORMAL_QOS = 0;
protected static final int QOS_THRESHOLD = 10; // the line between low and high qos
protected static final int HIGH_QOS = 100;
// Set when a report to the master comes back with a message asking us to
// shutdown. Also set by call to stop when debugging or running unit tests
// of HRegionServer in isolation.
protected volatile boolean stopped = false;
// Go down hard. Used if file system becomes unavailable and also in
// debugging and unit tests.
protected volatile boolean abortRequested;
// Port we put up the webui on.
protected int webuiport = -1;
Map<String, Integer> rowlocks = new ConcurrentHashMap<String, Integer>();
// A state before we go into stopped state. At this stage we're closing user
// space regions.
private boolean stopping = false;
private volatile boolean killed = false;
protected final Configuration conf;
protected final AtomicBoolean haveRootRegion = new AtomicBoolean(false);
private boolean useHBaseChecksum; // verify hbase checksums?
private Path rootDir;
protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
final int numRetries;
protected final int threadWakeFrequency;
private final int msgInterval;
protected final int numRegionsToReport;
// Remote HMaster
private RegionServerStatusProtocol hbaseMaster;
// Server to handle client requests. Default access so can be accessed by
// unit tests.
RpcServer rpcServer;
private final InetSocketAddress isa;
// Info server. Default access so can be used by unit tests. REGIONSERVER
// is name of the webapp and the attribute name used stuffing this instance
// into web context.
InfoServer infoServer;
/** region server process name */
public static final String REGIONSERVER = "regionserver";
/** region server configuration name */
public static final String REGIONSERVER_CONF = "regionserver_conf";
/*
* Space is reserved in HRS constructor and then released when aborting to
* recover from an OOME. See HBASE-706. TODO: Make this percentage of the heap
* or a minimum.
*/
private final LinkedList<byte[]> reservedSpace = new LinkedList<byte[]>();
private RegionServerMetrics metrics;
private RegionServerDynamicMetrics dynamicMetrics;
/*
* Check for compactions requests.
*/
Chore compactionChecker;
// HLog and HLog roller. log is protected rather than private to avoid
// eclipse warning when accessed by inner classes
protected volatile HLog hlog;
LogRoller hlogRoller;
// flag set after we're done setting up server threads (used for testing)
protected volatile boolean isOnline;
// zookeeper connection and watcher
private ZooKeeperWatcher zooKeeper;
// master address manager and watcher
private MasterAddressTracker masterAddressManager;
// Cluster Status Tracker
private ClusterStatusTracker clusterStatusTracker;
// Log Splitting Worker
private SplitLogWorker splitLogWorker;
// A sleeper that sleeps for msgInterval.
private final Sleeper sleeper;
private final int rpcTimeout;
private final RegionServerAccounting regionServerAccounting;
// Cache configuration and block cache reference
private final CacheConfig cacheConfig;
// reference to the Thrift Server.
volatile private HRegionThriftServer thriftServer;
/**
* The server name the Master sees us as. Its made from the hostname the
* master passes us, port, and server startcode. Gets set after registration
* against Master. The hostname can differ from the hostname in {@link #isa}
* but usually doesn't if both servers resolve .
*/
private ServerName serverNameFromMasterPOV;
/**
* This servers startcode.
*/
private final long startcode;
/**
* MX Bean for RegionServerInfo
*/
private ObjectName mxBean = null;
/**
* Chore to clean periodically the moved region list
*/
private MovedRegionsCleaner movedRegionsCleaner;
/**
* The lease timeout period for row locks (milliseconds).
*/
private final int rowLockLeaseTimeoutPeriod;
/**
* The lease timeout period for client scanners (milliseconds).
*/
private final int scannerLeaseTimeoutPeriod;
/**
* The reference to the QosFunction
*/
private final QosFunction qosFunction;
/**
* Starts a HRegionServer at the default location
*
* @param conf
* @throws IOException
* @throws InterruptedException
*/
public HRegionServer(Configuration conf)
throws IOException, InterruptedException {
this.fsOk = true;
this.conf = conf;
// Set how many times to retry talking to another server over HConnection.
HConnectionManager.setServerSideHConnectionRetries(this.conf, LOG);
this.isOnline = false;
checkCodecs(this.conf);
// do we use checksum verfication in the hbase? If hbase checksum verification
// is enabled, then we automatically switch off hdfs checksum verification.
this.useHBaseChecksum = conf.getBoolean(
HConstants.HBASE_CHECKSUM_VERIFICATION, true);
// Config'ed params
this.numRetries = conf.getInt("hbase.client.retries.number", 10);
this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000);
this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);
this.sleeper = new Sleeper(this.msgInterval, this);
this.maxScannerResultSize = conf.getLong(
HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY,
HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE);
this.numRegionsToReport = conf.getInt(
"hbase.regionserver.numregionstoreport", 10);
this.rpcTimeout = conf.getInt(
HConstants.HBASE_RPC_TIMEOUT_KEY,
HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
this.abortRequested = false;
this.stopped = false;
this.rowLockLeaseTimeoutPeriod = conf.getInt(
HConstants.HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD,
HConstants.DEFAULT_HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD);
this.scannerLeaseTimeoutPeriod = conf.getInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
HConstants.DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
// Server to handle client requests.
String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
conf.get("hbase.regionserver.dns.interface", "default"),
conf.get("hbase.regionserver.dns.nameserver", "default")));
int port = conf.getInt(HConstants.REGIONSERVER_PORT,
HConstants.DEFAULT_REGIONSERVER_PORT);
// Creation of a HSA will force a resolve.
InetSocketAddress initialIsa = new InetSocketAddress(hostname, port);
if (initialIsa.getAddress() == null) {
throw new IllegalArgumentException("Failed resolve of " + initialIsa);
}
this.rpcServer = HBaseRPC.getServer(AdminProtocol.class, this,
new Class<?>[]{ClientProtocol.class,
AdminProtocol.class, HBaseRPCErrorHandler.class,
OnlineRegions.class},
initialIsa.getHostName(), // BindAddress is IP we got for this server.
initialIsa.getPort(),
conf.getInt("hbase.regionserver.handler.count", 10),
conf.getInt("hbase.regionserver.metahandler.count", 10),
conf.getBoolean("hbase.rpc.verbose", false),
conf, QOS_THRESHOLD);
// Set our address.
this.isa = this.rpcServer.getListenerAddress();
this.rpcServer.setErrorHandler(this);
this.rpcServer.setQosFunction((qosFunction = new QosFunction()));
this.startcode = System.currentTimeMillis();
// login the server principal (if using secure Hadoop)
User.login(this.conf, "hbase.regionserver.keytab.file",
"hbase.regionserver.kerberos.principal", this.isa.getHostName());
regionServerAccounting = new RegionServerAccounting();
cacheConfig = new CacheConfig(conf);
}
/**
* Run test on configured codecs to make sure supporting libs are in place.
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException("Compression codec " + codec +
" not supported, aborting RS construction");
}
}
}
@Retention(RetentionPolicy.RUNTIME)
protected @interface QosPriority {
int priority() default 0;
}
QosFunction getQosFunction() {
return qosFunction;
}
RegionScanner getScanner(long scannerId) {
String scannerIdString = Long.toString(scannerId);
return scanners.get(scannerIdString);
}
/**
* Utility used ensuring higher quality of service for priority rpcs; e.g.
* rpcs to .META. and -ROOT-, etc.
*/
class QosFunction implements Function<RpcRequestBody,Integer> {
private final Map<String, Integer> annotatedQos;
//We need to mock the regionserver instance for some unit tests (set via
//setRegionServer method.
//The field value is initially set to the enclosing instance of HRegionServer.
private HRegionServer hRegionServer = HRegionServer.this;
//The logic for figuring out high priority RPCs is as follows:
//1. if the method is annotated with a QosPriority of QOS_HIGH,
// that is honored
//2. parse out the protobuf message and see if the request is for meta
// region, and if so, treat it as a high priority RPC
//Some optimizations for (2) are done here -
//Clients send the argument classname as part of making the RPC. The server
//decides whether to deserialize the proto argument message based on the
//pre-established set of argument classes (knownArgumentClasses below).
//This prevents the server from having to deserialize all proto argument
//messages prematurely.
//All the argument classes declare a 'getRegion' method that returns a
//RegionSpecifier object. Methods can be invoked on the returned object
//to figure out whether it is a meta region or not.
@SuppressWarnings("unchecked")
private final Class<? extends Message>[] knownArgumentClasses = new Class[]{
GetRegionInfoRequest.class,
GetStoreFileRequest.class,
CloseRegionRequest.class,
FlushRegionRequest.class,
SplitRegionRequest.class,
CompactRegionRequest.class,
GetRequest.class,
MutateRequest.class,
ScanRequest.class,
LockRowRequest.class,
UnlockRowRequest.class,
MultiRequest.class
};
//Some caches for helping performance
private final Map<String, Class<? extends Message>> argumentToClassMap =
new HashMap<String, Class<? extends Message>>();
private final Map<String, Map<Class<? extends Message>, Method>>
methodMap = new HashMap<String, Map<Class<? extends Message>, Method>>();
public QosFunction() {
Map<String, Integer> qosMap = new HashMap<String, Integer>();
for (Method m : HRegionServer.class.getMethods()) {
QosPriority p = m.getAnnotation(QosPriority.class);
if (p != null) {
qosMap.put(m.getName(), p.priority());
}
}
annotatedQos = qosMap;
if (methodMap.get("parseFrom") == null) {
methodMap.put("parseFrom",
new HashMap<Class<? extends Message>, Method>());
}
if (methodMap.get("getRegion") == null) {
methodMap.put("getRegion",
new HashMap<Class<? extends Message>, Method>());
}
for (Class<? extends Message> cls : knownArgumentClasses) {
argumentToClassMap.put(cls.getCanonicalName(), cls);
try {
methodMap.get("parseFrom").put(cls,
cls.getDeclaredMethod("parseFrom",ByteString.class));
methodMap.get("getRegion").put(cls, cls.getDeclaredMethod("getRegion"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
void setRegionServer(HRegionServer server) {
this.hRegionServer = server;
}
public boolean isMetaRegion(byte[] regionName) {
HRegion region;
try {
region = hRegionServer.getRegion(regionName);
} catch (NotServingRegionException ignored) {
return false;
}
return region.getRegionInfo().isMetaRegion();
}
@Override
public Integer apply(RpcRequestBody from) {
String methodName = from.getMethodName();
Class<? extends Message> rpcArgClass = null;
if (from.hasRequestClassName()) {
String cls = from.getRequestClassName();
rpcArgClass = argumentToClassMap.get(cls);
}
Integer priorityByAnnotation = annotatedQos.get(methodName);
if (priorityByAnnotation != null) {
return priorityByAnnotation;
}
if (rpcArgClass == null || from.getRequest().isEmpty()) {
return NORMAL_QOS;
}
Object deserializedRequestObj = null;
//check whether the request has reference to Meta region
try {
Method parseFrom = methodMap.get("parseFrom").get(rpcArgClass);
deserializedRequestObj = parseFrom.invoke(null, from.getRequest());
Method getRegion = methodMap.get("getRegion").get(rpcArgClass);
RegionSpecifier regionSpecifier =
(RegionSpecifier)getRegion.invoke(deserializedRequestObj,
(Object[])null);
HRegion region = hRegionServer.getRegion(regionSpecifier);
if (region.getRegionInfo().isMetaRegion()) {
if (LOG.isDebugEnabled()) {
LOG.debug("High priority: " + from.toString());
}
return HIGH_QOS;
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
if (methodName.equals("scan")) { // scanner methods...
ScanRequest request = (ScanRequest)deserializedRequestObj;
if (!request.hasScannerId()) {
return NORMAL_QOS;
}
RegionScanner scanner = hRegionServer.getScanner(request.getScannerId());
if (scanner != null && scanner.getRegionInfo().isMetaRegion()) {
if (LOG.isDebugEnabled()) {
LOG.debug("High priority scanner request: " + request.getScannerId());
}
return HIGH_QOS;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Low priority: " + from.toString());
}
return NORMAL_QOS;
}
}
/**
* All initialization needed before we go register with Master.
*
* @throws IOException
* @throws InterruptedException
*/
private void preRegistrationInitialization(){
try {
initializeZooKeeper();
initializeThreads();
int nbBlocks = conf.getInt("hbase.regionserver.nbreservationblocks", 4);
for (int i = 0; i < nbBlocks; i++) {
reservedSpace.add(new byte[HConstants.DEFAULT_SIZE_RESERVATION_BLOCK]);
}
} catch (Throwable t) {
// Call stop if error or process will stick around for ever since server
// puts up non-daemon threads.
this.rpcServer.stop();
abort("Initialization of RS failed. Hence aborting RS.", t);
}
}
/**
* Bring up connection to zk ensemble and then wait until a master for this
* cluster and then after that, wait until cluster 'up' flag has been set.
* This is the order in which master does things.
* Finally put up a catalog tracker.
* @throws IOException
* @throws InterruptedException
*/
private void initializeZooKeeper() throws IOException, InterruptedException {
// Open connection to zookeeper and set primary watcher
this.zooKeeper = new ZooKeeperWatcher(conf, REGIONSERVER + ":" +
this.isa.getPort(), this);
// Create the master address manager, register with zk, and start it. Then
// block until a master is available. No point in starting up if no master
// running.
this.masterAddressManager = new MasterAddressTracker(this.zooKeeper, this);
this.masterAddressManager.start();
blockAndCheckIfStopped(this.masterAddressManager);
// Wait on cluster being up. Master will set this flag up in zookeeper
// when ready.
this.clusterStatusTracker = new ClusterStatusTracker(this.zooKeeper, this);
this.clusterStatusTracker.start();
blockAndCheckIfStopped(this.clusterStatusTracker);
// Create the catalog tracker and start it;
this.catalogTracker = new CatalogTracker(this.zooKeeper, this.conf,
this, this.conf.getInt("hbase.regionserver.catalog.timeout", Integer.MAX_VALUE));
catalogTracker.start();
}
/**
* Utilty method to wait indefinitely on a znode availability while checking
* if the region server is shut down
* @param tracker znode tracker to use
* @throws IOException any IO exception, plus if the RS is stopped
* @throws InterruptedException
*/
private void blockAndCheckIfStopped(ZooKeeperNodeTracker tracker)
throws IOException, InterruptedException {
while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {
if (this.stopped) {
throw new IOException("Received the shutdown message while waiting.");
}
}
}
/**
* @return False if cluster shutdown in progress
*/
private boolean isClusterUp() {
return this.clusterStatusTracker.isClusterUp();
}
private void initializeThreads() throws IOException {
// Cache flushing thread.
this.cacheFlusher = new MemStoreFlusher(conf, this);
// Compaction thread
this.compactSplitThread = new CompactSplitThread(this);
// Background thread to check for compactions; needed if region
// has not gotten updates in a while. Make it run at a lesser frequency.
int multiplier = this.conf.getInt(HConstants.THREAD_WAKE_FREQUENCY +
".multiplier", 1000);
this.compactionChecker = new CompactionChecker(this,
this.threadWakeFrequency * multiplier, this);
this.leases = new Leases(this.threadWakeFrequency);
// Create the thread for the ThriftServer.
if (conf.getBoolean("hbase.regionserver.export.thrift", false)) {
thriftServer = new HRegionThriftServer(this, conf);
thriftServer.start();
LOG.info("Started Thrift API from Region Server.");
}
// Create the thread to clean the moved regions list
movedRegionsCleaner = MovedRegionsCleaner.createAndStart(this);
}
/**
* The HRegionServer sticks in this loop until closed.
*/
public void run() {
try {
// Do pre-registration initializations; zookeeper, lease threads, etc.
preRegistrationInitialization();
} catch (Throwable e) {
abort("Fatal exception during initialization", e);
}
try {
// Try and register with the Master; tell it we are here. Break if
// server is stopped or the clusterup flag is down or hdfs went wacky.
while (keepLooping()) {
RegionServerStartupResponse w = reportForDuty();
if (w == null) {
LOG.warn("reportForDuty failed; sleeping and then retrying.");
this.sleeper.sleep();
} else {
handleReportForDutyResponse(w);
break;
}
}
registerMBean();
// We registered with the Master. Go into run mode.
long lastMsg = 0;
long oldRequestCount = -1;
// The main run loop.
while (!this.stopped && isHealthy()) {
if (!isClusterUp()) {
if (isOnlineRegionsEmpty()) {
stop("Exiting; cluster shutdown set and not carrying any regions");
} else if (!this.stopping) {
this.stopping = true;
LOG.info("Closing user regions");
closeUserRegions(this.abortRequested);
} else if (this.stopping) {
boolean allUserRegionsOffline = areAllUserRegionsOffline();
if (allUserRegionsOffline) {
// Set stopped if no requests since last time we went around the loop.
// The remaining meta regions will be closed on our way out.
if (oldRequestCount == this.requestCount.get()) {
stop("Stopped; only catalog regions remaining online");
break;
}
oldRequestCount = this.requestCount.get();
} else {
// Make sure all regions have been closed -- some regions may
// have not got it because we were splitting at the time of
// the call to closeUserRegions.
closeUserRegions(this.abortRequested);
}
LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());
}
}
long now = System.currentTimeMillis();
if ((now - lastMsg) >= msgInterval) {
doMetrics();
tryRegionServerReport(lastMsg, now);
lastMsg = System.currentTimeMillis();
}
if (!this.stopped) this.sleeper.sleep();
} // for
} catch (Throwable t) {
if (!checkOOME(t)) {
abort("Unhandled exception: " + t.getMessage(), t);
}
}
// Run shutdown.
if (mxBean != null) {
MBeanUtil.unregisterMBean(mxBean);
mxBean = null;
}
if (this.thriftServer != null) this.thriftServer.shutdown();
this.leases.closeAfterLeasesExpire();
this.rpcServer.stop();
if (this.splitLogWorker != null) {
splitLogWorker.stop();
}
if (this.infoServer != null) {
LOG.info("Stopping infoServer");
try {
this.infoServer.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
// Send cache a shutdown.
if (cacheConfig.isBlockCacheEnabled()) {
cacheConfig.getBlockCache().shutdown();
}
movedRegionsCleaner.stop("Region Server stopping");
// Send interrupts to wake up threads if sleeping so they notice shutdown.
// TODO: Should we check they are alive? If OOME could have exited already
if (this.cacheFlusher != null) this.cacheFlusher.interruptIfNecessary();
if (this.compactSplitThread != null) this.compactSplitThread.interruptIfNecessary();
if (this.hlogRoller != null) this.hlogRoller.interruptIfNecessary();
if (this.compactionChecker != null)
this.compactionChecker.interrupt();
if (this.killed) {
// Just skip out w/o closing regions. Used when testing.
} else if (abortRequested) {
if (this.fsOk) {
closeAllRegions(abortRequested); // Don't leave any open file handles
}
LOG.info("aborting server " + this.serverNameFromMasterPOV);
} else {
closeAllRegions(abortRequested);
closeAllScanners();
LOG.info("stopping server " + this.serverNameFromMasterPOV);
}
// Interrupt catalog tracker here in case any regions being opened out in
// handlers are stuck waiting on meta or root.
if (this.catalogTracker != null) this.catalogTracker.stop();
if (!this.killed && this.fsOk) {
waitOnAllRegionsToClose(abortRequested);
LOG.info("stopping server " + this.serverNameFromMasterPOV +
"; all regions closed.");
}
//fsOk flag may be changed when closing regions throws exception.
if (!this.killed && this.fsOk) {
closeWAL(abortRequested ? false : true);
}
// Make sure the proxy is down.
if (this.hbaseMaster != null) {
HBaseRPC.stopProxy(this.hbaseMaster);
this.hbaseMaster = null;
}
this.leases.close();
if (!killed) {
join();
}
try {
deleteMyEphemeralNode();
} catch (KeeperException e) {
LOG.warn("Failed deleting my ephemeral node", e);
}
// We may have failed to delete the znode at the previous step, but
// we delete the file anyway: a second attempt to delete the znode is likely to fail again.
ZNodeClearer.deleteMyEphemeralNodeOnDisk();
this.zooKeeper.close();
LOG.info("stopping server " + this.serverNameFromMasterPOV +
"; zookeeper connection closed.");
LOG.info(Thread.currentThread().getName() + " exiting");
}
private boolean areAllUserRegionsOffline() {
if (getNumberOfOnlineRegions() > 2) return false;
boolean allUserRegionsOffline = true;
for (Map.Entry<String, HRegion> e: this.onlineRegions.entrySet()) {
if (!e.getValue().getRegionInfo().isMetaRegion()) {
allUserRegionsOffline = false;
break;
}
}
return allUserRegionsOffline;
}
void tryRegionServerReport(long reportStartTime, long reportEndTime)
throws IOException {
HBaseProtos.ServerLoad sl = buildServerLoad(reportStartTime, reportEndTime);
// Why we do this?
this.requestCount.set(0);
try {
RegionServerReportRequest.Builder request = RegionServerReportRequest.newBuilder();
ServerName sn = ServerName.parseVersionedServerName(
this.serverNameFromMasterPOV.getVersionedBytes());
request.setServer(ProtobufUtil.toServerName(sn));
request.setLoad(sl);
this.hbaseMaster.regionServerReport(null, request.build());
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof YouAreDeadException) {
// This will be caught and handled as a fatal error in run()
throw ioe;
}
// Couldn't connect to the master, get location from zk and reconnect
// Method blocks until new master is found or we are stopped
getMaster();
}
}
HBaseProtos.ServerLoad buildServerLoad(long reportStartTime, long reportEndTime) {
Collection<HRegion> regions = getOnlineRegionsLocalContext();
MemoryUsage memory =
ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
HBaseProtos.ServerLoad.Builder serverLoad = HBaseProtos.ServerLoad.newBuilder();
serverLoad.setNumberOfRequests((int)metrics.getRequests());
serverLoad.setTotalNumberOfRequests(requestCount.get());
serverLoad.setUsedHeapMB((int)(memory.getUsed() / 1024 / 1024));
serverLoad.setMaxHeapMB((int) (memory.getMax() / 1024 / 1024));
Set<String> coprocessors = this.hlog.getCoprocessorHost().getCoprocessors();
for (String coprocessor : coprocessors) {
serverLoad.addCoprocessors(
Coprocessor.newBuilder().setName(coprocessor).build());
}
for (HRegion region : regions) {
serverLoad.addRegionLoads(createRegionLoad(region));
}
serverLoad.setReportStartTime(reportStartTime);
serverLoad.setReportEndTime(reportEndTime);
return serverLoad.build();
}
String getOnlineRegionsAsPrintableString() {
StringBuilder sb = new StringBuilder();
for (HRegion r: this.onlineRegions.values()) {
if (sb.length() > 0) sb.append(", ");
sb.append(r.getRegionInfo().getEncodedName());
}
return sb.toString();
}
/**
* Wait on regions close.
*/
private void waitOnAllRegionsToClose(final boolean abort) {
// Wait till all regions are closed before going out.
int lastCount = -1;
long previousLogTime = 0;
Set<String> closedRegions = new HashSet<String>();
while (!isOnlineRegionsEmpty()) {
int count = getNumberOfOnlineRegions();
// Only print a message if the count of regions has changed.
if (count != lastCount) {
// Log every second at most
if (System.currentTimeMillis() > (previousLogTime + 1000)) {
previousLogTime = System.currentTimeMillis();
lastCount = count;
LOG.info("Waiting on " + count + " regions to close");
// Only print out regions still closing if a small number else will
// swamp the log.
if (count < 10 && LOG.isDebugEnabled()) {
LOG.debug(this.onlineRegions);
}
}
}
// Ensure all user regions have been sent a close. Use this to
// protect against the case where an open comes in after we start the
// iterator of onlineRegions to close all user regions.
for (Map.Entry<String, HRegion> e : this.onlineRegions.entrySet()) {
HRegionInfo hri = e.getValue().getRegionInfo();
if (!this.regionsInTransitionInRS.containsKey(hri.getEncodedNameAsBytes())
&& !closedRegions.contains(hri.getEncodedName())) {
closedRegions.add(hri.getEncodedName());
// Don't update zk with this close transition; pass false.
closeRegion(hri, abort, false);
}
}
// No regions in RIT, we could stop waiting now.
if (this.regionsInTransitionInRS.isEmpty()) {
if (!isOnlineRegionsEmpty()) {
LOG.info("We were exiting though online regions are not empty," +
" because some regions failed closing");
}
break;
}
Threads.sleep(200);
}
}
private void closeWAL(final boolean delete) {
try {
if (this.hlog != null) {
if (delete) {
hlog.closeAndDelete();
} else {
hlog.close();
}
}
} catch (Throwable e) {
LOG.error("Close and delete failed", RemoteExceptionHandler.checkThrowable(e));
}
}
private void closeAllScanners() {
// Close any outstanding scanners. Means they'll get an UnknownScanner
// exception next time they come in.
for (Map.Entry<String, RegionScanner> e : this.scanners.entrySet()) {
try {
e.getValue().close();
} catch (IOException ioe) {
LOG.warn("Closing scanner " + e.getKey(), ioe);
}
}
}
/*
* Run init. Sets up hlog and starts up all server threads.
*
* @param c Extra configuration.
*/
protected void handleReportForDutyResponse(final RegionServerStartupResponse c)
throws IOException {
try {
for (NameStringPair e : c.getMapEntriesList()) {
String key = e.getName();
// The hostname the master sees us as.
if (key.equals(HConstants.KEY_FOR_HOSTNAME_SEEN_BY_MASTER)) {
String hostnameFromMasterPOV = e.getValue();
this.serverNameFromMasterPOV = new ServerName(hostnameFromMasterPOV,
this.isa.getPort(), this.startcode);
LOG.info("Master passed us hostname to use. Was=" +
this.isa.getHostName() + ", Now=" +
this.serverNameFromMasterPOV.getHostname());
continue;
}
String value = e.getValue().toString();
if (LOG.isDebugEnabled()) {
LOG.debug("Config from master: " + key + "=" + value);
}
this.conf.set(key, value);
}
// hack! Maps DFSClient => RegionServer for logs. HDFS made this
// config param for task trackers, but we can piggyback off of it.
if (this.conf.get("mapred.task.id") == null) {
this.conf.set("mapred.task.id", "hb_rs_" +
this.serverNameFromMasterPOV.toString());
}
// Set our ephemeral znode up in zookeeper now we have a name.
createMyEphemeralNode();
// Save it in a file, this will allow to see if we crash
ZNodeClearer.writeMyEphemeralNodeOnDisk(getMyEphemeralNodePath());
// Master sent us hbase.rootdir to use. Should be fully qualified
// path with file system specification included. Set 'fs.defaultFS'
// to match the filesystem on hbase.rootdir else underlying hadoop hdfs
// accessors will be going against wrong filesystem (unless all is set
// to defaults).
this.conf.set("fs.defaultFS", this.conf.get("hbase.rootdir"));
// Get fs instance used by this RS
this.fs = new HFileSystem(this.conf, this.useHBaseChecksum);
this.rootDir = new Path(this.conf.get(HConstants.HBASE_DIR));
this.tableDescriptors = new FSTableDescriptors(this.fs, this.rootDir, true);
this.hlog = setupWALAndReplication();
// Init in here rather than in constructor after thread name has been set
this.metrics = new RegionServerMetrics();
this.dynamicMetrics = RegionServerDynamicMetrics.newInstance();
startServiceThreads();
LOG.info("Serving as " + this.serverNameFromMasterPOV +
", RPC listening on " + this.isa +
", sessionid=0x" +
Long.toHexString(this.zooKeeper.getRecoverableZooKeeper().getSessionId()));
isOnline = true;
} catch (Throwable e) {
this.isOnline = false;
stop("Failed initialization");
throw convertThrowableToIOE(cleanup(e, "Failed init"),
"Region server startup failed");
} finally {
sleeper.skipSleepCycle();
}
}
private void createMyEphemeralNode() throws KeeperException {
ZKUtil.createEphemeralNodeAndWatch(this.zooKeeper, getMyEphemeralNodePath(),
HConstants.EMPTY_BYTE_ARRAY);
}
private void deleteMyEphemeralNode() throws KeeperException {
ZKUtil.deleteNode(this.zooKeeper, getMyEphemeralNodePath());
}
public RegionServerAccounting getRegionServerAccounting() {
return regionServerAccounting;
}
/*
* @param r Region to get RegionLoad for.
*
* @return RegionLoad instance.
*
* @throws IOException
*/
private RegionLoad createRegionLoad(final HRegion r) {
byte[] name = r.getRegionName();
int stores = 0;
int storefiles = 0;
int storeUncompressedSizeMB = 0;
int storefileSizeMB = 0;
int memstoreSizeMB = (int) (r.memstoreSize.get() / 1024 / 1024);
int storefileIndexSizeMB = 0;
int rootIndexSizeKB = 0;
int totalStaticIndexSizeKB = 0;
int totalStaticBloomSizeKB = 0;
long totalCompactingKVs = 0;
long currentCompactedKVs = 0;
synchronized (r.stores) {
stores += r.stores.size();
for (Store store : r.stores.values()) {
storefiles += store.getStorefilesCount();
storeUncompressedSizeMB += (int) (store.getStoreSizeUncompressed()
/ 1024 / 1024);
storefileSizeMB += (int) (store.getStorefilesSize() / 1024 / 1024);
storefileIndexSizeMB += (int) (store.getStorefilesIndexSize() / 1024 / 1024);
CompactionProgress progress = store.getCompactionProgress();
if (progress != null) {
totalCompactingKVs += progress.totalCompactingKVs;
currentCompactedKVs += progress.currentCompactedKVs;
}
rootIndexSizeKB +=
(int) (store.getStorefilesIndexSize() / 1024);
totalStaticIndexSizeKB +=
(int) (store.getTotalStaticIndexSize() / 1024);
totalStaticBloomSizeKB +=
(int) (store.getTotalStaticBloomSize() / 1024);
}
}
RegionLoad.Builder regionLoad = RegionLoad.newBuilder();
RegionSpecifier.Builder regionSpecifier = RegionSpecifier.newBuilder();
regionSpecifier.setType(RegionSpecifierType.REGION_NAME);
regionSpecifier.setValue(ByteString.copyFrom(name));
regionLoad.setRegionSpecifier(regionSpecifier.build())
.setStores(stores)
.setStorefiles(storefiles)
.setStoreUncompressedSizeMB(storeUncompressedSizeMB)
.setStorefileSizeMB(storefileSizeMB)
.setMemstoreSizeMB(memstoreSizeMB)
.setStorefileIndexSizeMB(storefileIndexSizeMB)
.setRootIndexSizeKB(rootIndexSizeKB)
.setTotalStaticIndexSizeKB(totalStaticIndexSizeKB)
.setTotalStaticBloomSizeKB(totalStaticBloomSizeKB)
.setReadRequestsCount((int) r.readRequestsCount.get())
.setWriteRequestsCount((int) r.writeRequestsCount.get())
.setTotalCompactingKVs(totalCompactingKVs)
.setCurrentCompactedKVs(currentCompactedKVs);
Set<String> coprocessors = r.getCoprocessorHost().getCoprocessors();
for (String coprocessor : coprocessors) {
regionLoad.addCoprocessors(
Coprocessor.newBuilder().setName(coprocessor).build());
}
return regionLoad.build();
}
/**
* @param encodedRegionName
* @return An instance of RegionLoad.
*/
public RegionLoad createRegionLoad(final String encodedRegionName) {
HRegion r = null;
r = this.onlineRegions.get(encodedRegionName);
return r != null ? createRegionLoad(r) : null;
}
/*
* Inner class that runs on a long period checking if regions need compaction.
*/
private static class CompactionChecker extends Chore {
private final HRegionServer instance;
private final int majorCompactPriority;
private final static int DEFAULT_PRIORITY = Integer.MAX_VALUE;
CompactionChecker(final HRegionServer h, final int sleepTime,
final Stoppable stopper) {
super("CompactionChecker", sleepTime, h);
this.instance = h;
LOG.info("Runs every " + StringUtils.formatTime(sleepTime));
/* MajorCompactPriority is configurable.
* If not set, the compaction will use default priority.
*/
this.majorCompactPriority = this.instance.conf.
getInt("hbase.regionserver.compactionChecker.majorCompactPriority",
DEFAULT_PRIORITY);
}
@Override
protected void chore() {
for (HRegion r : this.instance.onlineRegions.values()) {
if (r == null)
continue;
for (Store s : r.getStores().values()) {
try {
if (s.needsCompaction()) {
// Queue a compaction. Will recognize if major is needed.
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests compaction");
} else if (s.isMajorCompaction()) {
if (majorCompactPriority == DEFAULT_PRIORITY ||
majorCompactPriority > r.getCompactPriority()) {
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests major compaction; use default priority");
} else {
this.instance.compactSplitThread.requestCompaction(r, s,
getName() + " requests major compaction; use configured priority",
this.majorCompactPriority);
}
}
} catch (IOException e) {
LOG.warn("Failed major compaction check on " + r, e);
}
}
}
}
}
/**
* Report the status of the server. A server is online once all the startup is
* completed (setting up filesystem, starting service threads, etc.). This
* method is designed mostly to be useful in tests.
*
* @return true if online, false if not.
*/
public boolean isOnline() {
return isOnline;
}
/**
* Setup WAL log and replication if enabled.
* Replication setup is done in here because it wants to be hooked up to WAL.
* @return A WAL instance.
* @throws IOException
*/
private HLog setupWALAndReplication() throws IOException {
final Path oldLogDir = new Path(rootDir, HConstants.HREGION_OLDLOGDIR_NAME);
Path logdir = new Path(rootDir,
HLog.getHLogDirectoryName(this.serverNameFromMasterPOV.toString()));
if (LOG.isDebugEnabled()) LOG.debug("logdir=" + logdir);
if (this.fs.exists(logdir)) {
throw new RegionServerRunningException("Region server has already " +
"created directory at " + this.serverNameFromMasterPOV.toString());
}
// Instantiate replication manager if replication enabled. Pass it the
// log directories.
createNewReplicationInstance(conf, this, this.fs, logdir, oldLogDir);
return instantiateHLog(logdir, oldLogDir);
}
/**
* Called by {@link #setupWALAndReplication()} creating WAL instance.
* @param logdir
* @param oldLogDir
* @return WAL instance.
* @throws IOException
*/
protected HLog instantiateHLog(Path logdir, Path oldLogDir) throws IOException {
return new HLog(this.fs.getBackingFs(), logdir, oldLogDir, this.conf,
getWALActionListeners(), this.serverNameFromMasterPOV.toString());
}
/**
* Called by {@link #instantiateHLog(Path, Path)} setting up WAL instance.
* Add any {@link WALActionsListener}s you want inserted before WAL startup.
* @return List of WALActionsListener that will be passed in to
* {@link HLog} on construction.
*/
protected List<WALActionsListener> getWALActionListeners() {
List<WALActionsListener> listeners = new ArrayList<WALActionsListener>();
// Log roller.
this.hlogRoller = new LogRoller(this, this);
listeners.add(this.hlogRoller);
if (this.replicationSourceHandler != null &&
this.replicationSourceHandler.getWALActionsListener() != null) {
// Replication handler is an implementation of WALActionsListener.
listeners.add(this.replicationSourceHandler.getWALActionsListener());
}
return listeners;
}
protected LogRoller getLogRoller() {
return hlogRoller;
}
/*
* @param interval Interval since last time metrics were called.
*/
protected void doMetrics() {
try {
metrics();
} catch (Throwable e) {
LOG.warn("Failed metrics", e);
}
}
protected void metrics() {
this.metrics.regions.set(this.onlineRegions.size());
this.metrics.incrementRequests(this.requestCount.get());
this.metrics.requests.intervalHeartBeat();
// Is this too expensive every three seconds getting a lock on onlineRegions
// and then per store carried? Can I make metrics be sloppier and avoid
// the synchronizations?
int stores = 0;
int storefiles = 0;
long memstoreSize = 0;
int readRequestsCount = 0;
int writeRequestsCount = 0;
long storefileIndexSize = 0;
HDFSBlocksDistribution hdfsBlocksDistribution =
new HDFSBlocksDistribution();
long totalStaticIndexSize = 0;
long totalStaticBloomSize = 0;
long numPutsWithoutWAL = 0;
long dataInMemoryWithoutWAL = 0;
// Note that this is a map of Doubles instead of Longs. This is because we
// do effective integer division, which would perhaps truncate more than it
// should because we do it only on one part of our sum at a time. Rather
// than dividing at the end, where it is difficult to know the proper
// factor, everything is exact then truncated.
final Map<String, MutableDouble> tempVals =
new HashMap<String, MutableDouble>();
for (Map.Entry<String, HRegion> e : this.onlineRegions.entrySet()) {
HRegion r = e.getValue();
memstoreSize += r.memstoreSize.get();
numPutsWithoutWAL += r.numPutsWithoutWAL.get();
dataInMemoryWithoutWAL += r.dataInMemoryWithoutWAL.get();
readRequestsCount += r.readRequestsCount.get();
writeRequestsCount += r.writeRequestsCount.get();
synchronized (r.stores) {
stores += r.stores.size();
for (Map.Entry<byte[], Store> ee : r.stores.entrySet()) {
final Store store = ee.getValue();
final SchemaMetrics schemaMetrics = store.getSchemaMetrics();
{
long tmpStorefiles = store.getStorefilesCount();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_COUNT, tmpStorefiles);
storefiles += tmpStorefiles;
}
{
long tmpStorefileIndexSize = store.getStorefilesIndexSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_INDEX_SIZE,
(long) (tmpStorefileIndexSize / (1024.0 * 1024)));
storefileIndexSize += tmpStorefileIndexSize;
}
{
long tmpStorefilesSize = store.getStorefilesSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STORE_FILE_SIZE_MB,
(long) (tmpStorefilesSize / (1024.0 * 1024)));
}
{
long tmpStaticBloomSize = store.getTotalStaticBloomSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STATIC_BLOOM_SIZE_KB,
(long) (tmpStaticBloomSize / 1024.0));
totalStaticBloomSize += tmpStaticBloomSize;
}
{
long tmpStaticIndexSize = store.getTotalStaticIndexSize();
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.STATIC_INDEX_SIZE_KB,
(long) (tmpStaticIndexSize / 1024.0));
totalStaticIndexSize += tmpStaticIndexSize;
}
schemaMetrics.accumulateStoreMetric(tempVals,
StoreMetricType.MEMSTORE_SIZE_MB,
(long) (store.getMemStoreSize() / (1024.0 * 1024)));
}
}
hdfsBlocksDistribution.add(r.getHDFSBlocksDistribution());
}
for (Entry<String, MutableDouble> e : tempVals.entrySet()) {
RegionMetricsStorage.setNumericMetric(e.getKey(), e.getValue().longValue());
}
this.metrics.stores.set(stores);
this.metrics.storefiles.set(storefiles);
this.metrics.memstoreSizeMB.set((int) (memstoreSize / (1024 * 1024)));
this.metrics.mbInMemoryWithoutWAL.set((int) (dataInMemoryWithoutWAL / (1024 * 1024)));
this.metrics.numPutsWithoutWAL.set(numPutsWithoutWAL);
this.metrics.storefileIndexSizeMB.set(
(int) (storefileIndexSize / (1024 * 1024)));
this.metrics.rootIndexSizeKB.set(
(int) (storefileIndexSize / 1024));
this.metrics.totalStaticIndexSizeKB.set(
(int) (totalStaticIndexSize / 1024));
this.metrics.totalStaticBloomSizeKB.set(
(int) (totalStaticBloomSize / 1024));
this.metrics.readRequestsCount.set(readRequestsCount);
this.metrics.writeRequestsCount.set(writeRequestsCount);
this.metrics.compactionQueueSize.set(compactSplitThread
.getCompactionQueueSize());
this.metrics.flushQueueSize.set(cacheFlusher
.getFlushQueueSize());
BlockCache blockCache = cacheConfig.getBlockCache();
if (blockCache != null) {
this.metrics.blockCacheCount.set(blockCache.size());
this.metrics.blockCacheFree.set(blockCache.getFreeSize());
this.metrics.blockCacheSize.set(blockCache.getCurrentSize());
CacheStats cacheStats = blockCache.getStats();
this.metrics.blockCacheHitCount.set(cacheStats.getHitCount());
this.metrics.blockCacheMissCount.set(cacheStats.getMissCount());
this.metrics.blockCacheEvictedCount.set(blockCache.getEvictedCount());
double ratio = blockCache.getStats().getHitRatio();
int percent = (int) (ratio * 100);
this.metrics.blockCacheHitRatio.set(percent);
ratio = blockCache.getStats().getHitCachingRatio();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitCachingRatio.set(percent);
// past N period block cache hit / hit caching ratios
cacheStats.rollMetricsPeriod();
ratio = cacheStats.getHitRatioPastNPeriods();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitRatioPastNPeriods.set(percent);
ratio = cacheStats.getHitCachingRatioPastNPeriods();
percent = (int) (ratio * 100);
this.metrics.blockCacheHitCachingRatioPastNPeriods.set(percent);
}
float localityIndex = hdfsBlocksDistribution.getBlockLocalityIndex(
getServerName().getHostname());
int percent = (int) (localityIndex * 100);
this.metrics.hdfsBlocksLocalityIndex.set(percent);
}
/**
* @return Region server metrics instance.
*/
public RegionServerMetrics getMetrics() {
return this.metrics;
}
/**
* @return Master address tracker instance.
*/
public MasterAddressTracker getMasterAddressManager() {
return this.masterAddressManager;
}
/*
* Start maintanence Threads, Server, Worker and lease checker threads.
* Install an UncaughtExceptionHandler that calls abort of RegionServer if we
* get an unhandled exception. We cannot set the handler on all threads.
* Server's internal Listener thread is off limits. For Server, if an OOME, it
* waits a while then retries. Meantime, a flush or a compaction that tries to
* run should trigger same critical condition and the shutdown will run. On
* its way out, this server will shut down Server. Leases are sort of
* inbetween. It has an internal thread that while it inherits from Chore, it
* keeps its own internal stop mechanism so needs to be stopped by this
* hosting server. Worker logs the exception and exits.
*/
private void startServiceThreads() throws IOException {
String n = Thread.currentThread().getName();
UncaughtExceptionHandler handler = new UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
abort("Uncaught exception in service thread " + t.getName(), e);
}
};
// Start executor services
this.service = new ExecutorService(getServerName().toString());
this.service.startExecutorService(ExecutorType.RS_OPEN_REGION,
conf.getInt("hbase.regionserver.executor.openregion.threads", 3));
this.service.startExecutorService(ExecutorType.RS_OPEN_ROOT,
conf.getInt("hbase.regionserver.executor.openroot.threads", 1));
this.service.startExecutorService(ExecutorType.RS_OPEN_META,
conf.getInt("hbase.regionserver.executor.openmeta.threads", 1));
this.service.startExecutorService(ExecutorType.RS_CLOSE_REGION,
conf.getInt("hbase.regionserver.executor.closeregion.threads", 3));
this.service.startExecutorService(ExecutorType.RS_CLOSE_ROOT,
conf.getInt("hbase.regionserver.executor.closeroot.threads", 1));
this.service.startExecutorService(ExecutorType.RS_CLOSE_META,
conf.getInt("hbase.regionserver.executor.closemeta.threads", 1));
Threads.setDaemonThreadRunning(this.hlogRoller.getThread(), n + ".logRoller", handler);
Threads.setDaemonThreadRunning(this.cacheFlusher.getThread(), n + ".cacheFlusher",
handler);
Threads.setDaemonThreadRunning(this.compactionChecker.getThread(), n +
".compactionChecker", handler);
// Leases is not a Thread. Internally it runs a daemon thread. If it gets
// an unhandled exception, it will just exit.
this.leases.setName(n + ".leaseChecker");
this.leases.start();
// Put up the webui. Webui may come up on port other than configured if
// that port is occupied. Adjust serverInfo if this is the case.
this.webuiport = putUpWebUI();
if (this.replicationSourceHandler == this.replicationSinkHandler &&
this.replicationSourceHandler != null) {
this.replicationSourceHandler.startReplicationService();
} else if (this.replicationSourceHandler != null) {
this.replicationSourceHandler.startReplicationService();
} else if (this.replicationSinkHandler != null) {
this.replicationSinkHandler.startReplicationService();
}
// Start Server. This service is like leases in that it internally runs
// a thread.
this.rpcServer.start();
// Create the log splitting worker and start it
this.splitLogWorker = new SplitLogWorker(this.zooKeeper,
this.getConfiguration(), this.getServerName());
splitLogWorker.start();
}
/**
* Puts up the webui.
* @return Returns final port -- maybe different from what we started with.
* @throws IOException
*/
private int putUpWebUI() throws IOException {
int port = this.conf.getInt(HConstants.REGIONSERVER_INFO_PORT, 60030);
// -1 is for disabling info server
if (port < 0) return port;
String addr = this.conf.get("hbase.regionserver.info.bindAddress", "0.0.0.0");
// check if auto port bind enabled
boolean auto = this.conf.getBoolean(HConstants.REGIONSERVER_INFO_PORT_AUTO,
false);
while (true) {
try {
this.infoServer = new InfoServer("regionserver", addr, port, false, this.conf);
this.infoServer.addServlet("status", "/rs-status", RSStatusServlet.class);
this.infoServer.addServlet("dump", "/dump", RSDumpServlet.class);
this.infoServer.setAttribute(REGIONSERVER, this);
this.infoServer.setAttribute(REGIONSERVER_CONF, conf);
this.infoServer.start();
break;
} catch (BindException e) {
if (!auto) {
// auto bind disabled throw BindException
throw e;
}
// auto bind enabled, try to use another port
LOG.info("Failed binding http info server to port: " + port);
port++;
}
}
return port;
}
/*
* Verify that server is healthy
*/
private boolean isHealthy() {
if (!fsOk) {
// File system problem
return false;
}
// Verify that all threads are alive
if (!(leases.isAlive()
&& cacheFlusher.isAlive() && hlogRoller.isAlive()
&& this.compactionChecker.isAlive())) {
stop("One or more threads are no longer alive -- stop");
return false;
}
return true;
}
@Override
public HLog getWAL() {
return this.hlog;
}
@Override
public CatalogTracker getCatalogTracker() {
return this.catalogTracker;
}
@Override
public void stop(final String msg) {
this.stopped = true;
LOG.info("STOPPED: " + msg);
// Wakes run() if it is sleeping
sleeper.skipSleepCycle();
}
public void waitForServerOnline(){
while (!isOnline() && !isStopped()){
sleeper.sleep();
}
}
@Override
public void postOpenDeployTasks(final HRegion r, final CatalogTracker ct,
final boolean daughter)
throws KeeperException, IOException {
checkOpen();
LOG.info("Post open deploy tasks for region=" + r.getRegionNameAsString() +
", daughter=" + daughter);
// Do checks to see if we need to compact (references or too many files)
for (Store s : r.getStores().values()) {
if (s.hasReferences() || s.needsCompaction()) {
getCompactionRequester().requestCompaction(r, s, "Opening Region");
}
}
// Update ZK, ROOT or META
if (r.getRegionInfo().isRootRegion()) {
RootRegionTracker.setRootLocation(getZooKeeper(),
this.serverNameFromMasterPOV);
} else if (r.getRegionInfo().isMetaRegion()) {
MetaEditor.updateMetaLocation(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
} else {
if (daughter) {
// If daughter of a split, update whole row, not just location.
MetaEditor.addDaughter(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
} else {
MetaEditor.updateRegionLocation(ct, r.getRegionInfo(),
this.serverNameFromMasterPOV);
}
}
LOG.info("Done with post open deploy task for region=" +
r.getRegionNameAsString() + ", daughter=" + daughter);
}
/**
* Return a reference to the metrics instance used for counting RPC calls.
* @return Metrics instance.
*/
public HBaseRpcMetrics getRpcMetrics() {
return rpcServer.getRpcMetrics();
}
@Override
public RpcServer getRpcServer() {
return rpcServer;
}
/**
* Cause the server to exit without closing the regions it is serving, the log
* it is using and without notifying the master. Used unit testing and on
* catastrophic events such as HDFS is yanked out from under hbase or we OOME.
*
* @param reason
* the reason we are aborting
* @param cause
* the exception that caused the abort, or null
*/
public void abort(String reason, Throwable cause) {
String msg = "ABORTING region server " + this + ": " + reason;
if (cause != null) {
LOG.fatal(msg, cause);
} else {
LOG.fatal(msg);
}
this.abortRequested = true;
this.reservedSpace.clear();
// HBASE-4014: show list of coprocessors that were loaded to help debug
// regionserver crashes.Note that we're implicitly using
// java.util.HashSet's toString() method to print the coprocessor names.
LOG.fatal("RegionServer abort: loaded coprocessors are: " +
CoprocessorHost.getLoadedCoprocessors());
if (this.metrics != null) {
LOG.info("Dump of metrics: " + this.metrics);
}
// Do our best to report our abort to the master, but this may not work
try {
if (cause != null) {
msg += "\nCause:\n" + StringUtils.stringifyException(cause);
}
if (hbaseMaster != null) {
ReportRSFatalErrorRequest.Builder builder =
ReportRSFatalErrorRequest.newBuilder();
ServerName sn =
ServerName.parseVersionedServerName(this.serverNameFromMasterPOV.getVersionedBytes());
builder.setServer(ProtobufUtil.toServerName(sn));
builder.setErrorMessage(msg);
hbaseMaster.reportRSFatalError(
null,builder.build());
}
} catch (Throwable t) {
LOG.warn("Unable to report fatal error to master", t);
}
stop(reason);
}
/**
* @see HRegionServer#abort(String, Throwable)
*/
public void abort(String reason) {
abort(reason, null);
}
public boolean isAborted() {
return this.abortRequested;
}
/*
* Simulate a kill -9 of this server. Exits w/o closing regions or cleaninup
* logs but it does close socket in case want to bring up server on old
* hostname+port immediately.
*/
protected void kill() {
this.killed = true;
abort("Simulated kill");
}
/**
* Wait on all threads to finish. Presumption is that all closes and stops
* have already been called.
*/
protected void join() {
Threads.shutdown(this.compactionChecker.getThread());
Threads.shutdown(this.cacheFlusher.getThread());
if (this.hlogRoller != null) {
Threads.shutdown(this.hlogRoller.getThread());
}
if (this.compactSplitThread != null) {
this.compactSplitThread.join();
}
if (this.service != null) this.service.shutdown();
if (this.replicationSourceHandler != null &&
this.replicationSourceHandler == this.replicationSinkHandler) {
this.replicationSourceHandler.stopReplicationService();
} else if (this.replicationSourceHandler != null) {
this.replicationSourceHandler.stopReplicationService();
} else if (this.replicationSinkHandler != null) {
this.replicationSinkHandler.stopReplicationService();
}
}
/**
* @return Return the object that implements the replication
* source service.
*/
ReplicationSourceService getReplicationSourceService() {
return replicationSourceHandler;
}
/**
* @return Return the object that implements the replication
* sink service.
*/
ReplicationSinkService getReplicationSinkService() {
return replicationSinkHandler;
}
/**
* Get the current master from ZooKeeper and open the RPC connection to it.
*
* Method will block until a master is available. You can break from this
* block by requesting the server stop.
*
* @return master + port, or null if server has been stopped
*/
private ServerName getMaster() {
ServerName masterServerName = null;
long previousLogTime = 0;
RegionServerStatusProtocol master = null;
boolean refresh = false; // for the first time, use cached data
while (keepLooping() && master == null) {
masterServerName = this.masterAddressManager.getMasterAddress(refresh);
if (masterServerName == null) {
if (!keepLooping()) {
// give up with no connection.
LOG.debug("No master found and cluster is stopped; bailing out");
return null;
}
LOG.debug("No master found; retry");
previousLogTime = System.currentTimeMillis();
refresh = true; // let's try pull it from ZK directly
sleeper.sleep();
continue;
}
InetSocketAddress isa =
new InetSocketAddress(masterServerName.getHostname(), masterServerName.getPort());
LOG.info("Attempting connect to Master server at " +
this.masterAddressManager.getMasterAddress());
try {
// Do initial RPC setup. The final argument indicates that the RPC
// should retry indefinitely.
master = (RegionServerStatusProtocol) HBaseRPC.waitForProxy(
RegionServerStatusProtocol.class, RegionServerStatusProtocol.VERSION,
isa, this.conf, -1,
this.rpcTimeout, this.rpcTimeout);
LOG.info("Connected to master at " + isa);
} catch (IOException e) {
e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e;
if (e instanceof ServerNotRunningYetException) {
if (System.currentTimeMillis() > (previousLogTime+1000)){
LOG.info("Master isn't available yet, retrying");
previousLogTime = System.currentTimeMillis();
}
} else {
if (System.currentTimeMillis() > (previousLogTime + 1000)) {
LOG.warn("Unable to connect to master. Retrying. Error was:", e);
previousLogTime = System.currentTimeMillis();
}
}
try {
Thread.sleep(200);
} catch (InterruptedException ignored) {
}
}
}
this.hbaseMaster = master;
return masterServerName;
}
/**
* @return True if we should break loop because cluster is going down or
* this server has been stopped or hdfs has gone bad.
*/
private boolean keepLooping() {
return !this.stopped && isClusterUp();
}
/*
* Let the master know we're here Run initialization using parameters passed
* us by the master.
* @return A Map of key/value configurations we got from the Master else
* null if we failed to register.
* @throws IOException
*/
private RegionServerStartupResponse reportForDuty() throws IOException {
RegionServerStartupResponse result = null;
ServerName masterServerName = getMaster();
if (masterServerName == null) return result;
try {
this.requestCount.set(0);
LOG.info("Telling master at " + masterServerName + " that we are up " +
"with port=" + this.isa.getPort() + ", startcode=" + this.startcode);
long now = EnvironmentEdgeManager.currentTimeMillis();
int port = this.isa.getPort();
RegionServerStartupRequest.Builder request = RegionServerStartupRequest.newBuilder();
request.setPort(port);
request.setServerStartCode(this.startcode);
request.setServerCurrentTime(now);
result = this.hbaseMaster.regionServerStartup(null, request.build());
} catch (ServiceException se) {
IOException ioe = ProtobufUtil.getRemoteException(se);
if (ioe instanceof ClockOutOfSyncException) {
LOG.fatal("Master rejected startup because clock is out of sync", ioe);
// Re-throw IOE will cause RS to abort
throw ioe;
} else {
LOG.warn("error telling master we are up", se);
}
}
return result;
}
/**
* Closes all regions. Called on our way out.
* Assumes that its not possible for new regions to be added to onlineRegions
* while this method runs.
*/
protected void closeAllRegions(final boolean abort) {
closeUserRegions(abort);
// Only root and meta should remain. Are we carrying root or meta?
HRegion meta = null;
HRegion root = null;
this.lock.writeLock().lock();
try {
for (Map.Entry<String, HRegion> e: onlineRegions.entrySet()) {
HRegionInfo hri = e.getValue().getRegionInfo();
if (hri.isRootRegion()) {
root = e.getValue();
} else if (hri.isMetaRegion()) {
meta = e.getValue();
}
if (meta != null && root != null) break;
}
} finally {
this.lock.writeLock().unlock();
}
if (meta != null) closeRegion(meta.getRegionInfo(), abort, false);
if (root != null) closeRegion(root.getRegionInfo(), abort, false);
}
/**
* Schedule closes on all user regions.
* Should be safe calling multiple times because it wont' close regions
* that are already closed or that are closing.
* @param abort Whether we're running an abort.
*/
void closeUserRegions(final boolean abort) {
this.lock.writeLock().lock();
try {
for (Map.Entry<String, HRegion> e: this.onlineRegions.entrySet()) {
HRegion r = e.getValue();
if (!r.getRegionInfo().isMetaRegion() && r.isAvailable()) {
// Don't update zk with this close transition; pass false.
closeRegion(r.getRegionInfo(), abort, false);
}
}
} finally {
this.lock.writeLock().unlock();
}
}
/** @return the info server */
public InfoServer getInfoServer() {
return infoServer;
}
/**
* @return true if a stop has been requested.
*/
public boolean isStopped() {
return this.stopped;
}
@Override
public boolean isStopping() {
return this.stopping;
}
/**
*
* @return the configuration
*/
public Configuration getConfiguration() {
return conf;
}
/** @return the write lock for the server */
ReentrantReadWriteLock.WriteLock getWriteLock() {
return lock.writeLock();
}
public int getNumberOfOnlineRegions() {
return this.onlineRegions.size();
}
boolean isOnlineRegionsEmpty() {
return this.onlineRegions.isEmpty();
}
/**
* @param encodedRegionName
* @return JSON Map of labels to values for passed in <code>encodedRegionName</code>
* @throws IOException
*/
public byte [] getRegionStats(final String encodedRegionName)
throws IOException {
HRegion r = null;
synchronized (this.onlineRegions) {
r = this.onlineRegions.get(encodedRegionName);
}
if (r == null) return null;
ObjectMapper mapper = new ObjectMapper();
int stores = 0;
int storefiles = 0;
int storefileSizeMB = 0;
int memstoreSizeMB = (int) (r.memstoreSize.get() / 1024 / 1024);
int storefileIndexSizeMB = 0;
synchronized (r.stores) {
stores += r.stores.size();
for (Store store : r.stores.values()) {
storefiles += store.getStorefilesCount();
storefileSizeMB += (int) (store.getStorefilesSize() / 1024 / 1024);
storefileIndexSizeMB += (int) (store.getStorefilesIndexSize() / 1024 / 1024);
}
}
Map<String, Integer> map = new TreeMap<String, Integer>();
map.put("stores", stores);
map.put("storefiles", storefiles);
map.put("storefileSizeMB", storefileSizeMB);
map.put("storefileIndexSizeMB", storefileIndexSizeMB);
map.put("memstoreSizeMB", memstoreSizeMB);
StringWriter w = new StringWriter();
mapper.writeValue(w, map);
w.close();
return Bytes.toBytes(w.toString());
}
/**
* For tests and web ui.
* This method will only work if HRegionServer is in the same JVM as client;
* HRegion cannot be serialized to cross an rpc.
* @see #getOnlineRegions()
*/
public Collection<HRegion> getOnlineRegionsLocalContext() {
Collection<HRegion> regions = this.onlineRegions.values();
return Collections.unmodifiableCollection(regions);
}
@Override
public void addToOnlineRegions(HRegion region) {
this.onlineRegions.put(region.getRegionInfo().getEncodedName(), region);
}
/**
* @return A new Map of online regions sorted by region size with the first
* entry being the biggest.
*/
public SortedMap<Long, HRegion> getCopyOfOnlineRegionsSortedBySize() {
// we'll sort the regions in reverse
SortedMap<Long, HRegion> sortedRegions = new TreeMap<Long, HRegion>(
new Comparator<Long>() {
public int compare(Long a, Long b) {
return -1 * a.compareTo(b);
}
});
// Copy over all regions. Regions are sorted by size with biggest first.
for (HRegion region : this.onlineRegions.values()) {
sortedRegions.put(Long.valueOf(region.memstoreSize.get()), region);
}
return sortedRegions;
}
/** @return the request count */
public AtomicInteger getRequestCount() {
return this.requestCount;
}
/**
* @return time stamp in millis of when this region server was started
*/
public long getStartcode() {
return this.startcode;
}
/** @return reference to FlushRequester */
public FlushRequester getFlushRequester() {
return this.cacheFlusher;
}
/**
* Get the top N most loaded regions this server is serving so we can tell the
* master which regions it can reallocate if we're overloaded. TODO: actually
* calculate which regions are most loaded. (Right now, we're just grabbing
* the first N regions being served regardless of load.)
*/
protected HRegionInfo[] getMostLoadedRegions() {
ArrayList<HRegionInfo> regions = new ArrayList<HRegionInfo>();
for (HRegion r : onlineRegions.values()) {
if (!r.isAvailable()) {
continue;
}
if (regions.size() < numRegionsToReport) {
regions.add(r.getRegionInfo());
} else {
break;
}
}
return regions.toArray(new HRegionInfo[regions.size()]);
}
@Override
@QosPriority(priority=HIGH_QOS)
public ProtocolSignature getProtocolSignature(
String protocol, long version, int clientMethodsHashCode)
throws IOException {
if (protocol.equals(ClientProtocol.class.getName())) {
return new ProtocolSignature(ClientProtocol.VERSION, null);
} else if (protocol.equals(AdminProtocol.class.getName())) {
return new ProtocolSignature(AdminProtocol.VERSION, null);
}
throw new IOException("Unknown protocol: " + protocol);
}
@Override
@QosPriority(priority=HIGH_QOS)
public long getProtocolVersion(final String protocol, final long clientVersion)
throws IOException {
if (protocol.equals(ClientProtocol.class.getName())) {
return ClientProtocol.VERSION;
} else if (protocol.equals(AdminProtocol.class.getName())) {
return AdminProtocol.VERSION;
}
throw new IOException("Unknown protocol: " + protocol);
}
@Override
public Leases getLeases() {
return leases;
}
/**
* @return Return the rootDir.
*/
protected Path getRootDir() {
return rootDir;
}
/**
* @return Return the fs.
*/
public FileSystem getFileSystem() {
return fs;
}
public String toString() {
return getServerName().toString();
}
/**
* Interval at which threads should run
*
* @return the interval
*/
public int getThreadWakeFrequency() {
return threadWakeFrequency;
}
@Override
public ZooKeeperWatcher getZooKeeper() {
return zooKeeper;
}
@Override
public ServerName getServerName() {
// Our servername could change after we talk to the master.
return this.serverNameFromMasterPOV == null?
new ServerName(this.isa.getHostName(), this.isa.getPort(), this.startcode):
this.serverNameFromMasterPOV;
}
@Override
public CompactionRequestor getCompactionRequester() {
return this.compactSplitThread;
}
public ZooKeeperWatcher getZooKeeperWatcher() {
return this.zooKeeper;
}
public ConcurrentSkipListMap<byte[], Boolean> getRegionsInTransitionInRS() {
return this.regionsInTransitionInRS;
}
public ExecutorService getExecutorService() {
return service;
}
//
// Main program and support routines
//
/**
* Load the replication service objects, if any
*/
static private void createNewReplicationInstance(Configuration conf,
HRegionServer server, FileSystem fs, Path logDir, Path oldLogDir) throws IOException{
// If replication is not enabled, then return immediately.
if (!conf.getBoolean(HConstants.REPLICATION_ENABLE_KEY, false)) {
return;
}
// read in the name of the source replication class from the config file.
String sourceClassname = conf.get(HConstants.REPLICATION_SOURCE_SERVICE_CLASSNAME,
HConstants.REPLICATION_SERVICE_CLASSNAME_DEFAULT);
// read in the name of the sink replication class from the config file.
String sinkClassname = conf.get(HConstants.REPLICATION_SINK_SERVICE_CLASSNAME,
HConstants.REPLICATION_SERVICE_CLASSNAME_DEFAULT);
// If both the sink and the source class names are the same, then instantiate
// only one object.
if (sourceClassname.equals(sinkClassname)) {
server.replicationSourceHandler = (ReplicationSourceService)
newReplicationInstance(sourceClassname,
conf, server, fs, logDir, oldLogDir);
server.replicationSinkHandler = (ReplicationSinkService)
server.replicationSourceHandler;
}
else {
server.replicationSourceHandler = (ReplicationSourceService)
newReplicationInstance(sourceClassname,
conf, server, fs, logDir, oldLogDir);
server.replicationSinkHandler = (ReplicationSinkService)
newReplicationInstance(sinkClassname,
conf, server, fs, logDir, oldLogDir);
}
}
static private ReplicationService newReplicationInstance(String classname,
Configuration conf, HRegionServer server, FileSystem fs, Path logDir,
Path oldLogDir) throws IOException{
Class<?> clazz = null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
clazz = Class.forName(classname, true, classLoader);
} catch (java.lang.ClassNotFoundException nfe) {
throw new IOException("Cound not find class for " + classname);
}
// create an instance of the replication object.
ReplicationService service = (ReplicationService)
ReflectionUtils.newInstance(clazz, conf);
service.initialize(server, fs, logDir, oldLogDir);
return service;
}
/**
* @param hrs
* @return Thread the RegionServer is running in correctly named.
* @throws IOException
*/
public static Thread startRegionServer(final HRegionServer hrs)
throws IOException {
return startRegionServer(hrs, "regionserver" + hrs.isa.getPort());
}
/**
* @param hrs
* @param name
* @return Thread the RegionServer is running in correctly named.
* @throws IOException
*/
public static Thread startRegionServer(final HRegionServer hrs,
final String name) throws IOException {
Thread t = new Thread(hrs);
t.setName(name);
t.start();
// Install shutdown hook that will catch signals and run an orderly shutdown
// of the hrs.
ShutdownHook.install(hrs.getConfiguration(), FileSystem.get(hrs
.getConfiguration()), hrs, t);
return t;
}
/**
* Utility for constructing an instance of the passed HRegionServer class.
*
* @param regionServerClass
* @param conf2
* @return HRegionServer instance.
*/
public static HRegionServer constructRegionServer(
Class<? extends HRegionServer> regionServerClass,
final Configuration conf2) {
try {
Constructor<? extends HRegionServer> c = regionServerClass
.getConstructor(Configuration.class);
return c.newInstance(conf2);
} catch (Exception e) {
throw new RuntimeException("Failed construction of " + "Regionserver: "
+ regionServerClass.toString(), e);
}
}
/**
* @see org.apache.hadoop.hbase.regionserver.HRegionServerCommandLine
*/
public static void main(String[] args) throws Exception {
VersionInfo.logVersion();
Configuration conf = HBaseConfiguration.create();
@SuppressWarnings("unchecked")
Class<? extends HRegionServer> regionServerClass = (Class<? extends HRegionServer>) conf
.getClass(HConstants.REGION_SERVER_IMPL, HRegionServer.class);
new HRegionServerCommandLine(regionServerClass).doMain(args);
}
/**
* Gets the online regions of the specified table.
* This method looks at the in-memory onlineRegions. It does not go to <code>.META.</code>.
* Only returns <em>online</em> regions. If a region on this table has been
* closed during a disable, etc., it will not be included in the returned list.
* So, the returned list may not necessarily be ALL regions in this table, its
* all the ONLINE regions in the table.
* @param tableName
* @return Online regions from <code>tableName</code>
*/
public List<HRegion> getOnlineRegions(byte[] tableName) {
List<HRegion> tableRegions = new ArrayList<HRegion>();
synchronized (this.onlineRegions) {
for (HRegion region: this.onlineRegions.values()) {
HRegionInfo regionInfo = region.getRegionInfo();
if(Bytes.equals(regionInfo.getTableName(), tableName)) {
tableRegions.add(region);
}
}
}
return tableRegions;
}
// used by org/apache/hbase/tmpl/regionserver/RSStatusTmpl.jamon (HBASE-4070).
public String[] getCoprocessors() {
// passing fake times to buildServerLoad is okay, because we only care about the coprocessor part.
HBaseProtos.ServerLoad sl = buildServerLoad(0, 0);
return sl == null? null:
new ServerLoad(sl).getRegionServerCoprocessors();
}
/**
* Register bean with platform management server
*/
void registerMBean() {
MXBeanImpl mxBeanInfo = MXBeanImpl.init(this);
mxBean = MBeanUtil.registerMBean("RegionServer", "RegionServer",
mxBeanInfo);
LOG.info("Registered RegionServer MXBean");
}
/**
* Instantiated as a row lock lease. If the lease times out, the row lock is
* released
*/
private class RowLockListener implements LeaseListener {
private final String lockName;
private final HRegion region;
RowLockListener(final String lockName, final HRegion region) {
this.lockName = lockName;
this.region = region;
}
public void leaseExpired() {
LOG.info("Row Lock " + this.lockName + " lease expired");
Integer r = rowlocks.remove(this.lockName);
if (r != null) {
region.releaseRowLock(r);
}
}
}
/**
* Instantiated as a scanner lease. If the lease times out, the scanner is
* closed
*/
private class ScannerListener implements LeaseListener {
private final String scannerName;
ScannerListener(final String n) {
this.scannerName = n;
}
public void leaseExpired() {
RegionScanner s = scanners.remove(this.scannerName);
if (s != null) {
LOG.info("Scanner " + this.scannerName + " lease expired on region "
+ s.getRegionInfo().getRegionNameAsString());
try {
HRegion region = getRegion(s.getRegionInfo().getRegionName());
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().preScannerClose(s);
}
s.close();
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerClose(s);
}
} catch (IOException e) {
LOG.error("Closing scanner for "
+ s.getRegionInfo().getRegionNameAsString(), e);
}
} else {
LOG.info("Scanner " + this.scannerName + " lease expired");
}
}
}
/**
* Method to get the Integer lock identifier used internally from the long
* lock identifier used by the client.
*
* @param lockId
* long row lock identifier from client
* @return intId Integer row lock used internally in HRegion
* @throws IOException
* Thrown if this is not a valid client lock id.
*/
Integer getLockFromId(long lockId) throws IOException {
if (lockId == -1L) {
return null;
}
String lockName = String.valueOf(lockId);
Integer rl = rowlocks.get(lockName);
if (rl == null) {
throw new UnknownRowLockException("Invalid row lock");
}
this.leases.renewLease(lockName);
return rl;
}
/**
* Called to verify that this server is up and running.
*
* @throws IOException
*/
protected void checkOpen() throws IOException {
if (this.stopped || this.abortRequested) {
throw new RegionServerStoppedException("Server " + getServerName() +
" not running" + (this.abortRequested ? ", aborting" : ""));
}
if (!fsOk) {
throw new RegionServerStoppedException("File system not available");
}
}
protected void checkIfRegionInTransition(HRegionInfo region,
String currentAction) throws RegionAlreadyInTransitionException {
byte[] encodedName = region.getEncodedNameAsBytes();
if (this.regionsInTransitionInRS.containsKey(encodedName)) {
boolean openAction = this.regionsInTransitionInRS.get(encodedName);
// The below exception message will be used in master.
throw new RegionAlreadyInTransitionException("Received:" + currentAction +
" for the region:" + region.getRegionNameAsString() +
" ,which we are already trying to " +
(openAction ? OPEN : CLOSE)+ ".");
}
}
/**
* @param region Region to close
* @param abort True if we are aborting
* @param zk True if we are to update zk about the region close; if the close
* was orchestrated by master, then update zk. If the close is being run by
* the regionserver because its going down, don't update zk.
* @return True if closed a region.
*/
protected boolean closeRegion(HRegionInfo region, final boolean abort,
final boolean zk) {
return closeRegion(region, abort, zk, -1, null);
}
/**
* @param region Region to close
* @param abort True if we are aborting
* @param zk True if we are to update zk about the region close; if the close
* was orchestrated by master, then update zk. If the close is being run by
* the regionserver because its going down, don't update zk.
* @param versionOfClosingNode
* the version of znode to compare when RS transitions the znode from
* CLOSING state.
* @return True if closed a region.
*/
protected boolean closeRegion(HRegionInfo region, final boolean abort,
final boolean zk, final int versionOfClosingNode, ServerName sn) {
if (this.regionsInTransitionInRS.containsKey(region.getEncodedNameAsBytes())) {
LOG.warn("Received close for region we are already opening or closing; " +
region.getEncodedName());
return false;
}
this.regionsInTransitionInRS.putIfAbsent(region.getEncodedNameAsBytes(), false);
CloseRegionHandler crh = null;
if (region.isRootRegion()) {
crh = new CloseRootHandler(this, this, region, abort, zk,
versionOfClosingNode);
} else if (region.isMetaRegion()) {
crh = new CloseMetaHandler(this, this, region, abort, zk,
versionOfClosingNode);
} else {
crh = new CloseRegionHandler(this, this, region, abort, zk, versionOfClosingNode, sn);
}
this.service.submit(crh);
return true;
}
/**
* @param regionName
* @return HRegion for the passed binary <code>regionName</code> or null if
* named region is not member of the online regions.
*/
public HRegion getOnlineRegion(final byte[] regionName) {
String encodedRegionName = HRegionInfo.encodeRegionName(regionName);
return this.onlineRegions.get(encodedRegionName);
}
@Override
public HRegion getFromOnlineRegions(final String encodedRegionName) {
return this.onlineRegions.get(encodedRegionName);
}
@Override
public boolean removeFromOnlineRegions(final String encodedRegionName, ServerName destination) {
HRegion toReturn = this.onlineRegions.remove(encodedRegionName);
if (destination != null){
addToMovedRegions(encodedRegionName, destination);
}
//Clear all of the dynamic metrics as they are now probably useless.
//This is a clear because dynamic metrics could include metrics per cf and
//per hfile. Figuring out which cfs, hfiles, and regions are still relevant to
//this region server would be an onerous task. Instead just clear everything
//and on the next tick of the metrics everything that is still relevant will be
//re-added.
this.dynamicMetrics.clear();
return toReturn != null;
}
/**
* Protected utility method for safely obtaining an HRegion handle.
*
* @param regionName
* Name of online {@link HRegion} to return
* @return {@link HRegion} for <code>regionName</code>
* @throws NotServingRegionException
*/
protected HRegion getRegion(final byte[] regionName)
throws NotServingRegionException {
String encodedRegionName = HRegionInfo.encodeRegionName(regionName);
return getRegionByEncodedName(encodedRegionName);
}
protected HRegion getRegionByEncodedName(String encodedRegionName)
throws NotServingRegionException {
HRegion region = this.onlineRegions.get(encodedRegionName);
if (region == null) {
ServerName sn = getMovedRegion(encodedRegionName);
if (sn != null) {
throw new RegionMovedException(sn.getHostname(), sn.getPort());
} else {
throw new NotServingRegionException("Region is not online: " + encodedRegionName);
}
}
return region;
}
/*
* Cleanup after Throwable caught invoking method. Converts <code>t</code> to
* IOE if it isn't already.
*
* @param t Throwable
*
* @return Throwable converted to an IOE; methods can only let out IOEs.
*/
protected Throwable cleanup(final Throwable t) {
return cleanup(t, null);
}
/*
* Cleanup after Throwable caught invoking method. Converts <code>t</code> to
* IOE if it isn't already.
*
* @param t Throwable
*
* @param msg Message to log in error. Can be null.
*
* @return Throwable converted to an IOE; methods can only let out IOEs.
*/
protected Throwable cleanup(final Throwable t, final String msg) {
// Don't log as error if NSRE; NSRE is 'normal' operation.
if (t instanceof NotServingRegionException) {
LOG.debug("NotServingRegionException; " + t.getMessage());
return t;
}
if (msg == null) {
LOG.error("", RemoteExceptionHandler.checkThrowable(t));
} else {
LOG.error(msg, RemoteExceptionHandler.checkThrowable(t));
}
if (!checkOOME(t)) {
checkFileSystem();
}
return t;
}
/*
* @param t
*
* @return Make <code>t</code> an IOE if it isn't already.
*/
protected IOException convertThrowableToIOE(final Throwable t) {
return convertThrowableToIOE(t, null);
}
/*
* @param t
*
* @param msg Message to put in new IOE if passed <code>t</code> is not an IOE
*
* @return Make <code>t</code> an IOE if it isn't already.
*/
protected IOException convertThrowableToIOE(final Throwable t, final String msg) {
return (t instanceof IOException ? (IOException) t : msg == null
|| msg.length() == 0 ? new IOException(t) : new IOException(msg, t));
}
/*
* Check if an OOME and, if so, abort immediately to avoid creating more objects.
*
* @param e
*
* @return True if we OOME'd and are aborting.
*/
public boolean checkOOME(final Throwable e) {
boolean stop = false;
try {
if (e instanceof OutOfMemoryError
|| (e.getCause() != null && e.getCause() instanceof OutOfMemoryError)
|| (e.getMessage() != null && e.getMessage().contains(
"java.lang.OutOfMemoryError"))) {
stop = true;
LOG.fatal(
"Run out of memory; HRegionServer will abort itself immediately", e);
}
} finally {
if (stop) {
Runtime.getRuntime().halt(1);
}
}
return stop;
}
/**
* Checks to see if the file system is still accessible. If not, sets
* abortRequested and stopRequested
*
* @return false if file system is not available
*/
public boolean checkFileSystem() {
if (this.fsOk && this.fs != null) {
try {
FSUtils.checkFileSystemAvailable(this.fs);
} catch (IOException e) {
abort("File System not available", e);
this.fsOk = false;
}
}
return this.fsOk;
}
protected long addRowLock(Integer r, HRegion region)
throws LeaseStillHeldException {
long lockId = nextLong();
String lockName = String.valueOf(lockId);
rowlocks.put(lockName, r);
this.leases.createLease(lockName, this.rowLockLeaseTimeoutPeriod, new RowLockListener(lockName,
region));
return lockId;
}
protected long addScanner(RegionScanner s) throws LeaseStillHeldException {
long scannerId = nextLong();
String scannerName = String.valueOf(scannerId);
scanners.put(scannerName, s);
this.leases.createLease(scannerName, this.scannerLeaseTimeoutPeriod, new ScannerListener(
scannerName));
return scannerId;
}
/**
* Generate a random positive long number
*
* @return a random positive long number
*/
protected long nextLong() {
long n = rand.nextLong();
if (n == 0) {
return nextLong();
}
if (n < 0) {
n = -n;
}
return n;
}
// Start Client methods
/**
* Get data from a table.
*
* @param controller the RPC controller
* @param request the get request
* @throws ServiceException
*/
@Override
public GetResponse get(final RpcController controller,
final GetRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
GetResponse.Builder builder = GetResponse.newBuilder();
ClientProtos.Get get = request.getGet();
Boolean existence = null;
Result r = null;
if (request.getClosestRowBefore()) {
if (get.getColumnCount() != 1) {
throw new DoNotRetryIOException(
"get ClosestRowBefore supports one and only one family now, not "
+ get.getColumnCount() + " families");
}
byte[] row = get.getRow().toByteArray();
byte[] family = get.getColumn(0).getFamily().toByteArray();
r = region.getClosestRowBefore(row, family);
} else {
Get clientGet = ProtobufUtil.toGet(get);
if (request.getExistenceOnly() && region.getCoprocessorHost() != null) {
existence = region.getCoprocessorHost().preExists(clientGet);
}
if (existence == null) {
Integer lock = getLockFromId(clientGet.getLockId());
r = region.get(clientGet, lock);
if (request.getExistenceOnly()) {
boolean exists = r != null && !r.isEmpty();
if (region.getCoprocessorHost() != null) {
exists = region.getCoprocessorHost().postExists(clientGet, exists);
}
existence = Boolean.valueOf(exists);
}
}
}
if (existence != null) {
builder.setExists(existence.booleanValue());
} else if (r != null) {
builder.setResult(ProtobufUtil.toResult(r));
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Mutate data in a table.
*
* @param controller the RPC controller
* @param request the mutate request
* @throws ServiceException
*/
@Override
public MutateResponse mutate(final RpcController controller,
final MutateRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
MutateResponse.Builder builder = MutateResponse.newBuilder();
Mutate mutate = request.getMutate();
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
Integer lock = null;
Result r = null;
Boolean processed = null;
MutateType type = mutate.getMutateType();
switch (type) {
case APPEND:
r = append(region, mutate);
break;
case INCREMENT:
r = increment(region, mutate);
break;
case PUT:
Put put = ProtobufUtil.toPut(mutate);
lock = getLockFromId(put.getLockId());
if (request.hasCondition()) {
Condition condition = request.getCondition();
byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name());
WritableByteArrayComparable comparator =
(WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator());
if (region.getCoprocessorHost() != null) {
processed = region.getCoprocessorHost().preCheckAndPut(
row, family, qualifier, compareOp, comparator, put);
}
if (processed == null) {
boolean result = region.checkAndMutate(row, family,
qualifier, compareOp, comparator, put, lock, true);
if (region.getCoprocessorHost() != null) {
result = region.getCoprocessorHost().postCheckAndPut(row, family,
qualifier, compareOp, comparator, put, result);
}
processed = Boolean.valueOf(result);
}
} else {
region.put(put, lock);
processed = Boolean.TRUE;
}
break;
case DELETE:
Delete delete = ProtobufUtil.toDelete(mutate);
lock = getLockFromId(delete.getLockId());
if (request.hasCondition()) {
Condition condition = request.getCondition();
byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name());
WritableByteArrayComparable comparator =
(WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator());
if (region.getCoprocessorHost() != null) {
processed = region.getCoprocessorHost().preCheckAndDelete(
row, family, qualifier, compareOp, comparator, delete);
}
if (processed == null) {
boolean result = region.checkAndMutate(row, family,
qualifier, compareOp, comparator, delete, lock, true);
if (region.getCoprocessorHost() != null) {
result = region.getCoprocessorHost().postCheckAndDelete(row, family,
qualifier, compareOp, comparator, delete, result);
}
processed = Boolean.valueOf(result);
}
} else {
region.delete(delete, lock, delete.getWriteToWAL());
processed = Boolean.TRUE;
}
break;
default:
throw new DoNotRetryIOException(
"Unsupported mutate type: " + type.name());
}
if (processed != null) {
builder.setProcessed(processed.booleanValue());
} else if (r != null) {
builder.setResult(ProtobufUtil.toResult(r));
}
return builder.build();
} catch (IOException ie) {
checkFileSystem();
throw new ServiceException(ie);
}
}
//
// remote scanner interface
//
/**
* Scan data in a table.
*
* @param controller the RPC controller
* @param request the scan request
* @throws ServiceException
*/
@Override
public ScanResponse scan(final RpcController controller,
final ScanRequest request) throws ServiceException {
Leases.Lease lease = null;
String scannerName = null;
try {
if (!request.hasScannerId() && !request.hasScan()) {
throw new DoNotRetryIOException(
"Missing required input: scannerId or scan");
}
long scannerId = -1;
if (request.hasScannerId()) {
scannerId = request.getScannerId();
scannerName = String.valueOf(scannerId);
}
try {
checkOpen();
} catch (IOException e) {
// If checkOpen failed, server not running or filesystem gone,
// cancel this lease; filesystem is gone or we're closing or something.
if (scannerName != null) {
try {
leases.cancelLease(scannerName);
} catch (LeaseException le) {
LOG.info("Server shutting down and client tried to access missing scanner " +
scannerName);
}
}
throw e;
}
requestCount.incrementAndGet();
try {
int ttl = 0;
HRegion region = null;
RegionScanner scanner = null;
boolean moreResults = true;
boolean closeScanner = false;
ScanResponse.Builder builder = ScanResponse.newBuilder();
if (request.hasCloseScanner()) {
closeScanner = request.getCloseScanner();
}
int rows = 1;
if (request.hasNumberOfRows()) {
rows = request.getNumberOfRows();
}
if (request.hasScannerId()) {
scanner = scanners.get(scannerName);
if (scanner == null) {
throw new UnknownScannerException(
"Name: " + scannerName + ", already closed?");
}
region = getRegion(scanner.getRegionInfo().getRegionName());
} else {
region = getRegion(request.getRegion());
ClientProtos.Scan protoScan = request.getScan();
Scan scan = ProtobufUtil.toScan(protoScan);
region.prepareScanner(scan);
if (region.getCoprocessorHost() != null) {
scanner = region.getCoprocessorHost().preScannerOpen(scan);
}
if (scanner == null) {
scanner = region.getScanner(scan);
}
if (region.getCoprocessorHost() != null) {
scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner);
}
scannerId = addScanner(scanner);
scannerName = String.valueOf(scannerId);
ttl = this.scannerLeaseTimeoutPeriod;
}
if (rows > 0) {
try {
// Remove lease while its being processed in server; protects against case
// where processing of request takes > lease expiration time.
lease = leases.removeLease(scannerName);
List<Result> results = new ArrayList<Result>(rows);
long currentScanResultSize = 0;
boolean done = false;
// Call coprocessor. Get region info from scanner.
if (region != null && region.getCoprocessorHost() != null) {
Boolean bypass = region.getCoprocessorHost().preScannerNext(
scanner, results, rows);
if (!results.isEmpty()) {
for (Result r : results) {
for (KeyValue kv : r.raw()) {
currentScanResultSize += kv.heapSize();
}
}
}
if (bypass != null && bypass.booleanValue()) {
done = true;
}
}
if (!done) {
long maxResultSize = scanner.getMaxResultSize();
if (maxResultSize <= 0) {
maxResultSize = maxScannerResultSize;
}
List<KeyValue> values = new ArrayList<KeyValue>();
for (int i = 0; i < rows
&& currentScanResultSize < maxResultSize; i++) {
// Collect values to be returned here
boolean moreRows = scanner.next(values, SchemaMetrics.METRIC_NEXTSIZE);
if (!values.isEmpty()) {
for (KeyValue kv : values) {
currentScanResultSize += kv.heapSize();
}
results.add(new Result(values));
}
if (!moreRows) {
break;
}
values.clear();
}
// coprocessor postNext hook
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerNext(scanner, results, rows, true);
}
}
// If the scanner's filter - if any - is done with the scan
// and wants to tell the client to stop the scan. This is done by passing
// a null result, and setting moreResults to false.
if (scanner.isFilterDone() && results.isEmpty()) {
moreResults = false;
results = null;
} else {
for (Result result: results) {
if (result != null) {
builder.addResult(ProtobufUtil.toResult(result));
}
}
}
} finally {
// We're done. On way out re-add the above removed lease.
// Adding resets expiration time on lease.
if (scanners.containsKey(scannerName)) {
if (lease != null) leases.addLease(lease);
ttl = this.scannerLeaseTimeoutPeriod;
}
}
}
if (!moreResults || closeScanner) {
ttl = 0;
moreResults = false;
if (region != null && region.getCoprocessorHost() != null) {
if (region.getCoprocessorHost().preScannerClose(scanner)) {
return builder.build(); // bypass
}
}
scanner = scanners.remove(scannerName);
if (scanner != null) {
scanner.close();
leases.cancelLease(scannerName);
if (region != null && region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postScannerClose(scanner);
}
}
}
if (ttl > 0) {
builder.setTtl(ttl);
}
builder.setScannerId(scannerId);
builder.setMoreResults(moreResults);
return builder.build();
} catch (Throwable t) {
if (scannerName != null &&
t instanceof NotServingRegionException) {
scanners.remove(scannerName);
}
throw convertThrowableToIOE(cleanup(t));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Lock a row in a table.
*
* @param controller the RPC controller
* @param request the lock row request
* @throws ServiceException
*/
@Override
public LockRowResponse lockRow(final RpcController controller,
final LockRowRequest request) throws ServiceException {
try {
if (request.getRowCount() != 1) {
throw new DoNotRetryIOException(
"lockRow supports only one row now, not " + request.getRowCount() + " rows");
}
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
byte[] row = request.getRow(0).toByteArray();
try {
Integer r = region.obtainRowLock(row);
long lockId = addRowLock(r, region);
LOG.debug("Row lock " + lockId + " explicitly acquired by client");
LockRowResponse.Builder builder = LockRowResponse.newBuilder();
builder.setLockId(lockId);
return builder.build();
} catch (Throwable t) {
throw convertThrowableToIOE(cleanup(t,
"Error obtaining row lock (fsOk: " + this.fsOk + ")"));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Unlock a locked row in a table.
*
* @param controller the RPC controller
* @param request the unlock row request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public UnlockRowResponse unlockRow(final RpcController controller,
final UnlockRowRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
if (!request.hasLockId()) {
throw new DoNotRetryIOException(
"Invalid unlock rowrequest, missing lock id");
}
long lockId = request.getLockId();
String lockName = String.valueOf(lockId);
try {
Integer r = rowlocks.remove(lockName);
if (r == null) {
throw new UnknownRowLockException(lockName);
}
region.releaseRowLock(r);
this.leases.cancelLease(lockName);
LOG.debug("Row lock " + lockId
+ " has been explicitly released by client");
return UnlockRowResponse.newBuilder().build();
} catch (Throwable t) {
throw convertThrowableToIOE(cleanup(t));
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Atomically bulk load several HFiles into an open region
* @return true if successful, false is failed but recoverably (no action)
* @throws IOException if failed unrecoverably
*/
@Override
public BulkLoadHFileResponse bulkLoadHFile(final RpcController controller,
final BulkLoadHFileRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
List<Pair<byte[], String>> familyPaths = new ArrayList<Pair<byte[], String>>();
for (FamilyPath familyPath: request.getFamilyPathList()) {
familyPaths.add(new Pair<byte[], String>(familyPath.getFamily().toByteArray(),
familyPath.getPath()));
}
boolean bypass = false;
if (region.getCoprocessorHost() != null) {
bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
}
boolean loaded = false;
if (!bypass) {
loaded = region.bulkLoadHFiles(familyPaths);
}
if (region.getCoprocessorHost() != null) {
loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
}
BulkLoadHFileResponse.Builder builder = BulkLoadHFileResponse.newBuilder();
builder.setLoaded(loaded);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
* method using the registered protocol handlers.
* {@link CoprocessorProtocol} implementations must be registered per-region
* via the
* {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)}
* method before they are available.
*
* @param regionName name of the region against which the invocation is executed
* @param call an {@code Exec} instance identifying the protocol, method name,
* and parameters for the method invocation
* @return an {@code ExecResult} instance containing the region name of the
* invocation and the return value
* @throws IOException if no registered protocol handler is found or an error
* occurs during the invocation
* @see org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)
*/
@Override
public ExecCoprocessorResponse execCoprocessor(final RpcController controller,
final ExecCoprocessorRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
ExecCoprocessorResponse.Builder
builder = ExecCoprocessorResponse.newBuilder();
ClientProtos.Exec call = request.getCall();
Exec clientCall = ProtobufUtil.toExec(call);
ExecResult result = region.exec(clientCall);
builder.setValue(ProtobufUtil.toParameter(result.getValue()));
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Execute multiple actions on a table: get, mutate, and/or execCoprocessor
*
* @param controller the RPC controller
* @param request the multi request
* @throws ServiceException
*/
@Override
public MultiResponse multi(final RpcController controller,
final MultiRequest request) throws ServiceException {
try {
HRegion region = getRegion(request.getRegion());
MultiResponse.Builder builder = MultiResponse.newBuilder();
if (request.hasAtomic() && request.getAtomic()) {
List<Mutate> mutates = new ArrayList<Mutate>();
for (ClientProtos.MultiAction actionUnion : request.getActionList()) {
if (actionUnion.hasMutate()) {
mutates.add(actionUnion.getMutate());
} else {
throw new DoNotRetryIOException(
"Unsupported atomic action type: " + actionUnion);
}
}
mutateRows(region, mutates);
} else {
ActionResult.Builder resultBuilder = null;
List<Mutate> mutates = new ArrayList<Mutate>();
for (ClientProtos.MultiAction actionUnion : request.getActionList()) {
requestCount.incrementAndGet();
try {
Object result = null;
if (actionUnion.hasGet()) {
Get get = ProtobufUtil.toGet(actionUnion.getGet());
Integer lock = getLockFromId(get.getLockId());
Result r = region.get(get, lock);
if (r != null) {
result = ProtobufUtil.toResult(r);
}
} else if (actionUnion.hasMutate()) {
Mutate mutate = actionUnion.getMutate();
MutateType type = mutate.getMutateType();
if (type != MutateType.PUT && type != MutateType.DELETE) {
if (!mutates.isEmpty()) {
doBatchOp(builder, region, mutates);
mutates.clear();
} else if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
}
Result r = null;
switch (type) {
case APPEND:
r = append(region, mutate);
break;
case INCREMENT:
r = increment(region, mutate);
break;
case PUT:
mutates.add(mutate);
break;
case DELETE:
mutates.add(mutate);
break;
default:
throw new DoNotRetryIOException("Unsupported mutate type: " + type.name());
}
if (r != null) {
result = ProtobufUtil.toResult(r);
}
} else if (actionUnion.hasExec()) {
Exec call = ProtobufUtil.toExec(actionUnion.getExec());
result = region.exec(call).getValue();
} else {
LOG.warn("Error: invalid action: " + actionUnion + ". "
+ "it must be a Get, Mutate, or Exec.");
throw new DoNotRetryIOException("Invalid action, "
+ "it must be a Get, Mutate, or Exec.");
}
if (result != null) {
if (resultBuilder == null) {
resultBuilder = ActionResult.newBuilder();
} else {
resultBuilder.clear();
}
NameBytesPair value = ProtobufUtil.toParameter(result);
resultBuilder.setValue(value);
builder.addResult(resultBuilder.build());
}
} catch (IOException ie) {
builder.addResult(ResponseConverter.buildActionResult(ie));
}
}
if (!mutates.isEmpty()) {
doBatchOp(builder, region, mutates);
}
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
// End Client methods
// Start Admin methods
@Override
@QosPriority(priority=HIGH_QOS)
public GetRegionInfoResponse getRegionInfo(final RpcController controller,
final GetRegionInfoRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
HRegionInfo info = region.getRegionInfo();
GetRegionInfoResponse.Builder builder = GetRegionInfoResponse.newBuilder();
builder.setRegionInfo(HRegionInfo.convert(info));
if (request.hasCompactionState() && request.getCompactionState()) {
builder.setCompactionState(
CompactionRequest.getCompactionState(info.getRegionId()));
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetStoreFileResponse getStoreFile(final RpcController controller,
final GetStoreFileRequest request) throws ServiceException {
try {
HRegion region = getRegion(request.getRegion());
requestCount.incrementAndGet();
Set<byte[]> columnFamilies = null;
if (request.getFamilyCount() == 0) {
columnFamilies = region.getStores().keySet();
} else {
columnFamilies = new HashSet<byte[]>();
for (ByteString cf: request.getFamilyList()) {
columnFamilies.add(cf.toByteArray());
}
}
int nCF = columnFamilies.size();
List<String> fileList = region.getStoreFileList(
columnFamilies.toArray(new byte[nCF][]));
GetStoreFileResponse.Builder builder = GetStoreFileResponse.newBuilder();
builder.addAllStoreFile(fileList);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
@QosPriority(priority=HIGH_QOS)
public GetOnlineRegionResponse getOnlineRegion(final RpcController controller,
final GetOnlineRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
List<HRegionInfo> list = new ArrayList<HRegionInfo>(onlineRegions.size());
for (HRegion region: this.onlineRegions.values()) {
list.add(region.getRegionInfo());
}
Collections.sort(list);
return ResponseConverter.buildGetOnlineRegionResponse(list);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
// Region open/close direct RPCs
/**
* Open a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public OpenRegionResponse openRegion(final RpcController controller, final OpenRegionRequest request)
throws ServiceException {
int versionOfOfflineNode = -1;
if (request.hasVersionOfOfflineNode()) {
versionOfOfflineNode = request.getVersionOfOfflineNode();
}
try {
checkOpen();
} catch (IOException ie) {
throw new ServiceException(ie);
}
requestCount.incrementAndGet();
OpenRegionResponse.Builder builder = OpenRegionResponse.newBuilder();
Map<String, HTableDescriptor> htds = new HashMap<String, HTableDescriptor>(
request.getRegionList().size());
boolean isBulkAssign = request.getRegionList().size() > 1;
for (RegionInfo regionInfo : request.getRegionList()) {
HRegionInfo region = HRegionInfo.convert(regionInfo);
try {
checkIfRegionInTransition(region, OPEN);
HRegion onlineRegion = getFromOnlineRegions(region.getEncodedName());
if (null != onlineRegion) {
// See HBASE-5094. Cross check with META if still this RS is owning
// the region.
Pair<HRegionInfo, ServerName> p = MetaReader.getRegion(
this.catalogTracker, region.getRegionName());
if (this.getServerName().equals(p.getSecond())) {
LOG.warn("Attempted open of " + region.getEncodedName()
+ " but already online on this server");
builder.addOpeningState(RegionOpeningState.ALREADY_OPENED);
continue;
} else {
LOG.warn("The region " + region.getEncodedName()
+ " is online on this server but META does not have this server.");
removeFromOnlineRegions(region.getEncodedName(), null);
}
}
LOG.info("Received request to open region: " + region.getRegionNameAsString() + " on "
+ this.serverNameFromMasterPOV);
HTableDescriptor htd = htds.get(region.getTableNameAsString());
if (htd == null) {
htd = this.tableDescriptors.get(region.getTableName());
htds.put(region.getTableNameAsString(), htd);
}
this.regionsInTransitionInRS.putIfAbsent(
region.getEncodedNameAsBytes(), true);
// Need to pass the expected version in the constructor.
if (region.isRootRegion()) {
this.service.submit(new OpenRootHandler(this, this, region, htd,
versionOfOfflineNode));
} else if (region.isMetaRegion()) {
this.service.submit(new OpenMetaHandler(this, this, region, htd,
versionOfOfflineNode));
} else {
this.service.submit(new OpenRegionHandler(this, this, region, htd,
versionOfOfflineNode));
}
builder.addOpeningState(RegionOpeningState.OPENED);
} catch (RegionAlreadyInTransitionException rie) {
LOG.warn("Region is already in transition", rie);
if (isBulkAssign) {
builder.addOpeningState(RegionOpeningState.OPENED);
} else {
throw new ServiceException(rie);
}
} catch (IOException ie) {
LOG.warn("Failed opening region " + region.getRegionNameAsString(), ie);
if (isBulkAssign) {
builder.addOpeningState(RegionOpeningState.FAILED_OPENING);
} else {
throw new ServiceException(ie);
}
}
}
return builder.build();
}
/**
* Close a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public CloseRegionResponse closeRegion(final RpcController controller,
final CloseRegionRequest request) throws ServiceException {
int versionOfClosingNode = -1;
if (request.hasVersionOfClosingNode()) {
versionOfClosingNode = request.getVersionOfClosingNode();
}
boolean zk = request.getTransitionInZK();
final ServerName sn = (request.hasDestinationServer() ?
ProtobufUtil.toServerName(request.getDestinationServer()) : null);
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
CloseRegionResponse.Builder
builder = CloseRegionResponse.newBuilder();
LOG.info("Received close region: " + region.getRegionNameAsString() +
". Version of ZK closing node:" + versionOfClosingNode +
". Destination server:" + sn);
HRegionInfo regionInfo = region.getRegionInfo();
checkIfRegionInTransition(regionInfo, CLOSE);
boolean closed = closeRegion(
regionInfo, false, zk, versionOfClosingNode, sn);
builder.setClosed(closed);
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Flush a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public FlushRegionResponse flushRegion(final RpcController controller,
final FlushRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Flushing " + region.getRegionNameAsString());
boolean shouldFlush = true;
if (request.hasIfOlderThanTs()) {
shouldFlush = region.getLastFlushTime() < request.getIfOlderThanTs();
}
FlushRegionResponse.Builder builder = FlushRegionResponse.newBuilder();
if (shouldFlush) {
builder.setFlushed(region.flushcache());
}
builder.setLastFlushTime(region.getLastFlushTime());
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Split a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public SplitRegionResponse splitRegion(final RpcController controller,
final SplitRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Splitting " + region.getRegionNameAsString());
region.flushcache();
byte[] splitPoint = null;
if (request.hasSplitPoint()) {
splitPoint = request.getSplitPoint().toByteArray();
}
region.forceSplit(splitPoint);
compactSplitThread.requestSplit(region, region.checkSplit());
return SplitRegionResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Compact a region on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public CompactRegionResponse compactRegion(final RpcController controller,
final CompactRegionRequest request) throws ServiceException {
try {
checkOpen();
requestCount.incrementAndGet();
HRegion region = getRegion(request.getRegion());
LOG.info("Compacting " + region.getRegionNameAsString());
boolean major = false;
if (request.hasMajor()) {
major = request.getMajor();
}
if (major) {
region.triggerMajorCompaction();
}
LOG.trace("User-triggered compaction requested for region " +
region.getRegionNameAsString());
compactSplitThread.requestCompaction(region,
"User-triggered " + (major ? "major " : "") + "compaction",
Store.PRIORITY_USER);
return CompactRegionResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Replicate WAL entries on the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
@QosPriority(priority=HIGH_QOS)
public ReplicateWALEntryResponse replicateWALEntry(final RpcController controller,
final ReplicateWALEntryRequest request) throws ServiceException {
try {
if (replicationSinkHandler != null) {
checkOpen();
requestCount.incrementAndGet();
HLog.Entry[] entries = ProtobufUtil.toHLogEntries(request.getEntryList());
if (entries != null && entries.length > 0) {
replicationSinkHandler.replicateLogEntries(entries);
}
}
return ReplicateWALEntryResponse.newBuilder().build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Roll the WAL writer of the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public RollWALWriterResponse rollWALWriter(final RpcController controller,
final RollWALWriterRequest request) throws ServiceException {
try {
requestCount.incrementAndGet();
HLog wal = this.getWAL();
byte[][] regionsToFlush = wal.rollWriter(true);
RollWALWriterResponse.Builder builder = RollWALWriterResponse.newBuilder();
if (regionsToFlush != null) {
for (byte[] region: regionsToFlush) {
builder.addRegionToFlush(ByteString.copyFrom(region));
}
}
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/**
* Stop the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public StopServerResponse stopServer(final RpcController controller,
final StopServerRequest request) throws ServiceException {
requestCount.incrementAndGet();
String reason = request.getReason();
stop(reason);
return StopServerResponse.newBuilder().build();
}
/**
* Get some information of the region server.
*
* @param controller the RPC controller
* @param request the request
* @throws ServiceException
*/
@Override
public GetServerInfoResponse getServerInfo(final RpcController controller,
final GetServerInfoRequest request) throws ServiceException {
ServerName serverName = getServerName();
requestCount.incrementAndGet();
return ResponseConverter.buildGetServerInfoResponse(serverName, webuiport);
}
// End Admin methods
/**
* Find the HRegion based on a region specifier
*
* @param regionSpecifier the region specifier
* @return the corresponding region
* @throws IOException if the specifier is not null,
* but failed to find the region
*/
protected HRegion getRegion(
final RegionSpecifier regionSpecifier) throws IOException {
byte[] value = regionSpecifier.getValue().toByteArray();
RegionSpecifierType type = regionSpecifier.getType();
checkOpen();
switch (type) {
case REGION_NAME:
return getRegion(value);
case ENCODED_REGION_NAME:
return getRegionByEncodedName(Bytes.toString(value));
default:
throw new DoNotRetryIOException(
"Unsupported region specifier type: " + type);
}
}
/**
* Execute an append mutation.
*
* @param region
* @param mutate
* @return
* @throws IOException
*/
protected Result append(final HRegion region,
final Mutate mutate) throws IOException {
Append append = ProtobufUtil.toAppend(mutate);
Result r = null;
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().preAppend(append);
}
if (r == null) {
Integer lock = getLockFromId(append.getLockId());
r = region.append(append, lock, append.getWriteToWAL());
if (region.getCoprocessorHost() != null) {
region.getCoprocessorHost().postAppend(append, r);
}
}
return r;
}
/**
* Execute an increment mutation.
*
* @param region
* @param mutate
* @return
* @throws IOException
*/
protected Result increment(final HRegion region,
final Mutate mutate) throws IOException {
Increment increment = ProtobufUtil.toIncrement(mutate);
Result r = null;
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().preIncrement(increment);
}
if (r == null) {
Integer lock = getLockFromId(increment.getLockId());
r = region.increment(increment, lock, increment.getWriteToWAL());
if (region.getCoprocessorHost() != null) {
r = region.getCoprocessorHost().postIncrement(increment, r);
}
}
return r;
}
/**
* Execute a list of Put/Delete mutations.
*
* @param builder
* @param region
* @param mutates
*/
protected void doBatchOp(final MultiResponse.Builder builder,
final HRegion region, final List<Mutate> mutates) {
@SuppressWarnings("unchecked")
Pair<Mutation, Integer>[] mutationsWithLocks = new Pair[mutates.size()];
try {
ActionResult.Builder resultBuilder = ActionResult.newBuilder();
NameBytesPair value = ProtobufUtil.toParameter(new Result());
resultBuilder.setValue(value);
ActionResult result = resultBuilder.build();
int i = 0;
for (Mutate m : mutates) {
Mutation mutation = null;
if (m.getMutateType() == MutateType.PUT) {
mutation = ProtobufUtil.toPut(m);
} else {
mutation = ProtobufUtil.toDelete(m);
}
Integer lock = getLockFromId(mutation.getLockId());
mutationsWithLocks[i++] = new Pair<Mutation, Integer>(mutation, lock);
builder.addResult(result);
}
requestCount.addAndGet(mutates.size());
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
OperationStatus codes[] = region.batchMutate(mutationsWithLocks);
for (i = 0; i < codes.length; i++) {
if (codes[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
result = ResponseConverter.buildActionResult(
new DoNotRetryIOException(codes[i].getExceptionMsg()));
builder.setResult(i, result);
}
}
} catch (IOException ie) {
ActionResult result = ResponseConverter.buildActionResult(ie);
for (int i = 0, n = mutates.size(); i < n; i++) {
builder.setResult(i, result);
}
}
}
/**
* Mutate a list of rows atomically.
*
* @param region
* @param mutates
* @throws IOException
*/
protected void mutateRows(final HRegion region,
final List<Mutate> mutates) throws IOException {
Mutate firstMutate = mutates.get(0);
if (!region.getRegionInfo().isMetaTable()) {
cacheFlusher.reclaimMemStoreMemory();
}
byte[] row = firstMutate.getRow().toByteArray();
RowMutations rm = new RowMutations(row);
for (Mutate mutate: mutates) {
MutateType type = mutate.getMutateType();
switch (mutate.getMutateType()) {
case PUT:
rm.add(ProtobufUtil.toPut(mutate));
break;
case DELETE:
rm.add(ProtobufUtil.toDelete(mutate));
break;
default:
throw new DoNotRetryIOException(
"mutate supports atomic put and/or delete, not "
+ type.name());
}
}
region.mutateRow(rm);
}
// This map will containsall the regions that we closed for a move.
// We add the time it was moved as we don't want to keep too old information
protected Map<String, Pair<Long, ServerName>> movedRegions =
new ConcurrentHashMap<String, Pair<Long, ServerName>>(3000);
// We need a timeout. If not there is a risk of giving a wrong information: this would double
// the number of network calls instead of reducing them.
private static final int TIMEOUT_REGION_MOVED = (2 * 60 * 1000);
protected void addToMovedRegions(HRegionInfo hri, ServerName destination){
addToMovedRegions(hri.getEncodedName(), destination);
}
protected void addToMovedRegions(String encodedName, ServerName destination){
final Long time = System.currentTimeMillis();
movedRegions.put(
encodedName,
new Pair<Long, ServerName>(time, destination));
}
private ServerName getMovedRegion(final String encodedRegionName) {
Pair<Long, ServerName> dest = movedRegions.get(encodedRegionName);
if (dest != null) {
if (dest.getFirst() > (System.currentTimeMillis() - TIMEOUT_REGION_MOVED)) {
return dest.getSecond();
} else {
movedRegions.remove(encodedRegionName);
}
}
return null;
}
/**
* Remove the expired entries from the moved regions list.
*/
protected void cleanMovedRegions(){
final long cutOff = System.currentTimeMillis() - TIMEOUT_REGION_MOVED;
Iterator<Entry<String, Pair<Long, ServerName>>> it = movedRegions.entrySet().iterator();
while (it.hasNext()){
Map.Entry<String, Pair<Long, ServerName>> e = it.next();
if (e.getValue().getFirst() < cutOff){
it.remove();
}
}
}
/**
* Creates a Chore thread to clean the moved region cache.
*/
protected static class MovedRegionsCleaner extends Chore implements Stoppable {
private HRegionServer regionServer;
Stoppable stoppable;
private MovedRegionsCleaner(
HRegionServer regionServer, Stoppable stoppable){
super("MovedRegionsCleaner for region "+regionServer, TIMEOUT_REGION_MOVED, stoppable);
this.regionServer = regionServer;
this.stoppable = stoppable;
}
static MovedRegionsCleaner createAndStart(HRegionServer rs){
Stoppable stoppable = new Stoppable() {
private volatile boolean isStopped = false;
@Override public void stop(String why) { isStopped = true;}
@Override public boolean isStopped() {return isStopped;}
};
return new MovedRegionsCleaner(rs, stoppable);
}
@Override
protected void chore() {
regionServer.cleanMovedRegions();
}
@Override
public void stop(String why) {
stoppable.stop(why);
}
@Override
public boolean isStopped() {
return stoppable.isStopped();
}
}
private String getMyEphemeralNodePath() {
return ZKUtil.joinZNode(this.zooKeeper.rsZNode, getServerName().toString());
}
}
| HBASE-5329 addRowLock() may allocate duplicate lock id, causing the client to be blocked (Ian Varley)
git-svn-id: 949c06ec81f1cb709fd2be51dd530a930344d7b3@1376489 13f79535-47bb-0310-9956-ffa450edef68
| hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java | HBASE-5329 addRowLock() may allocate duplicate lock id, causing the client to be blocked (Ian Varley) | <ide><path>base-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
<ide> import java.util.TreeMap;
<ide> import java.util.TreeSet;
<ide> import java.util.concurrent.ConcurrentHashMap;
<add>import java.util.concurrent.ConcurrentMap;
<ide> import java.util.concurrent.ConcurrentSkipListMap;
<ide> import java.util.concurrent.atomic.AtomicBoolean;
<ide> import java.util.concurrent.atomic.AtomicInteger;
<ide> // Port we put up the webui on.
<ide> protected int webuiport = -1;
<ide>
<del> Map<String, Integer> rowlocks = new ConcurrentHashMap<String, Integer>();
<add> ConcurrentMap<String, Integer> rowlocks = new ConcurrentHashMap<String, Integer>();
<ide>
<ide> // A state before we go into stopped state. At this stage we're closing user
<ide> // space regions.
<ide> return this.fsOk;
<ide> }
<ide>
<del> protected long addRowLock(Integer r, HRegion region)
<del> throws LeaseStillHeldException {
<del> long lockId = nextLong();
<del> String lockName = String.valueOf(lockId);
<del> rowlocks.put(lockName, r);
<add> protected long addRowLock(Integer r, HRegion region) throws LeaseStillHeldException {
<add> String lockName = null;
<add> long lockId;
<add> do {
<add> lockId = nextLong();
<add> lockName = String.valueOf(lockId);
<add> } while (rowlocks.putIfAbsent(lockName, r) != null);
<ide> this.leases.createLease(lockName, this.rowLockLeaseTimeoutPeriod, new RowLockListener(lockName,
<ide> region));
<ide> return lockId; |
|
Java | mpl-2.0 | ffcef3ed6d43a82fff6f71c092f28591563ffe3f | 0 | servinglynk/servinglynk-hmis,servinglynk/servinglynk-hmis,servinglynk/servinglynk-hmis,servinglynk/hmis-lynk-open-source,servinglynk/servinglynk-hmis,servinglynk/hmis-lynk-open-source,servinglynk/hmis-lynk-open-source,servinglynk/hmis-lynk-open-source,servinglynk/servinglynk-hmis,servinglynk/hmis-lynk-open-source,servinglynk/servinglynk-hmis,servinglynk/hmis-lynk-open-source | package com.servinglynk.hmis.warehouse.service.converter;
import com.servinglynk.hmis.warehouse.core.model.EnrollmentCoc;
public class EnrollmentCocConverter extends BaseConverter {
public static com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc modelToEntity (EnrollmentCoc model ,com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc entity) {
if(entity==null) entity = new com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc();
entity.setId(model.getEnrollmentCocId());
entity.setCocCode(model.getCocCode());
return entity;
}
public static EnrollmentCoc entityToModel (com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc entity) {
EnrollmentCoc model = new EnrollmentCoc();
model.setEnrollmentCocId(entity.getId());
model.setCocCode(entity.getCocCode());
model.setEnrollmentId(entity.getEnrollmentid() !=null ? entity.getEnrollmentid().getId() : null);
model.setProjectCocId(entity.getProjectCoc() !=null ? entity.getProjectCoc().getId() : null);
model.setDateCreated(entity.getDateCreated());
model.setDateUpdated(entity.getDateUpdated());
return model;
}
}
| hmis-service-v2014/src/main/java/com/servinglynk/hmis/warehouse/service/converter/EnrollmentCocConverter.java | package com.servinglynk.hmis.warehouse.service.converter;
import com.servinglynk.hmis.warehouse.core.model.EnrollmentCoc;
public class EnrollmentCocConverter extends BaseConverter {
public static com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc modelToEntity (EnrollmentCoc model ,com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc entity) {
if(entity==null) entity = new com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc();
entity.setId(model.getEnrollmentCocId());
entity.setCocCode(model.getCocCode());
return entity;
}
public static EnrollmentCoc entityToModel (com.servinglynk.hmis.warehouse.model.v2014.EnrollmentCoc entity) {
EnrollmentCoc model = new EnrollmentCoc();
model.setEnrollmentCocId(entity.getId());
model.setCocCode(entity.getCocCode());
model.setEnrollmentId(entity.getEnrollmentid().getId());
model.setProjectCocId(entity.getProjectCoc().getId());
model.setDateCreated(entity.getDateCreated());
model.setDateUpdated(entity.getDateUpdated());
return model;
}
}
| Fix NPE in the enrollmentcoc api
| hmis-service-v2014/src/main/java/com/servinglynk/hmis/warehouse/service/converter/EnrollmentCocConverter.java | Fix NPE in the enrollmentcoc api | <ide><path>mis-service-v2014/src/main/java/com/servinglynk/hmis/warehouse/service/converter/EnrollmentCocConverter.java
<ide> EnrollmentCoc model = new EnrollmentCoc();
<ide> model.setEnrollmentCocId(entity.getId());
<ide> model.setCocCode(entity.getCocCode());
<del> model.setEnrollmentId(entity.getEnrollmentid().getId());
<del> model.setProjectCocId(entity.getProjectCoc().getId());
<add> model.setEnrollmentId(entity.getEnrollmentid() !=null ? entity.getEnrollmentid().getId() : null);
<add> model.setProjectCocId(entity.getProjectCoc() !=null ? entity.getProjectCoc().getId() : null);
<ide> model.setDateCreated(entity.getDateCreated());
<ide> model.setDateUpdated(entity.getDateUpdated());
<ide> return model; |
|
Java | apache-2.0 | 050ad65d032c61be77729cd048c1175b859a7aae | 0 | opensource21/fuwesta,opensource21/fuwesta,opensource21/fuwesta | /*******************************************************************************
* Portions created by Sebastian Thomschke are copyright (c) 2005-2013 Sebastian
* Thomschke.
*
* All Rights Reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Sebastian Thomschke - initial implementation.
* Niels - add the possiblities to don't add AssertValidCheck, make sure
* a check isn't added if it defined directly.
*******************************************************************************/
package de.ppi.fuwesta.spring.mvc.oval;
import static net.sf.oval.Validator.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Lob;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Version;
import net.sf.oval.Check;
import net.sf.oval.collection.CollectionFactory;
import net.sf.oval.configuration.Configurer;
import net.sf.oval.configuration.pojo.elements.ClassConfiguration;
import net.sf.oval.configuration.pojo.elements.ConstraintSetConfiguration;
import net.sf.oval.configuration.pojo.elements.FieldConfiguration;
import net.sf.oval.configuration.pojo.elements.MethodConfiguration;
import net.sf.oval.configuration.pojo.elements.MethodReturnValueConfiguration;
import net.sf.oval.constraint.AssertValidCheck;
import net.sf.oval.constraint.Length;
import net.sf.oval.constraint.LengthCheck;
import net.sf.oval.constraint.NotNull;
import net.sf.oval.constraint.NotNullCheck;
import net.sf.oval.constraint.Range;
import net.sf.oval.constraint.RangeCheck;
import net.sf.oval.internal.util.ReflectionUtils;
/**
* Constraints configurer that interprets certain EJB3 JPA annotations:
* <ul>
* <li>javax.persistence.Basic(optional=false) =>
* net.sf.oval.constraint.NotNullCheck
* <li>javax.persistence.OneToOne(optional=false) =>
* net.sf.oval.constraint.NotNullCheck, net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.ManyToOne(optional=false) =>
* net.sf.oval.constraint.NotNullCheck, net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.ManyToMany => net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.Column(nullable=false) =>
* net.sf.oval.constraint.NotNullCheck
* <li>javax.persistence.Column(length=5) => net.sf.oval.constraint.LengthCheck
* <li>javax.persistence.Column(precision>0) on Numbers =>
* net.sf.oval.constraint.RangeCheck
* </ul>
* <b>Hint</b> if you add AssertValidCheck, read <a href=
* "http://sourceforge.net/p/oval/discussion/488110/thread/6ec11584/#4ae0">this
* post</a> carefully. You can suppress to add this check with the constructor
* {@link JPAAnnotationsConfigurer#JPAAnnotationsConfigurer(boolean)} and false
* as argument.
*
* @author Sebastian Thomschke
*/
public class JPAAnnotationsConfigurer implements Configurer {
protected Boolean applyFieldConstraintsToSetters;
protected Boolean applyFieldConstraintsToConstructors;
private final boolean addValidConstraint;
/**
* Initiates an object of type JPAAnnotationsConfigurer.
*
* @param addValidConstraint true if the @Valid should be added.
*/
public JPAAnnotationsConfigurer(boolean addValidConstraint) {
super();
this.addValidConstraint = addValidConstraint;
}
/**
* Initiates an object of type JPAAnnotationsConfigurer.
*
*/
public JPAAnnotationsConfigurer() {
this(true);
}
public Boolean getApplyFieldConstraintsToConstructors() {
return applyFieldConstraintsToConstructors;
}
@Override
public ClassConfiguration getClassConfiguration(final Class<?> clazz) {
final CollectionFactory cf = getCollectionFactory();
final ClassConfiguration config = new ClassConfiguration();
config.type = clazz;
config.applyFieldConstraintsToConstructors =
applyFieldConstraintsToConstructors;
config.applyFieldConstraintsToSetters = applyFieldConstraintsToSetters;
List<Check> checks = cf.createList(2);
/*
* determine field checks
*/
for (final Field field : config.type.getDeclaredFields()) {
// loop over all annotations of the current field
for (final Annotation annotation : field.getAnnotations()) {
if (annotation instanceof Basic) {
initializeChecks((Basic) annotation, checks);
} else if (annotation instanceof Column) {
initializeChecks((Column) annotation, checks, field);
} else if (annotation instanceof OneToOne) {
initializeChecks((OneToOne) annotation, checks);
} else if (annotation instanceof ManyToOne) {
initializeChecks((ManyToOne) annotation, checks);
} else if (annotation instanceof ManyToMany) {
initializeChecks((ManyToMany) annotation, checks);
} else if (annotation instanceof OneToMany) {
initializeChecks((OneToMany) annotation, checks);
}
}
if (checks.size() > 0) {
if (config.fieldConfigurations == null) {
config.fieldConfigurations = cf.createSet(8);
}
final FieldConfiguration fc = new FieldConfiguration();
fc.name = field.getName();
fc.checks = checks;
checks = cf.createList(); // create a new list for the next
// field with checks
config.fieldConfigurations.add(fc);
}
}
/*
* determine getter checks
*/
for (final Method method : config.type.getDeclaredMethods()) {
// consider getters only
if (!ReflectionUtils.isGetter(method)) {
continue;
}
// loop over all annotations
for (final Annotation annotation : method.getAnnotations()) {
if (annotation instanceof Basic) {
initializeChecks((Basic) annotation, checks);
} else if (annotation instanceof Column) {
initializeChecks((Column) annotation, checks, method);
} else if (annotation instanceof OneToOne) {
initializeChecks((OneToOne) annotation, checks);
} else if (annotation instanceof ManyToOne) {
initializeChecks((ManyToOne) annotation, checks);
} else if (annotation instanceof ManyToMany) {
initializeChecks((ManyToMany) annotation, checks);
} else if (annotation instanceof OneToMany) {
initializeChecks((OneToMany) annotation, checks);
}
}
// check if anything has been configured for this method at all
if (checks.size() > 0) {
if (config.methodConfigurations == null) {
config.methodConfigurations = cf.createSet(2);
}
final MethodConfiguration mc = new MethodConfiguration();
mc.name = method.getName();
mc.isInvariant = Boolean.TRUE;
mc.returnValueConfiguration =
new MethodReturnValueConfiguration();
mc.returnValueConfiguration.checks = checks;
checks = cf.createList(); // create a new list for the next
// method having return value checks
config.methodConfigurations.add(mc);
}
}
return config;
}
@Override
public ConstraintSetConfiguration getConstraintSetConfiguration(
final String constraintSetId) {
return null;
}
protected void initializeChecks(final Basic annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
}
protected void
initializeChecks(final Column annotation,
final Collection<Check> checks,
final AccessibleObject fieldOrMethod) {
assert annotation != null;
assert checks != null;
/*
* If the value is generated (annotated with @GeneratedValue) it is
* allowed to be null before the entity has been persisted, same is true
* in case of optimistic locking when a field is annotated with
*
* @Version. Therefore and because of the fact that there is no generic
* way to determine if an entity has been persisted already, a not-null
* check will not be performed for such fields.
*/
if (!annotation.nullable()
&& !fieldOrMethod.isAnnotationPresent(GeneratedValue.class)
&& !fieldOrMethod.isAnnotationPresent(Version.class)
&& !fieldOrMethod.isAnnotationPresent(NotNull.class)) {
checks.add(new NotNullCheck());
}
// only consider length parameter if @Lob is not present
// and not an Enumerated (which makes at least for ordinal problems
if (!fieldOrMethod.isAnnotationPresent(Lob.class)
&& !fieldOrMethod.isAnnotationPresent(Enumerated.class)
&& !fieldOrMethod.isAnnotationPresent(Length.class)) {
final LengthCheck lengthCheck = new LengthCheck();
lengthCheck.setMax(annotation.length());
checks.add(lengthCheck);
}
final Class<?> type;
if (fieldOrMethod instanceof Field) {
type = ((Field) fieldOrMethod).getType();
} else {
type = ((Method) fieldOrMethod).getReturnType();
}
// only consider precision/scale for numeric fields
if (!fieldOrMethod.isAnnotationPresent(Range.class)
&& annotation.precision() > 0
&& Number.class.isAssignableFrom(type)) {
/*
* precision = 6, scale = 2 => -9999.99<=x<=9999.99 precision = 4,
* scale = 1 => -999.9<=x<=999.9
*/
final RangeCheck rangeCheck = new RangeCheck();
rangeCheck.setMax(Math.pow(10,
annotation.precision() - annotation.scale())
- Math.pow(0.1, annotation.scale()));
rangeCheck.setMin(-1 * rangeCheck.getMax());
checks.add(rangeCheck);
}
}
protected void initializeChecks(final ManyToMany annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final ManyToOne annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final OneToMany annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final OneToOne annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
public Boolean isApplyFieldConstraintsToSetter() {
return applyFieldConstraintsToSetters;
}
public void setApplyFieldConstraintsToConstructors(
final Boolean applyFieldConstraintsToConstructors) {
this.applyFieldConstraintsToConstructors =
applyFieldConstraintsToConstructors;
}
public void setApplyFieldConstraintsToSetters(
final Boolean applyFieldConstraintsToSetters) {
this.applyFieldConstraintsToSetters = applyFieldConstraintsToSetters;
}
}
| fuwesta-core/src/main/java/de/ppi/fuwesta/spring/mvc/oval/JPAAnnotationsConfigurer.java | /*******************************************************************************
* Portions created by Sebastian Thomschke are copyright (c) 2005-2013 Sebastian
* Thomschke.
*
* All Rights Reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Sebastian Thomschke - initial implementation.
*******************************************************************************/
package de.ppi.fuwesta.spring.mvc.oval;
import static net.sf.oval.Validator.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Lob;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Version;
import net.sf.oval.Check;
import net.sf.oval.collection.CollectionFactory;
import net.sf.oval.configuration.Configurer;
import net.sf.oval.configuration.pojo.elements.ClassConfiguration;
import net.sf.oval.configuration.pojo.elements.ConstraintSetConfiguration;
import net.sf.oval.configuration.pojo.elements.FieldConfiguration;
import net.sf.oval.configuration.pojo.elements.MethodConfiguration;
import net.sf.oval.configuration.pojo.elements.MethodReturnValueConfiguration;
import net.sf.oval.constraint.AssertValidCheck;
import net.sf.oval.constraint.Length;
import net.sf.oval.constraint.LengthCheck;
import net.sf.oval.constraint.NotNull;
import net.sf.oval.constraint.NotNullCheck;
import net.sf.oval.constraint.Range;
import net.sf.oval.constraint.RangeCheck;
import net.sf.oval.internal.util.ReflectionUtils;
/**
* Constraints configurer that interprets certain EJB3 JPA annotations:
* <ul>
* <li>javax.persistence.Basic(optional=false) =>
* net.sf.oval.constraint.NotNullCheck
* <li>javax.persistence.OneToOne(optional=false) =>
* net.sf.oval.constraint.NotNullCheck, net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.ManyToOne(optional=false) =>
* net.sf.oval.constraint.NotNullCheck, net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.ManyToMany => net.sf.oval.constraint.AssertValidCheck
* <li>javax.persistence.Column(nullable=false) =>
* net.sf.oval.constraint.NotNullCheck
* <li>javax.persistence.Column(length=5) => net.sf.oval.constraint.LengthCheck
* <li>javax.persistence.Column(precision>0) on Numbers =>
* net.sf.oval.constraint.RangeCheck
* </ul>
* <b>Hint</b> if you add AssertValidCheck, read <a href=
* "http://sourceforge.net/p/oval/discussion/488110/thread/6ec11584/#4ae0">this
* post</a> carefully. You can suppress to add this check with the constructor
* {@link JPAAnnotationsConfigurer#JPAAnnotationsConfigurer(boolean)} and false
* as argument.
*
* @author Sebastian Thomschke
*/
public class JPAAnnotationsConfigurer implements Configurer {
protected Boolean applyFieldConstraintsToSetters;
protected Boolean applyFieldConstraintsToConstructors;
private final boolean addValidConstraint;
/**
* Initiates an object of type JPAAnnotationsConfigurer.
*
* @param addValidConstraint true if the @Valid should be added.
*/
public JPAAnnotationsConfigurer(boolean addValidConstraint) {
super();
this.addValidConstraint = addValidConstraint;
}
/**
* Initiates an object of type JPAAnnotationsConfigurer.
*
*/
public JPAAnnotationsConfigurer() {
this(true);
}
public Boolean getApplyFieldConstraintsToConstructors() {
return applyFieldConstraintsToConstructors;
}
@Override
public ClassConfiguration getClassConfiguration(final Class<?> clazz) {
final CollectionFactory cf = getCollectionFactory();
final ClassConfiguration config = new ClassConfiguration();
config.type = clazz;
config.applyFieldConstraintsToConstructors =
applyFieldConstraintsToConstructors;
config.applyFieldConstraintsToSetters = applyFieldConstraintsToSetters;
List<Check> checks = cf.createList(2);
/*
* determine field checks
*/
for (final Field field : config.type.getDeclaredFields()) {
// loop over all annotations of the current field
for (final Annotation annotation : field.getAnnotations()) {
if (annotation instanceof Basic) {
initializeChecks((Basic) annotation, checks);
} else if (annotation instanceof Column) {
initializeChecks((Column) annotation, checks, field);
} else if (annotation instanceof OneToOne) {
initializeChecks((OneToOne) annotation, checks);
} else if (annotation instanceof ManyToOne) {
initializeChecks((ManyToOne) annotation, checks);
} else if (annotation instanceof ManyToMany) {
initializeChecks((ManyToMany) annotation, checks);
} else if (annotation instanceof OneToMany) {
initializeChecks((OneToMany) annotation, checks);
}
}
if (checks.size() > 0) {
if (config.fieldConfigurations == null) {
config.fieldConfigurations = cf.createSet(8);
}
final FieldConfiguration fc = new FieldConfiguration();
fc.name = field.getName();
fc.checks = checks;
checks = cf.createList(); // create a new list for the next
// field with checks
config.fieldConfigurations.add(fc);
}
}
/*
* determine getter checks
*/
for (final Method method : config.type.getDeclaredMethods()) {
// consider getters only
if (!ReflectionUtils.isGetter(method)) {
continue;
}
// loop over all annotations
for (final Annotation annotation : method.getAnnotations()) {
if (annotation instanceof Basic) {
initializeChecks((Basic) annotation, checks);
} else if (annotation instanceof Column) {
initializeChecks((Column) annotation, checks, method);
} else if (annotation instanceof OneToOne) {
initializeChecks((OneToOne) annotation, checks);
} else if (annotation instanceof ManyToOne) {
initializeChecks((ManyToOne) annotation, checks);
} else if (annotation instanceof ManyToMany) {
initializeChecks((ManyToMany) annotation, checks);
} else if (annotation instanceof OneToMany) {
initializeChecks((OneToMany) annotation, checks);
}
}
// check if anything has been configured for this method at all
if (checks.size() > 0) {
if (config.methodConfigurations == null) {
config.methodConfigurations = cf.createSet(2);
}
final MethodConfiguration mc = new MethodConfiguration();
mc.name = method.getName();
mc.isInvariant = Boolean.TRUE;
mc.returnValueConfiguration =
new MethodReturnValueConfiguration();
mc.returnValueConfiguration.checks = checks;
checks = cf.createList(); // create a new list for the next
// method having return value checks
config.methodConfigurations.add(mc);
}
}
return config;
}
@Override
public ConstraintSetConfiguration getConstraintSetConfiguration(
final String constraintSetId) {
return null;
}
protected void initializeChecks(final Basic annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
}
protected void
initializeChecks(final Column annotation,
final Collection<Check> checks,
final AccessibleObject fieldOrMethod) {
assert annotation != null;
assert checks != null;
/*
* If the value is generated (annotated with @GeneratedValue) it is
* allowed to be null before the entity has been persisted, same is true
* in case of optimistic locking when a field is annotated with
*
* @Version. Therefore and because of the fact that there is no generic
* way to determine if an entity has been persisted already, a not-null
* check will not be performed for such fields.
*/
if (!annotation.nullable()
&& !fieldOrMethod.isAnnotationPresent(GeneratedValue.class)
&& !fieldOrMethod.isAnnotationPresent(Version.class)
&& !fieldOrMethod.isAnnotationPresent(NotNull.class)) {
checks.add(new NotNullCheck());
}
// only consider length parameter if @Lob is not present
// and not an Enumerated (which makes at least for ordinal problems
if (!fieldOrMethod.isAnnotationPresent(Lob.class)
&& !fieldOrMethod.isAnnotationPresent(Enumerated.class)
&& !fieldOrMethod.isAnnotationPresent(Length.class)) {
final LengthCheck lengthCheck = new LengthCheck();
lengthCheck.setMax(annotation.length());
checks.add(lengthCheck);
}
final Class<?> type;
if (fieldOrMethod instanceof Field) {
type = ((Field) fieldOrMethod).getType();
} else {
type = ((Method) fieldOrMethod).getReturnType();
}
// only consider precision/scale for numeric fields
if (!fieldOrMethod.isAnnotationPresent(Range.class)
&& annotation.precision() > 0
&& Number.class.isAssignableFrom(type)) {
/*
* precision = 6, scale = 2 => -9999.99<=x<=9999.99 precision = 4,
* scale = 1 => -999.9<=x<=999.9
*/
final RangeCheck rangeCheck = new RangeCheck();
rangeCheck.setMax(Math.pow(10,
annotation.precision() - annotation.scale())
- Math.pow(0.1, annotation.scale()));
rangeCheck.setMin(-1 * rangeCheck.getMax());
checks.add(rangeCheck);
}
}
protected void initializeChecks(final ManyToMany annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final ManyToOne annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final OneToMany annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
protected void initializeChecks(final OneToOne annotation,
final Collection<Check> checks) {
assert annotation != null;
assert checks != null;
if (!annotation.optional()) {
checks.add(new NotNullCheck());
}
if (addValidConstraint) {
checks.add(new AssertValidCheck());
}
}
public Boolean isApplyFieldConstraintsToSetter() {
return applyFieldConstraintsToSetters;
}
public void setApplyFieldConstraintsToConstructors(
final Boolean applyFieldConstraintsToConstructors) {
this.applyFieldConstraintsToConstructors =
applyFieldConstraintsToConstructors;
}
public void setApplyFieldConstraintsToSetters(
final Boolean applyFieldConstraintsToSetters) {
this.applyFieldConstraintsToSetters = applyFieldConstraintsToSetters;
}
}
| Added a comment to the copyright. | fuwesta-core/src/main/java/de/ppi/fuwesta/spring/mvc/oval/JPAAnnotationsConfigurer.java | Added a comment to the copyright. | <ide><path>uwesta-core/src/main/java/de/ppi/fuwesta/spring/mvc/oval/JPAAnnotationsConfigurer.java
<ide> *
<ide> * Contributors:
<ide> * Sebastian Thomschke - initial implementation.
<add> * Niels - add the possiblities to don't add AssertValidCheck, make sure
<add> * a check isn't added if it defined directly.
<ide> *******************************************************************************/
<ide> package de.ppi.fuwesta.spring.mvc.oval;
<ide> |
|
Java | lgpl-2.1 | 1be98e4d8362a5893e6a6b14775ba974d136d136 | 0 | viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eu.seebetter.ini.chips.davis;
import java.awt.Point;
import eu.seebetter.ini.chips.DavisChip;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.event.ApsDvsEvent.ColorFilter;
import net.sf.jaer.hardwareinterface.HardwareInterface;
/**
* CDAVIS camera with heterogenous mixture of DAVIS and RGB APS global shutter
* pixels camera
*
* @author Chenghan Li, Luca Longinotti, Tobi Delbruck
*/
@Description("CDAVIS APS-DVS camera with RGBW CFA color filter array and 640x480 APS pixels and 320x240 DAVIS pixels")
@DevelopmentStatus(DevelopmentStatus.Status.Experimental)
public class DavisRGBW640 extends DavisBaseCamera {
public static final short WIDTH_PIXELS = 640;
public static final short HEIGHT_PIXELS = 480;
public static final ColorFilter[] COLOR_FILTER = { ColorFilter.B, ColorFilter.W, ColorFilter.R, ColorFilter.G };
public static final float[][] COLOR_CORRECTION = { { 1.75f, -0.19f, -0.56f, 0.15f }, { -0.61f, 1.39f, 0.07f, 0.21f },
{ -0.42f, -1.13f, 2.45f, 0.18f } };
public DavisRGBW640() {
setName("DavisRGBW640");
setDefaultPreferencesFile("biasgenSettings/DavisRGBW640/DavisRGBW640.xml");
setSizeX(DavisRGBW640.WIDTH_PIXELS);
setSizeY(DavisRGBW640.HEIGHT_PIXELS);
setEventExtractor(new DavisColorEventExtractor(this, true, false, COLOR_FILTER, true));
setBiasgen(davisConfig = new DavisRGBW640Config(this));
davisRenderer = new DavisColorRenderer(this, true, COLOR_FILTER, true, COLOR_CORRECTION);
davisRenderer.setMaxADC(DavisChip.MAX_ADC);
setRenderer(davisRenderer);
setApsFirstPixelReadOut(new Point(0, 0));
setApsLastPixelReadOut(new Point(getSizeX() - 1, getSizeY() - 1));
}
public DavisRGBW640(final HardwareInterface hardwareInterface) {
this();
setHardwareInterface(hardwareInterface);
}
}
| src/eu/seebetter/ini/chips/davis/DavisRGBW640.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eu.seebetter.ini.chips.davis;
import java.awt.Point;
import eu.seebetter.ini.chips.DavisChip;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.event.ApsDvsEvent.ColorFilter;
import net.sf.jaer.hardwareinterface.HardwareInterface;
/**
* CDAVIS camera with heterogenous mixture of DAVIS and RGB APS global shutter
* pixels camera
*
* @author Chenghan Li, Luca Longinotti, Tobi Delbruck
*/
@Description("CDAVIS APS-DVS camera with RGBW CFA color filter array and 640x480 APS pixels and 320x240 DAVIS pixels")
@DevelopmentStatus(DevelopmentStatus.Status.Experimental)
public class DavisRGBW640 extends DavisBaseCamera {
public static final short WIDTH_PIXELS = 640;
public static final short HEIGHT_PIXELS = 480;
public static final ColorFilter[] COLOR_FILTER = { ColorFilter.B, ColorFilter.W, ColorFilter.R, ColorFilter.G };
public static final float[][] COLOR_CORRECTION = { { 1.75f, -0.19f, -0.56f, 0.15f }, { -0.61f, 1.39f, 0.07f, 0.21f },
{ -0.42f, -1.13f, 2.47f, 0.18f } };
public DavisRGBW640() {
setName("DavisRGBW640");
setDefaultPreferencesFile("biasgenSettings/DavisRGBW640/DavisRGBW640.xml");
setSizeX(DavisRGBW640.WIDTH_PIXELS);
setSizeY(DavisRGBW640.HEIGHT_PIXELS);
setEventExtractor(new DavisColorEventExtractor(this, true, false, COLOR_FILTER, true));
setBiasgen(davisConfig = new DavisRGBW640Config(this));
davisRenderer = new DavisColorRenderer(this, true, COLOR_FILTER, true, COLOR_CORRECTION);
davisRenderer.setMaxADC(DavisChip.MAX_ADC);
setRenderer(davisRenderer);
setApsFirstPixelReadOut(new Point(0, 0));
setApsLastPixelReadOut(new Point(getSizeX() - 1, getSizeY() - 1));
}
public DavisRGBW640(final HardwareInterface hardwareInterface) {
this();
setHardwareInterface(hardwareInterface);
}
}
| updating the colorCorrection matrix for better Blue
git-svn-id: fe6b3b33f0410f5f719dcd9e0c58b92353e7a5d3@8199 b7f4320f-462c-0410-a916-d9f35bb82d52
| src/eu/seebetter/ini/chips/davis/DavisRGBW640.java | updating the colorCorrection matrix for better Blue | <ide><path>rc/eu/seebetter/ini/chips/davis/DavisRGBW640.java
<ide> public static final short HEIGHT_PIXELS = 480;
<ide> public static final ColorFilter[] COLOR_FILTER = { ColorFilter.B, ColorFilter.W, ColorFilter.R, ColorFilter.G };
<ide> public static final float[][] COLOR_CORRECTION = { { 1.75f, -0.19f, -0.56f, 0.15f }, { -0.61f, 1.39f, 0.07f, 0.21f },
<del> { -0.42f, -1.13f, 2.47f, 0.18f } };
<add> { -0.42f, -1.13f, 2.45f, 0.18f } };
<ide>
<ide> public DavisRGBW640() {
<ide> setName("DavisRGBW640"); |
|
JavaScript | mit | bc18c651dd4d25b96c086ea53e1c1a9559ec9286 | 0 | zorahrel/snanake,zorahrel/snanake | var foods = [];
var s = new Snake();
function setup() {
pixelUnit = 30;
baseSpeed = pixelUnit;
var width = pixelUnit * 15;
var height = pixelUnit * 15;
createCanvas(width, height);
generateFood();
frameRate(8);
}
function draw() {
background(0);
s.update();
s.show();
s.eat();
s.eatHimSelf();
s.score();
}
function mouseClicked() {
s.level = s.level+1;
console.log('level up, now your level is: ', s.level);
}
function keyPressed() {
switch(keyCode) {
case UP_ARROW:
s.dir(0, -baseSpeed);
break;
case DOWN_ARROW:
s.dir(0, baseSpeed);
break;
case RIGHT_ARROW:
s.dir(baseSpeed, 0);
break;
case LEFT_ARROW:
s.dir(-baseSpeed, 0);
break;
}
}
function generateFood() {
do {
food = randomPos();
} while(collideSnake(food, s));
foods.push(food);
}
function collideSnake(pos, snake) {
var collides = false;
snake.tails.forEach(function(tail) {
if(pos.x == tail.x && pos.y == tail.y) {
collides = true;
}
});
return collides;
}
function randomPos() {
var cols = floor(width/pixelUnit);
var rows = floor(width/pixelUnit);
return {
x: floor(random(cols))*pixelUnit,
y: floor(random(rows))*pixelUnit
};
}
function Snake() {
this.x = 0;
this.y = 0;
this.xSpeed = 0;
this.ySpeed = 0;
this.level = 0;
this.tails = [];
this.update = function() {
for(var i=this.level; i>0; i--) {
this.tails[i] = this.tails[i-1];
}
this.tails[0] = { x: this.x, y: this.y };
var newX = this.x + this.xSpeed;
if(newX < width && newX > -pixelUnit) {
this.x = newX;
} else {
if(newX < 0) {
this.x = width-pixelUnit;
}
if(newX+pixelUnit > width) {
this.x = 0;
}
}
var newY = this.y + this.ySpeed;
if(newY < height && newY > -pixelUnit) {
this.y = newY;
} else {
if(newY < 0) {
this.y = height-pixelUnit;
}
if(newY+pixelUnit > height) {
this.y = 0;
}
}
}
this.show = function() {
fill(255);
var tails = this.tails;
tails.forEach(function(tail, index) {
var type = 'body';
if(index == 0 && tails.length>1) { // Testa
type = 'head';
}
if(index+1 == tails.length && tails.length>1) { // Coda
type = 'tail';
}
if(index == 0 && index+1 == tails.length) { // Pezzo unico
type = 'one';
}
switch(type) {
case 'head':
fill(255,0,0);
break;
case 'body':
case 'tail':
fill(255,255,255);
break;
case 'one':
fill(255,255,0);
break;
}
rect(tail.x, tail.y, pixelUnit, pixelUnit);
});
this.tails = tails;
foods.forEach(function(food) {
fill(0, 255, 0);
rect(food.x, food.y, pixelUnit, pixelUnit);
}, this);
}
this.eat = function() {
pos = { x: this.x, y: this.y };
lev = this.level;
foods.forEach(function(food, index) {
if(food.x == pos.x && food.y == pos.y) {
lev = lev+1;
generateFood();
foods.splice(index, 1);
}
});
this.level = lev;
return false;
}
this.eatHimSelf = function() {
var lev = this.level;
var tails = this.tails;
for(var i=0; i<tails.length-1; i++) {
if(i==0) {
continue;
}
if(tails[i].x == this.x && tails[i].y == this.y) {
for(var iB = i; iB<lev; iB++) {
tails = tails.splice(0, iB);
}
lev = i;
console.log('cazz, megg magnat a cor');
break;
}
}
this.tails = tails;
this.level = lev;
return false;
}
this.score = function() {
document.getElementById('score').innerHTML = 'Score: '+this.level;
}
this.dir = function(xSpeed, ySpeed) {
if(xSpeed*this.xSpeed == 0 || this.level==0) {
this.xSpeed = xSpeed;
}
if(ySpeed*this.ySpeed == 0 || this.level==0) {
this.ySpeed = ySpeed;
}
}
}
| snake.js | var foods = [];
var s = new Snake();
function setup() {
pixelUnit = 30;
baseSpeed = pixelUnit;
var width = pixelUnit * 15;
var height = pixelUnit * 15;
createCanvas(width, height);
generateFood();
frameRate(8);
}
function draw() {
background(0);
s.update();
s.show();
s.eat();
s.eatHimSelf();
s.score();
}
function mouseClicked() {
s.level = s.level+1;
console.log('level up, now your level is: ', s.level);
}
function keyPressed() {
switch(keyCode) {
case UP_ARROW:
s.dir(0, -baseSpeed);
break;
case DOWN_ARROW:
s.dir(0, baseSpeed);
break;
case RIGHT_ARROW:
s.dir(baseSpeed, 0);
break;
case LEFT_ARROW:
s.dir(-baseSpeed, 0);
break;
}
}
function generateFood() {
do {
food = randomPos();
} while(collideSnake(food, s));
foods.push(food);
}
function collideSnake(pos, snake) {
var collides = false;
snake.tails.forEach(function(tail) {
if(pos.x == tail.x && pos.y == tail.y) {
collides = true;
}
});
return collides;
}
function randomPos() {
var cols = floor(width/pixelUnit);
var rows = floor(width/pixelUnit);
return {
x: floor(random(cols))*pixelUnit,
y: floor(random(rows))*pixelUnit
};
}
function Snake() {
this.x = 0;
this.y = 0;
this.xSpeed = 0;
this.ySpeed = 0;
this.level = 0;
this.tails = [];
this.update = function() {
for(var i=this.level; i>0; i--) {
this.tails[i] = this.tails[i-1];
}
this.tails[0] = { x: this.x, y: this.y };
var newX = this.x + this.xSpeed;
if(newX < width && newX > -pixelUnit) {
this.x = newX;
} else {
if(newX < 0) {
this.x = width-pixelUnit;
}
if(newX+pixelUnit > width) {
this.x = 0;
}
}
var newY = this.y + this.ySpeed;
if(newY < height && newY > -pixelUnit) {
this.y = newY;
} else {
if(newY < 0) {
this.y = height-pixelUnit;
}
if(newY+pixelUnit > height) {
this.y = 0;
}
}
}
this.show = function() {
fill(255);
var tails = this.tails;
tails.forEach(function(tail, index) {
var type = 'body';
if(index == 0 && tails.length>1) { // Testa
type = 'head';
}
if(index+1 == tails.length && tails.length>1) { // Coda
type = 'tail';
}
if(index == 0 && index+1 == tails.length) { // Pezzo unico
type = 'one';
}
switch(type) {
case 'head':
fill(255,0,0);
break;
case 'body':
case 'tail':
fill(255,255,255);
break;
case 'one':
fill(255,255,0);
break;
}
rect(tail.x, tail.y, pixelUnit, pixelUnit);
});
this.tails = tails;
foods.forEach(function(food) {
fill(0, 255, 0);
rect(food.x, food.y, pixelUnit, pixelUnit);
}, this);
}
this.eat = function() {
pos = { x: this.x, y: this.y };
lev = this.level;
foods.forEach(function(food, index) {
if(food.x == pos.x && food.y == pos.y) {
lev = lev+1;
generateFood();
foods.splice(index, 1);
}
});
this.level = lev;
return false;
}
this.eatHimSelf = function() {
var lev = this.level;
var tails = this.tails;
for(var i=0; i<tails.length; i++) {
if(i==0) {
continue;
}
if(tails[i].x == this.x && tails[i].y == this.y) {
for(var iB = i; iB<lev; iB++) {
tails = tails.splice(0, iB);
}
lev = i;
console.log('cazz, megg magnat a cor');
break;
}
}
this.tails = tails;
this.level = lev;
return false;
}
this.score = function() {
document.getElementById('score').innerHTML = 'Score: '+this.level;
}
this.dir = function(xSpeed, ySpeed) {
if(xSpeed*this.xSpeed == 0 || this.level==0) {
this.xSpeed = xSpeed;
}
if(ySpeed*this.ySpeed == 0 || this.level==0) {
this.ySpeed = ySpeed;
}
}
}
| Risolto problema della testa troppo affamata
length-1 nel foreach di eatHimSelf perché tails[0] | snake.js | Risolto problema della testa troppo affamata | <ide><path>nake.js
<ide> this.eatHimSelf = function() {
<ide> var lev = this.level;
<ide> var tails = this.tails;
<del> for(var i=0; i<tails.length; i++) {
<add> for(var i=0; i<tails.length-1; i++) {
<ide> if(i==0) {
<ide> continue;
<ide> } |
|
Java | apache-2.0 | 9872c110a9abd8e6d4371427ac280d8ab847e02d | 0 | ligasgr/intellij-xquery,ligasgr/intellij-xquery | /*
* Copyright 2013-2014 Grzegorz Ligas <[email protected]> and other contributors
* (see the CONTRIBUTORS file).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.xquery.runner.ui.run.main.datasource;
import net.java.openjdk.cacio.ctc.junit.CacioTestRunner;
import org.intellij.xquery.CheatingIdeaApplicationManager;
import org.intellij.xquery.runner.state.datasources.XQueryDataSourceConfiguration;
import org.intellij.xquery.runner.state.datasources.XQueryDataSourcesSettings;
import org.intellij.xquery.runner.ui.datasources.DataSourcesSettingsForm;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.swing.Action;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import java.awt.event.ActionEvent;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* User: ligasgr
* Date: 10/11/13
* Time: 00:50
*/
@RunWith(CacioTestRunner.class)
@Ignore("Temporarily switching off until I have better idea how to rewrite dialog not to be too bound to idea")
public class DataSourcesDialogGuiTest {
private JPanel parent;
private DataSourceSelector selector;
private DataSourcesSettingsForm settingsForm;
private TestDataSourcesDialog dialog;
private XQueryDataSourcesSettings dataSourceSettings;
private Object showMonitor;
@Before
public void setUp() throws Exception {
CheatingIdeaApplicationManager.removeApplication();
selector = mock(DataSourceSelector.class);
settingsForm = mock(DataSourcesSettingsForm.class);
dataSourceSettings = mock(XQueryDataSourcesSettings.class);
parent = mock(JPanel.class);
given(parent.isShowing()).willReturn(true);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
dialog = new TestDataSourcesDialog(parent, selector, settingsForm);
}
});
showMonitor = new Object();
}
@After
public void tearDown() throws Exception {
CheatingIdeaApplicationManager.restoreApplication();
}
@Test
public void shouldDelegateCreationOfCenterPanelToSettingsForm() throws InterruptedException {
showDialog();
clickCancelButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm).getFormComponent();
}
});
}
@Test
public void shouldUpdateDataSourceConfigurationsWithCurrentStateFromForm() throws InterruptedException {
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm, atLeastOnce()).getCurrentConfigurations();
verify(dataSourceSettings).setDataSourceConfigurations(anyListOf(XQueryDataSourceConfiguration.class));
}
});
}
@Test
public void shouldUpdateDataSourceSelectorWithCurrentConfigurations() throws InterruptedException {
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm, atLeast(2)).getCurrentConfigurations();
verify(selector).setDataSources(anyListOf(XQueryDataSourceConfiguration.class));
}
});
}
@Test
public void shouldUpdateCurrentlySelectedDataSourceWithSelectionFromDialog() throws InterruptedException {
final XQueryDataSourceConfiguration cfg = new XQueryDataSourceConfiguration();
given(settingsForm.getSelectedDataSource()).willReturn(cfg);
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm).getSelectedDataSource();
verify(selector).setSelectedDataSource(cfg);
}
});
}
@Test
public void shouldNotUpdateCurrentlySelectedDataSourceWhenNoSelectionInDialog() throws InterruptedException {
given(settingsForm.getSelectedDataSource()).willReturn(null);
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(selector).setDataSources(anyListOf(XQueryDataSourceConfiguration.class));
verify(settingsForm).getSelectedDataSource();
verifyNoMoreInteractions(selector);
}
});
}
@Test
public void shouldDoNothingIfWasClosedWithClose() throws InterruptedException {
showDialog();
clickCancelButton();
performVerifications(new Runnable() {
@Override
public void run() {
verifyZeroInteractions(selector, dataSourceSettings);
}
});
}
private void clickOkButton() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
Action okAction = dialog.getOKAction();
ActionEvent event = new ActionEvent(DataSourcesDialogGuiTest.this, 0, "command");
okAction.actionPerformed(event);
}
});
}
private void clickCancelButton() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
Action cancelAction = dialog.getCancelAction();
ActionEvent event = new ActionEvent(DataSourcesDialogGuiTest.this, 0, "command");
cancelAction.actionPerformed(event);
}
});
}
private void performVerifications(Runnable verifications) throws InterruptedException {
synchronized (showMonitor) {
showMonitor.wait();
verifications.run();
}
}
private void showDialog() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
synchronized (showMonitor) {
dialog.show();
showMonitor.notify();
}
}
});
}
private class TestDataSourcesDialog extends DataSourcesDialog {
public TestDataSourcesDialog(JComponent parent, DataSourceSelector selector,
DataSourcesSettingsForm settingsForm) {
super(parent, selector, settingsForm);
}
@NotNull
@Override
public Action getOKAction() {
return super.getOKAction();
}
@NotNull
@Override
public Action getCancelAction() {
return super.getCancelAction();
}
@Override
protected XQueryDataSourcesSettings getDataSourceSettings() {
return dataSourceSettings;
}
}
}
| src/testGui/java/org/intellij/xquery/runner/ui/run/main/datasource/DataSourcesDialogGuiTest.java | /*
* Copyright 2013-2014 Grzegorz Ligas <[email protected]> and other contributors
* (see the CONTRIBUTORS file).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.xquery.runner.ui.run.main.datasource;
import net.java.openjdk.cacio.ctc.junit.CacioTestRunner;
import org.intellij.xquery.CheatingIdeaApplicationManager;
import org.intellij.xquery.runner.state.datasources.XQueryDataSourceConfiguration;
import org.intellij.xquery.runner.state.datasources.XQueryDataSourcesSettings;
import org.intellij.xquery.runner.ui.datasources.DataSourcesSettingsForm;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.swing.Action;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import java.awt.event.ActionEvent;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* User: ligasgr
* Date: 10/11/13
* Time: 00:50
*/
@RunWith(CacioTestRunner.class)
public class DataSourcesDialogGuiTest {
private JPanel parent;
private DataSourceSelector selector;
private DataSourcesSettingsForm settingsForm;
private TestDataSourcesDialog dialog;
private XQueryDataSourcesSettings dataSourceSettings;
private Object showMonitor;
@Before
public void setUp() throws Exception {
CheatingIdeaApplicationManager.removeApplication();
selector = mock(DataSourceSelector.class);
settingsForm = mock(DataSourcesSettingsForm.class);
dataSourceSettings = mock(XQueryDataSourcesSettings.class);
parent = mock(JPanel.class);
given(parent.isShowing()).willReturn(true);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
dialog = new TestDataSourcesDialog(parent, selector, settingsForm);
}
});
showMonitor = new Object();
}
@After
public void tearDown() throws Exception {
CheatingIdeaApplicationManager.restoreApplication();
}
@Test
public void shouldDelegateCreationOfCenterPanelToSettingsForm() throws InterruptedException {
showDialog();
clickCancelButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm).getFormComponent();
}
});
}
@Test
public void shouldUpdateDataSourceConfigurationsWithCurrentStateFromForm() throws InterruptedException {
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm, atLeastOnce()).getCurrentConfigurations();
verify(dataSourceSettings).setDataSourceConfigurations(anyListOf(XQueryDataSourceConfiguration.class));
}
});
}
@Test
public void shouldUpdateDataSourceSelectorWithCurrentConfigurations() throws InterruptedException {
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm, atLeast(2)).getCurrentConfigurations();
verify(selector).setDataSources(anyListOf(XQueryDataSourceConfiguration.class));
}
});
}
@Test
public void shouldUpdateCurrentlySelectedDataSourceWithSelectionFromDialog() throws InterruptedException {
final XQueryDataSourceConfiguration cfg = new XQueryDataSourceConfiguration();
given(settingsForm.getSelectedDataSource()).willReturn(cfg);
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(settingsForm).getSelectedDataSource();
verify(selector).setSelectedDataSource(cfg);
}
});
}
@Test
public void shouldNotUpdateCurrentlySelectedDataSourceWhenNoSelectionInDialog() throws InterruptedException {
given(settingsForm.getSelectedDataSource()).willReturn(null);
showDialog();
clickOkButton();
performVerifications(new Runnable() {
@Override
public void run() {
verify(selector).setDataSources(anyListOf(XQueryDataSourceConfiguration.class));
verify(settingsForm).getSelectedDataSource();
verifyNoMoreInteractions(selector);
}
});
}
@Test
public void shouldDoNothingIfWasClosedWithClose() throws InterruptedException {
showDialog();
clickCancelButton();
performVerifications(new Runnable() {
@Override
public void run() {
verifyZeroInteractions(selector, dataSourceSettings);
}
});
}
private void clickOkButton() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
Action okAction = dialog.getOKAction();
ActionEvent event = new ActionEvent(DataSourcesDialogGuiTest.this, 0, "command");
okAction.actionPerformed(event);
}
});
}
private void clickCancelButton() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
Action cancelAction = dialog.getCancelAction();
ActionEvent event = new ActionEvent(DataSourcesDialogGuiTest.this, 0, "command");
cancelAction.actionPerformed(event);
}
});
}
private void performVerifications(Runnable verifications) throws InterruptedException {
synchronized (showMonitor) {
showMonitor.wait();
verifications.run();
}
}
private void showDialog() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
synchronized (showMonitor) {
dialog.show();
showMonitor.notify();
}
}
});
}
private class TestDataSourcesDialog extends DataSourcesDialog {
public TestDataSourcesDialog(JComponent parent, DataSourceSelector selector,
DataSourcesSettingsForm settingsForm) {
super(parent, selector, settingsForm);
}
@NotNull
@Override
public Action getOKAction() {
return super.getOKAction();
}
@NotNull
@Override
public Action getCancelAction() {
return super.getCancelAction();
}
@Override
protected XQueryDataSourcesSettings getDataSourceSettings() {
return dataSourceSettings;
}
}
}
| Ignoring test until it's rewritten in a more reliable manner.
| src/testGui/java/org/intellij/xquery/runner/ui/run/main/datasource/DataSourcesDialogGuiTest.java | Ignoring test until it's rewritten in a more reliable manner. | <ide><path>rc/testGui/java/org/intellij/xquery/runner/ui/run/main/datasource/DataSourcesDialogGuiTest.java
<ide> import org.jetbrains.annotations.NotNull;
<ide> import org.junit.After;
<ide> import org.junit.Before;
<add>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.junit.runner.RunWith;
<ide>
<ide> * Time: 00:50
<ide> */
<ide> @RunWith(CacioTestRunner.class)
<add>@Ignore("Temporarily switching off until I have better idea how to rewrite dialog not to be too bound to idea")
<ide> public class DataSourcesDialogGuiTest {
<ide>
<ide> private JPanel parent; |
|
Java | apache-2.0 | 7d96d60c42cd8573bb61b4b404010d711e7f3a52 | 0 | fkeglevich/Raw-Dumper,fkeglevich/Raw-Dumper,fkeglevich/Raw-Dumper | /*
* Copyright 2017, Flávio Keglevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fkeglevich.rawdumper.io.async;
import android.content.Context;
import android.media.MediaScannerConnection;
import com.fkeglevich.rawdumper.controller.context.ContextManager;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Created by Flávio Keglevich on 24/08/2017.
* TODO: Add a class header comment!
*/
public class IOUtil
{
public static void scanFileWithMediaScanner(String filePath)
{
Context context = ContextManager.getApplicationContext();
MediaScannerConnection.scanFile(context, new String[]{filePath}, null, null);
}
public static void saveBytes(byte[] data, String filePath) throws IOException
{
try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(filePath))))
{
bos.write(data);
bos.flush();
}
}
public static byte[] readBytes(String filePath) throws IOException
{
int length = (int) new File(filePath).length();
try (FileInputStream stream = new FileInputStream(filePath))
{
byte[] result = new byte[length];
stream.read(result);
return result;
}
}
}
| app/src/main/java/com/fkeglevich/rawdumper/io/async/IOUtil.java | /*
* Copyright 2017, Flávio Keglevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fkeglevich.rawdumper.io.async;
import android.content.Context;
import android.media.MediaScannerConnection;
import com.fkeglevich.rawdumper.controller.context.ContextManager;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Created by Flávio Keglevich on 24/08/2017.
* TODO: Add a class header comment!
*/
public class IOUtil
{
public static void scanFileWithMediaScanner(String filePath)
{
Context context = ContextManager.getApplicationContext();
MediaScannerConnection.scanFile(context, new String[]{filePath}, null, null);
}
public static void saveBytes(byte[] data, String filePath) throws IOException
{
BufferedOutputStream bos = null;
try
{
bos = new BufferedOutputStream(new FileOutputStream(new File(filePath)));
bos.write(data);
bos.flush();
}
finally
{
if (bos != null)
bos.close();
}
}
public static byte[] readBytes(String filePath) throws IOException
{
int length = (int) new File(filePath).length();
try (FileInputStream stream = new FileInputStream(filePath))
{
byte[] result = new byte[length];
stream.read(result);
return result;
}
}
}
| Simplified code
| app/src/main/java/com/fkeglevich/rawdumper/io/async/IOUtil.java | Simplified code | <ide><path>pp/src/main/java/com/fkeglevich/rawdumper/io/async/IOUtil.java
<ide>
<ide> public static void saveBytes(byte[] data, String filePath) throws IOException
<ide> {
<del> BufferedOutputStream bos = null;
<del> try
<add> try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(filePath))))
<ide> {
<del> bos = new BufferedOutputStream(new FileOutputStream(new File(filePath)));
<ide> bos.write(data);
<ide> bos.flush();
<del> }
<del> finally
<del> {
<del> if (bos != null)
<del> bos.close();
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | ea5d7d1775d278df42f8284d5952db3fe7f8e11d | 0 | cvn/angular-shims-placeholder,akkunchoi/angular-shims-placeholder,cvn/angular-shims-placeholder,radotzki/angular-shims-placeholder,radotzki/angular-shims-placeholder,akkunchoi/angular-shims-placeholder | /*
* angular-shims-placeholder
* https://github.com/jrief/angular-shims-placeholder
*
* Add Angular directives which emulates attribute ´placeholder´ in input fields
* of type text for browsers not supporting this, ie. IE9 and below.
*
* Copyright (c) 2013 Jacob Rief
* Licensed under the MIT license.
*/
(function(angular, document, undefined) {
'use strict';
angular.module('ng.shims.placeholder', [])
.service('placeholderSniffer', function($document){
this.hasPlaceholder = function() {
// test for native placeholder support
var test = $document[0].createElement("input");
return (test.placeholder !== void 0);
};
})
.directive('placeholder', function($timeout, $document, placeholderSniffer) {
if (placeholderSniffer.hasPlaceholder()) return {};
var documentListenersApplied = false;
// No native support for attribute placeholder
return {
restrict: 'A',
require: '?ngModel',
priority: 110, // run after ngModel (0) and BOOLEAN_ATTR (100) directives
link: function(scope, elem, attrs, ngModel) {
var orig_val = getValue(),
domElem = elem[0],
elemType = domElem.nodeName.toLowerCase(),
isInput = elemType === 'input' || elemType === 'textarea',
is_pwd = attrs.type === 'password',
text = attrs.placeholder,
emptyClassName = 'empty',
clone;
if (!text || !isInput) { return; }
if (is_pwd) { setupPasswordPlaceholder(); }
// init
setValue(orig_val);
// on focus, replace auto-label with empty field
elem.bind('focus', function() {
if (elem.hasClass(emptyClassName)) {
elem.val('');
elem.removeClass(emptyClassName);
domElem.select(); // IE8/9 show text cursor after tabbing in
}
});
// on blur, show placeholder if necessary
elem.bind('blur', updateValue);
// handler for model-less inputs to interact with non-angular code
// TODO: vs `$watch(function(){return elem.val()})`
if (!ngModel) {
elem.bind('change', updateValue);
}
// model -> view
if (ngModel) {
ngModel.$render = function() {
setValue(ngModel.$viewValue);
// IE8/9: show text cursor after updating value while
// focused, this happens when tabbing into a field, and the
// deferred keydown handler from the previous field fires
//
// TODO: remove when tab key behavior is fixed in
// angular core
if (domElem === document.activeElement && !elem.val()) {
domElem.select();
}
};
}
if (!documentListenersApplied) {
// cancel selection of placeholder text on disabled elements
// disabled elements do not emit selectstart events in IE8/IE9,
// so bind to $document and catch the event as it bubbles
$document.on('selectstart', function (e) {
var elmn = angular.element(e.target);
if (elmn.hasClass(emptyClassName) && elmn.prop('disabled')) {
e.preventDefault();
}
});
documentListenersApplied = true;
}
function updateValue(e) {
var val = elem.val();
// don't update from placeholder, helps debounce
if (elem.hasClass(emptyClassName) && val === text) { return; }
conditionalDefer(function(){ setValue(val); });
}
function conditionalDefer(callback) {
// IE8/9: ngModel uses a keydown handler with deferrered
// execution to check for changes to the input. this $timeout
// prevents callback from firing before the keydown handler,
// which is an issue when tabbing out of an input.
// the conditional tests IE version, matches $sniffer.
//
// TODO: remove this function when tab key behavior is fixed in
// angular core
if (document.documentMode <= 11) {
$timeout(callback, 0);
} else {
callback();
}
}
function setValue(val) {
if (!val && domElem !== document.activeElement) {
// show placeholder when necessary
elem.addClass(emptyClassName);
if (is_pwd) {
showPasswordPlaceholder();
} else {
elem.val(text);
}
} else {
// otherwise set input to actual value
elem.removeClass(emptyClassName);
if (is_pwd) {
hidePasswordPlaceholder();
}
elem.val(val);
}
}
function getValue() {
if (ngModel) {
// use eval because $viewValue isn't ready during init
// TODO: this might not to work during unit tests, investigate
return scope.$eval(attrs.ngModel) || '';
}
return getDomValue() || '';
}
// IE8/9: elem.val() on an empty field sometimes returns the
// placeholder value, so return an empty string instead
// http://stackoverflow.com/q/11208417/490592
// I believe IE is persisting the field value across refreshes
// TODO: vs `elem.attr('value')`
function getDomValue() {
var val = elem.val();
if (val === attrs.placeholder) {
val = '';
}
return val;
}
function setAttrUnselectable(elmn, enable) {
if (enable) {
elmn.attr('unselectable', 'on');
} else {
elmn.removeAttr('unselectable');
}
}
// IE8: password inputs cannot display text, and inputs cannot
// change type, so create a new element to display placeholder
function setupPasswordPlaceholder() {
clone = angular.element('<input type="text" value="'+text+'"/>');
stylePasswordPlaceholder();
clone.addClass(emptyClassName)
.addClass('ng-hide')
.bind('focus', hidePasswordPlaceholderAndFocus);
domElem.parentNode.insertBefore(clone[0], domElem);
// keep password placeholder in sync with original element.
// update element after BOOLEAN_ATTR directives' $watches
var watchAttrs = [
attrs.ngDisabled,
attrs.ngReadonly
];
var watchUpdate = function() {
if (elem.hasClass(emptyClassName)) {
showPasswordPlaceholder();
}
};
for (var i = 0; i < watchAttrs.length; i++) {
if (watchAttrs[i]) {
scope.$watch(watchAttrs[i], watchUpdate);
}
}
}
function stylePasswordPlaceholder() {
clone.val(text)
.attr('class', elem.attr('class') || '')
.attr('style', elem.attr('style') || '')
.prop('disabled', elem.prop('disabled'))
.prop('readOnly', elem.prop('readOnly'));
setAttrUnselectable(clone, elem.attr('unselectable') === 'on');
}
function showPasswordPlaceholder() {
stylePasswordPlaceholder();
elem.addClass('ng-hide');
clone.removeClass('ng-hide');
}
function hidePasswordPlaceholder() {
clone.addClass('ng-hide');
elem.removeClass('ng-hide');
}
function hidePasswordPlaceholderAndFocus() {
hidePasswordPlaceholder();
domElem.focus();
}
}
};
});
})(window.angular, window.document);
| lib/angular-placeholder.js | /*
* angular-shims-placeholder
* https://github.com/jrief/angular-shims-placeholder
*
* Add Angular directives which emulates attribute ´placeholder´ in input fields
* of type text for browsers not supporting this, ie. IE9 and below.
*
* Copyright (c) 2013 Jacob Rief
* Licensed under the MIT license.
*/
(function(angular, document, undefined) {
'use strict';
angular.module('ng.shims.placeholder', [])
.service('placeholderSniffer', function($document){
this.hasPlaceholder = function() {
// test for native placeholder support
var test = $document[0].createElement("input");
return (test.placeholder !== void 0);
};
})
.directive('placeholder', function($timeout, $document, placeholderSniffer) {
if (placeholderSniffer.hasPlaceholder()) return {};
var documentListenersApplied = false;
// No native support for attribute placeholder
return {
restrict: 'A',
require: '?ngModel',
priority: 101, // ngModel is 0, ngDisabled is 100, run after both
link: function(scope, elem, attrs, ngModel) {
var orig_val = getValue(),
domElem = elem[0],
elemType = domElem.nodeName.toLowerCase(),
isInput = elemType === 'input' || elemType === 'textarea',
is_pwd = attrs.type === 'password',
text = attrs.placeholder,
emptyClassName = 'empty',
clone;
if (!text || !isInput) { return; }
if (is_pwd) { setupPasswordPlaceholder(); }
// init
setValue(orig_val);
// on focus, replace auto-label with empty field
elem.bind('focus', function() {
if (elem.hasClass(emptyClassName)) {
elem.val('');
elem.removeClass(emptyClassName);
domElem.select(); // IE8/9 show text cursor after tabbing in
}
});
// on blur, show placeholder if necessary
elem.bind('blur', updateValue);
// handler for model-less inputs to interact with non-angular code
// TODO: vs `$watch(function(){return elem.val()})`
if (!ngModel) {
elem.bind('change', updateValue);
}
// model -> view
if (ngModel) {
ngModel.$render = function() {
setValue(ngModel.$viewValue);
// IE8/9: show text cursor after updating value while
// focused, this happens when tabbing into a field, and the
// deferred keydown handler from the previous field fires
//
// TODO: remove when tab key behavior is fixed in
// angular core
if (domElem === document.activeElement && !elem.val()) {
domElem.select();
}
};
}
if (!documentListenersApplied) {
// cancel selection of placeholder text on disabled elements
// disabled elements do not emit selectstart events in IE8/IE9,
// so bind to $document and catch the event as it bubbles
$document.on('selectstart', function (e) {
var elmn = angular.element(e.target);
if (elmn.hasClass(emptyClassName) && elmn.prop('disabled')) {
e.preventDefault();
}
});
documentListenersApplied = true;
}
function updateValue(e) {
var val = elem.val();
// don't update from placeholder, helps debounce
if (elem.hasClass(emptyClassName) && val === text) { return; }
conditionalDefer(function(){ setValue(val); });
}
function conditionalDefer(callback) {
// IE8/9: ngModel uses a keydown handler with deferrered
// execution to check for changes to the input. this $timeout
// prevents callback from firing before the keydown handler,
// which is an issue when tabbing out of an input.
// the conditional tests IE version, matches $sniffer.
//
// TODO: remove this function when tab key behavior is fixed in
// angular core
if (document.documentMode <= 11) {
$timeout(callback, 0);
} else {
callback();
}
}
function setValue(val) {
if (!val && domElem !== document.activeElement) {
// show placeholder when necessary
elem.addClass(emptyClassName);
if (is_pwd) {
showPasswordPlaceholder();
} else {
elem.val(text);
}
} else {
// otherwise set input to actual value
elem.removeClass(emptyClassName);
if (is_pwd) {
hidePasswordPlaceholder();
}
elem.val(val);
}
}
function getValue() {
if (ngModel) {
// use eval because $viewValue isn't ready during init
// TODO: this might not to work during unit tests, investigate
return scope.$eval(attrs.ngModel) || '';
}
return getDomValue() || '';
}
// IE8/9: elem.val() on an empty field sometimes returns the
// placeholder value, so return an empty string instead
// http://stackoverflow.com/q/11208417/490592
// I believe IE is persisting the field value across refreshes
// TODO: vs `elem.attr('value')`
function getDomValue() {
var val = elem.val();
if (val === attrs.placeholder) {
val = '';
}
return val;
}
function setAttrUnselectable(elmn, enable) {
if (enable) {
elmn.attr('unselectable', 'on');
} else {
elmn.removeAttr('unselectable');
}
}
// IE8: password inputs cannot display text, and inputs cannot
// change type, so create a new element to display placeholder
function setupPasswordPlaceholder() {
clone = angular.element('<input type="text" value="'+text+'"/>');
stylePasswordPlaceholder();
clone.addClass(emptyClassName)
.addClass('ng-hide')
.bind('focus', hidePasswordPlaceholderAndFocus);
domElem.parentNode.insertBefore(clone[0], domElem);
// keep password placeholder in sync with original element.
// update element after $watches
var watchAttrs = [
attrs.ngDisabled,
attrs.ngReadonly
];
var watchUpdate = function() {
if (elem.hasClass(emptyClassName)) {
showPasswordPlaceholder();
}
};
for (var i = 0; i < watchAttrs.length; i++) {
if (watchAttrs[i]) {
scope.$watch(watchAttrs[i], watchUpdate);
}
}
}
function stylePasswordPlaceholder() {
clone.val(text)
.attr('class', elem.attr('class') || '')
.attr('style', elem.attr('style') || '')
.prop('disabled', elem.prop('disabled'))
.prop('readOnly', elem.prop('readOnly'));
setAttrUnselectable(clone, elem.attr('unselectable') === 'on');
}
function showPasswordPlaceholder() {
stylePasswordPlaceholder();
elem.addClass('ng-hide');
clone.removeClass('ng-hide');
}
function hidePasswordPlaceholder() {
clone.addClass('ng-hide');
elem.removeClass('ng-hide');
}
function hidePasswordPlaceholderAndFocus() {
hidePasswordPlaceholder();
domElem.focus();
}
}
};
});
})(window.angular, window.document);
| Change priority to be slightly lower, improve comments
| lib/angular-placeholder.js | Change priority to be slightly lower, improve comments | <ide><path>ib/angular-placeholder.js
<ide> return {
<ide> restrict: 'A',
<ide> require: '?ngModel',
<del> priority: 101, // ngModel is 0, ngDisabled is 100, run after both
<add> priority: 110, // run after ngModel (0) and BOOLEAN_ATTR (100) directives
<ide> link: function(scope, elem, attrs, ngModel) {
<ide> var orig_val = getValue(),
<ide> domElem = elem[0],
<ide> domElem.parentNode.insertBefore(clone[0], domElem);
<ide>
<ide> // keep password placeholder in sync with original element.
<del> // update element after $watches
<add> // update element after BOOLEAN_ATTR directives' $watches
<ide> var watchAttrs = [
<ide> attrs.ngDisabled,
<ide> attrs.ngReadonly |
|
JavaScript | mit | 1ca090661ae3375411bb91ac167c6d75b8a44304 | 0 | nemac/landsatfact_usgs_json_api,nemac/landsatfact_usgs_json_api | var pg = require('pg');
var winston = require('winston');
var logger = new (winston.Logger)({
transports: [
new (winston.transports.File)({ filename: 'logs/postgres_handlers.log'})
]
});
logger.level = 'debug';
module.exports = {
//connect to database and return the client object
pg_connect: function(connection_obj){
var pg_client = new pg.Client(connection_obj)
.on('drain', this.pg_clientDrain)
.on('error', this.pg_clientError)
.on('end', this.pg_clientEnd);
pg_client.connect(this.pg_connectionError);
return pg_client;
},
//generic error callback for client, connecting
pg_clientError: function (err) {
'use strict';
if (err) {
// console.error("Client Error: %s", err);
logger.log('error', "Client Error: %s", err);
}
return err;
},
//generic error callback for client, connecting
pg_connectionError: function(err) {
'use strict';
if (err) {
// console.error("Connection Error: %s", err);
logger.log('error', "Connection Error: %s", err);
}
return err;
},
//data test drain callback when all maintenace queries finish
pg_clientDrain: function() {
'use strict';
// console.log('client drain');
this.end();
},
//when client ends
pg_clientEnd: function(result) {
'use strict';
// console.log('client end');
return result;
},
//generic error callback for client,queries
pg_queryError: function (err) {
'use strict';
if (err) {
console.error("Query Error: %s", err);
logger.log('error', "Query Error: %s", err);
};
return err;
},
//generic query on row method.
pg_queryRow: function(row, result) {
'use strict';
console.log('pg_query row' + row);
return row;
},
//generic query end callback
pg_queryEnd: function(result) {
'use strict';
//no rows returned
if (result.rowCount === 0) {
console.log("no row(s) returned.")
logger.log('info', "no row(s) returned.");
} else {
console.log(result.rowCount.toString() + " row(s) returned.")
logger.log('info', result.rowCount.toString() + " row(s) returned.");
}
return result;
},
//generic query generator
query_db: function(pg_client, query_text, parameters){
pg_client.query(query_text, parameters)
.on('error', this.pg_queryError)
.on('row', this.pg_queryRow)
.on('end', this.pg_queryEnd);
},
};
| lib/postgres/postgres_handlers.js | var pg = require('pg');
var winston = require('winston');
var logger = new (winston.Logger)({
transports: [
new (winston.transports.File)({ filename: 'logs/postgres_handlers.log'})
]
});
logger.level = 'debug';
module.exports = {
//connect to database and return the client object
pg_connect: function(connection_obj){
var pg_client = new pg.Client(connection_obj)
.on('drain', this.pg_clientDrain)
.on('error', this.pg_clientError)
.on('end', this.pg_clientEnd);
pg_client.connect(this.pg_connectionError);
return pg_client;
},
//generic error callback for client, connecting
pg_clientError: function (err) {
'use strict';
if (err) {
// console.error("Client Error: %s", err);
logger.log('error', "Client Error: %s", err);
}
return err;
},
//generic error callback for client, connecting
pg_connectionError: function(err) {
'use strict';
if (err) {
// console.error("Connection Error: %s", err);
logger.log('error', "Connection Error: %s", err);
}
return err;
},
//data test drain callback when all maintenace queries finish
pg_clientDrain: function() {
'use strict';
// console.log('client drain');
this.end();
},
//when client ends
pg_clientEnd: function(result) {
'use strict';
// console.log('client end');
return result;
},
//generic error callback for client,queries
pg_queryError: function (err) {
'use strict';
if (err) {
// console.error("Query Error: %s", err);
logger.log('error', "Query Error: %s", err);
};
return err;
},
//generic query on row method.
pg_queryRow: function(row, result) {
'use strict';
//console.log(row);
return row;
},
//generic query end callback
pg_queryEnd: function(result) {
'use strict';
//no rows returned
if (result.rowCount === 0) {
//console.log("no row(s) returned.")
logger.log('info', "no row(s) returned.");
} else {
// console.log(result.rowCount.toString() + " row(s) returned.")
logger.log('info', result.rowCount.toString() + " row(s) returned.");
}
return result;
},
//generic query generator
query_db: function(pg_client, query_text, parameters){
pg_client.query(query_text, parameters)
.on('error', this.pg_queryError)
.on('row', this.pg_queryRow)
.on('end', this.pg_queryEnd);
},
};
| still not sure of why failing
| lib/postgres/postgres_handlers.js | still not sure of why failing | <ide><path>ib/postgres/postgres_handlers.js
<ide> pg_queryError: function (err) {
<ide> 'use strict';
<ide> if (err) {
<del> // console.error("Query Error: %s", err);
<add> console.error("Query Error: %s", err);
<ide> logger.log('error', "Query Error: %s", err);
<ide> };
<ide> return err;
<ide> //generic query on row method.
<ide> pg_queryRow: function(row, result) {
<ide> 'use strict';
<del> //console.log(row);
<add> console.log('pg_query row' + row);
<ide> return row;
<ide> },
<ide>
<ide>
<ide> //no rows returned
<ide> if (result.rowCount === 0) {
<del> //console.log("no row(s) returned.")
<add> console.log("no row(s) returned.")
<ide> logger.log('info', "no row(s) returned.");
<ide> } else {
<del> // console.log(result.rowCount.toString() + " row(s) returned.")
<add> console.log(result.rowCount.toString() + " row(s) returned.")
<ide> logger.log('info', result.rowCount.toString() + " row(s) returned.");
<ide>
<ide> } |
|
Java | apache-2.0 | eb01eb2da5b66396685241121101f09c996fc426 | 0 | jerome79/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,McLeodMoores/starling,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.web.analytics.formatting;
import static com.opengamma.web.analytics.formatting.ResultsFormatter.CurrencyDisplay.DISPLAY_CURRENCY;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.web.server.conversion.DoubleValueDecimalPlaceFormatter;
import com.opengamma.web.server.conversion.DoubleValueFormatter;
import com.opengamma.web.server.conversion.DoubleValueSignificantFiguresFormatter;
import com.opengamma.web.server.conversion.DoubleValueSizeBasedDecimalPlaceFormatter;
/**
*
*/
/* package */ class BigDecimalFormatter extends AbstractFormatter<BigDecimal> {
private static final Logger s_logger = LoggerFactory.getLogger(BigDecimalFormatter.class);
private static final Map<String, DoubleValueFormatter> s_formatters = Maps.newHashMap();
private static final DoubleValueFormatter s_defaultFormatter = DoubleValueSignificantFiguresFormatter.NON_CCY_5SF;
private static final DoubleValueFormatter s_defaultCcyFormatter = DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT;
static {
// General
s_formatters.put(ValueRequirementNames.DISCOUNT_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.YIELD_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.INSTANTANEOUS_FORWARD_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOLATILITY_SURFACE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOLATILITY_SURFACE_DATA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.COST_OF_CARRY, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Pricing
s_formatters.put(ValueRequirementNames.PRESENT_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PV01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.DV01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.GAMMA_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.RR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.IR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.JUMP_TO_DEFAULT, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PAR_RATE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.PAR_RATE_PARALLEL_CURVE_SHIFT, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE_THETA, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.POSITION_FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE_FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.SECURITY_MARKET_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.SECURITY_MODEL_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.UNDERLYING_MARKET_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.UNDERLYING_MODEL_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.DAILY_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
// PnL
s_formatters.put(ValueRequirementNames.PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.DAILY_PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.MTM_PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Greeks
s_formatters.put(ValueRequirementNames.DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DELTA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.STRIKE_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.STRIKE_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_P_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.THETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.RHO, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CARRY_RHO, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_GAMMA_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_IR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.YIELD_CURVE_JACOBIAN, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FX_IMPLIED_TRANSITION_MATRIX, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.ULTIMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_ULTIMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.SPEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.SPEED_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DVANNA_DVOL, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOMMA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DUAL_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DUAL_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.IMPLIED_VOLATILITY, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DRIFTLESS_THETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Position/value greeks
addBulkConversion("(POSITION_|VALUE_).*", DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Series analysis
s_formatters.put(ValueRequirementNames.SKEW, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.FISHER_KURTOSIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.PEARSON_KURTOSIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
// VaR
s_formatters.put(ValueRequirementNames.HISTORICAL_VAR, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PARAMETRIC_VAR, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.HISTORICAL_VAR_STDDEV, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.CONDITIONAL_HISTORICAL_VAR,
DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Capital Asset Pricing
s_formatters.put(ValueRequirementNames.CAPM_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_RESIDUALS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_RESIDUALS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ADJUSTED_R_SQUARED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_TSTATS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_TSTATS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_PVALUES, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_PVALUES, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_MEAN_SQUARE_ERROR, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_R_SQUARED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_STANDARD_ERROR_OF_ALPHA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_STANDARD_ERROR_OF_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Traditional Risk-Reward
s_formatters.put(ValueRequirementNames.SHARPE_RATIO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.TREYNOR_RATIO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.JENSENS_ALPHA, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.TOTAL_RISK_ALPHA, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.WEIGHT, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
// Bonds
s_formatters.put(ValueRequirementNames.CLEAN_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.DIRTY_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.YTM, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.MARKET_YTM, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.MARKET_DIRTY_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.MACAULAY_DURATION, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.CONVEXITY, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.Z_SPREAD, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.CONVERTION_FACTOR, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.IMPLIED_REPO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.GROSS_BASIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.NET_BASIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.NS_BOND_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.NSS_BOND_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Options
s_formatters.put(ValueRequirementNames.SECURITY_IMPLIED_VOLATILITY, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// FX
s_formatters.put(ValueRequirementNames.FX_PRESENT_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
}
private final ResultsFormatter.CurrencyDisplay _currencyDisplay;
/* package */ BigDecimalFormatter(ResultsFormatter.CurrencyDisplay currencyDisplay) {
super(BigDecimal.class);
ArgumentChecker.notNull(currencyDisplay, "currencyDisplay");
_currencyDisplay = currencyDisplay;
addFormatter(new Formatter<BigDecimal>(Format.HISTORY) {
@Override
Object format(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
return getFormatter(valueSpec).getRoundedValue(value);
}
});
addFormatter(new Formatter<BigDecimal>(Format.EXPANDED) {
@Override
Object format(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
return formatCell(value, valueSpec, inlineKey);
}
});
}
private static void addBulkConversion(String valueRequirementFieldNamePattern, DoubleValueFormatter conversionSettings) {
Pattern pattern = Pattern.compile(valueRequirementFieldNamePattern);
for (Field field : ValueRequirementNames.class.getFields()) {
if ((field.getModifiers() & (Modifier.STATIC | Modifier.PUBLIC)) == (Modifier.STATIC | Modifier.PUBLIC) &&
field.isSynthetic() == false &&
String.class.equals(field.getType()) && pattern.matcher(field.getName()).matches()) {
String fieldValue;
try {
fieldValue = (String) field.get(null);
s_formatters.put(fieldValue, conversionSettings);
} catch (Exception e) {
s_logger.debug("Unexpected exception initializing formatter", e);
}
}
}
}
private static DoubleValueFormatter getFormatter(ValueSpecification valueSpec) {
if (valueSpec == null) {
return s_defaultFormatter;
}
DoubleValueFormatter valueNameFormatter = s_formatters.get(valueSpec.getValueName());
if (valueNameFormatter != null) {
return valueNameFormatter;
} else {
if (valueSpec.getProperties().getValues(ValuePropertyNames.CURRENCY) != null) {
return s_defaultCcyFormatter;
}
return s_defaultFormatter;
}
}
@Override
public DataType getDataType() {
return DataType.DOUBLE;
}
@Override
public String formatCell(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
DoubleValueFormatter formatter = getFormatter(valueSpec);
String formattedNumber = formatter.format(value);
return formatter.isCurrencyAmount() && _currencyDisplay == DISPLAY_CURRENCY ?
formatWithCurrency(formattedNumber, valueSpec) :
formattedNumber;
}
private String formatWithCurrency(String formattedNumber, ValueSpecification valueSpec) {
Set<String> currencyValues = valueSpec.getProperties().getValues(ValuePropertyNames.CURRENCY);
return currencyValues == null || currencyValues.isEmpty() ?
formattedNumber :
currencyValues.iterator().next() + " " + formattedNumber;
}
}
| projects/OG-Web/src/main/java/com/opengamma/web/analytics/formatting/BigDecimalFormatter.java | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.web.analytics.formatting;
import static com.opengamma.web.analytics.formatting.ResultsFormatter.CurrencyDisplay.DISPLAY_CURRENCY;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.web.server.conversion.DoubleValueDecimalPlaceFormatter;
import com.opengamma.web.server.conversion.DoubleValueFormatter;
import com.opengamma.web.server.conversion.DoubleValueSignificantFiguresFormatter;
import com.opengamma.web.server.conversion.DoubleValueSizeBasedDecimalPlaceFormatter;
/**
*
*/
/* package */ class BigDecimalFormatter extends AbstractFormatter<BigDecimal> {
private static final Logger s_logger = LoggerFactory.getLogger(BigDecimalFormatter.class);
private static final Map<String, DoubleValueFormatter> s_formatters = Maps.newHashMap();
private static final DoubleValueFormatter s_defaultFormatter = DoubleValueSignificantFiguresFormatter.NON_CCY_5SF;
private static final DoubleValueFormatter s_defaultCcyFormatter = DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT;
static {
// General
s_formatters.put(ValueRequirementNames.DISCOUNT_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.YIELD_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.INSTANTANEOUS_FORWARD_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOLATILITY_SURFACE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOLATILITY_SURFACE_DATA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.COST_OF_CARRY, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Pricing
s_formatters.put(ValueRequirementNames.PRESENT_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PV01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.DV01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.GAMMA_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.RR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.IR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.JUMP_TO_DEFAULT, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PAR_RATE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.PAR_RATE_PARALLEL_CURVE_SHIFT, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE_THETA, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.POSITION_FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.VALUE_FAIR_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.SECURITY_MARKET_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.SECURITY_MODEL_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.UNDERLYING_MARKET_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.UNDERLYING_MODEL_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
s_formatters.put(ValueRequirementNames.DAILY_PRICE, DoubleValueSignificantFiguresFormatter.of(5, true));
// PnL
s_formatters.put(ValueRequirementNames.PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.DAILY_PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.MTM_PNL, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Greeks
s_formatters.put(ValueRequirementNames.DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DELTA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.STRIKE_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.STRIKE_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.GAMMA_P_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VEGA_BLEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.THETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.RHO, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CARRY_RHO, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_GAMMA_CS01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.BUCKETED_IR01, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.YIELD_CURVE_JACOBIAN, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FX_IMPLIED_TRANSITION_MATRIX, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.ULTIMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_ULTIMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.SPEED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.SPEED_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DVANNA_DVOL, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VOMMA_P, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.VARIANCE_VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DUAL_DELTA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.DUAL_GAMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VEGA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.FORWARD_VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.IMPLIED_VOLATILITY, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Position/value greeks
addBulkConversion("(POSITION_|VALUE_).*", DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Series analysis
s_formatters.put(ValueRequirementNames.SKEW, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.FISHER_KURTOSIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.PEARSON_KURTOSIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
// VaR
s_formatters.put(ValueRequirementNames.HISTORICAL_VAR, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.PARAMETRIC_VAR, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.HISTORICAL_VAR_STDDEV, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
s_formatters.put(ValueRequirementNames.CONDITIONAL_HISTORICAL_VAR,
DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
// Capital Asset Pricing
s_formatters.put(ValueRequirementNames.CAPM_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_RESIDUALS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_RESIDUALS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ADJUSTED_R_SQUARED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_TSTATS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_TSTATS, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_ALPHA_PVALUES, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_BETA_PVALUES, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_MEAN_SQUARE_ERROR, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_R_SQUARED, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_STANDARD_ERROR_OF_ALPHA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.CAPM_REGRESSION_STANDARD_ERROR_OF_BETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Traditional Risk-Reward
s_formatters.put(ValueRequirementNames.SHARPE_RATIO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.TREYNOR_RATIO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.JENSENS_ALPHA, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.TOTAL_RISK_ALPHA, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.WEIGHT, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
// Bonds
s_formatters.put(ValueRequirementNames.CLEAN_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.DIRTY_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.YTM, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.MARKET_YTM, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.MARKET_DIRTY_PRICE, DoubleValueDecimalPlaceFormatter.NON_CCY_6DP);
s_formatters.put(ValueRequirementNames.MACAULAY_DURATION, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.CONVEXITY, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.Z_SPREAD, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.CONVERTION_FACTOR, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.IMPLIED_REPO, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.GROSS_BASIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.NET_BASIS, DoubleValueDecimalPlaceFormatter.NON_CCY_4DP);
s_formatters.put(ValueRequirementNames.NS_BOND_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
s_formatters.put(ValueRequirementNames.NSS_BOND_CURVE, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// Options
s_formatters.put(ValueRequirementNames.SECURITY_IMPLIED_VOLATILITY, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
// FX
s_formatters.put(ValueRequirementNames.FX_PRESENT_VALUE, DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT);
}
private final ResultsFormatter.CurrencyDisplay _currencyDisplay;
/* package */ BigDecimalFormatter(ResultsFormatter.CurrencyDisplay currencyDisplay) {
super(BigDecimal.class);
ArgumentChecker.notNull(currencyDisplay, "currencyDisplay");
_currencyDisplay = currencyDisplay;
addFormatter(new Formatter<BigDecimal>(Format.HISTORY) {
@Override
Object format(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
return getFormatter(valueSpec).getRoundedValue(value);
}
});
addFormatter(new Formatter<BigDecimal>(Format.EXPANDED) {
@Override
Object format(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
return formatCell(value, valueSpec, inlineKey);
}
});
}
private static void addBulkConversion(String valueRequirementFieldNamePattern, DoubleValueFormatter conversionSettings) {
Pattern pattern = Pattern.compile(valueRequirementFieldNamePattern);
for (Field field : ValueRequirementNames.class.getFields()) {
if ((field.getModifiers() & (Modifier.STATIC | Modifier.PUBLIC)) == (Modifier.STATIC | Modifier.PUBLIC) &&
field.isSynthetic() == false &&
String.class.equals(field.getType()) && pattern.matcher(field.getName()).matches()) {
String fieldValue;
try {
fieldValue = (String) field.get(null);
s_formatters.put(fieldValue, conversionSettings);
} catch (Exception e) {
s_logger.debug("Unexpected exception initializing formatter", e);
}
}
}
}
private static DoubleValueFormatter getFormatter(ValueSpecification valueSpec) {
if (valueSpec == null) {
return s_defaultFormatter;
}
DoubleValueFormatter valueNameFormatter = s_formatters.get(valueSpec.getValueName());
if (valueNameFormatter != null) {
return valueNameFormatter;
} else {
if (valueSpec.getProperties().getValues(ValuePropertyNames.CURRENCY) != null) {
return s_defaultCcyFormatter;
}
return s_defaultFormatter;
}
}
@Override
public DataType getDataType() {
return DataType.DOUBLE;
}
@Override
public String formatCell(BigDecimal value, ValueSpecification valueSpec, Object inlineKey) {
DoubleValueFormatter formatter = getFormatter(valueSpec);
String formattedNumber = formatter.format(value);
return formatter.isCurrencyAmount() && _currencyDisplay == DISPLAY_CURRENCY ?
formatWithCurrency(formattedNumber, valueSpec) :
formattedNumber;
}
private String formatWithCurrency(String formattedNumber, ValueSpecification valueSpec) {
Set<String> currencyValues = valueSpec.getProperties().getValues(ValuePropertyNames.CURRENCY);
return currencyValues == null || currencyValues.isEmpty() ?
formattedNumber :
currencyValues.iterator().next() + " " + formattedNumber;
}
}
| [PLAT-4799] Not formatting driftless theta as a currency amount
| projects/OG-Web/src/main/java/com/opengamma/web/analytics/formatting/BigDecimalFormatter.java | [PLAT-4799] Not formatting driftless theta as a currency amount | <ide><path>rojects/OG-Web/src/main/java/com/opengamma/web/analytics/formatting/BigDecimalFormatter.java
<ide> s_formatters.put(ValueRequirementNames.FORWARD_VANNA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
<ide> s_formatters.put(ValueRequirementNames.FORWARD_VOMMA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
<ide> s_formatters.put(ValueRequirementNames.IMPLIED_VOLATILITY, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
<add> s_formatters.put(ValueRequirementNames.DRIFTLESS_THETA, DoubleValueSignificantFiguresFormatter.NON_CCY_5SF);
<ide>
<ide> // Position/value greeks
<ide> addBulkConversion("(POSITION_|VALUE_).*", DoubleValueSizeBasedDecimalPlaceFormatter.CCY_DEFAULT); |
|
Java | apache-2.0 | error: pathspec 'src/java/Sistema/CDBeLC.java' did not match any file(s) known to git
| c8b4bc1e533e53b1efcd89aca436ddb4b37a3f97 | 1 | eldersalesss/simuladorinvestimentos,eldersalesss/simuladorinvestimentos | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Sistema;
/**
*
* @author Bruno
*/
public class CDBeLC implements Investimentos {
private double taxaCDB;
private double taxaLC;
private double valor;
private int tempo;
public CDBeLC(int valor, int tempo) {
this.valor = valor;
this.tempo = tempo;
}
public double simuladorCDB() {
throw new UnsupportedOperationException("Método em desenvolvimento");
}
public double simuladorLC() {
throw new UnsupportedOperationException("Método em desenvolvimento");
}
}
| src/java/Sistema/CDBeLC.java | Criando a Classe CDBeLC.java | src/java/Sistema/CDBeLC.java | Criando a Classe CDBeLC.java | <ide><path>rc/java/Sistema/CDBeLC.java
<add>/*
<add> * To change this license header, choose License Headers in Project Properties.
<add> * To change this template file, choose Tools | Templates
<add> * and open the template in the editor.
<add> */
<add>package Sistema;
<add>
<add>/**
<add> *
<add> * @author Bruno
<add> */
<add>public class CDBeLC implements Investimentos {
<add>
<add> private double taxaCDB;
<add> private double taxaLC;
<add> private double valor;
<add> private int tempo;
<add>
<add> public CDBeLC(int valor, int tempo) {
<add> this.valor = valor;
<add> this.tempo = tempo;
<add> }
<add>
<add> public double simuladorCDB() {
<add> throw new UnsupportedOperationException("Método em desenvolvimento");
<add> }
<add>
<add> public double simuladorLC() {
<add> throw new UnsupportedOperationException("Método em desenvolvimento");
<add> }
<add>
<add>} |
|
Java | apache-2.0 | 8de070858833c254d7032c58f84ba5880c9daabc | 0 | secdec/burp-extension,secdec/burp-extension | /*
* Copyright (C) 2016 Code Dx, Inc. - http://www.codedx.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package burp;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.codedx.burp.ContextMenuFactory;
import com.codedx.burp.ExportActionListener;
import com.codedx.burp.JTextFieldSettingFocusListener;
import com.codedx.burp.security.SSLConnectionSocketFactoryFactory;
public class BurpExtender implements IBurpExtender, ITab {
public IBurpExtenderCallbacks callbacks;
private JScrollPane pane;
private JTextField serverUrl;
private JTextField apiKey;
private JComboBox<String> targetUrl;
private JComboBox<NameValuePair> projectBox;
private JButton projectRefresh;
private String[] targetArr = new String[0];
private NameValuePair[] projectArr = new BasicNameValuePair[0];
private boolean updating = false;
private ButtonAnimationThread refreshAnimation;
private static final Icon[] refreshSpinner = new ImageIcon[12];
private static final int TIMEOUT = 5000;
public static final String SERVER_KEY = "cdxServer";
public static final String API_KEY = "cdxApiKey";
public static final String TARGET_KEY = "cdxTarget";
public static final String PROJECT_KEY = "cdxProject";
public static final String ALL_URL_STR = "All URLs";
@Override
public void registerExtenderCallbacks(final IBurpExtenderCallbacks callbacks) {
// keep a reference to our callbacks object
this.callbacks = callbacks;
callbacks.registerContextMenuFactory(new ContextMenuFactory(this, callbacks));
// set our extension name
callbacks.setExtensionName("Code Dx");
for(int i = 0; i < refreshSpinner.length; i++)
refreshSpinner[i] = new ImageIcon(BurpExtender.class.getResource("/"+i+".png"));
// create our UI
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
pane = new JScrollPane(createMainPanel());
refreshAnimation = new ButtonAnimationThread(projectRefresh, refreshSpinner);
callbacks.customizeUiComponent(pane);
// add the custom tab to Burp's UI
callbacks.addSuiteTab(BurpExtender.this);
// add listener to update projects list when Code Dx tab selected
Component parent = pane.getParent();
if(parent instanceof JTabbedPane){
JTabbedPane tabs = (JTabbedPane) parent;
tabs.addChangeListener(new ChangeListener(){
@Override
public void stateChanged(ChangeEvent arg0) {
if (pane == tabs.getSelectedComponent() && !updating
&& !"".equals(serverUrl.getText()) && !"".equals(apiKey.getText())) {
Thread updateThread = new Thread() {
public void run(){
updateTargets();
updateProjects(true);
int activeProject = getSavedProjectIndex();
if(activeProject != -1)
projectBox.setSelectedIndex(activeProject);
}
};
updateThread.start();
}
}
});
}
}
});
}
private JPanel createMainPanel() {
JPanel main = new JPanel();
main.setLayout(new GridBagLayout());
// Create Settings Panel
JPanel settings = new JPanel(new GridBagLayout());
createTitle("Settings", settings);
KeyListener projectEnter = new KeyAdapter(){
@Override
public void keyPressed(KeyEvent k) {
if(k.getKeyCode() == KeyEvent.VK_ENTER)
updateProjects();
}
};
serverUrl = labelTextField("Server URL: ", settings, callbacks.loadExtensionSetting(BurpExtender.SERVER_KEY));
serverUrl.addKeyListener(projectEnter);
serverUrl.addFocusListener(new JTextFieldSettingFocusListener(BurpExtender.SERVER_KEY, callbacks));
apiKey = labelTextField("API Key: ", settings, callbacks.loadExtensionSetting(BurpExtender.API_KEY));
apiKey.addKeyListener(projectEnter);
apiKey.addFocusListener(new JTextFieldSettingFocusListener(BurpExtender.API_KEY, callbacks));
JButton targetRefresh = new JButton();
targetRefresh.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
updateTargets();
}
});
targetUrl = createComboBox("Target URL: ",settings, 3, targetRefresh);
projectRefresh = new JButton();
projectRefresh.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
Thread updateThread = new Thread() {
public void run(){
updateProjects();
}
};
updateThread.start();
}
});
projectBox = createComboBox("Projects: ",settings, 4, projectRefresh);
GridBagConstraints setGBC = new GridBagConstraints();
setGBC.gridy = 3;
setGBC.anchor = GridBagConstraints.NORTHWEST;
main.add(settings, setGBC);
// Separator
Insets ins = new Insets(10, 10, 2, 10);
JSeparator sep = new JSeparator(JSeparator.HORIZONTAL);
callbacks.customizeUiComponent(sep);
GridBagConstraints sepGBC = new GridBagConstraints();
sepGBC.gridwidth = 3;
sepGBC.gridx = 0;
sepGBC.fill = GridBagConstraints.HORIZONTAL;
sepGBC.insets = ins;
main.add(sep, sepGBC);
// Create Export Button
JButton exportBtn = new JButton();
exportBtn.setText("Send to Code Dx");
exportBtn.addActionListener(new ExportActionListener(this, callbacks));
callbacks.customizeUiComponent(exportBtn);
GridBagConstraints btnGBC = new GridBagConstraints();
btnGBC.gridx = 0;
btnGBC.weightx = 1.0;
btnGBC.weighty = 1.0;
btnGBC.insets = ins;
btnGBC.anchor = GridBagConstraints.NORTHWEST;
main.add(exportBtn, btnGBC);
updateTargets();
return main;
}
private void createTitle(String text, Container cont) {
JLabel title = new JLabel(text);
title.setForeground(new Color(229, 137, 0));
Font f = title.getFont();
title.setFont(new Font(f.getName(), Font.BOLD, f.getSize() + 2));
callbacks.customizeUiComponent(title);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridwidth = 0;
gbc.gridx = 0;
gbc.insets = new Insets(8, 10, 0, 0);
gbc.anchor = GridBagConstraints.WEST;
cont.add(title, gbc);
}
private JTextField labelTextField(String label, Container cont, String base) {
createSettingsLabel(label, cont);
JTextField textField = new JTextField(base, 45);
callbacks.customizeUiComponent(textField);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 1;
cont.add(textField, gbc);
return textField;
}
private <T> JComboBox<T> createComboBox(String label, Container cont, int buttonY, JButton button){
createSettingsLabel(label, cont);
JComboBox<T> box = new JComboBox<T>();
box.setMaximumRowCount(16);
callbacks.customizeUiComponent(box);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 1;
gbc.fill = GridBagConstraints.HORIZONTAL;
cont.add(box, gbc);
button.setIcon(refreshSpinner[0]);
button.setPreferredSize(new Dimension(refreshSpinner[0].getIconHeight()+4,refreshSpinner[0].getIconHeight()+4));
callbacks.customizeUiComponent(button);
gbc = new GridBagConstraints();
gbc.gridx = 2;
gbc.gridy = buttonY;
gbc.anchor = GridBagConstraints.WEST;
cont.add(button, gbc);
return box;
}
private void createSettingsLabel(String label, Container cont){
JLabel labelField = new JLabel(label);
labelField.setHorizontalAlignment(SwingConstants.LEFT);
callbacks.customizeUiComponent(labelField);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridwidth = 1;
gbc.gridx = 0;
gbc.insets = new Insets(0, 12, 0, 0);
gbc.anchor = GridBagConstraints.WEST;
cont.add(labelField, gbc);
}
public String getServerUrl() {
String text = serverUrl.getText();
if(text.endsWith("/"))
return text.substring(0, text.length()-1);
return text;
}
public String getApiKey() {
return apiKey.getText();
}
public String getTargetUrl() {
String url = targetUrl.getSelectedItem().toString();
if(ALL_URL_STR.equals(url))
return null;
return url;
}
public String[] getTargetUrls(){
return targetArr.clone();
}
public NameValuePair getProject(){
return (NameValuePair)projectBox.getSelectedItem();
}
public NameValuePair[] getProjects(){
return projectArr.clone();
}
public int getSavedProjectIndex(){
String activeProject = callbacks.loadExtensionSetting(PROJECT_KEY);
if(projectBox.getItemCount() > 0 && activeProject != null){
for(int i = 0; i < projectBox.getItemCount(); i++){
if(activeProject.equals(projectBox.getItemAt(i).getValue())){
return i;
}
}
}
return -1;
}
public void updateTargets(){
if(targetUrl != null){
Set<String> urlSet = new TreeSet<String>(new UrlComparator());
for(IHttpRequestResponse res : callbacks.getSiteMap(null)){
String site = res.getHttpService().toString();
urlSet.add(site);
}
targetUrl.removeAllItems();
targetUrl.addItem(ALL_URL_STR);
targetArr = urlSet.toArray(new String[urlSet.size()]);
for(String url: targetArr)
targetUrl.addItem(url);
}
}
public void updateProjects(){
updateProjects(false);
}
public void updateProjects(boolean ignoreMessages) {
if(!refreshAnimation.isRunning()){
refreshAnimation = new ButtonAnimationThread(projectRefresh, refreshSpinner);
refreshAnimation.start();
}
updating = true;
CloseableHttpClient client = null;
BufferedReader rd = null;
NameValuePair[] projectArr = new BasicNameValuePair[0];
try{
client = getHttpClient(ignoreMessages);
if(client != null){
HttpGet get = new HttpGet(getServerUrl() + "/api/projects");
get.setHeader("API-Key", getApiKey());
HttpResponse response = client.execute(get);
rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), "UTF-8"));
StringBuffer result = new StringBuffer();
String line = "";
while ((line = rd.readLine()) != null) {
result.append(line);
}
JSONObject obj = new JSONObject(result.toString());
JSONArray projects = obj.getJSONArray("projects");
projectArr = new NameValuePair[projects.length()];
for(int i = 0; i < projectArr.length; i++){
int id = projects.getJSONObject(i).getInt("id");
String name = projects.getJSONObject(i).getString("name");
projectArr[i] = new ModifiedNameValuePair(name,Integer.toString(id));
}
if(projectArr.length == 0 && !ignoreMessages){
warn("No projects were found.");
}
}
} catch (JSONException | IOException e){
if(!ignoreMessages)
error("An error occurred while trying to update the project list.\nCheck that the Server URL and API-Key are correct.");
} catch (Exception e){
if(!ignoreMessages){
error("An unknown error occurred while updating the project list.", e);
}
} finally {
if(client != null)
try {client.close();} catch (IOException e) {}
if(rd != null)
try {rd.close();} catch (IOException e) {}
}
this.projectArr = projectArr;
SwingUtilities.invokeLater(new Runnable(){
@Override
public void run() {
updateProjectComboBox();
updating = false;
}
});
refreshAnimation.end();
}
public void updateProjectComboBox(){
if(projectBox != null){
projectBox.removeAllItems();
for(NameValuePair p: projectArr)
projectBox.addItem(p);
}
}
public CloseableHttpClient getHttpClient(){
return getHttpClient(false);
}
public CloseableHttpClient getHttpClient(boolean ignoreMessages){
try{
RequestConfig config = RequestConfig.custom().setConnectTimeout(TIMEOUT).setSocketTimeout(TIMEOUT)
.setConnectionRequestTimeout(TIMEOUT).build();
return HttpClientBuilder.create()
.setSSLSocketFactory(SSLConnectionSocketFactoryFactory.getFactory(new URL(getServerUrl()).getHost(), this))
.setDefaultRequestConfig(config).build();
} catch (MalformedURLException e){
if(!ignoreMessages)
error("The Server URL is not a valid URL. Please check that it is correct.");
} catch (Exception e){
if(!ignoreMessages){
error("An unknown error occurred while trying to establish the HTTP client.", e);
}
}
return null;
}
public void error(String message){
error(message, null);
}
public void error(String message, Throwable t) {
if(refreshAnimation.isRunning())
refreshAnimation.end();
if(t != null){
StringWriter err = new StringWriter();
t.printStackTrace(new PrintWriter(err));
try {
callbacks.getStderr().write(err.toString().getBytes(Charset.forName("UTF-8")));
message += "\nCheck the error log in the Extensions tab for more details.";
} catch (IOException e) {}
}
JOptionPane.showMessageDialog(getUiComponent(), message, "Error", JOptionPane.ERROR_MESSAGE);
}
public void warn(String message) {
if(refreshAnimation.isRunning())
refreshAnimation.end();
JOptionPane.showMessageDialog(getUiComponent(), message, "Warning", JOptionPane.WARNING_MESSAGE);
}
public void message(String message, String title) {
JOptionPane.showMessageDialog(getUiComponent(), message, title, JOptionPane.PLAIN_MESSAGE);
}
@Override
public String getTabCaption() {
return "Code Dx";
}
@Override
public Component getUiComponent() {
return pane;
}
private static class ModifiedNameValuePair extends BasicNameValuePair{
private static final long serialVersionUID = -6671681121783779976L;
public ModifiedNameValuePair(String name, String value) {
super(name, value);
}
@Override
public String toString(){
return getName() + " (id: " + getValue() + ")";
}
}
private static final String URL_SPLITTER = "://";
private static class UrlComparator implements Comparator<String>{
@Override
public int compare(String s1, String s2) {
String s1Protocol = s1.substring(0, s1.indexOf(URL_SPLITTER));
String s2Protocol = s2.substring(0, s2.indexOf(URL_SPLITTER));
if(s1Protocol.equals(s2Protocol))
return s1.compareTo(s2);
String s1Host = s1.substring(s1.indexOf(URL_SPLITTER)+3);
String s2Host = s2.substring(s2.indexOf(URL_SPLITTER)+3);
if(s1Host.equals(s2Host))
return s1Protocol.compareTo(s2Protocol);
return s1Host.compareTo(s2Host);
}
}
private static class ButtonAnimationThread extends Thread{
private volatile boolean running = true;
private int next = 1;
private JButton button;
private Icon[] icons;
public ButtonAnimationThread(JButton button, Icon[] icons){
this.button = button;
this.icons = icons;
}
@Override
public void run() {
while(running || next < icons.length - 1){
button.setIcon(icons[next]);
next = (next == icons.length - 1) ? 0 : next + 1;
try {
Thread.sleep(50);
} catch (InterruptedException e) {}
}
button.setIcon(icons[0]);
}
public boolean isRunning() {
return running;
}
public void end() {
running = false;
}
}
} | src/main/java/burp/BurpExtender.java | /*
* Copyright (C) 2016 Code Dx, Inc. - http://www.codedx.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package burp;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.codedx.burp.ContextMenuFactory;
import com.codedx.burp.ExportActionListener;
import com.codedx.burp.JTextFieldSettingFocusListener;
import com.codedx.burp.security.SSLConnectionSocketFactoryFactory;
public class BurpExtender implements IBurpExtender, ITab {
public IBurpExtenderCallbacks callbacks;
private JScrollPane pane;
private JTextField serverUrl;
private JTextField apiKey;
private JComboBox<String> targetUrl;
private JComboBox<NameValuePair> projectBox;
private JButton projectRefresh;
private String[] targetArr = new String[0];
private NameValuePair[] projectArr = new BasicNameValuePair[0];
private boolean updating = false;
private ButtonAnimationThread refreshAnimation;
private static final Icon[] refreshSpinner = new ImageIcon[12];
private static final int TIMEOUT = 5000;
public static final String SERVER_KEY = "cdxServer";
public static final String API_KEY = "cdxApiKey";
public static final String TARGET_KEY = "cdxTarget";
public static final String PROJECT_KEY = "cdxProject";
public static final String ALL_URL_STR = "All URLs";
@Override
public void registerExtenderCallbacks(final IBurpExtenderCallbacks callbacks) {
// keep a reference to our callbacks object
this.callbacks = callbacks;
callbacks.registerContextMenuFactory(new ContextMenuFactory(this, callbacks));
// set our extension name
callbacks.setExtensionName("Code Dx");
for(int i = 0; i < refreshSpinner.length; i++)
refreshSpinner[i] = new ImageIcon(BurpExtender.class.getResource("/"+i+".png"));
// create our UI
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
pane = new JScrollPane(createMainPanel());
refreshAnimation = new ButtonAnimationThread(projectRefresh, refreshSpinner);
callbacks.customizeUiComponent(pane);
// add the custom tab to Burp's UI
callbacks.addSuiteTab(BurpExtender.this);
// add listener to update projects list when Code Dx tab selected
Component parent = pane.getParent();
if(parent instanceof JTabbedPane){
JTabbedPane tabs = (JTabbedPane) parent;
tabs.addChangeListener(new ChangeListener(){
@Override
public void stateChanged(ChangeEvent arg0) {
if (pane == tabs.getSelectedComponent() && !updating
&& !"".equals(serverUrl.getText()) && !"".equals(apiKey.getText())) {
Thread updateThread = new Thread() {
public void run(){
updateTargets();
updateProjects(true);
int activeProject = getSavedProjectIndex();
if(activeProject != -1)
projectBox.setSelectedIndex(activeProject);
}
};
updateThread.start();
}
}
});
}
}
});
}
private JPanel createMainPanel() {
JPanel main = new JPanel();
main.setLayout(new GridBagLayout());
// Create Settings Panel
JPanel settings = new JPanel(new GridBagLayout());
createTitle("Settings", settings);
KeyListener projectEnter = new KeyAdapter(){
@Override
public void keyPressed(KeyEvent k) {
if(k.getKeyCode() == KeyEvent.VK_ENTER)
updateProjects();
}
};
serverUrl = labelTextField("Server URL: ", settings, callbacks.loadExtensionSetting(BurpExtender.SERVER_KEY));
serverUrl.addKeyListener(projectEnter);
serverUrl.addFocusListener(new JTextFieldSettingFocusListener(BurpExtender.SERVER_KEY, callbacks));
apiKey = labelTextField("API Key: ", settings, callbacks.loadExtensionSetting(BurpExtender.API_KEY));
apiKey.addKeyListener(projectEnter);
apiKey.addFocusListener(new JTextFieldSettingFocusListener(BurpExtender.API_KEY, callbacks));
JButton targetRefresh = new JButton();
targetRefresh.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
updateTargets();
}
});
targetUrl = createComboBox("Target URL: ",settings, 3, targetRefresh);
projectRefresh = new JButton();
projectRefresh.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
Thread updateThread = new Thread() {
public void run(){
updateProjects();
}
};
updateThread.start();
}
});
projectBox = createComboBox("Projects: ",settings, 4, projectRefresh);
GridBagConstraints setGBC = new GridBagConstraints();
setGBC.gridy = 3;
setGBC.anchor = GridBagConstraints.NORTHWEST;
main.add(settings, setGBC);
// Separator
Insets ins = new Insets(10, 10, 2, 10);
JSeparator sep = new JSeparator(JSeparator.HORIZONTAL);
callbacks.customizeUiComponent(sep);
GridBagConstraints sepGBC = new GridBagConstraints();
sepGBC.gridwidth = 3;
sepGBC.gridx = 0;
sepGBC.fill = GridBagConstraints.HORIZONTAL;
sepGBC.insets = ins;
main.add(sep, sepGBC);
// Create Export Button
JButton exportBtn = new JButton();
exportBtn.setText("Send to Code Dx");
exportBtn.addActionListener(new ExportActionListener(this, callbacks));
callbacks.customizeUiComponent(exportBtn);
GridBagConstraints btnGBC = new GridBagConstraints();
btnGBC.gridx = 0;
btnGBC.weightx = 1.0;
btnGBC.weighty = 1.0;
btnGBC.insets = ins;
btnGBC.anchor = GridBagConstraints.NORTHWEST;
main.add(exportBtn, btnGBC);
updateTargets();
return main;
}
private void createTitle(String text, Container cont) {
JLabel title = new JLabel(text);
title.setForeground(new Color(229, 137, 0));
Font f = title.getFont();
title.setFont(new Font(f.getName(), Font.BOLD, f.getSize() + 2));
callbacks.customizeUiComponent(title);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridwidth = 0;
gbc.gridx = 0;
gbc.insets = new Insets(8, 10, 0, 0);
gbc.anchor = GridBagConstraints.WEST;
cont.add(title, gbc);
}
private JTextField labelTextField(String label, Container cont, String base) {
createSettingsLabel(label, cont);
JTextField textField = new JTextField(base, 45);
callbacks.customizeUiComponent(textField);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 1;
cont.add(textField, gbc);
return textField;
}
private <T> JComboBox<T> createComboBox(String label, Container cont, int buttonY, JButton button){
createSettingsLabel(label, cont);
JComboBox<T> box = new JComboBox<T>();
box.setMaximumRowCount(16);
callbacks.customizeUiComponent(box);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 1;
gbc.fill = GridBagConstraints.HORIZONTAL;
cont.add(box, gbc);
button.setIcon(refreshSpinner[0]);
button.setPreferredSize(new Dimension(refreshSpinner[0].getIconHeight()+4,refreshSpinner[0].getIconHeight()+4));
callbacks.customizeUiComponent(button);
gbc = new GridBagConstraints();
gbc.gridx = 2;
gbc.gridy = buttonY;
gbc.anchor = GridBagConstraints.WEST;
cont.add(button, gbc);
return box;
}
private void createSettingsLabel(String label, Container cont){
JLabel labelField = new JLabel(label);
labelField.setHorizontalAlignment(SwingConstants.LEFT);
callbacks.customizeUiComponent(labelField);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridwidth = 1;
gbc.gridx = 0;
gbc.insets = new Insets(0, 12, 0, 0);
gbc.anchor = GridBagConstraints.WEST;
cont.add(labelField, gbc);
}
public String getServerUrl() {
String text = serverUrl.getText();
if(text.endsWith("/"))
return text.substring(0, text.length()-1);
return text;
}
public String getApiKey() {
return apiKey.getText();
}
public String getTargetUrl() {
String url = targetUrl.getSelectedItem().toString();
if(ALL_URL_STR.equals(url))
return null;
return url;
}
public String[] getTargetUrls(){
return targetArr.clone();
}
public NameValuePair getProject(){
return (NameValuePair)projectBox.getSelectedItem();
}
public NameValuePair[] getProjects(){
return projectArr.clone();
}
public int getSavedProjectIndex(){
String activeProject = callbacks.loadExtensionSetting(PROJECT_KEY);
if(projectBox.getItemCount() > 0 && activeProject != null){
for(int i = 0; i < projectBox.getItemCount(); i++){
if(activeProject.equals(projectBox.getItemAt(i).getValue())){
return i;
}
}
}
return -1;
}
public void updateTargets(){
if(targetUrl != null){
Set<String> urlSet = new TreeSet<String>(new UrlComparator());
for(IHttpRequestResponse res : callbacks.getSiteMap(null)){
String site = res.getHttpService().toString();
urlSet.add(site);
}
targetUrl.removeAllItems();
targetUrl.addItem(ALL_URL_STR);
targetArr = urlSet.toArray(new String[urlSet.size()]);
for(String url: targetArr)
targetUrl.addItem(url);
}
}
public void updateProjects(){
updateProjects(false);
}
public void updateProjects(boolean ignoreMessages) {
if(!refreshAnimation.isRunning()){
refreshAnimation = new ButtonAnimationThread(projectRefresh, refreshSpinner);
refreshAnimation.start();
}
updating = true;
CloseableHttpClient client = null;
BufferedReader rd = null;
NameValuePair[] projectArr = new BasicNameValuePair[0];
try{
client = getHttpClient(ignoreMessages);
if(client != null){
HttpGet get = new HttpGet(getServerUrl() + "/api/projects");
get.setHeader("API-Key", getApiKey());
HttpResponse response = client.execute(get);
rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), "UTF-8"));
StringBuffer result = new StringBuffer();
String line = "";
while ((line = rd.readLine()) != null) {
result.append(line);
}
JSONObject obj = new JSONObject(result.toString());
JSONArray projects = obj.getJSONArray("projects");
projectArr = new NameValuePair[projects.length()];
for(int i = 0; i < projectArr.length; i++){
int id = projects.getJSONObject(i).getInt("id");
String name = projects.getJSONObject(i).getString("name");
projectArr[i] = new ModifiedNameValuePair(name,Integer.toString(id));
}
if(projectArr.length == 0 && !ignoreMessages){
warn("No projects were found.");
}
}
} catch (JSONException | IOException e){
if(!ignoreMessages)
error("An error occurred while trying to update the project list.\nCheck that the Server URL and API-Key are correct.");
} catch (Exception e){
if(!ignoreMessages){
error("An unknown error occurred while updating the project list.", e);
}
} finally {
if(client != null)
try {client.close();} catch (IOException e) {}
if(rd != null)
try {rd.close();} catch (IOException e) {}
}
this.projectArr = projectArr;
SwingUtilities.invokeLater(new Runnable(){
@Override
public void run() {
updateProjectComboBox();
updating = false;
}
});
refreshAnimation.end();
}
public void updateProjectComboBox(){
if(projectBox != null){
projectBox.removeAllItems();
for(NameValuePair p: projectArr)
projectBox.addItem(p);
}
}
public CloseableHttpClient getHttpClient(){
return getHttpClient(false);
}
public CloseableHttpClient getHttpClient(boolean ignoreMessages){
try{
RequestConfig config = RequestConfig.custom().setConnectTimeout(TIMEOUT).setSocketTimeout(TIMEOUT)
.setConnectionRequestTimeout(TIMEOUT).build();
return HttpClientBuilder.create()
.setSSLSocketFactory(SSLConnectionSocketFactoryFactory.getFactory(new URL(getServerUrl()).getHost(), this))
.setDefaultRequestConfig(config).build();
} catch (MalformedURLException e){
if(!ignoreMessages)
error("The Server URL is not a valid URL. Please check that it is correct.");
} catch (Exception e){
if(!ignoreMessages){
error("An unknown error occurred while trying to establish the HTTP client.", e);
}
}
return null;
}
public void error(String message){
error(message, null);
}
public void error(String message, Throwable t) {
if(refreshAnimation.isRunning())
refreshAnimation.end();
JOptionPane.showMessageDialog(getUiComponent(), message, "Error", JOptionPane.ERROR_MESSAGE);
if(t != null){
StringWriter err = new StringWriter();
t.printStackTrace(new PrintWriter(err));
try {
err.write("\nCheck the error log in the Extensions tab for more details.");
callbacks.getStderr().write(err.toString().getBytes(Charset.forName("UTF-8")));
} catch (IOException e) {}
}
}
public void warn(String message) {
if(refreshAnimation.isRunning())
refreshAnimation.end();
JOptionPane.showMessageDialog(getUiComponent(), message, "Warning", JOptionPane.WARNING_MESSAGE);
}
public void message(String message, String title) {
JOptionPane.showMessageDialog(getUiComponent(), message, title, JOptionPane.PLAIN_MESSAGE);
}
@Override
public String getTabCaption() {
return "Code Dx";
}
@Override
public Component getUiComponent() {
return pane;
}
private static class ModifiedNameValuePair extends BasicNameValuePair{
private static final long serialVersionUID = -6671681121783779976L;
public ModifiedNameValuePair(String name, String value) {
super(name, value);
}
@Override
public String toString(){
return getName() + " (id: " + getValue() + ")";
}
}
private static final String URL_SPLITTER = "://";
private static class UrlComparator implements Comparator<String>{
@Override
public int compare(String s1, String s2) {
String s1Protocol = s1.substring(0, s1.indexOf(URL_SPLITTER));
String s2Protocol = s2.substring(0, s2.indexOf(URL_SPLITTER));
if(s1Protocol.equals(s2Protocol))
return s1.compareTo(s2);
String s1Host = s1.substring(s1.indexOf(URL_SPLITTER)+3);
String s2Host = s2.substring(s2.indexOf(URL_SPLITTER)+3);
if(s1Host.equals(s2Host))
return s1Protocol.compareTo(s2Protocol);
return s1Host.compareTo(s2Host);
}
}
private static class ButtonAnimationThread extends Thread{
private volatile boolean running = true;
private int next = 1;
private JButton button;
private Icon[] icons;
public ButtonAnimationThread(JButton button, Icon[] icons){
this.button = button;
this.icons = icons;
}
@Override
public void run() {
while(running || next < icons.length - 1){
button.setIcon(icons[next]);
next = (next == icons.length - 1) ? 0 : next + 1;
try {
Thread.sleep(50);
} catch (InterruptedException e) {}
}
button.setIcon(icons[0]);
}
public boolean isRunning() {
return running;
}
public void end() {
running = false;
}
}
} | Fixed issue with error log
| src/main/java/burp/BurpExtender.java | Fixed issue with error log | <ide><path>rc/main/java/burp/BurpExtender.java
<ide> public void error(String message, Throwable t) {
<ide> if(refreshAnimation.isRunning())
<ide> refreshAnimation.end();
<del> JOptionPane.showMessageDialog(getUiComponent(), message, "Error", JOptionPane.ERROR_MESSAGE);
<del>
<ide> if(t != null){
<ide> StringWriter err = new StringWriter();
<ide> t.printStackTrace(new PrintWriter(err));
<ide> try {
<del> err.write("\nCheck the error log in the Extensions tab for more details.");
<ide> callbacks.getStderr().write(err.toString().getBytes(Charset.forName("UTF-8")));
<add> message += "\nCheck the error log in the Extensions tab for more details.";
<ide> } catch (IOException e) {}
<ide> }
<add> JOptionPane.showMessageDialog(getUiComponent(), message, "Error", JOptionPane.ERROR_MESSAGE);
<ide> }
<ide>
<ide> public void warn(String message) { |
|
JavaScript | mit | c4349b4bac4c4292189ecf0cb1f7b5d622f95d67 | 0 | transloadit/node-sdk,transloadit/node-sdk | const localtunnel = require('localtunnel')
const http = require('http')
const querystring = require('querystring')
const temp = require('temp')
const fs = require('fs')
const { join } = require('path')
const { promisify } = require('util')
const { pipeline: streamPipeline, PassThrough } = require('stream')
const got = require('got')
const pipeline = promisify(streamPipeline)
const intoStream = require('into-stream')
const TransloaditClient = require('../../../src/TransloaditClient')
async function downloadTmpFile (url) {
const { path } = await temp.open('transloadit')
await pipeline(got.stream(url), fs.createWriteStream(path))
return path
}
const authKey = process.env.TRANSLOADIT_KEY
const authSecret = process.env.TRANSLOADIT_SECRET
if (authKey == null || authSecret == null) {
throw new Error('Please specify environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET')
}
const startServerAsync = async (handler) => new Promise((resolve, reject) => {
const server = http.createServer(handler)
// Find a port to use
let port = 8000
server.on('error', err => {
if (err.code === 'EADDRINUSE') {
if (++port >= 65535) {
server.close()
reject(new Error('Failed to bind to port'))
}
return server.listen(port, '127.0.0.1')
} else {
return reject(err)
}
})
server.listen(port, '127.0.0.1')
// Once a port has been found and the server is ready, setup the
// localtunnel
server.on('listening', async () => {
try {
const tunnel = await localtunnel(port)
// console.log('localtunnel', tunnel.url)
tunnel.on('error', console.error)
tunnel.on('close', () => {
// console.log('tunnel closed')
server.close()
})
return resolve({
url: tunnel.url,
close () {
tunnel.close()
},
})
} catch (err) {
if (err != null) {
server.close()
return reject(err)
}
}
})
})
// https://transloadit.com/demos/importing-files/import-a-file-over-http
const genericImg = 'https://demos.transloadit.com/66/01604e7d0248109df8c7cc0f8daef8/snowflake.jpg'
const sampleSvg = '<?xml version="1.0" standalone="no"?><svg height="100" width="100"><circle cx="50" cy="50" r="40" fill="red" /></svg>'
const resizeOriginalStep = {
robot : '/image/resize',
use : ':original',
result: true,
width : 130,
height: 130,
}
const genericParams = {
params: {
steps: {
import: {
robot: '/http/import',
url : genericImg,
},
resize: {
robot : '/image/resize',
use : 'import',
result: true,
width : 130,
height: 130,
},
},
},
waitForCompletion: true,
}
jest.setTimeout(100000)
describe('API integration', function () {
describe('assembly creation', () => {
it('should create a retrievable assembly on the server', done => {
const client = new TransloaditClient({ authKey, authSecret })
return client.createAssembly(genericParams, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toHaveProperty('ok')
expect(result).toHaveProperty('assembly_id') // Since we're using it
const id = result.assembly_id
return client.getAssembly(id, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toEqual(expect.objectContaining({
assembly_url: expect.any(String),
ok : expect.any(String),
assembly_id : id,
}))
return done()
})
})
})
it("should signal an error if a file selected for upload doesn't exist", async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
client.addFile('original', temp.path({ suffix: '.transloadit.jpg' })) // Non-existing path
const promise = client.createAssemblyAsync(params)
await expect(promise).rejects.toThrow()
await expect(promise).rejects.toThrow(expect.objectContaining({ code: 'ENOENT' }))
})
it('should allow uploading files that do exist', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
const path = await downloadTmpFile(genericImg)
client.addFile('original', path)
await client.createAssemblyAsync(params)
})
it('should allow setting fields', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
waitForCompletion: true,
params : {
fields: { myField: 'test' },
steps : { resize: resizeOriginalStep },
},
}
const result = await client.createAssemblyAsync(params)
expect(result.fields.myField).toBe('test')
})
function createStreamFromString (str) {
const rawStream = intoStream(str)
// Workaround for https://github.com/tus/tus-js-client/issues/229
const stream = new PassThrough()
rawStream.pipe(stream)
return stream
}
it('should allow adding a stream', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
waitForCompletion: true,
params : {
steps: {
rasterize: {
robot : '/image/resize',
use : ':original',
format: 'jpg',
},
},
},
}
client.addStream('test', createStreamFromString(sampleSvg))
const result = await client.createAssemblyAsync(params)
expect(result.results.rasterize).toHaveLength(1)
expect(result.results.rasterize[0].name).toBe('test.jpg')
})
async function testUploadProgress (isResumable) {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
isResumable,
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
const path = await downloadTmpFile(genericImg)
client.addFile('original', path)
let progressCalled = false
function onProgress (progress) {
// console.log(progress)
expect(progress.uploadProgress.uploadedBytes).toBeDefined()
progressCalled = true
}
await client.createAssemblyAsync(params, onProgress)
expect(progressCalled).toBe(true)
}
it('should trigger progress callbacks when uploading files, resumable', async () => {
await testUploadProgress(true)
})
it('should trigger progress callbacks when uploading files, nonresumable', async () => {
await testUploadProgress(false)
})
it('should trigger the callback when waitForCompletion is false', done => {
const client = new TransloaditClient({ authKey, authSecret })
const params = Object.assign({}, genericParams, { waitForCompletion: false })
return client.createAssembly(params, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toHaveProperty('ok')
return done()
})
})
it('should exit fast when assembly has failed', async () => {
// An old bug caused it to continuously retry until timeout when errors such as INVALID_FILE_META_DATA
const client = new TransloaditClient({ authKey, authSecret })
const opts = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
waitForCompletion: true,
}
client.addFile('file', join(__dirname, './fixtures/zerobytes.jpg'))
const promise = client.createAssemblyAsync(opts)
await promise.catch((err) => {
expect(err).toMatchObject({ error: 'INVALID_FILE_META_DATA', assembly_id: expect.any(String) })
})
await expect(promise).rejects.toThrow(Error)
}, 7000)
})
describe('assembly cancelation', () => {
it('should stop the assembly from reaching completion', async () => {
const client = new TransloaditClient({ authKey, authSecret })
// We need to ensure that the assembly doesn't complete before it can be
// canceled, so we start an http server for the assembly to import from,
// and delay transmission of data until we've already sent the cancel
// request
// Async book-keeping for delaying the response
let sendServerResponse
const promise = new Promise((resolve) => {
sendServerResponse = resolve
})
const handler = async (req, res) => {
// console.log('handler', req.url)
expect(req.url).toBe('/')
await promise
res.setHeader('Content-type', 'image/jpeg')
res.writeHead(200)
got.stream(genericImg).pipe(res)
}
const server = await startServerAsync(handler)
try {
const params = {
params: {
steps: {
import: {
robot: '/http/import',
url : server.url,
},
resize: {
robot : '/image/resize',
use : 'import',
result: true,
width : 130,
height: 130,
},
},
},
}
// Finally send the createAssembly request
const { assembly_id: id } = await client.createAssemblyAsync(params)
// Now delete it
const resp = await client.deleteAssemblyAsync(id)
// Allow the upload to finish
sendServerResponse()
expect(resp.ok).toBe('ASSEMBLY_CANCELED')
// Successful cancel requests get ASSEMBLY_CANCELED even when it
// completed, so we now request the assembly status to check the
// *actual* status.
const resp2 = await client.getAssemblyAsync(id)
expect(resp2.ok).toBe('ASSEMBLY_CANCELED')
} finally {
server.close()
}
})
})
describe('replaying assemblies', () => {
it('should replay an assembly after it has completed', done => {
const client = new TransloaditClient({ authKey, authSecret })
client.createAssembly(genericParams, (err, { assembly_id: assemblyId } = {}) => {
expect(err).toBeFalsy()
const originalId = assemblyId
// ensure that the assembly has completed
const ensureCompletion = cb =>
client.getAssembly(originalId, (err, result) => {
expect(err).toBeFalsy()
const ok = result.ok
if (ok === 'ASSEMBLY_UPLOADING' || ok === 'ASSEMBLY_EXECUTING') {
setTimeout(() => ensureCompletion(cb), 1000)
} else {
cb()
}
})
// Start an asynchonous loop
ensureCompletion(() =>
client.replayAssembly({ assembly_id: originalId }, (err, { ok } = {}) => {
expect(err).toBeFalsy()
expect(ok).toBe('ASSEMBLY_REPLAYING')
done()
}),
)
})
})
})
describe('assembly list retrieval', () => {
it('should retrieve a list of assemblies', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const result = await client.listAssembliesAsync({})
expect(result).toEqual(expect.objectContaining({ count: expect.any(Number), items: expect.any(Array) }))
})
it('should be able to handle pagination with a stream', done => {
const client = new TransloaditClient({ authKey, authSecret })
const assemblies = client.streamAssemblies({ pagesize: 2 })
let n = 0
let isDone = false
assemblies.on('readable', () => {
const assembly = assemblies.read()
if (isDone) return
if (assembly == null) {
return done()
}
if (n === 5) {
isDone = true
return done()
}
expect(assembly).toHaveProperty('id')
n++
})
})
})
describe('assembly notification', () => {
let server
afterEach(() => {
console.log('closing server')
if (server) server.close()
})
// helper function
const streamToString = (stream) => new Promise((resolve, reject) => {
const chunks = []
stream.on('data', chunk => chunks.push(chunk))
stream.on('error', err => reject(err))
stream.on('end', () => resolve(chunks.join('')))
})
const runNotificationTest = async (onNotification, onError) => {
const client = new TransloaditClient({ authKey, authSecret })
// listens for notifications
const onNotificationRequest = async (req, res) => {
try {
expect(req.url).toBe('/')
expect(req.method).toBe('POST')
const body = await streamToString(req)
const result = JSON.parse(querystring.parse(body).transloadit)
expect(result).toHaveProperty('ok')
if (result.ok !== 'ASSEMBLY_COMPLETED') return onError(new Error(`result.ok was ${result.ok}`))
res.writeHead(200)
res.end()
onNotification({ client, assemblyId: result.assembly_id })
} catch (err) {
onError(err)
}
}
try {
server = await startServerAsync(onNotificationRequest)
await client.createAssemblyAsync({ params: { ...genericParams.params, notify_url: server.url } })
} catch (err) {
onError(err)
}
}
it('should send a notification upon assembly completion', async () => {
await new Promise((resolve, reject) => runNotificationTest(resolve, reject))
})
it('should replay the notification when requested', (done) => {
let notificationsRecvd = false
const onNotification = async ({ client, assemblyId }) => {
if (notificationsRecvd) {
// If we quit immediately, things will not get cleaned up and jest will hang
await new Promise((resolve) => setTimeout(resolve, 100))
done()
return
}
notificationsRecvd = true
try {
await new Promise((resolve) => setTimeout(resolve, 1000))
await client.replayAssemblyNotificationAsync({ assembly_id: assemblyId })
} catch (err) {
done(err)
}
}
runNotificationTest(onNotification, (err) => done(err))
})
})
describe('template methods', () => {
// can contain only lowercase latin letters, numbers, and dashes.
const templName = `node-sdk-test-${new Date().toISOString().toLocaleLowerCase('en-US').replace(/[^0-9a-z-]/g, '-')}`
let templId = null
const client = new TransloaditClient({ authKey, authSecret })
it('should allow creating a template', async () => {
const { id } = await client.createTemplateAsync({ name: templName, template: genericParams.params })
templId = id
})
it("should be able to fetch a template's definition", async () => {
expect(templId).toBeDefined()
const { name, content } = await client.getTemplateAsync(templId)
expect(name).toBe(templName)
expect(content).toEqual(genericParams.params)
})
it('should delete the template successfully', done => {
expect(templId).toBeDefined()
client.deleteTemplate(templId, (err, { ok } = {}) => {
expect(err).toBeFalsy()
expect(ok).toBe('TEMPLATE_DELETED')
client.getTemplate(templId, (err, result) => {
expect(result).toBeFalsy()
expect(err).toBeDefined()
expect(err.error).toBe('TEMPLATE_NOT_FOUND')
done()
})
})
})
})
})
| test/integration/__tests__/live-api.js | const localtunnel = require('localtunnel')
const http = require('http')
const querystring = require('querystring')
const temp = require('temp')
const fs = require('fs')
const _ = require('lodash')
const { join } = require('path')
const { promisify } = require('util')
const { pipeline: streamPipeline, PassThrough } = require('stream')
const got = require('got')
const pipeline = promisify(streamPipeline)
const intoStream = require('into-stream')
const TransloaditClient = require('../../../src/TransloaditClient')
async function downloadTmpFile (url) {
const { path } = await temp.open('transloadit')
await pipeline(got.stream(url), fs.createWriteStream(path))
return path
}
const authKey = process.env.TRANSLOADIT_KEY
const authSecret = process.env.TRANSLOADIT_SECRET
if (authKey == null || authSecret == null) {
throw new Error('Please specify environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET')
}
const startServerAsync = async (handler) => new Promise((resolve, reject) => {
const server = http.createServer(handler)
// Find a port to use
let port = 8000
server.on('error', err => {
if (err.code === 'EADDRINUSE') {
if (++port >= 65535) {
server.close()
reject(new Error('Failed to bind to port'))
}
return server.listen(port, '127.0.0.1')
} else {
return reject(err)
}
})
server.listen(port, '127.0.0.1')
// Once a port has been found and the server is ready, setup the
// localtunnel
return server.on('listening', async () => {
try {
const tunnel = await localtunnel(port)
// console.log('localtunnel', tunnel.url)
tunnel.on('error', console.error)
tunnel.on('close', () => {
// console.log('tunnel closed')
server.close()
})
return resolve({
url: tunnel.url,
close () {
tunnel.close()
},
})
} catch (err) {
if (err != null) {
server.close()
return reject(err)
}
}
})
})
const startServer = (handler, cb) => startServerAsync(handler).then((server) => cb(null, server)).catch(cb)
// https://transloadit.com/demos/importing-files/import-a-file-over-http
const genericImg = 'https://demos.transloadit.com/66/01604e7d0248109df8c7cc0f8daef8/snowflake.jpg'
const sampleSvg = '<?xml version="1.0" standalone="no"?><svg height="100" width="100"><circle cx="50" cy="50" r="40" fill="red" /></svg>'
const resizeOriginalStep = {
robot : '/image/resize',
use : ':original',
result: true,
width : 130,
height: 130,
}
const genericParams = {
params: {
steps: {
import: {
robot: '/http/import',
url : genericImg,
},
resize: {
robot : '/image/resize',
use : 'import',
result: true,
width : 130,
height: 130,
},
},
},
waitForCompletion: true,
}
jest.setTimeout(100000)
describe('API integration', function () {
describe('assembly creation', () => {
it('should create a retrievable assembly on the server', done => {
const client = new TransloaditClient({ authKey, authSecret })
return client.createAssembly(genericParams, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toHaveProperty('ok')
expect(result).toHaveProperty('assembly_id') // Since we're using it
const id = result.assembly_id
return client.getAssembly(id, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toEqual(expect.objectContaining({
assembly_url: expect.any(String),
ok : expect.any(String),
assembly_id : id,
}))
return done()
})
})
})
it("should signal an error if a file selected for upload doesn't exist", async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
client.addFile('original', temp.path({ suffix: '.transloadit.jpg' })) // Non-existing path
const promise = client.createAssemblyAsync(params)
await expect(promise).rejects.toThrow()
await expect(promise).rejects.toThrow(expect.objectContaining({ code: 'ENOENT' }))
})
it('should allow uploading files that do exist', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
const path = await downloadTmpFile(genericImg)
client.addFile('original', path)
await client.createAssemblyAsync(params)
})
it('should allow setting fields', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
waitForCompletion: true,
params : {
fields: { myField: 'test' },
steps : { resize: resizeOriginalStep },
},
}
const result = await client.createAssemblyAsync(params)
expect(result.fields.myField).toBe('test')
})
function createStreamFromString (str) {
const rawStream = intoStream(str)
// Workaround for https://github.com/tus/tus-js-client/issues/229
const stream = new PassThrough()
rawStream.pipe(stream)
return stream
}
it('should allow adding a stream', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
waitForCompletion: true,
params : {
steps: {
rasterize: {
robot : '/image/resize',
use : ':original',
format: 'jpg',
},
},
},
}
client.addStream('test', createStreamFromString(sampleSvg))
const result = await client.createAssemblyAsync(params)
expect(result.results.rasterize).toHaveLength(1)
expect(result.results.rasterize[0].name).toBe('test.jpg')
})
async function testUploadProgress (isResumable) {
const client = new TransloaditClient({ authKey, authSecret })
const params = {
isResumable,
params: {
steps: {
resize: resizeOriginalStep,
},
},
}
const path = await downloadTmpFile(genericImg)
client.addFile('original', path)
let progressCalled = false
function onProgress (progress) {
// console.log(progress)
expect(progress.uploadProgress.uploadedBytes).toBeDefined()
progressCalled = true
}
await client.createAssemblyAsync(params, onProgress)
expect(progressCalled).toBe(true)
}
it('should trigger progress callbacks when uploading files, resumable', async () => {
await testUploadProgress(true)
})
it('should trigger progress callbacks when uploading files, nonresumable', async () => {
await testUploadProgress(false)
})
it('should trigger the callback when waitForCompletion is false', done => {
const client = new TransloaditClient({ authKey, authSecret })
const params = Object.assign({}, genericParams, { waitForCompletion: false })
return client.createAssembly(params, (err, result) => {
expect(err).toBeFalsy()
expect(result).not.toHaveProperty('error')
expect(result).toHaveProperty('ok')
return done()
})
})
it('should exit fast when assembly has failed', async () => {
// An old bug caused it to continuously retry until timeout when errors such as INVALID_FILE_META_DATA
const client = new TransloaditClient({ authKey, authSecret })
const opts = {
params: {
steps: {
resize: resizeOriginalStep,
},
},
waitForCompletion: true,
}
client.addFile('file', join(__dirname, './fixtures/zerobytes.jpg'))
const promise = client.createAssemblyAsync(opts)
await promise.catch((err) => {
expect(err).toMatchObject({ error: 'INVALID_FILE_META_DATA', assembly_id: expect.any(String) })
})
await expect(promise).rejects.toThrow(Error)
}, 7000)
})
describe('assembly cancelation', () => {
it('should stop the assembly from reaching completion', async () => {
const client = new TransloaditClient({ authKey, authSecret })
// We need to ensure that the assembly doesn't complete before it can be
// canceled, so we start an http server for the assembly to import from,
// and delay transmission of data until we've already sent the cancel
// request
// Async book-keeping for delaying the response
let sendServerResponse
const promise = new Promise((resolve) => {
sendServerResponse = resolve
})
const handler = async (req, res) => {
// console.log('handler', req.url)
expect(req.url).toBe('/')
await promise
res.setHeader('Content-type', 'image/jpeg')
res.writeHead(200)
got.stream(genericImg).pipe(res)
}
const server = await startServerAsync(handler)
try {
const params = {
params: {
steps: {
import: {
robot: '/http/import',
url : server.url,
},
resize: {
robot : '/image/resize',
use : 'import',
result: true,
width : 130,
height: 130,
},
},
},
}
// Finally send the createAssembly request
const { assembly_id: id } = await client.createAssemblyAsync(params)
// Now delete it
const resp = await client.deleteAssemblyAsync(id)
// Allow the upload to finish
sendServerResponse()
expect(resp.ok).toBe('ASSEMBLY_CANCELED')
// Successful cancel requests get ASSEMBLY_CANCELED even when it
// completed, so we now request the assembly status to check the
// *actual* status.
const resp2 = await client.getAssemblyAsync(id)
expect(resp2.ok).toBe('ASSEMBLY_CANCELED')
} finally {
server.close()
}
})
})
describe('replaying assemblies', () => {
it('should replay an assembly after it has completed', done => {
const client = new TransloaditClient({ authKey, authSecret })
client.createAssembly(genericParams, (err, { assembly_id: assemblyId } = {}) => {
expect(err).toBeFalsy()
const originalId = assemblyId
// ensure that the assembly has completed
const ensureCompletion = cb =>
client.getAssembly(originalId, (err, result) => {
expect(err).toBeFalsy()
const ok = result.ok
if (ok === 'ASSEMBLY_UPLOADING' || ok === 'ASSEMBLY_EXECUTING') {
setTimeout(() => ensureCompletion(cb), 1000)
} else {
cb()
}
})
// Start an asynchonous loop
ensureCompletion(() =>
client.replayAssembly({ assembly_id: originalId }, (err, { ok } = {}) => {
expect(err).toBeFalsy()
expect(ok).toBe('ASSEMBLY_REPLAYING')
done()
}),
)
})
})
})
describe('assembly list retrieval', () => {
it('should retrieve a list of assemblies', async () => {
const client = new TransloaditClient({ authKey, authSecret })
const result = await client.listAssembliesAsync({})
expect(result).toEqual(expect.objectContaining({ count: expect.any(Number), items: expect.any(Array) }))
})
it('should be able to handle pagination with a stream', done => {
const client = new TransloaditClient({ authKey, authSecret })
const assemblies = client.streamAssemblies({ pagesize: 2 })
let n = 0
let isDone = false
assemblies.on('readable', () => {
const assembly = assemblies.read()
if (isDone) return
if (assembly == null) {
return done()
}
if (n === 5) {
isDone = true
return done()
}
expect(assembly).toHaveProperty('id')
n++
})
})
})
describe('assembly notification', () => {
// helper function
const streamToString = (stream, cb) => {
const chunks = []
stream.on('data', chunk => chunks.push(chunk))
stream.on('error', err => cb(err))
stream.on('end', () => cb(null, chunks.join('')))
}
const testCase = (desc, endBehavior) =>
it(desc, done => {
const client = new TransloaditClient({ authKey, authSecret })
// listens for notifications
const handler = (req, res) => {
expect(req.url).toBe('/')
expect(req.method).toBe('POST')
streamToString(req, (err, body) => {
if (err) {
console.error(err)
}
const result = JSON.parse(querystring.parse(body).transloadit)
expect(result).toHaveProperty('ok')
res.writeHead(200)
res.end()
if (result.ok !== 'ASSEMBLY_COMPLETED') return
endBehavior(client, result.assembly_id, done)
})
}
startServer(handler, (err, server) => {
expect(err).toBeFalsy()
const params = { params: _.extend({}, genericParams.params, { notify_url: server.url }) }
client.createAssembly(params, (err, result) => expect(err).toBeFalsy())
})
})
testCase('should send a notification upon assembly completion', (client, id, done) => done())
let notificationsRecvd = 0
testCase('should replay the notification when requested', (client, id, done) => {
if (notificationsRecvd++ === 0) {
setTimeout(() => {
client.replayAssemblyNotification({ assembly_id: id }, err => expect(err).toBeFalsy())
}, 2000)
} else {
done()
}
})
})
describe('template methods', () => {
// can contain only lowercase latin letters, numbers, and dashes.
const templName = `node-sdk-test-${new Date().toISOString().toLocaleLowerCase('en-US').replace(/[^0-9a-z-]/g, '-')}`
let templId = null
const client = new TransloaditClient({ authKey, authSecret })
it('should allow creating a template', async () => {
const { id } = await client.createTemplateAsync({ name: templName, template: genericParams.params })
templId = id
})
it("should be able to fetch a template's definition", async () => {
expect(templId).toBeDefined()
const { name, content } = await client.getTemplateAsync(templId)
expect(name).toBe(templName)
expect(content).toEqual(genericParams.params)
})
it('should delete the template successfully', done => {
expect(templId).toBeDefined()
client.deleteTemplate(templId, (err, { ok } = {}) => {
expect(err).toBeFalsy()
expect(ok).toBe('TEMPLATE_DELETED')
client.getTemplate(templId, (err, result) => {
expect(result).toBeFalsy()
expect(err).toBeDefined()
expect(err.error).toBe('TEMPLATE_NOT_FOUND')
done()
})
})
})
})
})
| fix test that caused jest to hang
it was not closing the server
also make the test use async await
| test/integration/__tests__/live-api.js | fix test that caused jest to hang | <ide><path>est/integration/__tests__/live-api.js
<ide> const querystring = require('querystring')
<ide> const temp = require('temp')
<ide> const fs = require('fs')
<del>const _ = require('lodash')
<ide> const { join } = require('path')
<ide> const { promisify } = require('util')
<ide> const { pipeline: streamPipeline, PassThrough } = require('stream')
<ide>
<ide> // Once a port has been found and the server is ready, setup the
<ide> // localtunnel
<del> return server.on('listening', async () => {
<add> server.on('listening', async () => {
<ide> try {
<ide> const tunnel = await localtunnel(port)
<ide> // console.log('localtunnel', tunnel.url)
<ide> }
<ide> })
<ide> })
<del>
<del>const startServer = (handler, cb) => startServerAsync(handler).then((server) => cb(null, server)).catch(cb)
<ide>
<ide> // https://transloadit.com/demos/importing-files/import-a-file-over-http
<ide> const genericImg = 'https://demos.transloadit.com/66/01604e7d0248109df8c7cc0f8daef8/snowflake.jpg'
<ide> })
<ide>
<ide> describe('assembly notification', () => {
<add> let server
<add> afterEach(() => {
<add> console.log('closing server')
<add> if (server) server.close()
<add> })
<add>
<ide> // helper function
<del> const streamToString = (stream, cb) => {
<add> const streamToString = (stream) => new Promise((resolve, reject) => {
<ide> const chunks = []
<ide> stream.on('data', chunk => chunks.push(chunk))
<del> stream.on('error', err => cb(err))
<del> stream.on('end', () => cb(null, chunks.join('')))
<add> stream.on('error', err => reject(err))
<add> stream.on('end', () => resolve(chunks.join('')))
<add> })
<add>
<add> const runNotificationTest = async (onNotification, onError) => {
<add> const client = new TransloaditClient({ authKey, authSecret })
<add>
<add> // listens for notifications
<add> const onNotificationRequest = async (req, res) => {
<add> try {
<add> expect(req.url).toBe('/')
<add> expect(req.method).toBe('POST')
<add> const body = await streamToString(req)
<add> const result = JSON.parse(querystring.parse(body).transloadit)
<add> expect(result).toHaveProperty('ok')
<add> if (result.ok !== 'ASSEMBLY_COMPLETED') return onError(new Error(`result.ok was ${result.ok}`))
<add>
<add> res.writeHead(200)
<add> res.end()
<add>
<add> onNotification({ client, assemblyId: result.assembly_id })
<add> } catch (err) {
<add> onError(err)
<add> }
<add> }
<add>
<add> try {
<add> server = await startServerAsync(onNotificationRequest)
<add> await client.createAssemblyAsync({ params: { ...genericParams.params, notify_url: server.url } })
<add> } catch (err) {
<add> onError(err)
<add> }
<ide> }
<ide>
<del> const testCase = (desc, endBehavior) =>
<del> it(desc, done => {
<del> const client = new TransloaditClient({ authKey, authSecret })
<del>
<del> // listens for notifications
<del> const handler = (req, res) => {
<del> expect(req.url).toBe('/')
<del>
<del> expect(req.method).toBe('POST')
<del> streamToString(req, (err, body) => {
<del> if (err) {
<del> console.error(err)
<del> }
<del> const result = JSON.parse(querystring.parse(body).transloadit)
<del> expect(result).toHaveProperty('ok')
<del> res.writeHead(200)
<del> res.end()
<del> if (result.ok !== 'ASSEMBLY_COMPLETED') return
<del> endBehavior(client, result.assembly_id, done)
<del> })
<add> it('should send a notification upon assembly completion', async () => {
<add> await new Promise((resolve, reject) => runNotificationTest(resolve, reject))
<add> })
<add>
<add> it('should replay the notification when requested', (done) => {
<add> let notificationsRecvd = false
<add>
<add> const onNotification = async ({ client, assemblyId }) => {
<add> if (notificationsRecvd) {
<add> // If we quit immediately, things will not get cleaned up and jest will hang
<add> await new Promise((resolve) => setTimeout(resolve, 100))
<add> done()
<add> return
<ide> }
<del>
<del> startServer(handler, (err, server) => {
<del> expect(err).toBeFalsy()
<del>
<del> const params = { params: _.extend({}, genericParams.params, { notify_url: server.url }) }
<del>
<del> client.createAssembly(params, (err, result) => expect(err).toBeFalsy())
<del> })
<del> })
<del>
<del> testCase('should send a notification upon assembly completion', (client, id, done) => done())
<del>
<del> let notificationsRecvd = 0
<del> testCase('should replay the notification when requested', (client, id, done) => {
<del> if (notificationsRecvd++ === 0) {
<del> setTimeout(() => {
<del> client.replayAssemblyNotification({ assembly_id: id }, err => expect(err).toBeFalsy())
<del> }, 2000)
<del> } else {
<del> done()
<del> }
<add> notificationsRecvd = true
<add>
<add> try {
<add> await new Promise((resolve) => setTimeout(resolve, 1000))
<add> await client.replayAssemblyNotificationAsync({ assembly_id: assemblyId })
<add> } catch (err) {
<add> done(err)
<add> }
<add> }
<add>
<add> runNotificationTest(onNotification, (err) => done(err))
<ide> })
<ide> })
<ide> |
|
Java | apache-2.0 | 0eadd301817ecbf76b3293e660ebd36e11d02b77 | 0 | GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit | // Copyright (C) 2021 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.query.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowCapability;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.block;
import static com.google.gerrit.common.data.GlobalCapability.QUERY_LIMIT;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import com.google.gerrit.acceptance.UseClockStep;
import com.google.gerrit.entities.Permission;
import com.google.gerrit.entities.Project;
import com.google.gerrit.index.testing.AbstractFakeIndex;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.index.change.ChangeIndexCollection;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.InMemoryRepositoryManager;
import com.google.gerrit.testing.InMemoryRepositoryManager.Repo;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import java.util.List;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Config;
import org.junit.Test;
/**
* Test against {@link com.google.gerrit.index.testing.AbstractFakeIndex}. This test might seem
* obsolete, but it makes sure that the fake index implementation used in tests gives the same
* results as production indices.
*/
public abstract class FakeQueryChangesTest extends AbstractQueryChangesTest {
@Inject private ChangeIndexCollection changeIndexCollection;
@Inject protected AllProjectsName allProjects;
@Override
protected Injector createInjector() {
Config fakeConfig = new Config(config);
InMemoryModule.setDefaults(fakeConfig);
fakeConfig.setString("index", null, "type", "fake");
return Guice.createInjector(new InMemoryModule(fakeConfig));
}
@Test
@UseClockStep
public void stopQueryIfNoMoreResults() throws Exception {
// create 2 visible changes
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// create 2 invisible changes
TestRepository<Repo> hiddenProject = createProject("hiddenProject");
insert(hiddenProject, newChange(hiddenProject));
insert(hiddenProject, newChange(hiddenProject));
projectOperations
.project(Project.nameKey("hiddenProject"))
.forUpdate()
.add(block(Permission.READ).ref("refs/*").group(REGISTERED_USERS))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
newQuery("status:new").withLimit(5).get();
// Since the limit of the query (i.e. 5) is more than the total number of changes (i.e. 4),
// only 1 index search is expected.
assertThat(idx.getQueryCount()).isEqualTo(1);
}
@Test
@UseClockStep
public void noLimitQueryPaginates() throws Exception {
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
// create 4 changes
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// Set queryLimit to 2
projectOperations
.project(allProjects)
.forUpdate()
.add(allowCapability(QUERY_LIMIT).group(REGISTERED_USERS).range(0, 2))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
// 2 index searches are expected. The first index search will run with size 3 (i.e.
// the configured query-limit+1), and then we will paginate to get the remaining
// changes with the second index search.
newQuery("status:new").withNoLimit().get();
assertThat(idx.getQueryCount()).isEqualTo(2);
}
@Test
@UseClockStep
public void internalQueriesPaginate() throws Exception {
// create 4 changes
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// Set queryLimit to 2
projectOperations
.project(allProjects)
.forUpdate()
.add(allowCapability(QUERY_LIMIT).group(REGISTERED_USERS).range(0, 2))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
// 2 index searches are expected. The first index search will run with size 3 (i.e.
// the configured query-limit+1), and then we will paginate to get the remaining
// changes with the second index search.
List<ChangeData> matches = queryProvider.get().query(queryBuilder.parse("status:new"));
assertThat(matches).hasSize(4);
assertThat(idx.getQueryCount()).isEqualTo(2);
}
}
| javatests/com/google/gerrit/server/query/change/FakeQueryChangesTest.java | // Copyright (C) 2021 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.query.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowCapability;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.block;
import static com.google.gerrit.common.data.GlobalCapability.QUERY_LIMIT;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import com.google.gerrit.acceptance.UseClockStep;
import com.google.gerrit.entities.Permission;
import com.google.gerrit.entities.Project;
import com.google.gerrit.index.testing.AbstractFakeIndex;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.index.change.ChangeIndexCollection;
import com.google.gerrit.testing.InMemoryModule;
import com.google.gerrit.testing.InMemoryRepositoryManager;
import com.google.gerrit.testing.InMemoryRepositoryManager.Repo;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import java.util.List;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Config;
import org.junit.Test;
/**
* Test against {@link com.google.gerrit.index.testing.AbstractFakeIndex}. This test might seem
* obsolete, but it makes sure that the fake index implementation used in tests gives the same
* results as production indices.
*/
public abstract class FakeQueryChangesTest extends AbstractQueryChangesTest {
@Inject private ChangeIndexCollection changeIndexCollection;
@Inject protected AllProjectsName allProjects;
@Override
protected Injector createInjector() {
Config fakeConfig = new Config(config);
InMemoryModule.setDefaults(fakeConfig);
fakeConfig.setString("index", null, "type", "fake");
return Guice.createInjector(new InMemoryModule(fakeConfig));
}
@Test
@UseClockStep
public void stopQueryIfNoMoreResults() throws Exception {
// create 2 visible changes
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// create 2 invisible changes
TestRepository<Repo> hiddenProject = createProject("hiddenProject");
insert(hiddenProject, newChange(hiddenProject));
insert(hiddenProject, newChange(hiddenProject));
projectOperations
.project(Project.nameKey("hiddenProject"))
.forUpdate()
.add(block(Permission.READ).ref("refs/*").group(REGISTERED_USERS))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
newQuery("status:new").withLimit(5).get();
// Since the limit of the query (i.e. 5) is more than the total number of changes (i.e. 4),
// only 1 index search is expected.
assertThat(idx.getQueryCount()).isEqualTo(1);
}
@Test
@UseClockStep
public void noLimitQueryPaginates() throws Exception {
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
// create 4 changes
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// Set queryLimit to 2
projectOperations
.project(allProjects)
.forUpdate()
.add(allowCapability(QUERY_LIMIT).group(REGISTERED_USERS).range(0, 2))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
// 2 index searches are expected. The first index search will run with size 3 (i.e.
// the configured query-limit+1), and then we will paginate to get the remaining
// changes with the second index search.
newQuery("status:new").withNoLimit().get();
assertThat(idx.getQueryCount()).isEqualTo(2);
}
@Test
@UseClockStep
@SuppressWarnings("unchecked")
public void internalQueriesPaginate() throws Exception {
// create 4 changes
TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo");
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
insert(testRepo, newChange(testRepo));
// Set queryLimit to 2
projectOperations
.project(allProjects)
.forUpdate()
.add(allowCapability(QUERY_LIMIT).group(REGISTERED_USERS).range(0, 2))
.update();
AbstractFakeIndex<?, ?, ?> idx =
(AbstractFakeIndex<?, ?, ?>) changeIndexCollection.getSearchIndex();
// 2 index searches are expected. The first index search will run with size 3 (i.e.
// the configured query-limit+1), and then we will paginate to get the remaining
// changes with the second index search.
List<ChangeData> matches = queryProvider.get().query(queryBuilder.parse("status:new"));
assertThat(matches).hasSize(4);
assertThat(idx.getQueryCount()).isEqualTo(2);
}
}
| Remove unnecessary @SuppressWarnings annotation
After the raw type warning has been fixed by change Iba5125df5 this
annotation is no longer needed.
Release-Notes: skip
Signed-off-by: Edwin Kempin <[email protected]>
Change-Id: Ib6869dcb7ba569d544a793e8fe4683d5c13aabd4
| javatests/com/google/gerrit/server/query/change/FakeQueryChangesTest.java | Remove unnecessary @SuppressWarnings annotation | <ide><path>avatests/com/google/gerrit/server/query/change/FakeQueryChangesTest.java
<ide>
<ide> @Test
<ide> @UseClockStep
<del> @SuppressWarnings("unchecked")
<ide> public void internalQueriesPaginate() throws Exception {
<ide> // create 4 changes
<ide> TestRepository<InMemoryRepositoryManager.Repo> testRepo = createProject("repo"); |
|
JavaScript | mit | 9847edba156e0acd0aa179f22779cdddcf864e86 | 0 | hellofresh/janus-dashboard,hellofresh/janus-dashboard | import createHistory from 'history/createBrowserHistory';
import R from 'ramda';
import client from '../api';
import endpointSchema from '../../configurations/apiSchema'; // @TODO: REMOVE
import {
DELETE_ENDPOINT_START,
DELETE_ENDPOINT_SUCCESS,
FETCH_ENDPOINT_START,
FETCH_ENDPOINT_SUCCESS,
FETCH_ENDPOINT_SCHEMA_START,
FETCH_ENDPOINT_SCHEMA_SUCCESS,
FILL_SELECTED_PLUGINS,
SAVE_ENDPOINT_START,
SAVE_ENDPOINT_SUCCESS,
SET_DEFAULT_ENDPOINT,
EXCLUDE_PLUGIN,
SELECT_PLUGIN,
RESET_ENDPOINT,
WILL_CLONE,
} from '../constants';
import {
clearConfirmationModal,
closeConfirmationModal,
fetchEndpoints,
openConfirmationModal,
openResponseModal,
showToaster,
// closeResponseModal, // @TODO: will need thi a bit later
} from './index';
import history from '../configuration/history';
export const deleteEndpointRequest = () => ({
type: DELETE_ENDPOINT_START,
});
export const deleteEndpointSuccess = () => ({
type: DELETE_ENDPOINT_SUCCESS,
});
export const getEndpointRequest = () => ({
type: FETCH_ENDPOINT_START,
});
export const getEndpointSuccess = (api, response) => ({
type: FETCH_ENDPOINT_SUCCESS,
payload: {
api,
response,
},
});
export const getEndpointSchemaRequest = () => ({
type: FETCH_ENDPOINT_SCHEMA_START,
});
export const getEndpointSchemaSuccess = api => ({
type: FETCH_ENDPOINT_SCHEMA_SUCCESS,
payload: api,
});
export const saveEndpointRequest = api => ({
type: SAVE_ENDPOINT_START,
payload: api,
});
export const saveEndpointSuccess = () => ({
type: SAVE_ENDPOINT_SUCCESS,
});
export const selectPlugin = pluginName/*: string*/ => ({
type: SELECT_PLUGIN,
payload: pluginName,
});
export const excludePlugin = pluginName/*: string*/ => ({
type: EXCLUDE_PLUGIN,
payload: pluginName,
});
export const resetEndpoint = () => ({
type: RESET_ENDPOINT,
});
export const willClone = data => {
const preparedPlugins = data.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const pluginFromSchema = endpointSchema.plugins.filter(item => item.name === plugin.name)[0];
const { units } = pluginFromSchema.config.limit;
const policyFromSchema = pluginFromSchema.config.policy;
const getUpdatedLimit = limit => {
if (R.type(limit) === 'Object') {
return {
value: limit.value,
unit: limit.unit,
units,
};
}
const arr = limit.split('-');
const valueOfLimit = arr[0]*1;
const valueOfUnit = arr[1];
return {
value: valueOfLimit,
unit: valueOfUnit,
units,
};
};
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
const lens3 = R.lensPath(['config', 'policy', 'selected']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, getUpdatedLimit(plugin.config.limit), plugin);
const pluginWithPolicyFromSchema = R.set(lens2, policyFromSchema , updatedPlugin);
const getSelectedPolicy = policy => {
if (R.type(policy) === 'Object') {
return policy.selected;
}
return policy;
};
return R.set(lens3, getSelectedPolicy(plugin.config.policy), pluginWithPolicyFromSchema);
}
if (plugin.name === 'request_transformer') {
const transformHeadersToArray = obj => R.toPairs(obj)
.reduce((acc, item) => {
const header = {
key: item[0],
value: item[1],
};
acc.push(header);
return acc;
}, []);
const configWithTransformedHeaders = R.toPairs(plugin.config)
.reduce((acc, item) => {
const transformedHeaders = transformHeadersToArray(item[1].headers);
acc[item[0]] = {
headers: transformedHeaders,
querystring: item[1].querystring,
};
return acc;
}, {});
// set path for lens and substitude config in plugin:
const lens = R.lensPath(['config']);
const updatedPlugin = R.set(lens, configWithTransformedHeaders, plugin);
return updatedPlugin;
}
return plugin;
});
const lens = R.lensPath(['plugins']);
const preparedApi = R.set(lens, preparedPlugins, data);
return {
type: WILL_CLONE,
payload: {
api: preparedApi,
response: data,
},
};
};
export const fillSelected = selectedPlugins => ({
type: FILL_SELECTED_PLUGINS,
payload: selectedPlugins,
});
export const fetchEndpoint = pathname => async dispatch => {
dispatch(getEndpointRequest());
try {
const response = await client.get(`apis${pathname}`);
const preparedPlugins = response.data.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const pluginFromSchema = endpointSchema.plugins.filter(item => item.name === plugin.name)[0];
const { units } = pluginFromSchema.config.limit;
const policyFromSchema = pluginFromSchema.config.policy;
const arr = plugin.config.limit.split('-');
const valueOfLimit = arr[0]*1;
const valueOfUnit = arr[1];
// @TODO: policy should be also an array like in schema;
const updatedLimit = {
value: valueOfLimit,
unit: valueOfUnit,
units,
};
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
const lens3 = R.lensPath(['config', 'policy', 'selected']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, updatedLimit, plugin);
const pluginWithPolicyFromSchema = R.set(lens2, policyFromSchema , updatedPlugin);
return R.set(lens3, plugin.config.policy, pluginWithPolicyFromSchema);
}
if (plugin.name === 'request_transformer') {
const transformHeadersToArray = obj => R.toPairs(obj)
.reduce((acc, item) => {
const header = {
key: item[0],
value: item[1],
};
acc.push(header);
return acc;
}, []);
const configWithTransformedHeaders = R.toPairs(plugin.config)
.reduce((acc, item) => {
const transformedHeaders = transformHeadersToArray(item[1].headers);
acc[item[0]] = {
headers: transformedHeaders,
querystring: item[1].querystring,
};
return acc;
}, {});
// set path for lens and substitude config in plugin:
const lens = R.lensPath(['config']);
const updatedPlugin = R.set(lens, configWithTransformedHeaders, plugin);
return updatedPlugin;
}
return plugin;
});
const lens = R.lensPath(['plugins']);
const preparedApi = R.set(lens, preparedPlugins, response.data);
dispatch(getEndpointSuccess(preparedApi, response.data));
} catch (error) {
console.log('FETCH_ENDPOINT_ERROR', 'Infernal server error', error);
}
};
export const setInitialEndpoint = endpointSchema => ({
type: SET_DEFAULT_ENDPOINT,
payload: endpointSchema,
});
export const fetchEndpointSchema = flag => async (dispatch) => {
dispatch(getEndpointSchemaRequest());
try {
// Get all server names
const response = await client.get('/oauth/servers');
const serverNames = response.data.reduce((acc, item) => {
acc.push(item.name);
return acc;
}, []);
const lensOAuth = R.lensPath(['config', 'server_names']);
const updatePlugin = (lens, serverNames, list) => pluginName => {
const comparator = string => el => el.name === string;
const getPluginIndex = comparator => list => list.findIndex(comparator);
return R.adjust(
R.set(lens, serverNames),
getPluginIndex(comparator(pluginName))(list),
list,
);
};
const pluginsFromApiSchemaWithUpdatedOAuthPlugin = updatePlugin(
lensOAuth,
serverNames,
endpointSchema.plugins,
)('oauth2');
const lens = R.lensPath(['plugins']);
const endpointSchemaWithUpdatedOAuthPlugin = R.set(
lens,
pluginsFromApiSchemaWithUpdatedOAuthPlugin,
endpointSchema
);
flag && dispatch(setInitialEndpoint(endpointSchemaWithUpdatedOAuthPlugin));
dispatch(getEndpointSchemaSuccess(endpointSchemaWithUpdatedOAuthPlugin)); // @TODO: REMOVE when endpoint will be ready
} catch (error) {
console.log('FETCH_SERVER_NAMES_ERROR', error);
}
};
export const preparePlugins = api => api.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const { limit, policy } = plugin.config;
const { value, unit } = limit;
const concatenation = `${value}-${unit}`;
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, concatenation, plugin);
return R.set(lens2, policy.selected, updatedPlugin);
}
if (plugin.name === 'oauth2') {
return R.dissocPath(['config', 'server_names'], plugin);
}
if (plugin.name === 'request_transformer') {
// get all options names
const options = Object.keys(plugin.config);
// convert all values of plugin's config to array of objects
// so then we will be able to map through them:
const config = R.values(plugin.config);
const allTransformedHeaders = config.map((item, index) => {
// headers comes as an array of objects:
/**
* @example #1
*
* add: {
* header: [
* {someKey: 'someValue'},
* {someAnotherKey: 'someAnotherValue'},
* ]
* }
*/
const headers = item.headers;
// we will fill this arrays with keys and values respectively
let keys = [];
let values = [];
// fill key/values arrays
headers.map(item => {
const arr = R.values(item);
keys.push(arr[0]);
values.push(arr[1]);
});
// and now we are creating object that should be placed instead of
// array of the objects from example #1
/**
* @example #2
*
* add: {
* headers: {
* someKey: 'someValue',
* someAnotherKey: 'someAnotherValue',
* }
* }
*/
const transformedHeaders = R.zipObj(keys, values);
return transformedHeaders;
});
// step by step we updating plugins config:
const updatedPlugin = allTransformedHeaders.reduce((acc, item, index) => {
const lens = R.lensPath(['config', options[index], 'headers']);
return R.set(lens, item, acc);
}, plugin);
return updatedPlugin;
}
return plugin;
});
export const saveEndpoint = (pathname, api) => dispatch => {
dispatch(openConfirmationModal('save', () => confirmedSaveEndpoint(dispatch, pathname, api)));
};
export const updateEndpoint = (pathname, api) => dispatch => {
dispatch(openConfirmationModal('update', () => confirmedUpdateEndpoint(dispatch, pathname, api), api.name));
};
export const deleteEndpoint = apiName => dispatch => {
dispatch(openConfirmationModal('delete', () => confirmedDeleteEndpoint(dispatch, apiName), apiName));
};
export const confirmedSaveEndpoint = (dispatch, pathname, api) => {
dispatch(saveEndpointRequest(api));
const preparedPlugins = preparePlugins(api);
// substitude updated list of plugins
const preparedApi = R.set(R.lensPath(['plugins']), preparedPlugins, api);
try {
const response = client.post('apis', preparedApi);
dispatch(saveEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(fetchEndpoints());
history.push('/');
dispatch(showToaster());
} catch (error) {
if (error.response) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data,
}));
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
// More info about error handling in Axios: https://github.com/mzabriskie/axios#handling-errors
// eslint-disable-next-line
console.error(error.response.data);
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
// eslint-disable-next-line
console.log(error.request);
} else {
// Something happened in setting up the request that triggered an Error
// eslint-disable-next-line
console.log('Error', error.message);
}
}
};
export const confirmedUpdateEndpoint = (dispatch, pathname, api) => {
dispatch(saveEndpointRequest());
const preparedPlugins = preparePlugins(api);
// substitude updated list of plugins
const preparedApi = R.set(R.lensPath(['plugins']), preparedPlugins, api);
return client.put(`apis${pathname}`, preparedApi)
.then((response) => {
dispatch(saveEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(showToaster());
})
.catch((error) => {
if (error.response) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data,
}));
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
// More info about error handling in Axios: https://github.com/mzabriskie/axios#handling-errors
// eslint-disable-next-line
console.error(error.response.data);
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
// eslint-disable-next-line
console.log(error.request);
} else {
// Something happened in setting up the request that triggered an Error
// eslint-disable-next-line
console.log('Error', error.message);
}
});
};
export const confirmedDeleteEndpoint = async (dispatch, apiName) => {
dispatch(deleteEndpointRequest());
try {
const response = await client.delete(`apis/${apiName}`);
dispatch(deleteEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(fetchEndpoints());
history.push('/');
dispatch(showToaster());
} catch (error) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data.error,
}));
}
};
| src/store/actions/api.actions.js | import createHistory from 'history/createBrowserHistory';
import R from 'ramda';
import client from '../api';
import endpointSchema from '../../configurations/apiSchema'; // @TODO: REMOVE
import {
DELETE_ENDPOINT_START,
DELETE_ENDPOINT_SUCCESS,
FETCH_ENDPOINT_START,
FETCH_ENDPOINT_SUCCESS,
FETCH_ENDPOINT_SCHEMA_START,
FETCH_ENDPOINT_SCHEMA_SUCCESS,
FILL_SELECTED_PLUGINS,
SAVE_ENDPOINT_START,
SAVE_ENDPOINT_SUCCESS,
SET_DEFAULT_ENDPOINT,
EXCLUDE_PLUGIN,
SELECT_PLUGIN,
RESET_ENDPOINT,
WILL_CLONE,
} from '../constants';
import {
clearConfirmationModal,
closeConfirmationModal,
fetchEndpoints,
openConfirmationModal,
openResponseModal,
showToaster,
// closeResponseModal, // @TODO: will need thi a bit later
} from './index';
import history from '../configuration/history';
export const deleteEndpointRequest = () => ({
type: DELETE_ENDPOINT_START,
});
export const deleteEndpointSuccess = () => ({
type: DELETE_ENDPOINT_SUCCESS,
});
export const getEndpointRequest = () => ({
type: FETCH_ENDPOINT_START,
});
export const getEndpointSuccess = (api, response) => ({
type: FETCH_ENDPOINT_SUCCESS,
payload: {
api,
response,
},
});
export const getEndpointSchemaRequest = () => ({
type: FETCH_ENDPOINT_SCHEMA_START,
});
export const getEndpointSchemaSuccess = api => ({
type: FETCH_ENDPOINT_SCHEMA_SUCCESS,
payload: api,
});
export const saveEndpointRequest = api => ({
type: SAVE_ENDPOINT_START,
payload: api,
});
export const saveEndpointSuccess = () => ({
type: SAVE_ENDPOINT_SUCCESS,
});
export const selectPlugin = pluginName/*: string*/ => ({
type: SELECT_PLUGIN,
payload: pluginName,
});
export const excludePlugin = pluginName/*: string*/ => ({
type: EXCLUDE_PLUGIN,
payload: pluginName,
});
export const resetEndpoint = () => ({
type: RESET_ENDPOINT,
});
export const willClone = data => {
const preparedPlugins = data.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const pluginFromSchema = endpointSchema.plugins.filter(item => item.name === plugin.name)[0];
const { units } = pluginFromSchema.config.limit;
const policyFromSchema = pluginFromSchema.config.policy;
const getUpdatedLimit = limit => {
if (R.type(limit) === 'Object') {
return {
value: limit.value,
unit: limit.unit,
units,
};
}
const arr = limit.split('-');
const valueOfLimit = arr[0]*1;
const valueOfUnit = arr[1];
return {
value: valueOfLimit,
unit: valueOfUnit,
units,
};
};
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
const lens3 = R.lensPath(['config', 'policy', 'selected']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, getUpdatedLimit(plugin.config.limit), plugin);
const pluginWithPolicyFromSchema = R.set(lens2, policyFromSchema , updatedPlugin);
const getSelectedPolicy = policy => {
if (R.type(policy) === 'Object') {
return policy.selected;
}
return policy;
};
return R.set(lens3, getSelectedPolicy(plugin.config.policy), pluginWithPolicyFromSchema);
}
if (plugin.name === 'request_transformer') {
const transformHeadersToArray = obj => R.toPairs(obj)
.reduce((acc, item) => {
const header = {
key: item[0],
value: item[1],
};
acc.push(header);
return acc;
}, []);
const configWithTransformedHeaders = R.toPairs(plugin.config)
.reduce((acc, item) => {
const transformedHeaders = transformHeadersToArray(item[1].headers);
acc[item[0]] = {
headers: transformedHeaders,
querystring: item[1].querystring,
};
return acc;
}, {});
// set path for lens and substitude config in plugin:
const lens = R.lensPath(['config']);
const updatedPlugin = R.set(lens, configWithTransformedHeaders, plugin);
return updatedPlugin;
}
return plugin;
});
const lens = R.lensPath(['plugins']);
const preparedApi = R.set(lens, preparedPlugins, data);
return {
type: WILL_CLONE,
payload: {
api: preparedApi,
response: data,
},
};
};
export const fillSelected = selectedPlugins => ({
type: FILL_SELECTED_PLUGINS,
payload: selectedPlugins,
});
export const fetchEndpoint = pathname => async dispatch => {
dispatch(getEndpointRequest());
try {
const response = await client.get(`apis${pathname}`);
const preparedPlugins = response.data.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const pluginFromSchema = endpointSchema.plugins.filter(item => item.name === plugin.name)[0];
const { units } = pluginFromSchema.config.limit;
const policyFromSchema = pluginFromSchema.config.policy;
const arr = plugin.config.limit.split('-');
const valueOfLimit = arr[0]*1;
const valueOfUnit = arr[1];
// @TODO: policy should be also an array like in schema;
const updatedLimit = {
value: valueOfLimit,
unit: valueOfUnit,
units,
};
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
const lens3 = R.lensPath(['config', 'policy', 'selected']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, updatedLimit, plugin);
const pluginWithPolicyFromSchema = R.set(lens2, policyFromSchema , updatedPlugin);
return R.set(lens3, plugin.config.policy, pluginWithPolicyFromSchema);
}
if (plugin.name === 'request_transformer') {
const transformHeadersToArray = obj => R.toPairs(obj)
.reduce((acc, item) => {
const header = {
key: item[0],
value: item[1],
};
acc.push(header);
return acc;
}, []);
const configWithTransformedHeaders = R.toPairs(plugin.config)
.reduce((acc, item) => {
const transformedHeaders = transformHeadersToArray(item[1].headers);
acc[item[0]] = {
headers: transformedHeaders,
querystring: item[1].querystring,
};
return acc;
}, {});
// set path for lens and substitude config in plugin:
const lens = R.lensPath(['config']);
const updatedPlugin = R.set(lens, configWithTransformedHeaders, plugin);
return updatedPlugin;
}
return plugin;
});
const lens = R.lensPath(['plugins']);
const preparedApi = R.set(lens, preparedPlugins, response.data);
dispatch(getEndpointSuccess(preparedApi, response.data));
} catch (error) {
console.log('FETCH_ENDPOINT_ERROR', 'Infernal server error', error);
}
};
export const setInitialEndpoint = endpointSchema => ({
type: SET_DEFAULT_ENDPOINT,
payload: endpointSchema,
});
export const fetchEndpointSchema = flag => async (dispatch) => {
dispatch(getEndpointSchemaRequest());
try {
// Get all server names
const response = await client.get('/oauth/servers');
const serverNames = await response.data.reduce((acc, item) => {
acc.push(item.name);
return acc;
}, []);
const pluginsFromApiSchemaWithUpdatedOAuthPlugin = await endpointSchema.plugins.map(item => {
if (item.name === 'oauth2') {
const lens = R.lensPath(['config', 'server_names']);
return R.set(lens, serverNames, item);
}
return item;
});
const lens = R.lensPath(['plugins']);
const endpointSchemaWithUpdatedOAuthPlugin = R.set(
lens,
pluginsFromApiSchemaWithUpdatedOAuthPlugin,
endpointSchema
);
flag && dispatch(setInitialEndpoint(endpointSchemaWithUpdatedOAuthPlugin));
dispatch(getEndpointSchemaSuccess(endpointSchemaWithUpdatedOAuthPlugin)); // @TODO: REMOVE when endpoint will be ready
} catch (error) {
console.log('FETCH_SERVER_NAMES_ERROR', error);
}
};
export const preparePlugins = api => api.plugins.map(plugin => {
if (plugin.name === 'rate_limit') {
const { limit, policy } = plugin.config;
const { value, unit } = limit;
const concatenation = `${value}-${unit}`;
// set the path for the lens
const lens = R.lensPath(['config', 'limit']);
const lens2 = R.lensPath(['config', 'policy']);
// substitude the plugin.config.limit
const updatedPlugin = R.set(lens, concatenation, plugin);
return R.set(lens2, policy.selected, updatedPlugin);
}
if (plugin.name === 'oauth2') {
return R.dissocPath(['config', 'server_names'], plugin);
}
if (plugin.name === 'request_transformer') {
// get all options names
const options = Object.keys(plugin.config);
// convert all values of plugin's config to array of objects
// so then we will be able to map through them:
const config = R.values(plugin.config);
const allTransformedHeaders = config.map((item, index) => {
// headers comes as an array of objects:
/**
* @example #1
*
* add: {
* header: [
* {someKey: 'someValue'},
* {someAnotherKey: 'someAnotherValue'},
* ]
* }
*/
const headers = item.headers;
// we will fill this arrays with keys and values respectively
let keys = [];
let values = [];
// fill key/values arrays
headers.map(item => {
const arr = R.values(item);
keys.push(arr[0]);
values.push(arr[1]);
});
// and now we are creating object that should be placed instead of
// array of the objects from example #1
/**
* @example #2
*
* add: {
* headers: {
* someKey: 'someValue',
* someAnotherKey: 'someAnotherValue',
* }
* }
*/
const transformedHeaders = R.zipObj(keys, values);
return transformedHeaders;
});
// step by step we updating plugins config:
const updatedPlugin = allTransformedHeaders.reduce((acc, item, index) => {
const lens = R.lensPath(['config', options[index], 'headers']);
return R.set(lens, item, acc);
}, plugin);
return updatedPlugin;
}
return plugin;
});
export const saveEndpoint = (pathname, api) => dispatch => {
dispatch(openConfirmationModal('save', () => confirmedSaveEndpoint(dispatch, pathname, api)));
};
export const updateEndpoint = (pathname, api) => dispatch => {
dispatch(openConfirmationModal('update', () => confirmedUpdateEndpoint(dispatch, pathname, api), api.name));
};
export const deleteEndpoint = apiName => dispatch => {
dispatch(openConfirmationModal('delete', () => confirmedDeleteEndpoint(dispatch, apiName), apiName));
};
export const confirmedSaveEndpoint = (dispatch, pathname, api) => {
dispatch(saveEndpointRequest(api));
const preparedPlugins = preparePlugins(api);
// substitude updated list of plugins
const preparedApi = R.set(R.lensPath(['plugins']), preparedPlugins, api);
try {
const response = client.post('apis', preparedApi);
dispatch(saveEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(fetchEndpoints());
history.push('/');
dispatch(showToaster());
} catch (error) {
if (error.response) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data,
}));
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
// More info about error handling in Axios: https://github.com/mzabriskie/axios#handling-errors
// eslint-disable-next-line
console.error(error.response.data);
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
// eslint-disable-next-line
console.log(error.request);
} else {
// Something happened in setting up the request that triggered an Error
// eslint-disable-next-line
console.log('Error', error.message);
}
}
};
export const confirmedUpdateEndpoint = (dispatch, pathname, api) => {
dispatch(saveEndpointRequest());
const preparedPlugins = preparePlugins(api);
// substitude updated list of plugins
const preparedApi = R.set(R.lensPath(['plugins']), preparedPlugins, api);
return client.put(`apis${pathname}`, preparedApi)
.then((response) => {
dispatch(saveEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(showToaster());
})
.catch((error) => {
if (error.response) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data,
}));
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
// More info about error handling in Axios: https://github.com/mzabriskie/axios#handling-errors
// eslint-disable-next-line
console.error(error.response.data);
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
// eslint-disable-next-line
console.log(error.request);
} else {
// Something happened in setting up the request that triggered an Error
// eslint-disable-next-line
console.log('Error', error.message);
}
});
};
export const confirmedDeleteEndpoint = async (dispatch, apiName) => {
dispatch(deleteEndpointRequest());
try {
const response = await client.delete(`apis/${apiName}`);
dispatch(deleteEndpointSuccess());
dispatch(closeConfirmationModal());
dispatch(fetchEndpoints());
history.push('/');
dispatch(showToaster());
} catch (error) {
dispatch(openResponseModal({
status: error.response.status,
statusText: error.response.statusText,
message: error.response.data.error,
}));
}
};
| remove senseless awaits and minor FP changes :)
| src/store/actions/api.actions.js | remove senseless awaits and minor FP changes :) | <ide><path>rc/store/actions/api.actions.js
<ide> try {
<ide> // Get all server names
<ide> const response = await client.get('/oauth/servers');
<del> const serverNames = await response.data.reduce((acc, item) => {
<add> const serverNames = response.data.reduce((acc, item) => {
<ide> acc.push(item.name);
<ide>
<ide> return acc;
<ide> }, []);
<del> const pluginsFromApiSchemaWithUpdatedOAuthPlugin = await endpointSchema.plugins.map(item => {
<del> if (item.name === 'oauth2') {
<del> const lens = R.lensPath(['config', 'server_names']);
<del>
<del> return R.set(lens, serverNames, item);
<del> }
<del>
<del> return item;
<del> });
<add> const lensOAuth = R.lensPath(['config', 'server_names']);
<add> const updatePlugin = (lens, serverNames, list) => pluginName => {
<add> const comparator = string => el => el.name === string;
<add> const getPluginIndex = comparator => list => list.findIndex(comparator);
<add>
<add> return R.adjust(
<add> R.set(lens, serverNames),
<add> getPluginIndex(comparator(pluginName))(list),
<add> list,
<add> );
<add> };
<add> const pluginsFromApiSchemaWithUpdatedOAuthPlugin = updatePlugin(
<add> lensOAuth,
<add> serverNames,
<add> endpointSchema.plugins,
<add> )('oauth2');
<ide> const lens = R.lensPath(['plugins']);
<ide> const endpointSchemaWithUpdatedOAuthPlugin = R.set(
<ide> lens, |
|
Java | apache-2.0 | 1d9a4b286365aef3fedb0c231771a667926d95e6 | 0 | jamesjara/OpenIAB-Cordova-Plugin,jamesjara/OpenIAB-Cordova-Plugin | package org.onepf.openiab.cordova;
//import com.squareup.okhttpxxxxxxx3.internal.StrictLineReader;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.Manifest;
import java.util.ArrayList;
import java.util.List;
import org.onepf.openiab.cordova.PaymentConstants;
import mp.MpUtils;
import mp.PaymentActivity;
import mp.PaymentRequest;
import mp.PaymentResponse;
public class OpenIabCordovaPlugin extends CordovaPlugin
{
private static final int REQUEST_CODE = 1234; // Can be anything
public static final String TAG = "OpenIAB-xxxx";
//private PaymentActivity mClass;
public static final String READ = "xxx";//Manifest.permission.PAYMENT_BROADCAST_PERMISSION;
/*
@Override
protected void onStart() {
super.onStart();
IntentFilter filter = new IntentFilter(PaymentConstants.SUCCESSFUL_PAYMENT);
registerReceiver(updateReceiver, filter);
Log.i(TAG, "updateReceiver registered");
}
@Override
protected void onStop() {
unregisterReceiver(updateReceiver);
Log.i(TAG, "updateReceiver unregistered");
super.onPause();
}
*/
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException
{
if ("init".equals(action))
{
JSONObject j = args.getJSONObject(0);
boolean checkInventory = j.getBoolean("checkInventory");
int checkInventoryTimeout = j.getInt("checkInventoryTimeout");
int discoveryTimeout = j.getInt("discoveryTimeout");
int verifyMode = j.getInt("verifyMode");
int storeSearchStrategy = j.getInt("storeSearchStrategy");
int samsungCertificationRequestCode = j.getInt("samsungCertificationRequestCode");
//mClass = new PaymentActivity(this);
//_helper = Fortumo.enablePaymentBroadcast(this, Manifest.permission.PAYMENT_BROADCAST_PERMISSION);
init(callbackContext);
return true;
}
else if ("purchaseProduct".equals(action))
{
String sku = args.getString(0);
String payload = args.length() > 1 ? args.getString(1) : "";
purchaseProduct(sku, payload, callbackContext);
return true;
}
else if ("purchaseSubscription".equals(action))
{
/*
String sku = args.getString(0);
String payload = args.length() > 1 ? args.getString(1) : "";
purchaseProduct(sku, payload, callbackContext);
return true;
*/
}
else if ("consume".equals(action))
{
/*
String sku = args.getString(0);
consume(sku, callbackContext);
return true;
*/
}
else if ("getSkuDetails".equals(action))
{
/*
String sku = args.getString(0);
getSkuDetails(sku, callbackContext);
return true;
*/
}
else if ("getSkuListDetails".equals(action))
{
/*
List<String> skuList = new ArrayList<String>();
if (args.length() > 0) {
JSONArray jSkuList = args.getJSONArray(0);
int count = jSkuList.length();
for (int i = 0; i < count; ++i) {
skuList.add(jSkuList.getString(i));
}
}
getSkuListDetails(skuList, callbackContext);
return true;
*/
}
else if ("getPurchases".equals(action))
{
/*
getPurchases(callbackContext);
return true;
*/
}
else if ("mapSku".equals(action))
{
/*
String sku = args.getString(0);
String storeName = args.getString(1);
String storeSku = args.getString(2);
mapSku(sku, storeName, storeSku);
return true;
*/
}
return false; // Returning false results in a "MethodNotFound" error.
}
private void mapSku(String sku, String storeName, String storeSku) {
//SkuManager.getInstance().mapSku(sku, storeName, storeSku);
}
private void getPurchases(final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
List<Purchase> purchaseList = _inventory.getAllPurchases();
JSONArray jsonPurchaseList = new JSONArray();
for (Purchase p : purchaseList) {
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(p);
jsonPurchaseList.put(jsonPurchase);
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize Purchase: " + p.getSku()));
return;
}
}
callbackContext.success(jsonPurchaseList);
*/
}
private void getSkuDetails(String sku, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
if (!_inventory.hasDetails(sku)) {
callbackContext.error(Serialization.errorToJson(-1, "SkuDetails not found: " + sku));
return;
}
JSONObject jsonSkuDetails;
try {
jsonSkuDetails = Serialization.skuDetailsToJson(_inventory.getSkuDetails(sku));
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize SkuDetails: " + sku));
return;
}
callbackContext.success(jsonSkuDetails);
*/
}
private void getSkuListDetails(List<String> skuList, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
JSONArray jsonSkuDetailsList = new JSONArray();
for (String sku : skuList) {
if (_inventory.hasDetails(sku)) {
JSONObject jsonSkuDetails;
try {
jsonSkuDetails = Serialization.skuDetailsToJson(_inventory.getSkuDetails(sku));
jsonSkuDetailsList.put(jsonSkuDetails);
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize SkuDetails: " + sku));
return;
}
}
else {
Log.d(TAG, "SKU NOT FOUND: " + sku);
}
}
callbackContext.success(jsonSkuDetailsList);
*/
}
//private void init(final JSONArray options, final List<String> skuList, final CallbackContext callbackContext) {
private void init(final CallbackContext callbackContext) {
cordova.getActivity().runOnUiThread(new Runnable() {
public void run() {
// MpUtils.enablePaymentBroadcast(this, READ); //Manifest.permission.PAYMENT_BROADCAST_PERMISSION);
// _helper = new OpenIabHelper(cordova.getActivity(), options);
createBroadcasts();
new UpdateDataTask().execute();
// Start setup. This is asynchronous and the specified listener
// will be called once setup completes.
/*
Log.d(TAG, "Starting setup.");
_helper.startSetup(new IabHelper.OnIabSetupFinishedListener() {
public void onIabSetupFinished(IabResult result) {
Log.d(TAG, "Setup finished.");
if (result.isFailure()) {
// Oh noes, there was a problem.
Log.e(TAG, "Problem setting up in-app billing: " + result);
callbackContext.error(Serialization.errorToJson(result));
return;
}
Log.d(TAG, "Querying inventory.");
// TODO: this is SHIT! product and subs skus shouldn't be sent two times
//_helper.queryInventoryAsync(true, skuList, skuList, new BillingCallback(callbackContext));
}
});
*/
}
});
}
private boolean checkInitialized(final CallbackContext callbackContext) {
if (false)
{
Log.e(TAG, "Not initialized");
callbackContext.error("Not initialized");
return false;
}
return true;
}
private void purchaseProduct(final String sku, final String developerPayload, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
//Log.d(TAG, "SKU: " + SkuManager.getInstance().getStoreSku(OpenIabHelper.NAME_GOOGLE, sku));
cordova.setActivityResultCallback(this);
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
//mClass.PaymentRequest.PaymentRequestBuilder builder = new mClass.PaymentRequest.PaymentRequestBuilder();
PaymentRequest.PaymentRequestBuilder builder = new PaymentRequest.PaymentRequestBuilder();
builder.setService(PaymentConstants.GOLD_SERVICE_ID, PaymentConstants.GOLD_SERVICE_IN_APP_SECRET);
builder.setProductName(PaymentConstants.PRODUCT_GOLD);
builder.setConsumable(true);
builder.setDisplayString(PaymentConstants.DISPLAY_STRING_GOLD);
builder.setCreditsMultiplier(1.1d);
//builder.setIcon(R.drawable.ic_launcher);
PaymentRequest pr = builder.build();
// execute
Intent localIntent = pr.toIntent(cordova.getActivity());
cordova.getActivity().startActivityForResult(localIntent, REQUEST_CODE);
//makePayment(pr);
//_helper.launchPurchaseFlow(cordova.getActivity(), sku, RC_REQUEST, new BillingCallback(callbackContext), developerPayload);
}
});
}
public void purchaseSubscription(final String sku, final String developerPayload, final CallbackContext callbackContext) {
/*
if (!checkInitialized(callbackContext)) return;
cordova.setActivityResultCallback(this);
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
_helper.launchSubscriptionPurchaseFlow(cordova.getActivity(), sku, RC_REQUEST, new BillingCallback(callbackContext), developerPayload);
}
});
*/
}
private void consume(final String sku, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if (!_inventory.hasPurchase(sku))
{
callbackContext.error(Serialization.errorToJson(-1, "Product haven't been purchased: " + sku));
return;
}
Purchase purchase = _inventory.getPurchase(sku);
_helper.consumeAsync(purchase, new BillingCallback(callbackContext));
}
});
*/
}
private class UpdateDataTask extends AsyncTask<Void, Void, String[]> {
@Override
protected String[] doInBackground(Void... voids) {
String[] result = new String[1];
result[0] = String.valueOf("asd");
//result[0] = String.valueOf(Wallet.getColdAmount(MainActivity.this));
//result[1] = String.valueOf(BonusLevel.isBonusUnlocked(MainActivity.this));
//result[2] = String.valueOf(PotionStack.getPotionAmount(MainActivity.this, PaymentConstants.PRODUCT_HEALTH_POTION));
//result[3] = String.valueOf(PotionStack.getPotionAmount(MainActivity.this, PaymentConstants.PRODUCT_MANA_POTION));
return result;
}
@Override
protected void onPostExecute(String[] data) {
// goldTextView.setText(data[0]);
// bonusLevelUnlockedTextView.setText(data[1]);
// healthPotionTextView.setText(data[2]);
// manaPotionTextView.setText(data[3]);
}
}
/**
* Callback class for when a purchase or consumption process is finished
*/
/*
public class BillingCallback implements
IabHelper.QueryInventoryFinishedListener,
IabHelper.OnIabPurchaseFinishedListener,
IabHelper.OnConsumeFinishedListener {
final CallbackContext _callbackContext;
public BillingCallback(final CallbackContext callbackContext) {
_callbackContext = callbackContext;
}
@Override
public void onQueryInventoryFinished(IabResult result, Inventory inventory) {
Log.d(TAG, "Query inventory process finished.");
if (result.isFailure()) {
_callbackContext.error(Serialization.errorToJson(result));
return;
}
Log.d(TAG, "Query inventory was successful. Init finished.");
_inventory = inventory;
_callbackContext.success();
}
@Override
public void onIabPurchaseFinished(IabResult result, Purchase purchase) {
Log.d(TAG, "Purchase process finished: " + result + ", purchase: " + purchase);
if (result.isFailure()) {
Log.e(TAG, "Error purchasing: " + result);
_callbackContext.error(Serialization.errorToJson(result));
return;
}
_inventory.addPurchase(purchase);
Log.d(TAG, "Purchase successful.");
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(purchase);
} catch (JSONException e) {
_callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize the purchase"));
return;
}
_callbackContext.success(jsonPurchase);
}
@Override
public void onConsumeFinished(Purchase purchase, IabResult result) {
Log.d(TAG, "Consumption process finished. Purchase: " + purchase + ", result: " + result);
if (result.isFailure()) {
Log.e(TAG, "Error while consuming: " + result);
_callbackContext.error(Serialization.errorToJson(result));
return;
}
_inventory.erasePurchase(purchase.getSku());
Log.d(TAG, "Consumption successful. Provisioning.");
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(purchase);
} catch (JSONException e) {
_callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize the purchase"));
return;
}
_callbackContext.success(jsonPurchase);
}
}
*/
private void createBroadcasts() {
Log.d(TAG, "createBroadcasts");
/*
IntentFilter filter = new IntentFilter(YANDEX_STORE_ACTION_PURCHASE_STATE_CHANGED);
cordova.getActivity().registerReceiver(_billingReceiver, filter);
*/
IntentFilter filter = new IntentFilter(PaymentConstants.SUCCESSFUL_PAYMENT);
cordova.getActivity().registerReceiver(_billingReceiver, filter);
Log.i(TAG, "updateReceiver registered");
}
private void destroyBroadcasts() {
Log.d(TAG, "destroyBroadcasts");
try {
cordova.getActivity().unregisterReceiver(_billingReceiver);
} catch (Exception ex) {
Log.d(TAG, "destroyBroadcasts exception:\n" + ex.getMessage());
}
}
private BroadcastReceiver _billingReceiver = new BroadcastReceiver() {
private static final String TAG = "YandexBillingReceiver";
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
Bundle extras = intent.getExtras();
Log.d(TAG, "onReceive intent: " + intent);
//if (YANDEX_STORE_ACTION_PURCHASE_STATE_CHANGED.equals(action)) {
purchaseStateChanged(extras, intent);
//}
}
private void purchaseStateChanged(Bundle extras, Intent intent) {
Log.d(TAG, "purchaseStateChanged intent: " + extras);
//_helper.handleActivityResult(RC_REQUEST, Activity.RESULT_OK, data);
// Log.d(TAG, "- billing_status: " + getStatusString(extras.getInt("billing_status")));
Log.d(TAG, "- credit_amount: " + extras.getString("credit_amount"));
Log.d(TAG, "- credit_name: " + extras.getString("credit_name"));
Log.d(TAG, "- message_id: " + extras.getString("message_id") );
Log.d(TAG, "- payment_code: " + extras.getString("payment_code"));
Log.d(TAG, "- price_amount: " + extras.getString("price_amount"));
Log.d(TAG, "- price_currency: " + extras.getString("price_currency"));
Log.d(TAG, "- product_name: " + extras.getString("product_name"));
Log.d(TAG, "- service_id: " + extras.getString("service_id"));
Log.d(TAG, "- user_id: " + extras.getString("user_id"));
int billingStatus = extras.getInt("billing_status");
if(billingStatus == MpUtils.MESSAGE_STATUS_BILLED) {
int coins = Integer.parseInt(intent.getStringExtra("credit_amount"));
//Wallet.addCoins(context, coins);
new UpdateDataTask().execute();
}
}
};
/*
protected final void makePayment(PaymentRequest payment) {
Intent localIntent = paymentRequest.toIntent(act);
act.startActivityForResult(localIntent, requestCode);
Context context = cordova.getActivity().getApplicationContext();
Intent intent = new Intent(context,payment.toIntent(this));
startActivityForResult(this, intent, 0);
}
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_CODE) {
if(data == null) {
return;
}
// OK
if (resultCode == RESULT_OK) {
PaymentResponse response = new PaymentResponse(data);
switch (response.getBillingStatus()) {
case MpUtils.MESSAGE_STATUS_BILLED:
// ...
break;
case MpUtils.MESSAGE_STATUS_FAILED:
// ...
break;
case MpUtils.MESSAGE_STATUS_PENDING:
// ...
break;
}
// Cancel
} else {
// ..
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
} | android/src/OpenIabCordovaPlugin.java | package org.onepf.openiab.cordova;
//import com.squareup.okhttpxxxxxxx3.internal.StrictLineReader;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.Manifest;
import java.util.ArrayList;
import java.util.List;
import org.onepf.openiab.cordova.PaymentConstants;
import mp.MpUtils;
import mp.PaymentActivity;
import mp.PaymentRequest;
import mp.PaymentResponse;
public class OpenIabCordovaPlugin extends CordovaPlugin
{
public static final String TAG = "OpenIAB-xxxx";
//private PaymentActivity mClass;
public static final String READ = "xxx";//Manifest.permission.PAYMENT_BROADCAST_PERMISSION;
/*
@Override
protected void onStart() {
super.onStart();
IntentFilter filter = new IntentFilter(PaymentConstants.SUCCESSFUL_PAYMENT);
registerReceiver(updateReceiver, filter);
Log.i(TAG, "updateReceiver registered");
}
@Override
protected void onStop() {
unregisterReceiver(updateReceiver);
Log.i(TAG, "updateReceiver unregistered");
super.onPause();
}
*/
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException
{
if ("init".equals(action))
{
JSONObject j = args.getJSONObject(0);
boolean checkInventory = j.getBoolean("checkInventory");
int checkInventoryTimeout = j.getInt("checkInventoryTimeout");
int discoveryTimeout = j.getInt("discoveryTimeout");
int verifyMode = j.getInt("verifyMode");
int storeSearchStrategy = j.getInt("storeSearchStrategy");
int samsungCertificationRequestCode = j.getInt("samsungCertificationRequestCode");
//mClass = new PaymentActivity(this);
//_helper = Fortumo.enablePaymentBroadcast(this, Manifest.permission.PAYMENT_BROADCAST_PERMISSION);
init(callbackContext);
return true;
}
else if ("purchaseProduct".equals(action))
{
String sku = args.getString(0);
String payload = args.length() > 1 ? args.getString(1) : "";
purchaseProduct(sku, payload, callbackContext);
return true;
}
else if ("purchaseSubscription".equals(action))
{
/*
String sku = args.getString(0);
String payload = args.length() > 1 ? args.getString(1) : "";
purchaseProduct(sku, payload, callbackContext);
return true;
*/
}
else if ("consume".equals(action))
{
/*
String sku = args.getString(0);
consume(sku, callbackContext);
return true;
*/
}
else if ("getSkuDetails".equals(action))
{
/*
String sku = args.getString(0);
getSkuDetails(sku, callbackContext);
return true;
*/
}
else if ("getSkuListDetails".equals(action))
{
/*
List<String> skuList = new ArrayList<String>();
if (args.length() > 0) {
JSONArray jSkuList = args.getJSONArray(0);
int count = jSkuList.length();
for (int i = 0; i < count; ++i) {
skuList.add(jSkuList.getString(i));
}
}
getSkuListDetails(skuList, callbackContext);
return true;
*/
}
else if ("getPurchases".equals(action))
{
/*
getPurchases(callbackContext);
return true;
*/
}
else if ("mapSku".equals(action))
{
/*
String sku = args.getString(0);
String storeName = args.getString(1);
String storeSku = args.getString(2);
mapSku(sku, storeName, storeSku);
return true;
*/
}
return false; // Returning false results in a "MethodNotFound" error.
}
private void mapSku(String sku, String storeName, String storeSku) {
//SkuManager.getInstance().mapSku(sku, storeName, storeSku);
}
private void getPurchases(final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
List<Purchase> purchaseList = _inventory.getAllPurchases();
JSONArray jsonPurchaseList = new JSONArray();
for (Purchase p : purchaseList) {
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(p);
jsonPurchaseList.put(jsonPurchase);
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize Purchase: " + p.getSku()));
return;
}
}
callbackContext.success(jsonPurchaseList);
*/
}
private void getSkuDetails(String sku, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
if (!_inventory.hasDetails(sku)) {
callbackContext.error(Serialization.errorToJson(-1, "SkuDetails not found: " + sku));
return;
}
JSONObject jsonSkuDetails;
try {
jsonSkuDetails = Serialization.skuDetailsToJson(_inventory.getSkuDetails(sku));
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize SkuDetails: " + sku));
return;
}
callbackContext.success(jsonSkuDetails);
*/
}
private void getSkuListDetails(List<String> skuList, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
JSONArray jsonSkuDetailsList = new JSONArray();
for (String sku : skuList) {
if (_inventory.hasDetails(sku)) {
JSONObject jsonSkuDetails;
try {
jsonSkuDetails = Serialization.skuDetailsToJson(_inventory.getSkuDetails(sku));
jsonSkuDetailsList.put(jsonSkuDetails);
} catch (JSONException e) {
callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize SkuDetails: " + sku));
return;
}
}
else {
Log.d(TAG, "SKU NOT FOUND: " + sku);
}
}
callbackContext.success(jsonSkuDetailsList);
*/
}
//private void init(final JSONArray options, final List<String> skuList, final CallbackContext callbackContext) {
private void init(final CallbackContext callbackContext) {
cordova.getActivity().runOnUiThread(new Runnable() {
public void run() {
// MpUtils.enablePaymentBroadcast(this, READ); //Manifest.permission.PAYMENT_BROADCAST_PERMISSION);
// _helper = new OpenIabHelper(cordova.getActivity(), options);
createBroadcasts();
new UpdateDataTask().execute();
// Start setup. This is asynchronous and the specified listener
// will be called once setup completes.
/*
Log.d(TAG, "Starting setup.");
_helper.startSetup(new IabHelper.OnIabSetupFinishedListener() {
public void onIabSetupFinished(IabResult result) {
Log.d(TAG, "Setup finished.");
if (result.isFailure()) {
// Oh noes, there was a problem.
Log.e(TAG, "Problem setting up in-app billing: " + result);
callbackContext.error(Serialization.errorToJson(result));
return;
}
Log.d(TAG, "Querying inventory.");
// TODO: this is SHIT! product and subs skus shouldn't be sent two times
//_helper.queryInventoryAsync(true, skuList, skuList, new BillingCallback(callbackContext));
}
});
*/
}
});
}
private boolean checkInitialized(final CallbackContext callbackContext) {
if (false)
{
Log.e(TAG, "Not initialized");
callbackContext.error("Not initialized");
return false;
}
return true;
}
private void purchaseProduct(final String sku, final String developerPayload, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
//Log.d(TAG, "SKU: " + SkuManager.getInstance().getStoreSku(OpenIabHelper.NAME_GOOGLE, sku));
cordova.setActivityResultCallback(this);
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
//mClass.PaymentRequest.PaymentRequestBuilder builder = new mClass.PaymentRequest.PaymentRequestBuilder();
PaymentRequest.PaymentRequestBuilder builder = new PaymentRequest.PaymentRequestBuilder();
builder.setService(PaymentConstants.GOLD_SERVICE_ID, PaymentConstants.GOLD_SERVICE_IN_APP_SECRET);
builder.setProductName(PaymentConstants.PRODUCT_GOLD);
builder.setConsumable(true);
builder.setDisplayString(PaymentConstants.DISPLAY_STRING_GOLD);
builder.setCreditsMultiplier(1.1d);
//builder.setIcon(R.drawable.ic_launcher);
PaymentRequest pr = builder.build();
// execute
Intent localIntent = pr.toIntent(cordova.getActivity());
cordova.getActivity().startActivityForResult(localIntent, 123);
//makePayment(pr);
//_helper.launchPurchaseFlow(cordova.getActivity(), sku, RC_REQUEST, new BillingCallback(callbackContext), developerPayload);
}
});
}
public void purchaseSubscription(final String sku, final String developerPayload, final CallbackContext callbackContext) {
/*
if (!checkInitialized(callbackContext)) return;
cordova.setActivityResultCallback(this);
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
_helper.launchSubscriptionPurchaseFlow(cordova.getActivity(), sku, RC_REQUEST, new BillingCallback(callbackContext), developerPayload);
}
});
*/
}
private void consume(final String sku, final CallbackContext callbackContext) {
if (!checkInitialized(callbackContext)) return;
/*
cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if (!_inventory.hasPurchase(sku))
{
callbackContext.error(Serialization.errorToJson(-1, "Product haven't been purchased: " + sku));
return;
}
Purchase purchase = _inventory.getPurchase(sku);
_helper.consumeAsync(purchase, new BillingCallback(callbackContext));
}
});
*/
}
private class UpdateDataTask extends AsyncTask<Void, Void, String[]> {
@Override
protected String[] doInBackground(Void... voids) {
String[] result = new String[1];
result[0] = String.valueOf("asd");
//result[0] = String.valueOf(Wallet.getColdAmount(MainActivity.this));
//result[1] = String.valueOf(BonusLevel.isBonusUnlocked(MainActivity.this));
//result[2] = String.valueOf(PotionStack.getPotionAmount(MainActivity.this, PaymentConstants.PRODUCT_HEALTH_POTION));
//result[3] = String.valueOf(PotionStack.getPotionAmount(MainActivity.this, PaymentConstants.PRODUCT_MANA_POTION));
return result;
}
@Override
protected void onPostExecute(String[] data) {
// goldTextView.setText(data[0]);
// bonusLevelUnlockedTextView.setText(data[1]);
// healthPotionTextView.setText(data[2]);
// manaPotionTextView.setText(data[3]);
}
}
/**
* Callback class for when a purchase or consumption process is finished
*/
/*
public class BillingCallback implements
IabHelper.QueryInventoryFinishedListener,
IabHelper.OnIabPurchaseFinishedListener,
IabHelper.OnConsumeFinishedListener {
final CallbackContext _callbackContext;
public BillingCallback(final CallbackContext callbackContext) {
_callbackContext = callbackContext;
}
@Override
public void onQueryInventoryFinished(IabResult result, Inventory inventory) {
Log.d(TAG, "Query inventory process finished.");
if (result.isFailure()) {
_callbackContext.error(Serialization.errorToJson(result));
return;
}
Log.d(TAG, "Query inventory was successful. Init finished.");
_inventory = inventory;
_callbackContext.success();
}
@Override
public void onIabPurchaseFinished(IabResult result, Purchase purchase) {
Log.d(TAG, "Purchase process finished: " + result + ", purchase: " + purchase);
if (result.isFailure()) {
Log.e(TAG, "Error purchasing: " + result);
_callbackContext.error(Serialization.errorToJson(result));
return;
}
_inventory.addPurchase(purchase);
Log.d(TAG, "Purchase successful.");
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(purchase);
} catch (JSONException e) {
_callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize the purchase"));
return;
}
_callbackContext.success(jsonPurchase);
}
@Override
public void onConsumeFinished(Purchase purchase, IabResult result) {
Log.d(TAG, "Consumption process finished. Purchase: " + purchase + ", result: " + result);
if (result.isFailure()) {
Log.e(TAG, "Error while consuming: " + result);
_callbackContext.error(Serialization.errorToJson(result));
return;
}
_inventory.erasePurchase(purchase.getSku());
Log.d(TAG, "Consumption successful. Provisioning.");
JSONObject jsonPurchase;
try {
jsonPurchase = Serialization.purchaseToJson(purchase);
} catch (JSONException e) {
_callbackContext.error(Serialization.errorToJson(-1, "Couldn't serialize the purchase"));
return;
}
_callbackContext.success(jsonPurchase);
}
}
*/
private void createBroadcasts() {
Log.d(TAG, "createBroadcasts");
/*
IntentFilter filter = new IntentFilter(YANDEX_STORE_ACTION_PURCHASE_STATE_CHANGED);
cordova.getActivity().registerReceiver(_billingReceiver, filter);
*/
IntentFilter filter = new IntentFilter(PaymentConstants.SUCCESSFUL_PAYMENT);
cordova.getActivity().registerReceiver(_billingReceiver, filter);
Log.i(TAG, "updateReceiver registered");
}
private void destroyBroadcasts() {
Log.d(TAG, "destroyBroadcasts");
try {
cordova.getActivity().unregisterReceiver(_billingReceiver);
} catch (Exception ex) {
Log.d(TAG, "destroyBroadcasts exception:\n" + ex.getMessage());
}
}
private BroadcastReceiver _billingReceiver = new BroadcastReceiver() {
private static final String TAG = "YandexBillingReceiver";
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
Bundle extras = intent.getExtras();
Log.d(TAG, "onReceive intent: " + intent);
//if (YANDEX_STORE_ACTION_PURCHASE_STATE_CHANGED.equals(action)) {
purchaseStateChanged(extras, intent);
//}
}
private void purchaseStateChanged(Bundle extras, Intent intent) {
Log.d(TAG, "purchaseStateChanged intent: " + extras);
//_helper.handleActivityResult(RC_REQUEST, Activity.RESULT_OK, data);
// Log.d(TAG, "- billing_status: " + getStatusString(extras.getInt("billing_status")));
Log.d(TAG, "- credit_amount: " + extras.getString("credit_amount"));
Log.d(TAG, "- credit_name: " + extras.getString("credit_name"));
Log.d(TAG, "- message_id: " + extras.getString("message_id") );
Log.d(TAG, "- payment_code: " + extras.getString("payment_code"));
Log.d(TAG, "- price_amount: " + extras.getString("price_amount"));
Log.d(TAG, "- price_currency: " + extras.getString("price_currency"));
Log.d(TAG, "- product_name: " + extras.getString("product_name"));
Log.d(TAG, "- service_id: " + extras.getString("service_id"));
Log.d(TAG, "- user_id: " + extras.getString("user_id"));
int billingStatus = extras.getInt("billing_status");
if(billingStatus == MpUtils.MESSAGE_STATUS_BILLED) {
int coins = Integer.parseInt(intent.getStringExtra("credit_amount"));
//Wallet.addCoins(context, coins);
new UpdateDataTask().execute();
}
}
};
/*
protected final void makePayment(PaymentRequest payment) {
Intent localIntent = paymentRequest.toIntent(act);
act.startActivityForResult(localIntent, requestCode);
Context context = cordova.getActivity().getApplicationContext();
Intent intent = new Intent(context,payment.toIntent(this));
startActivityForResult(this, intent, 0);
}
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_CODE) {
if(data == null) {
return;
}
// OK
if (resultCode == RESULT_OK) {
PaymentResponse response = new PaymentResponse(data);
switch (response.getBillingStatus()) {
case MpUtils.MESSAGE_STATUS_BILLED:
// ...
break;
case MpUtils.MESSAGE_STATUS_FAILED:
// ...
break;
case MpUtils.MESSAGE_STATUS_PENDING:
// ...
break;
}
// Cancel
} else {
// ..
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
} | no time to desc
| android/src/OpenIabCordovaPlugin.java | no time to desc | <ide><path>ndroid/src/OpenIabCordovaPlugin.java
<ide> public class OpenIabCordovaPlugin extends CordovaPlugin
<ide> {
<ide>
<del>
<add>
<add> private static final int REQUEST_CODE = 1234; // Can be anything
<ide> public static final String TAG = "OpenIAB-xxxx";
<ide>
<ide> //private PaymentActivity mClass;
<ide>
<ide> // execute
<ide> Intent localIntent = pr.toIntent(cordova.getActivity());
<del> cordova.getActivity().startActivityForResult(localIntent, 123);
<add> cordova.getActivity().startActivityForResult(localIntent, REQUEST_CODE);
<ide>
<ide> //makePayment(pr);
<ide> //_helper.launchPurchaseFlow(cordova.getActivity(), sku, RC_REQUEST, new BillingCallback(callbackContext), developerPayload);
<ide>
<ide> }
<ide> };
<add>
<ide>
<ide> /*
<ide> protected final void makePayment(PaymentRequest payment) { |
|
JavaScript | mit | 0afa4b9e781e444913670575066c55f68b5e9772 | 0 | shopetan/repair,shopetan/repair | var socket = io.connect('http://localhost:8080');
socket.on('connect', function(msg) {
console.log("connet");
});
socket.on('title_message', function(msg) {
console.log("send_title");
console.log(msg);
document.getElementById("input_name").innerHTML = msg.value;
});
socket.on('type_message', function(msg) {
console.log("send_type");
console.log(msg);
document.getElementById("input_type").innerHTML = msg.value;
});
socket.on('source_message', function(msg) {
console.log("send_source");
console.log(msg);
document.getElementById("input_source").innerHTML = msg.value;
});
socket.on('os_message', function(msg) {
console.log("send_os");
console.log(msg);
document.getElementById("input_os").innerHTML = msg.value;
});
socket.on('browser_message', function(msg) {
console.log("send_browser");
console.log(msg);
document.getElementById("input_browser").innerHTML = msg.value;
});
function SendTitleMsg() {
var msg = document.getElementById("input_name").value;
socket.emit('title_message', { value: msg});
}
function SendTypeMsg() {
var msg = document.getElementById("input_type").value;
socket.emit('type_message', { value: msg});
}
function SendSourceMsg() {
var msg = document.getElementById("input_source").value;
socket.emit('source_message', { value: msg});
}
function SendOsMsg() {
if(document.getElementById("input_os").value != null){
var msg = document.getElementById("input_os").value;
socket.emit('os_message', { value: msg});
}
}
function SendBrowserMsg() {
if(document.getElementById("input_browser").value != null){
var msg = document.getElementById("input_browser").value;
socket.emit('browser_message', { value: msg});
}
}
function DisConnect() {
socket.emit('message', { value: msg });
socket.disconnect();
}
function keydown(){
var name = document.forms.EditProgram.input_name.value;
var type = document.forms.EditProgram.input_type.value;
var source = document.forms.EditProgram.input_source.value + ``;
SendTitleMsg();
SendTypeMsg();
SendSourceMsg();
SendOsMsg();
SendBrowserMsg();
}
// Draw html
function DrawUsersHTML(item){
var template = [
'<div class="mdl-cell mdl-cell--3-col mdl-cell--4-col-tablet mdl-cell--4-col-phone mdl-card mdl-shadow--3dp">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<h4 class="mdl-card__title-text"><%- name %></h4>',
'</div>',
'<div class="mdl-card__supporting-text">',
'<span class="mdl-typography--font-light mdl-typography--subhead">login:<%- is_login %></span>',
'</div>',
'<div class="mdl-card__actions">',
' <a class="android-link mdl-button mdl-js-button mdl-typography--text-uppercase" href="detail.html?userID=<%- _id %>">',
' 詳細画面へ',
' <i class="material-icons">chevron_right</i>',
' </a>',
'</div>',
'</div>'
].join("");
$("#users").append(_.template(template)({name :item.name,
is_login :item.is_login,
_id :item._id}));
}
function DrawSingleEditProgramsHTML(item){
var template = [
'<div class="mdl-card mdl-shadow--3dp ">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_name"><%- name %></textarea>',
'<label class="mdl-textfield__label" for="source">title</label>',
'</div>',
' </div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_type"><%- type %></textarea>',
'<label class="mdl-textfield__label" for="source">type</label>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "15" id="input_source"><%- source %></textarea>',
'<label class="mdl-textfield__label" for="source">edit_program</label>',
'</div>',
'</div>',
'<div id="support">',
'<input type="none" value="view Support" class="mdl-button mdl-js-button mdl-button--raised mdl-js-ripple-effect mdl-button--accent" onclick="display(!disp,true)">',
'</div>',
'<div id="disp">',
'</div>',
'</div>'
].join("");
$("#EditProgram").append(_.template(template)({name:item.name,
type :item.type,
source :item.source}));
}
function DrawEditProgramsHTML(item,i){
var template = [
'<div class="mdl-cell mdl-cell--3-col mdl-cell--4-col-tablet mdl-cell--4-col-phone mdl-card mdl-shadow--3dp">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<h4 class="mdl-card__title-text"><%- name %></h4>',
'</div>',
'<div class="mdl-card__supporting-text">',
'<span class="mdl-typography--font-light mdl-typography--subhead">type:<%- type %></span>',
'</div>',
'<div class="mdl-card__actions">',
' <a class="android-link mdl-button mdl-js-button mdl-typography--text-uppercase" href="edit.html?editProgramID=<%- _id %>&userID=<%- u_id%>">',
' このコードを編集する',
' <i class="material-icons">chevron_right</i>',
' </a>',
'</div>',
'</div>'
].join("");
$("#editPrograms").append(_.template(template)({name:item.edit_programs[i].name,
type :item.edit_programs[i].type,
_id :item.edit_programs[i]._id,
u_id :item._id}));
}
function DrawSupportHTML(item){
var template = [
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_os"><%- os %></textarea>',
'</div>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_browser"><%- browser %></textarea>',
'</div>',
'</div>'
].join("");
if(item[0] == null){
item[0] = {}
if(item[0].os == null){
item[0].os = "OS";
}
if(item[0].browser == null){
item[0].browser = "ブラウザ";
}
}
$("#support").click(function () {
$(this).hide();
return false;
});
$("#disp").append(_.template(template)({os : item[0].os,
browser :item[0].browser }));
}
function display(num,edit){
console.log(num,edit);
if (num == true && edit == false){
var template = [
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_os">OS</textarea>',
'</div>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_browser">ブラウザ</textarea>',
'</div>',
'</div>'
].join("");
$("#disp").append(_.template(template)());
$("#support").click(function () {
$(this).hide();
return false;
});
}else if(num == true && edit == true){
var reg = new RegExp("\\?(.+?)$");
var theUrl = window.location.href;
routeAPI = 'http://localhost:3000/api/';
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
GetEditProgramIDUrl = routeAPI + 'edit_programs/' + method[0][1];
fetch(GetEditProgramIDUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSupportHTML(data.supports);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
}
function DrawCreateButtonHTML(item){
var template = [
'<button class="mdl-button mdl-js-button mdl-button--raised mdl-js-ripple-effect mdl-button--accent" onclick="location.href=\'./create.html?userID=<%- u_id %>\'">',
'Create Programs',
'</button>'
].join("");
$("#createButton").append(_.template(template)({u_id : item._id}));
}
function DrawSingleUserHTML(item){
var template = [
'<img class="mdl-cell mdl-cell--4-col" src="images/stationery.png"><div class="logo-font android-sub-slogan"><%- name %></div>'
].join("");
$("#user").append(_.template(template)({name : item.name}));
}
//HTTP Request
function GetAllUserAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
for(var i = 0; i < data.length; i++){
DrawUsersHTML(data[i]);
}
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetSingleEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSingleEditProgramsHTML(data);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSingleUserHTML(data);
for(var i = 0; i < data.edit_programs.length; i++){
DrawEditProgramsHTML(data,i);
}
DrawCreateButtonHTML(data);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetEditProgramIDAPI(theUrl,routeAPI){
console.log(theUrl);
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
console.log(data);
getUserIDFromEditProgramUrl = routeAPI + 'users' + '?name=' + (data.name + '') + '&type=' + (data.type +'') + '&source=' + (data.source + '');
getUserIDFromEditProgramAPI(getUserIDFromEditProgramUrl);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function getUserIDFromEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
console.log(data);
console.log(theUrl);
window.location.href = "./detail.html?userID=" + data[0]._id;
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function PostUserCreateAPI(theUrl,u_name,is_login){
fetch(theUrl, {
method: 'post',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: JSON.stringify({
name: 'Hubot',
is_login: 'false'
})
});
}
function PostEditProgramAPI(theUrl,e_name,e_type,e_source){
fetch(theUrl, {
method: 'post',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: JSON.stringify({
name: e_name + '',
type: e_type + '',
source: e_source + ''
})
});
}
function DeleteUserAPI(theUrl){
fetch(theUrl, {
method: 'delete',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
});
}
function DeleteEditProgramAPI(theUrl){
fetch(theUrl, {
method: 'delete',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
});
}
function saveProgram(routeAPI,theUrl){
var reg = new RegExp("\\?(.+?)$");
var name = document.forms.EditProgram.input_name.value;
var type = document.forms.EditProgram.input_type.value;
var source = document.forms.EditProgram.input_source.value + ``;
if(theUrl.match(/edit/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
//DeleteSupportUrl = routeAPI + 'supports/' + method[0][1];
//PostSupportUrl = routeAPI + 'supports' + '?editProgramID=' + method[0][1];
//PostSupportAPI(PostEditProgramUrl,os,browser);
DeleteEditProgramUrl = routeAPI + 'edit_programs/' + method[0][1];
PostEditProgramUrl = routeAPI + 'edit_programs' + '?userID=' + method[1][1];
DeleteUserUrl = routeAPI + 'users/' + method[1][1];
name = name.replace(/\n+$/g,'');
type = type.replace(/\n+$/g,'');
source = source.replace(/\n+$/g,'');
source = source.replace(/\n+/g,'');
url_name = '?name=' + (name);
url_type = '&type=' + (type);
url_source = '&source=' + (source);
console.log("test");
console.log(name,type,source);
GetEditProgramIDUrl = routeAPI + 'edit_programs' + url_name + url_type + url_source;
console.log(GetEditProgramIDUrl);
DeleteEditProgramAPI(DeleteEditProgramUrl);
PostEditProgramAPI(PostEditProgramUrl,name,type,source);
DeleteUserAPI(DeleteUserUrl);
GetEditProgramIDAPI(GetEditProgramIDUrl,routeAPI);
}
}
else if(theUrl.match(/create/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
PostEditProgramUrl = routeAPI + 'edit_programs' + '?userID=' + method[0][1];
DeleteUserUrl = routeAPI + 'users/' + method[0][1];
GetEditProgramIDUrl = routeAPI + 'edit_programs' + '?name=' + (name+'') + '&type=' + (type+'') + '&source=' + (source+'');
PostEditProgramAPI(PostEditProgramUrl,name,type,source);
DeleteUserAPI(DeleteUserUrl);
GetEditProgramIDAPI(GetEditProgramIDUrl,routeAPI);
}
}
// window.location.href = "./index.html";
}
function getURL(theUrl){
return theUrl;
}
function decideAPI(routeAPI,theUrl){
var reg = new RegExp("\\?(.+?)$");
//Use GetAllUserAPI
if(theUrl.match(/users/) != null){
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
theUrl = routeAPI + 'users' +'?' + str;
console.log(theUrl);
GetAllUserAPI(theUrl);
}
else{
theUrl = routeAPI + 'users';
GetAllUserAPI(theUrl);
}
}
//Use GetEditProgramAPI
else if(theUrl.match(/detail/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("=");
theUrl = routeAPI + 'users/' + query[1];
GetEditProgramAPI(theUrl);
}
}
//Use GetSingleEditProgramAPI
else if(theUrl.match(/edit/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
theUrl = routeAPI + 'edit_programs/' + method[0][1];
GetSingleEditProgramAPI(theUrl);
}else{
}
}else{
}
}
| server/app/view/js/client.js | var socket = io.connect('http://localhost:8080');
socket.on('connect', function(msg) {
console.log("connet");
});
socket.on('title_message', function(msg) {
console.log("send_title");
console.log(msg);
document.getElementById("input_name").innerHTML = msg.value;
});
socket.on('type_message', function(msg) {
console.log("send_type");
console.log(msg);
document.getElementById("input_type").innerHTML = msg.value;
});
socket.on('source_message', function(msg) {
console.log("send_source");
console.log(msg);
document.getElementById("input_source").innerHTML = msg.value;
});
socket.on('os_message', function(msg) {
console.log("send_os");
console.log(msg);
document.getElementById("input_os").innerHTML = msg.value;
});
socket.on('browser_message', function(msg) {
console.log("send_browser");
console.log(msg);
document.getElementById("input_browser").innerHTML = msg.value;
});
function SendTitleMsg() {
var msg = document.getElementById("input_name").value;
socket.emit('title_message', { value: msg});
}
function SendTypeMsg() {
var msg = document.getElementById("input_type").value;
socket.emit('type_message', { value: msg});
}
function SendSourceMsg() {
var msg = document.getElementById("input_source").value;
socket.emit('source_message', { value: msg});
}
function SendOsMsg() {
if(document.getElementById("input_os").value != null){
var msg = document.getElementById("input_os").value;
socket.emit('os_message', { value: msg});
}
}
function SendBrowserMsg() {
if(document.getElementById("input_browser").value != null){
var msg = document.getElementById("input_browser").value;
socket.emit('browser_message', { value: msg});
}
}
function DisConnect() {
socket.emit('message', { value: msg });
socket.disconnect();
}
function keydown(){
var name = document.forms.EditProgram.input_name.value;
var type = document.forms.EditProgram.input_type.value;
var source = document.forms.EditProgram.input_source.value + ``;
SendTitleMsg();
SendTypeMsg();
SendSourceMsg();
SendOsMsg();
SendBrowserMsg();
}
// Draw html
function DrawUsersHTML(item){
var template = [
'<div class="mdl-cell mdl-cell--3-col mdl-cell--4-col-tablet mdl-cell--4-col-phone mdl-card mdl-shadow--3dp">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<h4 class="mdl-card__title-text"><%- name %></h4>',
'</div>',
'<div class="mdl-card__supporting-text">',
'<span class="mdl-typography--font-light mdl-typography--subhead">login:<%- is_login %></span>',
'</div>',
'<div class="mdl-card__actions">',
' <a class="android-link mdl-button mdl-js-button mdl-typography--text-uppercase" href="detail.html?userID=<%- _id %>">',
' 詳細画面へ',
' <i class="material-icons">chevron_right</i>',
' </a>',
'</div>',
'</div>'
].join("");
$("#users").append(_.template(template)({name :item.name,
is_login :item.is_login,
_id :item._id}));
}
function DrawSingleEditProgramsHTML(item){
var template = [
'<div class="mdl-card mdl-shadow--3dp ">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_name"><%- name %></textarea>',
'<label class="mdl-textfield__label" for="source">title</label>',
'</div>',
' </div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_type"><%- name %></textarea>',
'<label class="mdl-textfield__label" for="source">type</label>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "15" id="input_source"><%- source %></textarea>',
'<label class="mdl-textfield__label" for="source">edit_program</label>',
'</div>',
'</div>',
'<div id="support">',
'<input type="none" value="view Support" class="mdl-button mdl-js-button mdl-button--raised mdl-js-ripple-effect mdl-button--accent" onclick="display(!disp,true)">',
'</div>',
'<div id="disp">',
'</div>',
'</div>'
].join("");
console.log(item);
$("#EditProgram").append(_.template(template)({name:item.name,
type :item.type,
source :item.source}));
}
function DrawEditProgramsHTML(item,i){
var template = [
'<div class="mdl-cell mdl-cell--3-col mdl-cell--4-col-tablet mdl-cell--4-col-phone mdl-card mdl-shadow--3dp">',
'<div class="mdl-card__media">',
'</div>',
'<div class="mdl-card__title">',
'<h4 class="mdl-card__title-text"><%- name %></h4>',
'</div>',
'<div class="mdl-card__supporting-text">',
'<span class="mdl-typography--font-light mdl-typography--subhead">type:<%- type %></span>',
'</div>',
'<div class="mdl-card__actions">',
' <a class="android-link mdl-button mdl-js-button mdl-typography--text-uppercase" href="edit.html?editProgramID=<%- _id %>&userID=<%- u_id%>">',
' このコードを編集する',
' <i class="material-icons">chevron_right</i>',
' </a>',
'</div>',
'</div>'
].join("");
$("#editPrograms").append(_.template(template)({name:item.edit_programs[i].name,
type :item.edit_programs[i].type,
_id :item.edit_programs[i]._id,
u_id :item._id }));
}
function DrawSupportHTML(item){
var template = [
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_os"><%- os %></textarea>',
'</div>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_browser"><%- browser %></textarea>',
'</div>',
'</div>'
].join("");
if(item[0] == null){
item[0] = {}
if(item[0].os == null){
item[0].os = "OS";
}
if(item[0].browser == null){
item[0].browser = "ブラウザ";
}
}
$("#disp").append(_.template(template)({os : item[0].os,
browser :item[0].browser }));
}
function display(num,edit){
console.log(num,edit);
if (num == true && edit == false){
var template = [
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_os">OS</textarea>',
'</div>',
'</div>',
'<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
'<div class="mdl-textfield mdl-js-textfield">',
'<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_browser">ブラウザ</textarea>',
'</div>',
'</div>'
].join("");
$("#disp").append(_.template(template)());
$("#support").click(function () {
$(this).hide();
return false;
});
}else if(num == true && edit == true){
var reg = new RegExp("\\?(.+?)$");
var theUrl = window.location.href;
routeAPI = 'http://localhost:3000/api/';
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
GetEditProgramIDUrl = routeAPI + 'edit_programs/' + method[0][1];
fetch(GetEditProgramIDUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSupportHTML(data.supports);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
}
function DrawCreateButtonHTML(item){
var template = [
'<button class="mdl-button mdl-js-button mdl-button--raised mdl-js-ripple-effect mdl-button--accent" onclick="location.href=\'./create.html?userID=<%- u_id %>\'">',
'Create Programs',
'</button>'
].join("");
$("#createButton").append(_.template(template)({u_id : item._id}));
}
function DrawSingleUserHTML(item){
var template = [
'<img class="mdl-cell mdl-cell--4-col" src="images/stationery.png"><div class="logo-font android-sub-slogan"><%- name %></div>'
].join("");
$("#user").append(_.template(template)({name : item.name}));
}
//HTTP Request
function GetAllUserAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
for(var i = 0; i < data.length; i++){
DrawUsersHTML(data[i]);
}
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetSingleEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSingleEditProgramsHTML(data);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
DrawSingleUserHTML(data);
for(var i = 0; i < data.edit_programs.length; i++){
DrawEditProgramsHTML(data,i);
}
DrawCreateButtonHTML(data);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function GetEditProgramIDAPI(theUrl,routeAPI){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
console.log(data);
getUserIDFromEditProgramUrl = routeAPI + 'users' + '?name=' + (data.name + '') + '&type=' + (data.type +'') + '&source=' + (data.source + '');
getUserIDFromEditProgramAPI(getUserIDFromEditProgramUrl);
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function getUserIDFromEditProgramAPI(theUrl){
fetch(theUrl)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
// Examine the text in the response
response.json().then(function(data) {
console.log(data);
console.log(theUrl);
window.location.href = "./detail.html?userID=" + data[0]._id;
});
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
});
}
function PostUserCreateAPI(theUrl,u_name,is_login){
fetch(theUrl, {
method: 'post',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: JSON.stringify({
name: 'Hubot',
is_login: 'false'
})
});
}
function PostEditProgramAPI(theUrl,e_name,e_type,e_source){
fetch(theUrl, {
method: 'post',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: JSON.stringify({
name: e_name + '',
type: e_type + '',
source: e_source + ''
})
});
}
function DeleteUserAPI(theUrl){
fetch(theUrl, {
method: 'delete',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
});
}
function DeleteEditProgramAPI(theUrl){
fetch(theUrl, {
method: 'delete',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
});
}
function saveProgram(routeAPI,theUrl){
var reg = new RegExp("\\?(.+?)$");
var name = document.forms.EditProgram.input_name.value;
var type = document.forms.EditProgram.input_type.value;
var source = document.forms.EditProgram.input_source.value + ``;
if(theUrl.match(/edit/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
DeleteEditProgramUrl = routeAPI + 'edit_programs/' + method[0][1];
PostEditProgramUrl = routeAPI + 'edit_programs' + '?userID=' + method[1][1];
DeleteUserUrl = routeAPI + 'users/' + method[1][1];
GetEditProgramIDUrl = routeAPI + 'edit_programs' + '?name=' + (name+'') + '&type=' + (type+'') + '&source=' + (source+'');
DeleteEditProgramAPI(DeleteEditProgramUrl);
PostEditProgramAPI(PostEditProgramUrl,name,type,source);
DeleteUserAPI(DeleteUserUrl);
GetEditProgramIDAPI(GetEditProgramIDUrl,routeAPI);
}
}
else if(theUrl.match(/create/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
PostEditProgramUrl = routeAPI + 'edit_programs' + '?userID=' + method[0][1];
DeleteUserUrl = routeAPI + 'users/' + method[0][1];
GetEditProgramIDUrl = routeAPI + 'edit_programs' + '?name=' + (name+'') + '&type=' + (type+'') + '&source=' + (source+'');
PostEditProgramAPI(PostEditProgramUrl,name,type,source);
DeleteUserAPI(DeleteUserUrl);
GetEditProgramIDAPI(GetEditProgramIDUrl,routeAPI);
console.log(PostEditProgramUrl);
console.log(DeleteUserUrl);
console.log(GetEditProgramIDUrl);
}
}
// window.location.href = "./index.html";
}
function getURL(theUrl){
return theUrl;
}
function decideAPI(routeAPI,theUrl){
var reg = new RegExp("\\?(.+?)$");
//Use GetAllUserAPI
if(theUrl.match(/users/) != null){
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
theUrl = routeAPI + 'users' +'?' + str;
console.log(theUrl);
GetAllUserAPI(theUrl);
}
else{
theUrl = routeAPI + 'users';
GetAllUserAPI(theUrl);
}
}
//Use GetEditProgramAPI
else if(theUrl.match(/detail/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("=");
theUrl = routeAPI + 'users/' + query[1];
GetEditProgramAPI(theUrl);
}
}
//Use GetSingleEditProgramAPI
else if(theUrl.match(/edit/) != null){
// urlの?以降(クエリパラメータを取得する)
if(theUrl.match(reg)){
var str = theUrl.match(reg)[1];
var query = str.split("&");
var method = []
for (var i = 0;i < query.length;i++){
method[i] = query[i].split("=");
}
theUrl = routeAPI + 'edit_programs/' + method[0][1];
GetSingleEditProgramAPI(theUrl);
}else{
}
}else{
}
}
| 英語でのsaveが出来る
| server/app/view/js/client.js | 英語でのsaveが出来る | <ide><path>erver/app/view/js/client.js
<ide> '</div>',
<ide> ' </div>',
<ide> '<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
<del> '<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_type"><%- name %></textarea>',
<add> '<textarea class="mdl-textfield__input" type="text" rows= "1" id="input_type"><%- type %></textarea>',
<ide> '<label class="mdl-textfield__label" for="source">type</label>',
<ide> '</div>',
<ide> '<div class="mdl-textfield mdl-js-textfield mdl-textfield--floating-label">',
<ide> '</div>',
<ide> '</div>'
<ide> ].join("");
<del> console.log(item);
<ide> $("#EditProgram").append(_.template(template)({name:item.name,
<ide> type :item.type,
<ide> source :item.source}));
<ide> $("#editPrograms").append(_.template(template)({name:item.edit_programs[i].name,
<ide> type :item.edit_programs[i].type,
<ide> _id :item.edit_programs[i]._id,
<del> u_id :item._id }));
<add> u_id :item._id}));
<ide> }
<ide>
<ide> function DrawSupportHTML(item){
<ide> }
<ide> }
<ide>
<add> $("#support").click(function () {
<add> $(this).hide();
<add> return false;
<add> });
<ide> $("#disp").append(_.template(template)({os : item[0].os,
<ide> browser :item[0].browser }));
<ide>
<ide> }
<ide>
<ide> function GetEditProgramIDAPI(theUrl,routeAPI){
<add> console.log(theUrl);
<ide> fetch(theUrl)
<ide> .then(
<ide> function(response) {
<ide> for (var i = 0;i < query.length;i++){
<ide> method[i] = query[i].split("=");
<ide> }
<add> //DeleteSupportUrl = routeAPI + 'supports/' + method[0][1];
<add> //PostSupportUrl = routeAPI + 'supports' + '?editProgramID=' + method[0][1];
<add> //PostSupportAPI(PostEditProgramUrl,os,browser);
<add>
<ide> DeleteEditProgramUrl = routeAPI + 'edit_programs/' + method[0][1];
<ide> PostEditProgramUrl = routeAPI + 'edit_programs' + '?userID=' + method[1][1];
<ide> DeleteUserUrl = routeAPI + 'users/' + method[1][1];
<del> GetEditProgramIDUrl = routeAPI + 'edit_programs' + '?name=' + (name+'') + '&type=' + (type+'') + '&source=' + (source+'');
<del>
<add> name = name.replace(/\n+$/g,'');
<add> type = type.replace(/\n+$/g,'');
<add> source = source.replace(/\n+$/g,'');
<add> source = source.replace(/\n+/g,'');
<add> url_name = '?name=' + (name);
<add> url_type = '&type=' + (type);
<add> url_source = '&source=' + (source);
<add> console.log("test");
<add> console.log(name,type,source);
<add> GetEditProgramIDUrl = routeAPI + 'edit_programs' + url_name + url_type + url_source;
<add>
<add> console.log(GetEditProgramIDUrl);
<ide> DeleteEditProgramAPI(DeleteEditProgramUrl);
<ide> PostEditProgramAPI(PostEditProgramUrl,name,type,source);
<ide> DeleteUserAPI(DeleteUserUrl);
<ide> DeleteUserAPI(DeleteUserUrl);
<ide> GetEditProgramIDAPI(GetEditProgramIDUrl,routeAPI);
<ide>
<del> console.log(PostEditProgramUrl);
<del> console.log(DeleteUserUrl);
<del> console.log(GetEditProgramIDUrl);
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | f1044cac0fa85c4171103d3b864c1b8ab26de6b1 | 0 | lassecph/backbone.bootstrap-modal,powmedia/backbone.bootstrap-modal,squidsolutions/backbone.bootstrap-modal,tindustry/backbone.bootstrap-modal,tindustry/backbone.bootstrap-modal,Mediamorph/backbone.bootstrap-modal | /**
* Bootstrap Modal wrapper for use with Backbone.
*
* Takes care of instantiation, manages multiple modals,
* adds several options and removes the element from the DOM when closed
*
* @author Charles Davison <[email protected]>
*
* Events:
* shown: Fired when the modal has finished animating in
* hidden: Fired when the modal has finished animating out
* cancel: The user dismissed the modal
* ok: The user clicked OK
*/
(function($, _, Backbone) {
//Set custom template settings
var _interpolateBackup = _.templateSettings;
_.templateSettings = {
interpolate: /\{\{(.+?)\}\}/g,
evaluate: /<%([\s\S]+?)%>/g
}
var template = _.template('\
<% if (title) { %>\
<div class="modal-header">\
<% if (allowCancel) { %>\
<a class="close">×</a>\
<% } %>\
<h3>{{title}}</h3>\
</div>\
<% } %>\
<div class="modal-body">{{content}}</div>\
<div class="modal-footer">\
<% if (allowCancel) { %>\
<% if (cancelText) { %>\
<a href="#" class="btn cancel">{{cancelText}}</a>\
<% } %>\
<% } %>\
<a href="#" class="btn ok btn-primary">{{okText}}</a>\
</div>\
');
//Reset to users' template settings
_.templateSettings = _interpolateBackup;
var Modal = Backbone.View.extend({
className: 'modal',
events: {
'click .close': function(event) {
event.preventDefault();
this.trigger('cancel');
},
'click .cancel': function(event) {
event.preventDefault();
this.trigger('cancel');
},
'click .ok': function(event) {
event.preventDefault();
this.trigger('ok');
this.close();
}
},
/**
* Creates an instance of a Bootstrap Modal
*
* @see http://twitter.github.com/bootstrap/javascript.html#modals
*
* @param {Object} options
* @param {String|View} [options.content] Modal content. Default: none
* @param {String} [options.title] Title. Default: none
* @param {String} [options.okText] Text for the OK button. Default: 'OK'
* @param {String} [options.cancelText] Text for the cancel button. Default: 'Cancel'. If passed a falsey value, the button will be removed
* @param {Boolean} [options.allowCancel Whether the modal can be closed, other than by pressing OK. Default: true
* @param {Boolean} [options.escape] Whether the 'esc' key can dismiss the modal. Default: true, but false if options.cancellable is true
* @param {Boolean} [options.animate] Whether to animate in/out. Default: false
* @param {Function} [options.template] Compiled underscore template to override the default one
*/
initialize: function(options) {
this.options = _.extend({
title: null,
okText: 'OK',
cancelText: 'Cancel',
allowCancel: true,
escape: true,
animate: false,
template: template
}, options);
},
/**
* Creates the DOM element
*
* @api private
*/
render: function() {
var $el = this.$el,
options = this.options,
content = options.content;
//Create the modal container
$el.html(options.template(options));
var $content = this.$content = $el.find('.modal-body')
//Insert the main content if it's a view
if (content.$el) {
content.render();
$el.find('.modal-body').html(content.$el);
}
if (options.animate) $el.addClass('fade');
this.isRendered = true;
return this;
},
/**
* Renders and shows the modal
*
* @param {Function} [cb] Optional callback that runs only when OK is pressed.
*/
open: function(cb) {
if (!this.isRendered) this.render();
var self = this,
$el = this.$el;
//Create it
$el.modal({
keyboard: this.options.allowCancel,
backdrop: this.options.allowCancel ? true : 'static'
});
//Focus OK button
$el.one('shown', function() {
$el.find('.btn.ok').focus();
self.trigger('shown');
});
//Adjust the modal and backdrop z-index; for dealing with multiple modals
var numModals = Modal.count,
$backdrop = $('.modal-backdrop:eq('+numModals+')'),
backdropIndex = $backdrop.css('z-index'),
elIndex = $backdrop.css('z-index');
$backdrop.css('z-index', backdropIndex + numModals);
this.$el.css('z-index', elIndex + numModals);
if (this.options.allowCancel) {
$backdrop.one('click', function() {
self.trigger('cancel');
});
$(document).one('keyup.dismiss.modal', function (e) {
e.which == 27 && self.trigger('cancel');
});
}
this.on('cancel', function() {
self.close();
});
Modal.count++;
//Run callback on OK if provided
if (cb) {
self.on('ok', cb);
}
return this;
},
/**
* Closes the modal
*/
close: function() {
var self = this,
$el = this.$el;
//Check if the modal should stay open
if (this._preventClose) {
this._preventClose = false;
return;
}
$el.one('hidden', function() {
self.remove();
self.trigger('hidden');
});
$el.modal('hide');
Modal.count--;
},
/**
* Stop the modal from closing.
* Can be called from within a 'close' or 'ok' event listener.
*/
preventClose: function() {
this._preventClose = true;
}
}, {
//STATICS
//The number of modals on display
count: 0
});
//EXPORTS
//CommonJS
if (typeof require == 'function' && typeof module !== 'undefined' && exports) {
module.exports = Modal;
}
//AMD / RequireJS
if (typeof define === 'function' && define.amd) {
return define(function() {
Backbone.BootstrapModal = Modal;
})
}
//Regular; add to Backbone.Bootstrap.Modal
else {
Backbone.BootstrapModal = Modal;
}
})(jQuery, _, Backbone);
| src/backbone.bootstrap-modal.js | /**
* Bootstrap Modal wrapper for use with Backbone.
*
* Takes care of instantiation, manages multiple modals,
* adds several options and removes the element from the DOM when closed
*
* @author Charles Davison <[email protected]>
*
* Events:
* shown: Fired when the modal has finished animating in
* hidden: Fired when the modal has finished animating out
* cancel: The user dismissed the modal
* ok: The user clicked OK
*/
(function($, _, Backbone) {
//Set custom template settings
var _interpolateBackup = _.templateSettings;
_.templateSettings = {
interpolate: /\{\{(.+?)\}\}/g,
evaluate: /<%([\s\S]+?)%>/g
}
var template = _.template('\
<% if (title) { %>\
<div class="modal-header">\
<% if (allowCancel) { %>\
<a class="close">×</a>\
<% } %>\
<h3>{{title}}</h3>\
</div>\
<% } %>\
<div class="modal-body">{{content}}</div>\
<div class="modal-footer">\
<% if (allowCancel) { %>\
<% if (cancelText) { %>\
<a href="#" class="btn cancel">{{cancelText}}</a>\
<% } %>\
<% } %>\
<a href="#" class="btn ok btn-primary">{{okText}}</a>\
</div>\
');
//Reset to users' template settings
_.templateSettings = _interpolateBackup;
var Modal = Backbone.View.extend({
className: 'modal',
events: {
'click .close': function(event) {
event.preventDefault();
this.trigger('cancel');
},
'click .cancel': function(event) {
event.preventDefault();
this.trigger('cancel');
},
'click .ok': function(event) {
event.preventDefault();
this.trigger('ok');
this.close();
}
},
/**
* Creates an instance of a Bootstrap Modal
*
* @see http://twitter.github.com/bootstrap/javascript.html#modals
*
* @param {Object} options
* @param {String|View} [options.content] Modal content. Default: none
* @param {String} [options.title] Title. Default: none
* @param {String} [options.okText] Text for the OK button. Default: 'OK'
* @param {String} [options.cancelText] Text for the cancel button. Default: 'Cancel'. If passed a falsey value, the button will be removed
* @param {Boolean} [options.allowCancel Whether the modal can be closed, other than by pressing OK. Default: true
* @param {Boolean} [options.escape] Whether the 'esc' key can dismiss the modal. Default: true, but false if options.cancellable is true
* @param {Boolean} [options.animate] Whether to animate in/out. Default: false
* @param {Function} [options.template] Compiled underscore template to override the default one
*/
initialize: function(options) {
this.options = _.extend({
title: null,
okText: 'OK',
cancelText: 'Cancel',
allowCancel: true,
escape: true,
animate: false,
template: template
}, options);
},
/**
* Creates the DOM element
*
* @api private
*/
render: function() {
var $el = this.$el,
options = this.options,
content = options.content;
//Create the modal container
$el.html(options.template(options));
var $content = this.$content = $el.find('.modal-body')
//Insert the main content if it's a view
if (content.$el) {
content.render();
$el.find('.modal-body').html(content.$el);
}
if (options.animate) $el.addClass('fade');
this.isRendered = true;
return this;
},
/**
* Renders and shows the modal
*
* @param {Function} [cb] Optional callback that runs only when OK is pressed.
*/
open: function(cb) {
if (!this.isRendered) this.render();
var self = this,
$el = this.$el;
//Create it
$el.modal({
keyboard: this.options.allowCancel,
backdrop: this.options.allowCancel ? true : 'static'
});
//Focus OK button
$el.one('shown', function() {
$el.find('.btn.ok').focus();
self.trigger('shown');
});
//Adjust the modal and backdrop z-index; for dealing with multiple modals
var numModals = Modal.count,
$backdrop = $('.modal-backdrop:eq('+numModals+')'),
backdropIndex = $backdrop.css('z-index'),
elIndex = $backdrop.css('z-index');
$backdrop.css('z-index', backdropIndex + numModals);
this.$el.css('z-index', elIndex + numModals);
if (this.options.allowCancel) {
$backdrop.one('click', function() {
self.trigger('cancel');
});
$(document).one('keyup.dismiss.modal', function (e) {
e.which == 27 && self.trigger('cancel');
});
}
this.on('cancel', function() {
self.close();
});
Modal.count++;
//Run callback on OK if provided
if (cb) {
self.on('ok', cb);
}
return this;
},
/**
* Closes the modal
*/
close: function() {
var self = this,
$el = this.$el;
//Check if the modal should stay open
if (this._preventClose) {
this._preventClose = false;
return;
}
$el.modal('hide');
$el.one('hidden', function() {
self.remove();
self.trigger('hidden');
});
Modal.count--;
},
/**
* Stop the modal from closing.
* Can be called from within a 'close' or 'ok' event listener.
*/
preventClose: function() {
this._preventClose = true;
}
}, {
//STATICS
//The number of modals on display
count: 0
});
//EXPORTS
//CommonJS
if (typeof require == 'function' && typeof module !== 'undefined' && exports) {
module.exports = Modal;
}
//AMD / RequireJS
if (typeof define === 'function' && define.amd) {
return define(function() {
Backbone.BootstrapModal = Modal;
})
}
//Regular; add to Backbone.Bootstrap.Modal
else {
Backbone.BootstrapModal = Modal;
}
})(jQuery, _, Backbone);
| Installs event handler for "hidden" before actually triggering it
Even when 'hide' is fast, on('hidden') will be already there | src/backbone.bootstrap-modal.js | Installs event handler for "hidden" before actually triggering it | <ide><path>rc/backbone.bootstrap-modal.js
<ide> return;
<ide> }
<ide>
<del> $el.modal('hide');
<del>
<ide> $el.one('hidden', function() {
<ide> self.remove();
<ide>
<ide> self.trigger('hidden');
<ide> });
<add>
<add> $el.modal('hide');
<ide>
<ide> Modal.count--;
<ide> }, |
|
Java | bsd-2-clause | 6caab32c72582159ffaa778e6cce2913d6156f0d | 0 | carat-project/carat,carat-project/carat,carat-project/carat,carat-project/carat,carat-project/carat | package edu.berkeley.cs.amplab.carat.android.protocol;
import java.util.ArrayList;
import java.util.List;
import org.apache.thrift.TException;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
import edu.berkeley.cs.amplab.carat.android.CaratApplication;
import edu.berkeley.cs.amplab.carat.android.SamplingLibrary;
import edu.berkeley.cs.amplab.carat.thrift.CaratService;
import edu.berkeley.cs.amplab.carat.thrift.Feature;
import edu.berkeley.cs.amplab.carat.thrift.HogBugReport;
import edu.berkeley.cs.amplab.carat.thrift.Registration;
import edu.berkeley.cs.amplab.carat.thrift.Reports;
import edu.berkeley.cs.amplab.carat.thrift.Sample;
public class CommunicationManager {
// Freshness timeout. Default: one hour
// public static final long FRESHNESS_TIMEOUT = 3600000L;
// 5 minutes
public static final long FRESHNESS_TIMEOUT = 300000L;
public static final String PREFERENCE_FIRST_RUN = "carat.first.run";
private CaratService.Client c = null;
private CaratApplication a = null;
private boolean register = true;
private SharedPreferences p = null;
public CommunicationManager(CaratApplication a) {
this.a = a;
p = PreferenceManager.getDefaultSharedPreferences(this.a);
register = p.getBoolean(PREFERENCE_FIRST_RUN, true);
}
private void registerMe(String uuId, String os, String model)
throws TException {
if (uuId == null || os == null || model == null) {
Log.e("registerMe", "Null uuId, os, or model given to registerMe!");
System.exit(1);
return;
}
Registration registration = new Registration(uuId);
registration.setPlatformId(model);
registration.setSystemVersion(os);
registration.setTimestamp(System.currentTimeMillis() / 1000.0);
c.registerMe(registration);
}
public void uploadSample(Sample sample) throws TException {
registerOnFirstRun();
// FIXME: This may be stupid, but always use a new connection.
// Alternative: Make sure c opens the connection if it is
// stale/closed/nonexistent.
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("uploadSample", "We are disconnected, not uploading.");
return;
}
c.uploadSample(sample);
ProtocolClient.close();
}
public void uploadSamples(Sample[] samples) throws TException {
registerOnFirstRun();
// FIXME: This may be stupid, but always use a new connection.
// Alternative: Make sure c opens the connection if it is
// stale/closed/nonexistent.
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("uploadSample", "We are disconnected, not uploading.");
return;
}
for (Sample s : samples)
c.uploadSample(s);
ProtocolClient.close();
}
public void registerOnFirstRun() {
if (register) {
String uuId = SamplingLibrary.getUuid(a.getApplicationContext());
String os = SamplingLibrary.getOsVersion();
String model = SamplingLibrary.getModel();
Log.i("CommunicationManager",
"First run, registering this device: " + uuId + ", " + os
+ ", " + model);
try {
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("register", "We are disconnected, not registering.");
return;
}
registerMe(uuId, os, model);
p.edit().putBoolean(PREFERENCE_FIRST_RUN, false).commit();
register = false;
} catch (TException e) {
Log.e("CommunicationManager",
"Registration failed, will try again next time: " + e);
e.printStackTrace();
}
}
}
public void refreshReports() {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
registerOnFirstRun();
String uuId = SamplingLibrary.getUuid(a);
String model = SamplingLibrary.getModel();
String OS = SamplingLibrary.getOsVersion();
// FIXME: Fake data for now
uuId = "2DEC05A1-C2DF-4D57-BB0F-BA29B02E4ABE";
model = "iPhone 3GS";
OS = "5.0.1";
try {
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
refreshMainReports(uuId, OS, model);
refreshBugReports(uuId, model);
refreshHogReports(uuId, model);
ProtocolClient.close();
a.s.writeFreshness();
} catch (TException e) {
Log.e("refreshReports", "Could not download new reports!");
e.printStackTrace();
}
}
private void refreshMainReports(String uuid, String os, String model)
throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
Reports r = c.getReports(uuid, getFeatures("Model", model, "OS", os));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeReports(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private void refreshBugReports(String uuid, String model) throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
HogBugReport r = c.getHogOrBugReport(uuid,
getFeatures("ReportType", "Bug", "Model", model));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeBugReport(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private void refreshHogReports(String uuid, String model) throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
HogBugReport r = c.getHogOrBugReport(uuid,
getFeatures("ReportType", "Hog", "Model", model));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeHogReport(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private List<Feature> getFeatures(String key1, String val1, String key2,
String val2) {
List<Feature> features = new ArrayList<Feature>();
if (key1 == null || val1 == null || key2 == null || val2 == null) {
Log.e("getFeatures", "Null key or value given to getFeatures!");
System.exit(1);
return features;
}
Feature feature = new Feature();
feature.setKey(key1);
feature.setValue(val1);
features.add(feature);
feature = new Feature();
feature.setKey(key2);
feature.setValue(val2);
features.add(feature);
return features;
}
}
| app/android/src/edu/berkeley/cs/amplab/carat/android/protocol/CommunicationManager.java | package edu.berkeley.cs.amplab.carat.android.protocol;
import java.util.ArrayList;
import java.util.List;
import org.apache.thrift.TException;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
import edu.berkeley.cs.amplab.carat.android.CaratApplication;
import edu.berkeley.cs.amplab.carat.android.SamplingLibrary;
import edu.berkeley.cs.amplab.carat.thrift.CaratService;
import edu.berkeley.cs.amplab.carat.thrift.Feature;
import edu.berkeley.cs.amplab.carat.thrift.HogBugReport;
import edu.berkeley.cs.amplab.carat.thrift.Registration;
import edu.berkeley.cs.amplab.carat.thrift.Reports;
import edu.berkeley.cs.amplab.carat.thrift.Sample;
public class CommunicationManager {
// Freshness timeout. Default: one hour
// public static final long FRESHNESS_TIMEOUT = 3600000L;
// 5 minutes
public static final long FRESHNESS_TIMEOUT = 300000L;
public static final String PREFERENCE_FIRST_RUN = "carat.first.run";
private CaratService.Client c = null;
private CaratApplication a = null;
private boolean register = true;
private SharedPreferences p = null;
public CommunicationManager(CaratApplication a) {
this.a = a;
p = PreferenceManager.getDefaultSharedPreferences(this.a);
register = p.getBoolean(PREFERENCE_FIRST_RUN, true);
}
private void registerMe(String uuId, String os, String model)
throws TException {
if (uuId == null || os == null || model == null) {
Log.e("registerMe", "Null uuId, os, or model given to registerMe!");
System.exit(1);
return;
}
Registration registration = new Registration(uuId);
registration.setPlatformId(model);
registration.setSystemVersion(os);
registration.setTimestamp(System.currentTimeMillis() / 1000.0);
c.registerMe(registration);
}
public void uploadSample(Sample sample) throws TException {
registerOnFirstRun();
// FIXME: This may be stupid, but always use a new connection.
// Alternative: Make sure c opens the connection if it is stale/closed/nonexistent.
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("uploadSample", "We are disconnected, not uploading.");
return;
}
c.uploadSample(sample);
ProtocolClient.close();
}
public void uploadSamples(Sample[] samples) throws TException {
registerOnFirstRun();
// FIXME: This may be stupid, but always use a new connection.
// Alternative: Make sure c opens the connection if it is stale/closed/nonexistent.
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("uploadSample", "We are disconnected, not uploading.");
return;
}
for (Sample s: samples)
c.uploadSample(s);
ProtocolClient.close();
}
public void registerOnFirstRun() {
if (register) {
String uuId = SamplingLibrary.getUuid(a.getApplicationContext());
String os = SamplingLibrary.getOsVersion();
String model = SamplingLibrary.getModel();
Log.i("CommunicationManager",
"First run, registering this device: " + uuId + ", " + os
+ ", " + model);
try {
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("register",
"We are disconnected, not registering.");
return;
}
registerMe(uuId, os, model);
p.edit().putBoolean(PREFERENCE_FIRST_RUN, false).commit();
register = false;
} catch (TException e) {
Log.e("CommunicationManager",
"Registration failed, will try again next time: " + e);
e.printStackTrace();
}
}
}
public void refreshReports() {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
registerOnFirstRun();
// FIXME: Fake data for now
String uuId = "2DEC05A1-C2DF-4D57-BB0F-BA29B02E4ABE";
String model = "iPhone 3GS";
String OS = "5.0.1";
try {
c = ProtocolClient.getInstance(a.getApplicationContext());
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
refreshMainReports(uuId, OS, model);
refreshBugReports(uuId, model);
refreshHogReports(uuId, model);
ProtocolClient.close();
a.s.writeFreshness();
} catch (TException e) {
Log.e("refreshReports", "Could not download new reports!");
e.printStackTrace();
}
}
private void refreshMainReports(String uuid, String os, String model)
throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
Reports r = c.getReports(uuid, getFeatures("Model", model, "OS", os));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeReports(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private void refreshBugReports(String uuid, String model) throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
HogBugReport r = c.getHogOrBugReport(uuid,
getFeatures("ReportType", "Bug", "Model", model));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeBugReport(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private void refreshHogReports(String uuid, String model) throws TException {
if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
return;
if (c == null) {
Log.e("refreshReports", "We are disconnected, not refreshing.");
return;
}
HogBugReport r = c.getHogOrBugReport(uuid,
getFeatures("ReportType", "Hog", "Model", model));
// Assume multiple invocations, do not close
// ProtocolClient.close();
a.s.writeHogReport(r);
// Assume freshness written by caller.
// s.writeFreshness();
}
private List<Feature> getFeatures(String key1, String val1, String key2,
String val2) {
List<Feature> features = new ArrayList<Feature>();
if (key1 == null || val1 == null || key2 == null || val2 == null) {
Log.e("getFeatures", "Null key or value given to getFeatures!");
System.exit(1);
return features;
}
Feature feature = new Feature();
feature.setKey(key1);
feature.setValue(val1);
features.add(feature);
feature = new Feature();
feature.setKey(key2);
feature.setValue(val2);
features.add(feature);
return features;
}
}
| Step towards real data receiving.
| app/android/src/edu/berkeley/cs/amplab/carat/android/protocol/CommunicationManager.java | Step towards real data receiving. | <ide><path>pp/android/src/edu/berkeley/cs/amplab/carat/android/protocol/CommunicationManager.java
<ide> public void uploadSample(Sample sample) throws TException {
<ide> registerOnFirstRun();
<ide> // FIXME: This may be stupid, but always use a new connection.
<del> // Alternative: Make sure c opens the connection if it is stale/closed/nonexistent.
<add> // Alternative: Make sure c opens the connection if it is
<add> // stale/closed/nonexistent.
<ide> c = ProtocolClient.getInstance(a.getApplicationContext());
<ide> if (c == null) {
<ide> Log.e("uploadSample", "We are disconnected, not uploading.");
<ide> c.uploadSample(sample);
<ide> ProtocolClient.close();
<ide> }
<del>
<add>
<ide> public void uploadSamples(Sample[] samples) throws TException {
<ide> registerOnFirstRun();
<ide> // FIXME: This may be stupid, but always use a new connection.
<del> // Alternative: Make sure c opens the connection if it is stale/closed/nonexistent.
<add> // Alternative: Make sure c opens the connection if it is
<add> // stale/closed/nonexistent.
<ide> c = ProtocolClient.getInstance(a.getApplicationContext());
<ide> if (c == null) {
<ide> Log.e("uploadSample", "We are disconnected, not uploading.");
<ide> return;
<ide> }
<del> for (Sample s: samples)
<add> for (Sample s : samples)
<ide> c.uploadSample(s);
<ide> ProtocolClient.close();
<ide> }
<ide> try {
<ide> c = ProtocolClient.getInstance(a.getApplicationContext());
<ide> if (c == null) {
<del> Log.e("register",
<del> "We are disconnected, not registering.");
<add> Log.e("register", "We are disconnected, not registering.");
<ide> return;
<ide> }
<ide> registerMe(uuId, os, model);
<ide> if (System.currentTimeMillis() - a.s.getFreshness() < FRESHNESS_TIMEOUT)
<ide> return;
<ide> registerOnFirstRun();
<add>
<add> String uuId = SamplingLibrary.getUuid(a);
<add> String model = SamplingLibrary.getModel();
<add> String OS = SamplingLibrary.getOsVersion();
<add>
<ide> // FIXME: Fake data for now
<del> String uuId = "2DEC05A1-C2DF-4D57-BB0F-BA29B02E4ABE";
<del> String model = "iPhone 3GS";
<del> String OS = "5.0.1";
<add>
<add> uuId = "2DEC05A1-C2DF-4D57-BB0F-BA29B02E4ABE";
<add> model = "iPhone 3GS";
<add> OS = "5.0.1";
<ide>
<ide> try {
<ide> c = ProtocolClient.getInstance(a.getApplicationContext()); |
|
Java | agpl-3.0 | df1cd2fff97c696f1f9c7d481a15fb913aac4473 | 0 | KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver | package nl.mpi.kinnate.svg;
import java.awt.Cursor;
import java.awt.Point;
import java.awt.event.MouseEvent;
import java.net.URI;
import java.net.URISyntaxException;
import org.w3c.dom.events.Event;
import org.w3c.dom.events.EventListener;
import org.apache.batik.dom.events.DOMMouseEvent;
import javax.swing.event.MouseInputAdapter;
import nl.mpi.arbil.data.ArbilDataNodeLoader;
import nl.mpi.arbil.ui.GuiHelper;
import org.w3c.dom.Element;
/**
* Document : MouseListenerSvg
* Created on : Mar 9, 2011, 3:21:53 PM
* Author : Peter Withers
*/
public class MouseListenerSvg extends MouseInputAdapter implements EventListener {
private Cursor preDragCursor;
private GraphPanel graphPanel;
private Point startDragPoint = null;
static boolean mouseActionOnNode = false;
static boolean mouseActionIsPopupTrigger = false;
static boolean mouseActionIsDrag = false;
public MouseListenerSvg(GraphPanel graphPanelLocal) {
graphPanel = graphPanelLocal;
}
@Override
public void mouseDragged(MouseEvent me) {
if (startDragPoint != null) {
// System.out.println("mouseDragged: " + me.toString());
if (graphPanel.selectedGroupId.size() > 0) {
graphPanel.svgCanvas.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
// limit the drag to the distance draged not the location
graphPanel.svgUpdateHandler.updateDragNode(me.getPoint().x - startDragPoint.x, me.getPoint().y - startDragPoint.y);
} else {
graphPanel.svgUpdateHandler.dragCanvas(me.getPoint().x - startDragPoint.x, me.getPoint().y - startDragPoint.y);
}
mouseActionIsDrag = true;
} else {
graphPanel.svgUpdateHandler.startDrag();
}
startDragPoint = me.getPoint();
}
@Override
public void mouseReleased(MouseEvent me) {
// System.out.println("mouseReleased: " + me.toString());
graphPanel.svgCanvas.setCursor(preDragCursor);
startDragPoint = null;
if (!mouseActionIsDrag && !mouseActionIsPopupTrigger && !mouseActionOnNode && me.getButton() == MouseEvent.BUTTON1) { // todo: button1 could cause issues for left handed people with swapped mouse buttons
System.out.println("Clear selection");
graphPanel.selectedGroupId.clear();
graphPanel.svgUpdateHandler.updateSvgSelectionHighlights();
}
mouseActionOnNode = false;
}
@Override
public void mousePressed(MouseEvent e) {
mouseActionIsDrag = false;
mouseActionIsPopupTrigger = e.isPopupTrigger();
}
@Override
public void handleEvent(Event evt) {
mouseActionOnNode = true;
boolean shiftDown = false;
if (evt instanceof DOMMouseEvent) {
shiftDown = ((DOMMouseEvent) evt).getShiftKey();
}
System.out.println("dom mouse event: " + evt.getCurrentTarget());
Element currentDraggedElement = ((Element) evt.getCurrentTarget());
preDragCursor = graphPanel.svgCanvas.getCursor();
// get the entityPath
// todo: change selected elements to use the ID not the path, but this change will affect th way the imdi path is obtained to show the table
String entityIdentifier = currentDraggedElement.getAttribute("id");
System.out.println("entityPath: " + entityIdentifier);
boolean nodeAlreadySelected = graphPanel.selectedGroupId.contains(entityIdentifier);
if (!shiftDown) {
System.out.println("Clear selection");
graphPanel.selectedGroupId.clear();
graphPanel.selectedGroupId.add(entityIdentifier);
} else {
// toggle the highlight
if (nodeAlreadySelected) {
graphPanel.selectedGroupId.remove(entityIdentifier);
} else {
graphPanel.selectedGroupId.add(entityIdentifier);
}
}
graphPanel.svgUpdateHandler.updateSvgSelectionHighlights();
// if (existingHighlight == null) {
// svgCanvas.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
// }
// update the table selection
if (graphPanel.arbilTableModel != null) {
graphPanel.arbilTableModel.removeAllArbilDataNodeRows();
try {
for (String currentSelectedId : graphPanel.selectedGroupId) {
String currentSelectedPath = graphPanel.getPathForElementId(currentSelectedId);
graphPanel.arbilTableModel.addSingleArbilDataNode(ArbilDataNodeLoader.getSingleInstance().getArbilDataNode(null, new URI(currentSelectedPath)));
}
} catch (URISyntaxException urise) {
GuiHelper.linorgBugCatcher.logError(urise);
}
}
}
}
| src/main/java/nl/mpi/kinnate/svg/MouseListenerSvg.java | package nl.mpi.kinnate.svg;
import java.awt.Cursor;
import java.awt.Point;
import java.awt.event.MouseEvent;
import java.net.URI;
import java.net.URISyntaxException;
import org.w3c.dom.events.Event;
import org.w3c.dom.events.EventListener;
import org.apache.batik.dom.events.DOMMouseEvent;
import javax.swing.event.MouseInputAdapter;
import nl.mpi.arbil.GuiHelper;
import org.w3c.dom.Element;
import nl.mpi.arbil.data.ImdiLoader;
/**
* Document : MouseListenerSvg
* Created on : Mar 9, 2011, 3:21:53 PM
* Author : Peter Withers
*/
public class MouseListenerSvg extends MouseInputAdapter implements EventListener {
private Cursor preDragCursor;
private GraphPanel graphPanel;
private Point startDragPoint = null;
static boolean mouseActionOnNode = false;
static boolean mouseActionIsPopupTrigger = false;
static boolean mouseActionIsDrag = false;
public MouseListenerSvg(GraphPanel graphPanelLocal) {
graphPanel = graphPanelLocal;
}
@Override
public void mouseDragged(MouseEvent me) {
if (startDragPoint != null) {
// System.out.println("mouseDragged: " + me.toString());
if (graphPanel.selectedGroupId.size() > 0) {
graphPanel.svgCanvas.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
// limit the drag to the distance draged not the location
graphPanel.svgUpdateHandler.updateDragNode(me.getPoint().x - startDragPoint.x, me.getPoint().y - startDragPoint.y);
} else {
graphPanel.svgUpdateHandler.dragCanvas(me.getPoint().x - startDragPoint.x, me.getPoint().y - startDragPoint.y);
}
mouseActionIsDrag = true;
} else {
graphPanel.svgUpdateHandler.startDrag();
}
startDragPoint = me.getPoint();
}
@Override
public void mouseReleased(MouseEvent me) {
// System.out.println("mouseReleased: " + me.toString());
graphPanel.svgCanvas.setCursor(preDragCursor);
startDragPoint = null;
if (!mouseActionIsDrag && !mouseActionIsPopupTrigger && !mouseActionOnNode && me.getButton() == MouseEvent.BUTTON1) { // todo: button1 could cause issues for left handed people with swapped mouse buttons
System.out.println("Clear selection");
graphPanel.selectedGroupId.clear();
graphPanel.svgUpdateHandler.updateSvgSelectionHighlights();
}
mouseActionOnNode = false;
}
@Override
public void mousePressed(MouseEvent e) {
mouseActionIsDrag = false;
mouseActionIsPopupTrigger = e.isPopupTrigger();
}
@Override
public void handleEvent(Event evt) {
mouseActionOnNode = true;
boolean shiftDown = false;
if (evt instanceof DOMMouseEvent) {
shiftDown = ((DOMMouseEvent) evt).getShiftKey();
}
System.out.println("dom mouse event: " + evt.getCurrentTarget());
Element currentDraggedElement = ((Element) evt.getCurrentTarget());
preDragCursor = graphPanel.svgCanvas.getCursor();
// get the entityPath
// todo: change selected elements to use the ID not the path, but this change will affect th way the imdi path is obtained to show the table
String entityIdentifier = currentDraggedElement.getAttribute("id");
System.out.println("entityPath: " + entityIdentifier);
boolean nodeAlreadySelected = graphPanel.selectedGroupId.contains(entityIdentifier);
if (!shiftDown) {
System.out.println("Clear selection");
graphPanel.selectedGroupId.clear();
graphPanel.selectedGroupId.add(entityIdentifier);
} else {
// toggle the highlight
if (nodeAlreadySelected) {
graphPanel.selectedGroupId.remove(entityIdentifier);
} else {
graphPanel.selectedGroupId.add(entityIdentifier);
}
}
graphPanel.svgUpdateHandler.updateSvgSelectionHighlights();
// if (existingHighlight == null) {
// svgCanvas.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
// }
// update the table selection
if (graphPanel.imdiTableModel != null) {
graphPanel.imdiTableModel.removeAllImdiRows();
try {
for (String currentSelectedId : graphPanel.selectedGroupId) {
String currentSelectedPath = graphPanel.getPathForElementId(currentSelectedId);
graphPanel.imdiTableModel.addSingleImdiObject(ImdiLoader.getSingleInstance().getImdiObject(null, new URI(currentSelectedPath)));
}
} catch (URISyntaxException urise) {
GuiHelper.linorgBugCatcher.logError(urise);
}
}
}
}
| Migrated kinnate trunk to use the latest Arbil jar.
| src/main/java/nl/mpi/kinnate/svg/MouseListenerSvg.java | Migrated kinnate trunk to use the latest Arbil jar. | <ide><path>rc/main/java/nl/mpi/kinnate/svg/MouseListenerSvg.java
<ide> import org.w3c.dom.events.EventListener;
<ide> import org.apache.batik.dom.events.DOMMouseEvent;
<ide> import javax.swing.event.MouseInputAdapter;
<del>import nl.mpi.arbil.GuiHelper;
<add>import nl.mpi.arbil.data.ArbilDataNodeLoader;
<add>import nl.mpi.arbil.ui.GuiHelper;
<ide> import org.w3c.dom.Element;
<del>import nl.mpi.arbil.data.ImdiLoader;
<ide>
<ide> /**
<ide> * Document : MouseListenerSvg
<ide> // svgCanvas.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR));
<ide> // }
<ide> // update the table selection
<del> if (graphPanel.imdiTableModel != null) {
<del> graphPanel.imdiTableModel.removeAllImdiRows();
<add> if (graphPanel.arbilTableModel != null) {
<add> graphPanel.arbilTableModel.removeAllArbilDataNodeRows();
<ide> try {
<ide> for (String currentSelectedId : graphPanel.selectedGroupId) {
<ide> String currentSelectedPath = graphPanel.getPathForElementId(currentSelectedId);
<del> graphPanel.imdiTableModel.addSingleImdiObject(ImdiLoader.getSingleInstance().getImdiObject(null, new URI(currentSelectedPath)));
<add> graphPanel.arbilTableModel.addSingleArbilDataNode(ArbilDataNodeLoader.getSingleInstance().getArbilDataNode(null, new URI(currentSelectedPath)));
<ide> }
<ide> } catch (URISyntaxException urise) {
<ide> GuiHelper.linorgBugCatcher.logError(urise); |
|
Java | apache-2.0 | 1c1a2007dcee1b6ca9c583ec8af63057ada317f1 | 0 | openfoodfacts/OpenFoodFacts-androidApp | package openfoodfacts.github.scrachx.openfood.views;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewTreeObserver;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.github.chrisbanes.photoview.PhotoViewAttacher;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import com.theartofdev.edmodo.cropper.CropImage;
import com.theartofdev.edmodo.cropper.CropImageActivity;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import io.reactivex.Completable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.CompositeDisposable;
import openfoodfacts.github.scrachx.openfood.R;
import openfoodfacts.github.scrachx.openfood.databinding.ActivityFullScreenImageBinding;
import openfoodfacts.github.scrachx.openfood.fragments.BaseFragment;
import openfoodfacts.github.scrachx.openfood.images.ImageKeyHelper;
import openfoodfacts.github.scrachx.openfood.images.ImageSize;
import openfoodfacts.github.scrachx.openfood.images.ImageTransformationUtils;
import openfoodfacts.github.scrachx.openfood.images.PhotoReceiver;
import openfoodfacts.github.scrachx.openfood.images.ProductImage;
import openfoodfacts.github.scrachx.openfood.jobs.FileDownloader;
import openfoodfacts.github.scrachx.openfood.jobs.PhotoReceiverHandler;
import openfoodfacts.github.scrachx.openfood.models.Product;
import openfoodfacts.github.scrachx.openfood.models.ProductImageField;
import openfoodfacts.github.scrachx.openfood.network.OpenFoodAPIClient;
import openfoodfacts.github.scrachx.openfood.utils.FileUtils;
import openfoodfacts.github.scrachx.openfood.utils.ImageUploadListener;
import openfoodfacts.github.scrachx.openfood.utils.LocaleHelper;
import openfoodfacts.github.scrachx.openfood.utils.SwipeDetector;
import openfoodfacts.github.scrachx.openfood.views.adapters.LanguageDataAdapter;
import pl.aprilapps.easyphotopicker.EasyImage;
import smartdevelop.ir.eram.showcaseviewlib.GuideView;
import static android.Manifest.permission.CAMERA;
import static android.content.pm.PackageManager.PERMISSION_GRANTED;
import static openfoodfacts.github.scrachx.openfood.utils.Utils.MY_PERMISSIONS_REQUEST_CAMERA;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
/**
* Activity to display/edit product images
*/
public class ProductImageManagementActivity extends BaseActivity implements PhotoReceiver {
private static final int RESULTCODE_MODIFIED = 1;
private static final int REQUEST_EDIT_IMAGE_AFTER_LOGIN = 1;
private static final int REQUEST_ADD_IMAGE_AFTER_LOGIN = 2;
private static final int REQUEST_CHOOSE_IMAGE_AFTER_LOGIN = 3;
private static final int REQUEST_UNSELECT_IMAGE_AFTER_LOGIN = 4;
static final int REQUEST_EDIT_IMAGE = 1000;
private static final int REQUEST_CHOOSE_IMAGE = 1001;
private static final List<ProductImageField> TYPE_IMAGE = Arrays.asList(ProductImageField.FRONT, ProductImageField.INGREDIENTS, ProductImageField.NUTRITION);
private ActivityFullScreenImageBinding binding;
private OpenFoodAPIClient client;
private File lastViewedImage;
private PhotoViewAttacher mAttacher;
private SharedPreferences settings;
private CompositeDisposable disp = new CompositeDisposable();
public static boolean isImageModified(int requestCode, int resultCode) {
return requestCode == REQUEST_EDIT_IMAGE && resultCode == ProductImageManagementActivity.RESULTCODE_MODIFIED;
}
@Override
protected void onDestroy() {
super.onDestroy();
disp.dispose();
binding = null;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
client = new OpenFoodAPIClient(this);
binding = ActivityFullScreenImageBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
// Setup onclick listeners
binding.btnDone.setOnClickListener(v -> onExit());
binding.btnUnselectImage.setOnClickListener(v -> unSelectImage());
binding.btnChooseImage.setOnClickListener(v -> onChooseImage());
binding.btnAddImage.setOnClickListener(v -> onAddImage());
binding.btnChooseDefaultLanguage.setOnClickListener(v -> onSelectDefaultLanguage());
binding.btnEditImage.setOnClickListener(v -> onStartEditExistingImage());
binding.comboLanguages.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
onLanguageChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing
}
});
binding.comboImageType.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
onImageTypeChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing
}
});
settings = getSharedPreferences("prefs", 0);
if (settings.getBoolean(getString(R.string.check_first_time), true)) {
startShowCase(getString(R.string.title_image_type), getString(R.string.content_image_type), R.id.comboImageType, 1);
}
Intent intent = getIntent();
Product product = (Product) intent.getSerializableExtra(ImageKeyHelper.PRODUCT);
boolean canEdit = product != null;
((View) binding.btnEditImage).setVisibility(canEdit ? View.VISIBLE : View.INVISIBLE);
((View) binding.btnUnselectImage).setVisibility(binding.btnEditImage.getVisibility());
mAttacher = new PhotoViewAttacher(binding.imageViewFullScreen);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
//delaying the transition until the view has been laid out
postponeEnterTransition();
}
new SwipeDetector(binding.imageViewFullScreen).setOnSwipeListener((v, swipeType) -> {
if (swipeType == SwipeDetector.SwipeTypeEnum.LEFT_TO_RIGHT) {
incrementImageType(-1);
} else if (swipeType == SwipeDetector.SwipeTypeEnum.RIGHT_TO_LEFT) {
incrementImageType(1);
} else if (swipeType == SwipeDetector.SwipeTypeEnum.TOP_TO_BOTTOM) {
onRefresh(true);
} else {
stopRefresh();
}
});
ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.simple_spinner_item_white, generateImageTypeNames());
adapter.setDropDownViewResource(android.R.layout.simple_list_item_single_choice);
binding.comboImageType.setAdapter(adapter);
setSupportActionBar(binding.toolbar);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
loadLanguage();
binding.comboImageType.setSelection(TYPE_IMAGE.indexOf(getSelectedType()));
updateProductImagesInfo(null);
onRefresh(false);
}
private void startShowCase(String title, String content, int viewId, final int type) {
new GuideView.Builder(this)
.setTitle(title)
.setContentText(content)
.setTargetView(findViewById(viewId))
.setContentTextSize(12)
.setTitleTextSize(16)
.setDismissType(GuideView.DismissType.outside)
.setGuideListener(view -> {
switch (type) {
case 1:
startShowCase(getString(R.string.title_choose_language), getString(R.string.content_choose_language), R.id.comboLanguages, 2);
break;
case 2:
startShowCase(getString(R.string.title_add_photo), getString(R.string.content_add_photo), R.id.btnAddImage, 3);
break;
case 3:
startShowCase(getString(R.string.title_choose_photo), getString(R.string.content_choose_photo), R.id.btnChooseImage, 4);
break;
case 4:
startShowCase(getString(R.string.title_edit_photo), getString(R.string.content_edit_photo), R.id.btnEditImage, 5);
break;
case 5:
startShowCase(getString(R.string.title_unselect_photo), getString(R.string.content_unselect_photo), R.id.btnUnselectImage, 6);
break;
case 6:
startShowCase(getString(R.string.title_exit), getString(R.string.content_exit), R.id.btn_done, 7);
break;
case 7:
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(getString(R.string.check_first_time), false);
editor.apply();
break;
}
})
.build()
.show();
}
private List<String> generateImageTypeNames() {
List<String> images = new ArrayList<>();
for (ProductImageField type : TYPE_IMAGE) {
images.add(getResources().getString(ImageKeyHelper.getResourceId(type)));
}
return images;
}
@Override
public boolean onSupportNavigateUp() {
finish();
return true;
}
private void incrementImageType(int inc) {
stopRefresh();
int newPosition = binding.comboImageType.getSelectedItemPosition() + inc;
final int count = binding.comboImageType.getAdapter().getCount();
if (newPosition < 0) {
newPosition = count - 1;
} else {
newPosition = newPosition % count;
}
binding.comboImageType.setSelection(newPosition, true);
}
private void loadLanguage() {
Product product = getProduct();
if (product == null) {
return;
}
//we load all available languages for product/type
String currentLanguage = getCurrentLanguage();
final ProductImageField productImageField = getSelectedType();
final Set<String> addedLanguages = new HashSet<>(product.getAvailableLanguageForImage(productImageField, ImageSize.DISPLAY));
final List<LocaleHelper.LanguageData> languageForImage = LocaleHelper.getLanguageData(addedLanguages, true);
int selectedIndex = LocaleHelper.find(languageForImage, currentLanguage);
if (selectedIndex < 0) {
addedLanguages.add(currentLanguage);
languageForImage.add(LocaleHelper.getLanguageData(currentLanguage, false));
}
String[] localeValues = getResources().getStringArray(R.array.languages_array);
List<String> otherNotSupportedCode = new ArrayList<>();
for (String local : localeValues) {
if (!addedLanguages.contains(local)) {
otherNotSupportedCode.add(local);
}
}
languageForImage.addAll(LocaleHelper.getLanguageData(otherNotSupportedCode, false));
LanguageDataAdapter adapter = new LanguageDataAdapter(this, R.layout.simple_spinner_item_white, languageForImage);
adapter.setDropDownViewResource(android.R.layout.simple_list_item_single_choice);
binding.comboLanguages.setAdapter(adapter);
selectedIndex = LocaleHelper.find(languageForImage, currentLanguage);
if (selectedIndex >= 0) {
binding.comboLanguages.setSelection(selectedIndex);
}
updateLanguageStatus();
updateSelectDefaultLanguageAction();
}
/**
* Use to warn the user that there is no image for the selected image.
*/
private boolean updateLanguageStatus() {
final ProductImageField serializableExtra = getSelectedType();
String imageUrl = getCurrentImageUrl();
String languageUsedByImage = ImageKeyHelper.getLanguageCodeFromUrl(serializableExtra, imageUrl);
String language = getCurrentLanguage();
//if the language of the displayed image is not the same that the language in this activity
//we use the language of the image
boolean languageSupported = language.equals(languageUsedByImage);
if (languageSupported) {
binding.textInfo.setText(null);
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.white));
} else {
binding.textInfo.setText(R.string.image_not_defined_for_language);
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.orange));
}
((View) binding.btnEditImage).setVisibility(languageSupported ? View.VISIBLE : View.GONE);
((View) binding.btnUnselectImage).setVisibility(binding.btnEditImage.getVisibility());
return languageSupported;
}
private String getCurrentLanguage() {
final String language = getIntent().getStringExtra(ImageKeyHelper.LANGUAGE);
if (language == null) {
return LocaleHelper.getLanguage(getBaseContext());
}
return language;
}
private void updateToolbarTitle(Product product) {
if (product != null) {
changeToolBarTitle(StringUtils.defaultString(product.getProductName(LocaleHelper.getLanguage(this))));
}
}
private void changeToolBarTitle(String productName) {
binding.toolbar.setTitle(productName + " / " + binding.comboImageType.getSelectedItem().toString());
}
@Override
protected void onResume() {
super.onResume();
updateToolbarTitle(getProduct());
}
private void onRefresh(boolean reloadProduct) {
String imageUrl = getCurrentImageUrl();
if (reloadProduct || imageUrl == null) {
reloadProduct();
} else {
loadImage(imageUrl);
}
}
private void loadImage(String imageUrl) {
if (isNotEmpty(imageUrl)) {
String url = imageUrl;
if (FileUtils.isAbsolute(url)) {
url = "file://" + url;
}
startRefresh(getString(R.string.txtLoading));
Picasso.get()
.load(url)
.into(binding.imageViewFullScreen, new Callback() {
@Override
public void onSuccess() {
mAttacher.update();
scheduleStartPostponedTransition(binding.imageViewFullScreen);
binding.imageViewFullScreen.setVisibility(View.VISIBLE);
stopRefresh();
}
@Override
public void onError(Exception ex) {
binding.imageViewFullScreen.setVisibility(View.VISIBLE);
Toast.makeText(ProductImageManagementActivity.this, getResources().getString(R.string.txtConnectionError), Toast.LENGTH_LONG).show();
stopRefresh();
}
});
} else {
binding.imageViewFullScreen.setImageDrawable(null);
stopRefresh();
}
}
/**
* Reload the product, update the image and the language
*/
private void reloadProduct() {
if (isFinishing()) {
return;
}
Product product = getProduct();
if (product != null) {
startRefresh(getString(R.string.loading_product, "..."));
client.getProductImages(product.getCode(), newState -> {
final Product newStateProduct = newState.getProduct();
boolean imageReloaded = false;
if (newStateProduct != null) {
updateToolbarTitle(newStateProduct);
String imageUrl = getCurrentImageUrl();
getIntent().putExtra(ImageKeyHelper.PRODUCT, newStateProduct);
final String newImageUrl = getImageUrlToDisplay(newStateProduct);
loadLanguage();
if (imageUrl == null || !imageUrl.equals(newImageUrl)) {
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, newImageUrl);
loadImage(newImageUrl);
imageReloaded = true;
}
} else {
if (StringUtils.isNotBlank(newState.getStatusVerbose())) {
Toast.makeText(ProductImageManagementActivity.this, newState.getStatusVerbose(), Toast.LENGTH_LONG).show();
}
}
if (!imageReloaded) {
stopRefresh();
}
});
}
}
/**
* The additional field "images" is not loaded by default by OFF as it's only used to edit an image.
* So we load the product images in background.
* Could be improved by loading only the field "images".
*/
private void updateProductImagesInfo(Runnable toDoAfter) {
Product product = getProduct();
if (product != null) {
client.getProductImages(product.getCode(), newState -> {
final Product newStateProduct = newState.getProduct();
if (newStateProduct != null) {
getIntent().putExtra(ImageKeyHelper.PRODUCT, newStateProduct);
}
if (toDoAfter != null) {
toDoAfter.run();
}
});
}
}
private String getImageUrlToDisplay(Product product) {
return product.getSelectedImage(getCurrentLanguage(), getSelectedType(),
ImageSize.DISPLAY);
}
private String getCurrentImageUrl() {
return getIntent().getStringExtra(ImageKeyHelper.IMAGE_URL);
}
private void stopRefresh() {
binding.progressBar.setVisibility(View.GONE);
updateLanguageStatus();
}
private boolean isRefreshing() {
return binding.progressBar.getVisibility() == View.VISIBLE;
}
private void startRefresh(String text) {
binding.progressBar.setVisibility(View.VISIBLE);
if (text != null) {
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.white));
binding.textInfo.setText(text);
}
}
void onSelectDefaultLanguage() {
String lang = LocaleHelper.getLocale(getProduct().getLang()).getLanguage();
LocaleHelper.getLanguageData(lang, true);
final int position = ((LanguageDataAdapter) binding.comboLanguages.getAdapter()).getPosition(lang);
if (position >= 0) {
binding.comboLanguages.setSelection(position, true);
}
}
void onExit() {
finish();
}
private void unSelectImage() {
if (cannotEdit(REQUEST_UNSELECT_IMAGE_AFTER_LOGIN)) {
return;
}
startRefresh(getString(R.string.unselect_image));
client.unSelectImage(getProduct().getCode(), getSelectedType(), getCurrentLanguage(), (value, response) -> {
if (value) {
setResult(RESULTCODE_MODIFIED);
}
reloadProduct();
});
}
private void onChooseImage() {
if (cannotEdit(REQUEST_CHOOSE_IMAGE_AFTER_LOGIN)) {
return;
}
final Intent intent = new Intent(ProductImageManagementActivity.this, ImagesSelectionActivity.class);
intent.putExtra(ImageKeyHelper.PRODUCT_BARCODE, getProduct().getCode());
intent.putExtra(ImagesSelectionActivity.TOOLBAR_TITLE, binding.toolbar.getTitle());
startActivityForResult(intent, REQUEST_CHOOSE_IMAGE);
}
private boolean cannotEdit(int loginRequestCode) {
if (isRefreshing()) {
Toast.makeText(this, R.string.cant_modify_if_refreshing, Toast.LENGTH_SHORT).show();
return true;
}
//if user not logged in, we force to log
if (isUserNotLoggedIn()) {
startActivityForResult(new Intent(ProductImageManagementActivity.this, LoginActivity.class), loginRequestCode);
return true;
}
return false;
}
void onAddImage() {
if (cannotEdit(REQUEST_ADD_IMAGE_AFTER_LOGIN)) {
return;
}
if (ContextCompat.checkSelfPermission(this, CAMERA) != PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
} else {
EasyImage.openCamera(this, 0);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == MY_PERMISSIONS_REQUEST_CAMERA && BaseFragment.isAllGranted(grantResults)) {
onAddImage();
}
}
private void updateSelectDefaultLanguageAction() {
boolean isDefault = getProduct().getLang() != null && getCurrentLanguage().equals(LocaleHelper.getLocale(getProduct().getLang()).getLanguage());
binding.btnChooseDefaultLanguage.setVisibility(isDefault ? View.INVISIBLE : View.VISIBLE);
}
void onStartEditExistingImage() {
if (cannotEdit(REQUEST_EDIT_IMAGE_AFTER_LOGIN)) {
return;
}
Product product = getProduct();
final ProductImageField productImageField = getSelectedType();
String language = getCurrentLanguage();
//the rotation/crop set on the server
ImageTransformationUtils transformation = ImageTransformationUtils.getScreenTransformation(product, productImageField, language);
//the first time, the images properties are not loaded...
if (transformation.isEmpty()) {
updateProductImagesInfo(() -> editPhoto(productImageField, ImageTransformationUtils.getScreenTransformation(product, productImageField, language)));
}
editPhoto(productImageField, transformation);
}
private void editPhoto(ProductImageField productImageField, ImageTransformationUtils transformation) {
if (transformation.isNotEmpty()) {
disp.add(FileDownloader.download(this, transformation.getInitImageUrl())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(file -> {
//to delete the file after:
lastViewedImage = file;
cropRotateExistingImageOnServer(file, getString(ImageKeyHelper.getResourceIdForEditAction(productImageField)), transformation);
}));
}
}
private Product getProduct() {
return (Product) getIntent().getSerializableExtra(ImageKeyHelper.PRODUCT);
}
private void onLanguageChanged() {
LocaleHelper.LanguageData data = (LocaleHelper.LanguageData) binding.comboLanguages.getSelectedItem();
Product product = getProduct();
if (!data.getCode().equals(getCurrentLanguage())) {
getIntent().putExtra(ImageKeyHelper.LANGUAGE, data.getCode());
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, getImageUrlToDisplay(product));
updateToolbarTitle(product);
onRefresh(false);
}
updateSelectDefaultLanguageAction();
}
private ProductImageField getSelectedType() {
return (ProductImageField) getIntent().getSerializableExtra(ImageKeyHelper.IMAGE_TYPE);
}
private void onImageTypeChanged() {
if (getProduct() == null) {
return;
}
ProductImageField newTypeSelected = TYPE_IMAGE.get(binding.comboImageType.getSelectedItemPosition());
final ProductImageField selectedType = getSelectedType();
if (newTypeSelected.equals(selectedType)) {
return;
}
getIntent().putExtra(ImageKeyHelper.IMAGE_TYPE, newTypeSelected);
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, getImageUrlToDisplay(getProduct()));
onRefresh(false);
loadLanguage();
updateToolbarTitle(getProduct());
}
private void cropRotateExistingImageOnServer(File image, String title, ImageTransformationUtils transformation) {
Uri uri = Uri.fromFile(image);
final CropImage.ActivityBuilder activityBuilder = CropImage.activity(uri)
.setCropMenuCropButtonIcon(R.drawable.ic_check_white_24dp)
.setAllowFlipping(false)
//we just want crop size/rotation
.setNoOutputImage(true)
.setAllowRotation(true)
.setAllowCounterRotation(true)
.setAutoZoomEnabled(false)
.setInitialRotation(transformation.getRotationInDegree())
.setActivityTitle(title);
if (transformation.getCropRectangle() != null) {
activityBuilder.setInitialCropWindowRectangle(transformation.getCropRectangle());
} else {
activityBuilder.setInitialCropWindowPaddingRatio(0);
}
startActivityForResult(activityBuilder.getIntent(this, CropImageActivity.class), REQUEST_EDIT_IMAGE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
// do nothing
switch (requestCode) {
case REQUEST_EDIT_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onStartEditExistingImage();
}
break;
case REQUEST_ADD_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onAddImage();
}
break;
case REQUEST_CHOOSE_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onChooseImage();
}
break;
case REQUEST_UNSELECT_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
unSelectImage();
}
break;
case REQUEST_EDIT_IMAGE:
applyEditExistingImage(resultCode, data);
break;
case REQUEST_CHOOSE_IMAGE:
if (resultCode == RESULT_OK && data != null) {
File file = (File) data.getSerializableExtra(ImageKeyHelper.IMAGE_FILE);
String imgId = data.getStringExtra(ImageKeyHelper.IMG_ID);
//photo choosed from gallery
if (file != null) {
onPhotoReturned(file);
} else if (StringUtils.isNotBlank(imgId)) {
HashMap<String, String> imgMap = new HashMap<>();
imgMap.put(ImageKeyHelper.IMG_ID, imgId);
postEditImage(imgMap);
}
}
break;
default:
new PhotoReceiverHandler(this).onActivityResult(this, requestCode, resultCode, data);
break;
}
}
/**
* @param resultCode should
* @param dataFromCropActivity from the crop activity. If not, action is ignored
*/
private void applyEditExistingImage(int resultCode, @Nullable Intent dataFromCropActivity) {
//delete downoaded local file
deleteLocalFiles();
// if the selected language is not the same than current image we can't modify: only add
if (isUserNotLoggedIn() || !updateLanguageStatus() || dataFromCropActivity == null) {
return;
}
if (resultCode == Activity.RESULT_OK) {
startRefresh(StringUtils.EMPTY);
CropImage.ActivityResult result = CropImage.getActivityResult(dataFromCropActivity);
final Product product = getProduct();
ImageTransformationUtils currentServerTransformation = ImageTransformationUtils.getInitialServerTransformation(product, getSelectedType(), getCurrentLanguage());
ImageTransformationUtils newServerTransformation = ImageTransformationUtils
.toServerTransformation(new ImageTransformationUtils(result.getRotation(), result.getCropRect()), product, getSelectedType(), getCurrentLanguage());
boolean isModified = !currentServerTransformation.equals(newServerTransformation);
if (isModified) {
startRefresh(getString(R.string.toastSending));
HashMap<String, String> imgMap = new HashMap<>();
imgMap.put(ImageKeyHelper.IMG_ID, newServerTransformation.getInitImageId());
ImageTransformationUtils.addTransformToMap(newServerTransformation, imgMap);
postEditImage(imgMap);
} else {
stopRefresh();
}
}
}
private void postEditImage(HashMap<String, String> imgMap) {
final String code = getProduct().getCode();
imgMap.put(ImageKeyHelper.PRODUCT_BARCODE, code);
imgMap.put(ImageKeyHelper.IMAGE_STRING_ID, ImageKeyHelper.getImageStringKey(getSelectedType(), getCurrentLanguage()));
binding.imageViewFullScreen.setVisibility(View.INVISIBLE);
client.editImage(code, imgMap, (value, response) -> {
if (value) {
setResult(RESULTCODE_MODIFIED);
}
reloadProduct();
});
}
private void deleteLocalFiles() {
if (lastViewedImage != null) {
boolean deleted = lastViewedImage.delete();
if (!deleted) {
Log.w(ProductImageManagementActivity.class.getSimpleName(), "cant delete file " + lastViewedImage);
} else {
lastViewedImage = null;
}
}
}
/**
* For scheduling a postponed transition after the proper measures of the view are done
* and the view has been properly laid out in the View hierarchy
*/
private void scheduleStartPostponedTransition(final View sharedElement) {
sharedElement.getViewTreeObserver().addOnPreDrawListener(
new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
sharedElement.getViewTreeObserver().removeOnPreDrawListener(this);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
startPostponedEnterTransition();
}
return true;
}
});
}
/**
* @param newPhotoFile photo selected by the user to be sent to the server.
*/
@Override
public void onPhotoReturned(File newPhotoFile) {
startRefresh(getString(R.string.uploading_image));
disp.add(Completable.fromAction(() -> {
ProductImage image = new ProductImage(getProduct().getCode(), getSelectedType(), newPhotoFile, getCurrentLanguage());
image.setFilePath(newPhotoFile.getAbsolutePath());
client.postImg(image, true, new ImageUploadListener() {
@Override
public void onSuccess() {
reloadProduct();
setResult(RESULTCODE_MODIFIED);
}
@Override
public void onFailure(String message) {
Toast.makeText(ProductImageManagementActivity.this, message, Toast.LENGTH_LONG).show();
stopRefresh();
}
});
}).observeOn(AndroidSchedulers.mainThread()).subscribe());
}
}
| app/src/main/java/openfoodfacts/github/scrachx/openfood/views/ProductImageManagementActivity.java | package openfoodfacts.github.scrachx.openfood.views;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewTreeObserver;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.github.chrisbanes.photoview.PhotoViewAttacher;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import com.theartofdev.edmodo.cropper.CropImage;
import com.theartofdev.edmodo.cropper.CropImageActivity;
import org.apache.commons.lang.StringUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.CompositeDisposable;
import openfoodfacts.github.scrachx.openfood.R;
import openfoodfacts.github.scrachx.openfood.databinding.ActivityFullScreenImageBinding;
import openfoodfacts.github.scrachx.openfood.fragments.BaseFragment;
import openfoodfacts.github.scrachx.openfood.images.ImageKeyHelper;
import openfoodfacts.github.scrachx.openfood.images.ImageSize;
import openfoodfacts.github.scrachx.openfood.images.ImageTransformationUtils;
import openfoodfacts.github.scrachx.openfood.images.PhotoReceiver;
import openfoodfacts.github.scrachx.openfood.images.ProductImage;
import openfoodfacts.github.scrachx.openfood.jobs.FileDownloader;
import openfoodfacts.github.scrachx.openfood.jobs.PhotoReceiverHandler;
import openfoodfacts.github.scrachx.openfood.models.Product;
import openfoodfacts.github.scrachx.openfood.models.ProductImageField;
import openfoodfacts.github.scrachx.openfood.network.OpenFoodAPIClient;
import openfoodfacts.github.scrachx.openfood.utils.FileUtils;
import openfoodfacts.github.scrachx.openfood.utils.ImageUploadListener;
import openfoodfacts.github.scrachx.openfood.utils.LocaleHelper;
import openfoodfacts.github.scrachx.openfood.utils.SwipeDetector;
import openfoodfacts.github.scrachx.openfood.views.adapters.LanguageDataAdapter;
import pl.aprilapps.easyphotopicker.EasyImage;
import smartdevelop.ir.eram.showcaseviewlib.GuideView;
import static android.Manifest.permission.CAMERA;
import static android.content.pm.PackageManager.PERMISSION_GRANTED;
import static openfoodfacts.github.scrachx.openfood.utils.Utils.MY_PERMISSIONS_REQUEST_CAMERA;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
/**
* Activity to display/edit product images
*/
public class ProductImageManagementActivity extends BaseActivity implements PhotoReceiver {
private static final int RESULTCODE_MODIFIED = 1;
private static final int REQUEST_EDIT_IMAGE_AFTER_LOGIN = 1;
private static final int REQUEST_ADD_IMAGE_AFTER_LOGIN = 2;
private static final int REQUEST_CHOOSE_IMAGE_AFTER_LOGIN = 3;
private static final int REQUEST_UNSELECT_IMAGE_AFTER_LOGIN = 4;
static final int REQUEST_EDIT_IMAGE = 1000;
private static final int REQUEST_CHOOSE_IMAGE = 1001;
private static final List<ProductImageField> TYPE_IMAGE = Arrays.asList(ProductImageField.FRONT, ProductImageField.INGREDIENTS, ProductImageField.NUTRITION);
private ActivityFullScreenImageBinding binding;
private OpenFoodAPIClient client;
private File lastViewedImage;
private PhotoViewAttacher mAttacher;
private SharedPreferences settings;
private CompositeDisposable disp = new CompositeDisposable();
public static boolean isImageModified(int requestCode, int resultCode) {
return requestCode == REQUEST_EDIT_IMAGE && resultCode == ProductImageManagementActivity.RESULTCODE_MODIFIED;
}
@Override
protected void onDestroy() {
super.onDestroy();
disp.dispose();
binding = null;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
client = new OpenFoodAPIClient(this);
binding = ActivityFullScreenImageBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
// Setup onclick listeners
binding.btnDone.setOnClickListener(v -> onExit());
binding.btnUnselectImage.setOnClickListener(v -> unSelectImage());
binding.btnChooseImage.setOnClickListener(v -> onChooseImage());
binding.btnAddImage.setOnClickListener(v -> onAddImage());
binding.btnChooseDefaultLanguage.setOnClickListener(v -> onSelectDefaultLanguage());
binding.btnEditImage.setOnClickListener(v -> onStartEditExistingImage());
binding.comboLanguages.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
onLanguageChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing
}
});
binding.comboImageType.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
onImageTypeChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Do nothing
}
});
settings = getSharedPreferences("prefs", 0);
if (settings.getBoolean(getString(R.string.check_first_time), true)) {
startShowCase(getString(R.string.title_image_type), getString(R.string.content_image_type), R.id.comboImageType, 1);
}
Intent intent = getIntent();
Product product = (Product) intent.getSerializableExtra(ImageKeyHelper.PRODUCT);
boolean canEdit = product != null;
((View) binding.btnEditImage).setVisibility(canEdit ? View.VISIBLE : View.INVISIBLE);
((View) binding.btnUnselectImage).setVisibility(binding.btnEditImage.getVisibility());
mAttacher = new PhotoViewAttacher(binding.imageViewFullScreen);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
//delaying the transition until the view has been laid out
postponeEnterTransition();
}
new SwipeDetector(binding.imageViewFullScreen).setOnSwipeListener((v, swipeType) -> {
if (swipeType == SwipeDetector.SwipeTypeEnum.LEFT_TO_RIGHT) {
incrementImageType(-1);
} else if (swipeType == SwipeDetector.SwipeTypeEnum.RIGHT_TO_LEFT) {
incrementImageType(1);
} else if (swipeType == SwipeDetector.SwipeTypeEnum.TOP_TO_BOTTOM) {
onRefresh(true);
} else {
stopRefresh();
}
});
ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.simple_spinner_item_white, generateImageTypeNames());
adapter.setDropDownViewResource(android.R.layout.simple_list_item_single_choice);
binding.comboImageType.setAdapter(adapter);
setSupportActionBar(binding.toolbar);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
loadLanguage();
binding.comboImageType.setSelection(TYPE_IMAGE.indexOf(getSelectedType()));
updateProductImagesInfo(null);
onRefresh(false);
}
private void startShowCase(String title, String content, int viewId, final int type) {
new GuideView.Builder(this)
.setTitle(title)
.setContentText(content)
.setTargetView(findViewById(viewId))
.setContentTextSize(12)
.setTitleTextSize(16)
.setDismissType(GuideView.DismissType.outside)
.setGuideListener(view -> {
switch (type) {
case 1:
startShowCase(getString(R.string.title_choose_language), getString(R.string.content_choose_language), R.id.comboLanguages, 2);
break;
case 2:
startShowCase(getString(R.string.title_add_photo), getString(R.string.content_add_photo), R.id.btnAddImage, 3);
break;
case 3:
startShowCase(getString(R.string.title_choose_photo), getString(R.string.content_choose_photo), R.id.btnChooseImage, 4);
break;
case 4:
startShowCase(getString(R.string.title_edit_photo), getString(R.string.content_edit_photo), R.id.btnEditImage, 5);
break;
case 5:
startShowCase(getString(R.string.title_unselect_photo), getString(R.string.content_unselect_photo), R.id.btnUnselectImage, 6);
break;
case 6:
startShowCase(getString(R.string.title_exit), getString(R.string.content_exit), R.id.btn_done, 7);
break;
case 7:
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(getString(R.string.check_first_time), false);
editor.apply();
break;
}
})
.build()
.show();
}
private List<String> generateImageTypeNames() {
List<String> images = new ArrayList<>();
for (ProductImageField type : TYPE_IMAGE) {
images.add(getResources().getString(ImageKeyHelper.getResourceId(type)));
}
return images;
}
@Override
public boolean onSupportNavigateUp() {
finish();
return true;
}
private void incrementImageType(int inc) {
stopRefresh();
int newPosition = binding.comboImageType.getSelectedItemPosition() + inc;
final int count = binding.comboImageType.getAdapter().getCount();
if (newPosition < 0) {
newPosition = count - 1;
} else {
newPosition = newPosition % count;
}
binding.comboImageType.setSelection(newPosition, true);
}
private void loadLanguage() {
Product product = getProduct();
if (product == null) {
return;
}
//we load all available languages for product/type
String currentLanguage = getCurrentLanguage();
final ProductImageField productImageField = getSelectedType();
final Set<String> addedLanguages = new HashSet<>(product.getAvailableLanguageForImage(productImageField, ImageSize.DISPLAY));
final List<LocaleHelper.LanguageData> languageForImage = LocaleHelper.getLanguageData(addedLanguages, true);
int selectedIndex = LocaleHelper.find(languageForImage, currentLanguage);
if (selectedIndex < 0) {
addedLanguages.add(currentLanguage);
languageForImage.add(LocaleHelper.getLanguageData(currentLanguage, false));
}
String[] localeValues = getResources().getStringArray(R.array.languages_array);
List<String> otherNotSupportedCode = new ArrayList<>();
for (String local : localeValues) {
if (!addedLanguages.contains(local)) {
otherNotSupportedCode.add(local);
}
}
languageForImage.addAll(LocaleHelper.getLanguageData(otherNotSupportedCode, false));
LanguageDataAdapter adapter = new LanguageDataAdapter(this, R.layout.simple_spinner_item_white, languageForImage);
adapter.setDropDownViewResource(android.R.layout.simple_list_item_single_choice);
binding.comboLanguages.setAdapter(adapter);
selectedIndex = LocaleHelper.find(languageForImage, currentLanguage);
if (selectedIndex >= 0) {
binding.comboLanguages.setSelection(selectedIndex);
}
updateLanguageStatus();
updateSelectDefaultLanguageAction();
}
/**
* Use to warn the user that there is no image for the selected image.
*/
private boolean updateLanguageStatus() {
final ProductImageField serializableExtra = getSelectedType();
String imageUrl = getCurrentImageUrl();
String languageUsedByImage = ImageKeyHelper.getLanguageCodeFromUrl(serializableExtra, imageUrl);
String language = getCurrentLanguage();
//if the language of the displayed image is not the same that the language in this activity
//we use the language of the image
boolean languageSupported = language.equals(languageUsedByImage);
if (languageSupported) {
binding.textInfo.setText(null);
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.white));
} else {
binding.textInfo.setText(R.string.image_not_defined_for_language);
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.orange));
}
((View) binding.btnEditImage).setVisibility(languageSupported ? View.VISIBLE : View.GONE);
((View) binding.btnUnselectImage).setVisibility(binding.btnEditImage.getVisibility());
return languageSupported;
}
private String getCurrentLanguage() {
final String language = getIntent().getStringExtra(ImageKeyHelper.LANGUAGE);
if (language == null) {
return LocaleHelper.getLanguage(getBaseContext());
}
return language;
}
private void updateToolbarTitle(Product product) {
if (product != null) {
changeToolBarTitle(StringUtils.defaultString(product.getProductName(LocaleHelper.getLanguage(this))));
}
}
private void changeToolBarTitle(String productName) {
binding.toolbar.setTitle(productName + " / " + binding.comboImageType.getSelectedItem().toString());
}
@Override
protected void onResume() {
super.onResume();
updateToolbarTitle(getProduct());
}
private void onRefresh(boolean reloadProduct) {
String imageUrl = getCurrentImageUrl();
if (reloadProduct || imageUrl == null) {
reloadProduct();
} else {
loadImage(imageUrl);
}
}
private void loadImage(String imageUrl) {
if (isNotEmpty(imageUrl)) {
String url = imageUrl;
if (FileUtils.isAbsolute(url)) {
url = "file://" + url;
}
startRefresh(getString(R.string.txtLoading));
Picasso.get()
.load(url)
.into(binding.imageViewFullScreen, new Callback() {
@Override
public void onSuccess() {
mAttacher.update();
scheduleStartPostponedTransition(binding.imageViewFullScreen);
binding.imageViewFullScreen.setVisibility(View.VISIBLE);
stopRefresh();
}
@Override
public void onError(Exception ex) {
binding.imageViewFullScreen.setVisibility(View.VISIBLE);
Toast.makeText(ProductImageManagementActivity.this, getResources().getString(R.string.txtConnectionError), Toast.LENGTH_LONG).show();
stopRefresh();
}
});
} else {
binding.imageViewFullScreen.setImageDrawable(null);
stopRefresh();
}
}
/**
* Reload the product, update the image and the language
*/
private void reloadProduct() {
if (isFinishing()) {
return;
}
Product product = getProduct();
if (product != null) {
startRefresh(getString(R.string.loading_product, "..."));
client.getProductImages(product.getCode(), newState -> {
final Product newStateProduct = newState.getProduct();
boolean imageReloaded = false;
if (newStateProduct != null) {
updateToolbarTitle(newStateProduct);
String imageUrl = getCurrentImageUrl();
getIntent().putExtra(ImageKeyHelper.PRODUCT, newStateProduct);
final String newImageUrl = getImageUrlToDisplay(newStateProduct);
loadLanguage();
if (imageUrl == null || !imageUrl.equals(newImageUrl)) {
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, newImageUrl);
loadImage(newImageUrl);
imageReloaded = true;
}
} else {
if (StringUtils.isNotBlank(newState.getStatusVerbose())) {
Toast.makeText(ProductImageManagementActivity.this, newState.getStatusVerbose(), Toast.LENGTH_LONG).show();
}
}
if (!imageReloaded) {
stopRefresh();
}
});
}
}
/**
* The additional field "images" is not loaded by default by OFF as it's only used to edit an image.
* So we load the product images in background.
* Could be improved by loading only the field "images".
*/
private void updateProductImagesInfo(Runnable toDoAfter) {
Product product = getProduct();
if (product != null) {
client.getProductImages(product.getCode(), newState -> {
final Product newStateProduct = newState.getProduct();
if (newStateProduct != null) {
getIntent().putExtra(ImageKeyHelper.PRODUCT, newStateProduct);
}
if (toDoAfter != null) {
toDoAfter.run();
}
});
}
}
private String getImageUrlToDisplay(Product product) {
return product.getSelectedImage(getCurrentLanguage(), getSelectedType(),
ImageSize.DISPLAY);
}
private String getCurrentImageUrl() {
return getIntent().getStringExtra(ImageKeyHelper.IMAGE_URL);
}
private void stopRefresh() {
binding.progressBar.setVisibility(View.GONE);
updateLanguageStatus();
}
private boolean isRefreshing() {
return binding.progressBar.getVisibility() == View.VISIBLE;
}
private void startRefresh(String text) {
binding.progressBar.setVisibility(View.VISIBLE);
if (text != null) {
binding.textInfo.setTextColor(ContextCompat.getColor(this, R.color.white));
binding.textInfo.setText(text);
}
}
void onSelectDefaultLanguage() {
String lang = LocaleHelper.getLocale(getProduct().getLang()).getLanguage();
LocaleHelper.getLanguageData(lang, true);
final int position = ((LanguageDataAdapter) binding.comboLanguages.getAdapter()).getPosition(lang);
if (position >= 0) {
binding.comboLanguages.setSelection(position, true);
}
}
void onExit() {
finish();
}
private void unSelectImage() {
if (cannotEdit(REQUEST_UNSELECT_IMAGE_AFTER_LOGIN)) {
return;
}
startRefresh(getString(R.string.unselect_image));
client.unSelectImage(getProduct().getCode(), getSelectedType(), getCurrentLanguage(), (value, response) -> {
if (value) {
setResult(RESULTCODE_MODIFIED);
}
reloadProduct();
});
}
private void onChooseImage() {
if (cannotEdit(REQUEST_CHOOSE_IMAGE_AFTER_LOGIN)) {
return;
}
final Intent intent = new Intent(ProductImageManagementActivity.this, ImagesSelectionActivity.class);
intent.putExtra(ImageKeyHelper.PRODUCT_BARCODE, getProduct().getCode());
intent.putExtra(ImagesSelectionActivity.TOOLBAR_TITLE, binding.toolbar.getTitle());
startActivityForResult(intent, REQUEST_CHOOSE_IMAGE);
}
private boolean cannotEdit(int loginRequestCode) {
if (isRefreshing()) {
Toast.makeText(this, R.string.cant_modify_if_refreshing, Toast.LENGTH_SHORT).show();
return true;
}
//if user not logged in, we force to log
if (isUserNotLoggedIn()) {
startActivityForResult(new Intent(ProductImageManagementActivity.this, LoginActivity.class), loginRequestCode);
return true;
}
return false;
}
void onAddImage() {
if (cannotEdit(REQUEST_ADD_IMAGE_AFTER_LOGIN)) {
return;
}
if (ContextCompat.checkSelfPermission(this, CAMERA) != PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
} else {
EasyImage.openCamera(this, 0);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == MY_PERMISSIONS_REQUEST_CAMERA && BaseFragment.isAllGranted(grantResults)) {
onAddImage();
}
}
private void updateSelectDefaultLanguageAction() {
boolean isDefault = getProduct().getLang() != null && getCurrentLanguage().equals(LocaleHelper.getLocale(getProduct().getLang()).getLanguage());
binding.btnChooseDefaultLanguage.setVisibility(isDefault ? View.INVISIBLE : View.VISIBLE);
}
void onStartEditExistingImage() {
if (cannotEdit(REQUEST_EDIT_IMAGE_AFTER_LOGIN)) {
return;
}
Product product = getProduct();
final ProductImageField productImageField = getSelectedType();
String language = getCurrentLanguage();
//the rotation/crop set on the server
ImageTransformationUtils transformation = ImageTransformationUtils.getScreenTransformation(product, productImageField, language);
//the first time, the images properties are not loaded...
if (transformation.isEmpty()) {
updateProductImagesInfo(() -> editPhoto(productImageField, ImageTransformationUtils.getScreenTransformation(product, productImageField, language)));
}
editPhoto(productImageField, transformation);
}
private void editPhoto(ProductImageField productImageField, ImageTransformationUtils transformation) {
if (transformation.isNotEmpty()) {
disp.add(FileDownloader.download(this, transformation.getInitImageUrl())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(file -> {
//to delete the file after:
lastViewedImage = file;
cropRotateExistingImageOnServer(file, getString(ImageKeyHelper.getResourceIdForEditAction(productImageField)), transformation);
}));
}
}
private Product getProduct() {
return (Product) getIntent().getSerializableExtra(ImageKeyHelper.PRODUCT);
}
private void onLanguageChanged() {
LocaleHelper.LanguageData data = (LocaleHelper.LanguageData) binding.comboLanguages.getSelectedItem();
Product product = getProduct();
if (!data.getCode().equals(getCurrentLanguage())) {
getIntent().putExtra(ImageKeyHelper.LANGUAGE, data.getCode());
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, getImageUrlToDisplay(product));
updateToolbarTitle(product);
onRefresh(false);
}
updateSelectDefaultLanguageAction();
}
private ProductImageField getSelectedType() {
return (ProductImageField) getIntent().getSerializableExtra(ImageKeyHelper.IMAGE_TYPE);
}
private void onImageTypeChanged() {
if (getProduct() == null) {
return;
}
ProductImageField newTypeSelected = TYPE_IMAGE.get(binding.comboImageType.getSelectedItemPosition());
final ProductImageField selectedType = getSelectedType();
if (newTypeSelected.equals(selectedType)) {
return;
}
getIntent().putExtra(ImageKeyHelper.IMAGE_TYPE, newTypeSelected);
getIntent().putExtra(ImageKeyHelper.IMAGE_URL, getImageUrlToDisplay(getProduct()));
onRefresh(false);
loadLanguage();
updateToolbarTitle(getProduct());
}
private void cropRotateExistingImageOnServer(File image, String title, ImageTransformationUtils transformation) {
Uri uri = Uri.fromFile(image);
final CropImage.ActivityBuilder activityBuilder = CropImage.activity(uri)
.setCropMenuCropButtonIcon(R.drawable.ic_check_white_24dp)
.setAllowFlipping(false)
//we just want crop size/rotation
.setNoOutputImage(true)
.setAllowRotation(true)
.setAllowCounterRotation(true)
.setAutoZoomEnabled(false)
.setInitialRotation(transformation.getRotationInDegree())
.setActivityTitle(title);
if (transformation.getCropRectangle() != null) {
activityBuilder.setInitialCropWindowRectangle(transformation.getCropRectangle());
} else {
activityBuilder.setInitialCropWindowPaddingRatio(0);
}
startActivityForResult(activityBuilder.getIntent(this, CropImageActivity.class), REQUEST_EDIT_IMAGE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
// do nothing
switch (requestCode) {
case REQUEST_EDIT_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onStartEditExistingImage();
}
break;
case REQUEST_ADD_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onAddImage();
}
break;
case REQUEST_CHOOSE_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
onChooseImage();
}
break;
case REQUEST_UNSELECT_IMAGE_AFTER_LOGIN:
if (resultCode == RESULT_OK) {
unSelectImage();
}
break;
case REQUEST_EDIT_IMAGE:
applyEditExistingImage(resultCode, data);
break;
case REQUEST_CHOOSE_IMAGE:
if (resultCode == RESULT_OK && data != null) {
File file = (File) data.getSerializableExtra(ImageKeyHelper.IMAGE_FILE);
String imgId = data.getStringExtra(ImageKeyHelper.IMG_ID);
//photo choosed from gallery
if (file != null) {
onPhotoReturned(file);
} else if (StringUtils.isNotBlank(imgId)) {
HashMap<String, String> imgMap = new HashMap<>();
imgMap.put(ImageKeyHelper.IMG_ID, imgId);
postEditImage(imgMap);
}
}
break;
default:
new PhotoReceiverHandler(this).onActivityResult(this, requestCode, resultCode, data);
break;
}
}
/**
* @param resultCode should
* @param dataFromCropActivity from the crop activity. If not, action is ignored
*/
private void applyEditExistingImage(int resultCode, @Nullable Intent dataFromCropActivity) {
//delete downoaded local file
deleteLocalFiles();
// if the selected language is not the same than current image we can't modify: only add
if (isUserNotLoggedIn() || !updateLanguageStatus() || dataFromCropActivity == null) {
return;
}
if (resultCode == Activity.RESULT_OK) {
startRefresh(StringUtils.EMPTY);
CropImage.ActivityResult result = CropImage.getActivityResult(dataFromCropActivity);
final Product product = getProduct();
ImageTransformationUtils currentServerTransformation = ImageTransformationUtils.getInitialServerTransformation(product, getSelectedType(), getCurrentLanguage());
ImageTransformationUtils newServerTransformation = ImageTransformationUtils
.toServerTransformation(new ImageTransformationUtils(result.getRotation(), result.getCropRect()), product, getSelectedType(), getCurrentLanguage());
boolean isModified = !currentServerTransformation.equals(newServerTransformation);
if (isModified) {
startRefresh(getString(R.string.toastSending));
HashMap<String, String> imgMap = new HashMap<>();
imgMap.put(ImageKeyHelper.IMG_ID, newServerTransformation.getInitImageId());
ImageTransformationUtils.addTransformToMap(newServerTransformation, imgMap);
postEditImage(imgMap);
} else {
stopRefresh();
}
}
}
private void postEditImage(HashMap<String, String> imgMap) {
final String code = getProduct().getCode();
imgMap.put(ImageKeyHelper.PRODUCT_BARCODE, code);
imgMap.put(ImageKeyHelper.IMAGE_STRING_ID, ImageKeyHelper.getImageStringKey(getSelectedType(), getCurrentLanguage()));
binding.imageViewFullScreen.setVisibility(View.INVISIBLE);
client.editImage(code, imgMap, (value, response) -> {
if (value) {
setResult(RESULTCODE_MODIFIED);
}
reloadProduct();
});
}
private void deleteLocalFiles() {
if (lastViewedImage != null) {
boolean deleted = lastViewedImage.delete();
if (!deleted) {
Log.w(ProductImageManagementActivity.class.getSimpleName(), "cant delete file " + lastViewedImage);
} else {
lastViewedImage = null;
}
}
}
/**
* For scheduling a postponed transition after the proper measures of the view are done
* and the view has been properly laid out in the View hierarchy
*/
private void scheduleStartPostponedTransition(final View sharedElement) {
sharedElement.getViewTreeObserver().addOnPreDrawListener(
new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
sharedElement.getViewTreeObserver().removeOnPreDrawListener(this);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
startPostponedEnterTransition();
}
return true;
}
});
}
/**
* @param newPhotoFile photo selected by the user to be sent to the server.
*/
@Override
public void onPhotoReturned(File newPhotoFile) {
startRefresh(getString(R.string.uploading_image));
new Thread(() -> { // TODO: Use rxjava
ProductImage image = new ProductImage(getProduct().getCode(), getSelectedType(), newPhotoFile, getCurrentLanguage());
image.setFilePath(newPhotoFile.getAbsolutePath());
client.postImg(ProductImageManagementActivity.this, image, true, new ImageUploadListener() {
@Override
public void onSuccess() {
reloadProduct();
setResult(RESULTCODE_MODIFIED);
}
@Override
public void onFailure(String message) {
Toast.makeText(ProductImageManagementActivity.this, message, Toast.LENGTH_LONG).show();
stopRefresh();
}
});
}).start();
}
}
| fix: thread to Completable
| app/src/main/java/openfoodfacts/github/scrachx/openfood/views/ProductImageManagementActivity.java | fix: thread to Completable | <ide><path>pp/src/main/java/openfoodfacts/github/scrachx/openfood/views/ProductImageManagementActivity.java
<ide> import java.util.List;
<ide> import java.util.Set;
<ide>
<add>import io.reactivex.Completable;
<ide> import io.reactivex.android.schedulers.AndroidSchedulers;
<ide> import io.reactivex.disposables.CompositeDisposable;
<ide> import openfoodfacts.github.scrachx.openfood.R;
<ide> @Override
<ide> public void onPhotoReturned(File newPhotoFile) {
<ide> startRefresh(getString(R.string.uploading_image));
<del> new Thread(() -> { // TODO: Use rxjava
<add> disp.add(Completable.fromAction(() -> {
<ide> ProductImage image = new ProductImage(getProduct().getCode(), getSelectedType(), newPhotoFile, getCurrentLanguage());
<ide> image.setFilePath(newPhotoFile.getAbsolutePath());
<ide>
<del> client.postImg(ProductImageManagementActivity.this, image, true, new ImageUploadListener() {
<add> client.postImg(image, true, new ImageUploadListener() {
<ide> @Override
<ide> public void onSuccess() {
<ide> reloadProduct();
<ide> stopRefresh();
<ide> }
<ide> });
<del> }).start();
<add> }).observeOn(AndroidSchedulers.mainThread()).subscribe());
<ide> }
<ide> } |
|
Java | mit | 6044dd667eb94bb5b98e0fa6af667548ddf964b4 | 0 | tomvlk/BusinessRuleGenerator-to | package com.brg.mock;
import org.apache.ddlutils.model.Column;
import org.apache.ddlutils.model.Database;
import org.apache.ddlutils.model.Table;
public class TargetDatabaseMock {
private Database database;
private Table afdelingen, cursussen, historie, inschrijvingen, medewerkers, schalen, uitvoering;
public TargetDatabaseMock() {
database = new Database();
afdelingen = new Table();
cursussen = new Table();
historie = new Table();
inschrijvingen = new Table();
medewerkers = new Table();
schalen = new Table();
uitvoering = new Table();
createColumns();
setTableNames();
}
public void setTableNames() {
afdelingen.setName("AFDELINGEN");
cursussen.setName("CURSUSSEN");
historie.setName("HISTORIE");
inschrijvingen.setName("INSCHRIJVINGEN");
medewerkers.setName("MEDEWERKERS");
schalen.setName("SCHALEN");
uitvoering.setName("UITVOERINGEN");
database.addTable(afdelingen);
database.addTable(cursussen);
database.addTable(historie);
database.addTable(inschrijvingen);
database.addTable(medewerkers);
database.addTable(schalen);
database.addTable(uitvoering);
}
public void createColumns() {
afdelingColumns();
cursussenColumns();
historieColumns();
inschrijvingenColumns();
medewerkersColumns();
schalenColumns();
uitvoeringenColumns();
}
private void afdelingColumns() {
Column anr, naam, locatie, hoofd;
anr = new Column();
naam = new Column();
locatie = new Column();
hoofd = new Column();
anr.setName("ANR");
naam.setName("NAAM");
locatie.setName("LOCATIE");
hoofd.setName("HOOFD");
afdelingen.addColumn(anr);
afdelingen.addColumn(naam);
afdelingen.addColumn(locatie);
afdelingen.addColumn(hoofd);
}
private void cursussenColumns() {
Column code, omschrijving, type, lengthe;
code = new Column();
omschrijving = new Column();
type = new Column();
lengthe = new Column();
code.setName("CODE");
omschrijving.setName("OMSCHRIJVING");
type.setName("TYPE");
lengthe.setName("LENGTE");
cursussen.addColumn(code);
cursussen.addColumn(omschrijving);
cursussen.addColumn(type);
cursussen.addColumn(lengthe);
}
private void historieColumns() {
Column mnr, begindatum, beginjaar, einddatum, afd, maandsal, opmerking, gebeurtenis;
mnr = new Column();
begindatum = new Column();
beginjaar = new Column();
einddatum = new Column();
afd = new Column();
maandsal = new Column();
opmerking = new Column();
gebeurtenis = new Column();
mnr.setName("MNR");
begindatum.setName("BEGINDATUM");
beginjaar.setName("BEGINJAAR");
einddatum.setName("EINDDATUM");
afd.setName("AFD");
maandsal.setName("MAANDSAL");
opmerking.setName("OPMERKINGEN");
gebeurtenis.setName("GEBEURTENIS");
historie.addColumn(mnr);
historie.addColumn(begindatum);
historie.addColumn(einddatum);
historie.addColumn(afd);
historie.addColumn(maandsal);
historie.addColumn(opmerking);
historie.addColumn(gebeurtenis);
}
private void inschrijvingenColumns() {
Column cursist, cursus, begindatum, evaluatie;
cursist = new Column();
cursus = new Column();
begindatum = new Column();
evaluatie = new Column();
cursist.setName("CURSIST");
cursus.setName("CURSUS");
begindatum.setName("BEGINDATUM");
evaluatie.setName("EVALUATIE");
inschrijvingen.addColumn(cursist);
inschrijvingen.addColumn(cursus);
inschrijvingen.addColumn(begindatum);
inschrijvingen.addColumn(evaluatie);
}
private void medewerkersColumns() {
Column mnr, naam, voorl, functie, chef, gbdatum, maandsal, comm, afd;
mnr = new Column();
naam = new Column();
voorl = new Column();
functie = new Column();
chef = new Column();
gbdatum = new Column();
maandsal = new Column();
comm = new Column();
afd = new Column();
mnr.setName("MNR");
naam.setName("NAAM");
voorl.setName("VOORL");
functie.setName("FUNCTIE");
chef.setName("CHEF");
gbdatum.setName("GBDATUM");
maandsal.setName("MAANDSAL");
comm.setName("COMM");
afd.setName("AFD");
medewerkers.addColumn(mnr);
medewerkers.addColumn(naam);
medewerkers.addColumn(voorl);
medewerkers.addColumn(functie);
medewerkers.addColumn(chef);
medewerkers.addColumn(gbdatum);
medewerkers.addColumn(maandsal);
medewerkers.addColumn(comm);
medewerkers.addColumn(afd);
}
private void schalenColumns() {
Column snr, ondergrens, bovengrens, toelage;
snr = new Column();
ondergrens = new Column();
bovengrens = new Column();
toelage = new Column();
snr.setName("SNR");
ondergrens.setName("ONDERGRENS");
bovengrens.setName("BOVENGRENS");
toelage.setName("TOELAGE");
schalen.addColumn(snr);
schalen.addColumn(ondergrens);
schalen.addColumn(bovengrens);
schalen.addColumn(toelage);
}
private void uitvoeringenColumns() {
Column cursus, begindatum, docent, locatie;
cursus = new Column();
begindatum = new Column();
docent = new Column();
locatie = new Column();
cursus.setName("CURSUS");
begindatum.setName("BEGINDATUM");
docent.setName("DOCENT");
locatie.setName("LOCATIE");
uitvoering.addColumn(cursus);
uitvoering.addColumn(begindatum);
uitvoering.addColumn(docent);
uitvoering.addColumn(locatie);
}
public Database getDatabase() {
return database;
}
}
| src/main/java/com/brg/mock/TargetDatabaseMock.java | package com.brg.mock;
import org.apache.ddlutils.model.Column;
import org.apache.ddlutils.model.Database;
import org.apache.ddlutils.model.Table;
public class TargetDatabaseMock {
private Database database;
private Table afdelingen, cursussen, historie, inschrijvingen, medewerkers, schalen, uitvoering;
public TargetDatabaseMock() {
database = new Database();
afdelingen = new Table();
cursussen = new Table();
historie = new Table();
inschrijvingen = new Table();
medewerkers = new Table();
schalen = new Table();
uitvoering = new Table();
createColumns();
setTableNames();
}
public void setTableNames() {
afdelingen.setName("AFDELINGEN");
cursussen.setName("CURSUSSEN");
historie.setName("HISTORIE");
inschrijvingen.setName("INSCHRIJVINGEN");
medewerkers.setName("MEDEWERKERS");
schalen.setName("SCHALEN");
uitvoering.setName("UITVOERINGEN");
database.addTable(afdelingen);
database.addTable(cursussen);
database.addTable(historie);
database.addTable(inschrijvingen);
database.addTable(medewerkers);
database.addTable(schalen);
database.addTable(uitvoering);
}
public void createColumns() {
afdelingColumns();
cursussenColumns();
historieColumns();
inschrijvingenColumns();
medewerkersColumns();
schalenColumns();
uitvoeringenColumns();
}
private void afdelingColumns() {
Column anr, naam, locatie, hoofd;
anr = new Column();
naam = new Column();
locatie = new Column();
hoofd = new Column();
anr.setName("ANR");
naam.setName("NAAM");
locatie.setName("LOCATIE");
hoofd.setName("HOOFD");
afdelingen.addColumn(anr);
afdelingen.addColumn(naam);
afdelingen.addColumn(locatie);
afdelingen.addColumn(hoofd);
}
private void cursussenColumns() {
Column code, omschrijving, type, lengthe;
code = new Column();
omschrijving = new Column();
type = new Column();
lengthe = new Column();
code.setName("CODE");
omschrijving.setName("OMSCHRIJVING");
type.setName("TYPE");
lengthe.setName("LENGTE");
cursussen.addColumn(code);
cursussen.addColumn(omschrijving);
cursussen.addColumn(type);
cursussen.addColumn(lengthe);
}
private void historieColumns() {
Column mnr, begindatum, beginjaar, einddatum, afd, maandsal, opmerking;
mnr = new Column();
begindatum = new Column();
beginjaar = new Column();
einddatum = new Column();
afd = new Column();
maandsal = new Column();
opmerking = new Column();
mnr.setName("MNR");
begindatum.setName("BEGINDATUM");
beginjaar.setName("BEGINJAAR");
einddatum.setName("EINDDATUM");
afd.setName("AFD");
maandsal.setName("MAANDSAL");
opmerking.setName("OPMERKING");
historie.addColumn(mnr);
historie.addColumn(begindatum);
historie.addColumn(einddatum);
historie.addColumn(afd);
historie.addColumn(maandsal);
historie.addColumn(opmerking);
}
private void inschrijvingenColumns() {
Column cursist, cursus, begindatum, evaluatie;
cursist = new Column();
cursus = new Column();
begindatum = new Column();
evaluatie = new Column();
cursist.setName("CURSIST");
cursus.setName("CURSUS");
begindatum.setName("BEGINDATUM");
evaluatie.setName("EVALUATIE");
inschrijvingen.addColumn(cursist);
inschrijvingen.addColumn(cursus);
inschrijvingen.addColumn(begindatum);
inschrijvingen.addColumn(evaluatie);
}
private void medewerkersColumns() {
Column mnr, naam, voorl, functie, chef, gbdatum, maandsal, comm, afd;
mnr = new Column();
naam = new Column();
voorl = new Column();
functie = new Column();
chef = new Column();
gbdatum = new Column();
maandsal = new Column();
comm = new Column();
afd = new Column();
mnr.setName("MNR");
naam.setName("NAAM");
voorl.setName("VOORL");
functie.setName("FUNCTIE");
chef.setName("CHEF");
gbdatum.setName("GBDATUM");
maandsal.setName("MAANDSAL");
comm.setName("COMM");
afd.setName("AFD");
medewerkers.addColumn(mnr);
medewerkers.addColumn(naam);
medewerkers.addColumn(voorl);
medewerkers.addColumn(functie);
medewerkers.addColumn(chef);
medewerkers.addColumn(gbdatum);
medewerkers.addColumn(maandsal);
medewerkers.addColumn(comm);
medewerkers.addColumn(afd);
}
private void schalenColumns() {
Column snr, ondergrens, bovengrens, toelage;
snr = new Column();
ondergrens = new Column();
bovengrens = new Column();
toelage = new Column();
snr.setName("SNR");
ondergrens.setName("ONDERGRENS");
bovengrens.setName("BOVENGRENS");
toelage.setName("TOELAGE");
schalen.addColumn(snr);
schalen.addColumn(ondergrens);
schalen.addColumn(bovengrens);
schalen.addColumn(toelage);
}
private void uitvoeringenColumns() {
Column cursus, begindatum, docent, locatie;
cursus = new Column();
begindatum = new Column();
docent = new Column();
locatie = new Column();
cursus.setName("CURSUS");
begindatum.setName("BEGINDATUM");
docent.setName("DOCENT");
locatie.setName("LOCATIE");
uitvoering.addColumn(cursus);
uitvoering.addColumn(begindatum);
uitvoering.addColumn(docent);
uitvoering.addColumn(locatie);
}
public Database getDatabase() {
return database;
}
}
| typo plus added column
| src/main/java/com/brg/mock/TargetDatabaseMock.java | typo plus added column | <ide><path>rc/main/java/com/brg/mock/TargetDatabaseMock.java
<ide> }
<ide>
<ide> private void historieColumns() {
<del> Column mnr, begindatum, beginjaar, einddatum, afd, maandsal, opmerking;
<add> Column mnr, begindatum, beginjaar, einddatum, afd, maandsal, opmerking, gebeurtenis;
<ide> mnr = new Column();
<ide> begindatum = new Column();
<ide> beginjaar = new Column();
<ide> afd = new Column();
<ide> maandsal = new Column();
<ide> opmerking = new Column();
<add> gebeurtenis = new Column();
<ide>
<ide> mnr.setName("MNR");
<ide> begindatum.setName("BEGINDATUM");
<ide> einddatum.setName("EINDDATUM");
<ide> afd.setName("AFD");
<ide> maandsal.setName("MAANDSAL");
<del> opmerking.setName("OPMERKING");
<add> opmerking.setName("OPMERKINGEN");
<add> gebeurtenis.setName("GEBEURTENIS");
<ide>
<ide> historie.addColumn(mnr);
<ide> historie.addColumn(begindatum);
<ide> historie.addColumn(afd);
<ide> historie.addColumn(maandsal);
<ide> historie.addColumn(opmerking);
<add> historie.addColumn(gebeurtenis);
<ide> }
<ide>
<ide> private void inschrijvingenColumns() { |
|
Java | artistic-2.0 | d8e7b1d41608afd851fb13b263796ad13a8bec10 | 0 | salortiz/rakudo,LLFourn/rakudo,jonathanstowe/rakudo,samcv/rakudo,awwaiid/rakudo,salortiz/rakudo,cygx/rakudo,tony-o/rakudo,cygx/rakudo,awwaiid/rakudo,LLFourn/rakudo,raydiak/rakudo,ugexe/rakudo,softmoth/rakudo,MasterDuke17/rakudo,rakudo/rakudo,jonathanstowe/rakudo,softmoth/rakudo,tbrowder/rakudo,sjn/rakudo,sjn/rakudo,cygx/rakudo,ab5tract/rakudo,nbrown/rakudo,labster/rakudo,salortiz/rakudo,ab5tract/rakudo,b2gills/rakudo,raydiak/rakudo,lucasbuchala/rakudo,Gnouc/rakudo,cygx/rakudo,awwaiid/rakudo,tbrowder/rakudo,tony-o/rakudo,paultcochrane/rakudo,MasterDuke17/rakudo,LLFourn/rakudo,sjn/rakudo,lucasbuchala/rakudo,ab5tract/rakudo,labster/rakudo,Gnouc/rakudo,dankogai/rakudo,cognominal/rakudo,rakudo/rakudo,salortiz/rakudo,nunorc/rakudo,rakudo/rakudo,zostay/rakudo,tbrowder/rakudo,cognominal/rakudo,ugexe/rakudo,b2gills/rakudo,azawawi/rakudo,paultcochrane/rakudo,ungrim97/rakudo,cognominal/rakudo,labster/rakudo,awwaiid/rakudo,samcv/rakudo,samcv/rakudo,Gnouc/rakudo,cognominal/rakudo,b2gills/rakudo,dankogai/rakudo,cognominal/rakudo,ugexe/rakudo,skids/rakudo,paultcochrane/rakudo,MasterDuke17/rakudo,nunorc/rakudo,raydiak/rakudo,tbrowder/rakudo,paultcochrane/rakudo,b2gills/rakudo,ungrim97/rakudo,raydiak/rakudo,azawawi/rakudo,salortiz/rakudo,dankogai/rakudo,labster/rakudo,skids/rakudo,sjn/rakudo,MasterDuke17/rakudo,Gnouc/rakudo,awwaiid/rakudo,lucasbuchala/rakudo,nbrown/rakudo,LLFourn/rakudo,salortiz/rakudo,azawawi/rakudo,jonathanstowe/rakudo,usev6/rakudo,nunorc/rakudo,skids/rakudo,zostay/rakudo,azawawi/rakudo,sjn/rakudo,cygx/rakudo,usev6/rakudo,samcv/rakudo,ab5tract/rakudo,tony-o/rakudo,ugexe/rakudo,labster/rakudo,skids/rakudo,MasterDuke17/rakudo,softmoth/rakudo,nbrown/rakudo,nbrown/rakudo,usev6/rakudo,nbrown/rakudo,ugexe/rakudo,paultcochrane/rakudo,tony-o/rakudo,labster/rakudo,zostay/rakudo,tony-o/rakudo,tony-o/rakudo,ungrim97/rakudo,lucasbuchala/rakudo,usev6/rakudo,b2gills/rakudo,softmoth/rakudo,zostay/rakudo,tbrowder/rakudo,softmoth/rakudo,rakudo/rakudo,rakudo/rakudo,Gnouc/rakudo,tbrowder/rakudo,lucasbuchala/rakudo,jonathanstowe/rakudo,ab5tract/rakudo,skids/rakudo,ungrim97/rakudo,jonathanstowe/rakudo,samcv/rakudo,dankogai/rakudo,azawawi/rakudo,nbrown/rakudo,MasterDuke17/rakudo,dankogai/rakudo,Gnouc/rakudo,rakudo/rakudo,LLFourn/rakudo,usev6/rakudo,ungrim97/rakudo,nunorc/rakudo | package org.perl6.rakudo;
import java.util.*;
import org.perl6.nqp.runtime.*;
import org.perl6.nqp.sixmodel.*;
import org.perl6.nqp.sixmodel.reprs.ContextRefInstance;
@SuppressWarnings("unused")
public final class Binder {
/* Possible results of binding. */
public static final int BIND_RESULT_OK = 0;
public static final int BIND_RESULT_FAIL = 1;
public static final int BIND_RESULT_JUNCTION = 2;
/* Compile time trial binding result indicators. */
public static final int TRIAL_BIND_NOT_SURE = 0; /* Plausible, but need to check at runtime. */
public static final int TRIAL_BIND_OK = 1; /* Bind will always work out. */
public static final int TRIAL_BIND_NO_WAY = -1; /* Bind could never work out. */
/* Flags. */
private static final int SIG_ELEM_BIND_CAPTURE = 1;
private static final int SIG_ELEM_BIND_PRIVATE_ATTR = 2;
private static final int SIG_ELEM_BIND_PUBLIC_ATTR = 4;
private static final int SIG_ELEM_BIND_ATTRIBUTIVE = (SIG_ELEM_BIND_PRIVATE_ATTR | SIG_ELEM_BIND_PUBLIC_ATTR);
private static final int SIG_ELEM_SLURPY_POS = 8;
private static final int SIG_ELEM_SLURPY_NAMED = 16;
private static final int SIG_ELEM_SLURPY_LOL = 32;
private static final int SIG_ELEM_SLURPY = (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED | SIG_ELEM_SLURPY_LOL);
private static final int SIG_ELEM_INVOCANT = 64;
private static final int SIG_ELEM_MULTI_INVOCANT = 128;
private static final int SIG_ELEM_IS_RW = 256;
private static final int SIG_ELEM_IS_COPY = 512;
private static final int SIG_ELEM_IS_PARCEL = 1024;
private static final int SIG_ELEM_IS_OPTIONAL = 2048;
private static final int SIG_ELEM_ARRAY_SIGIL = 4096;
private static final int SIG_ELEM_HASH_SIGIL = 8192;
private static final int SIG_ELEM_DEFAULT_FROM_OUTER = 16384;
private static final int SIG_ELEM_IS_CAPTURE = 32768;
private static final int SIG_ELEM_UNDEFINED_ONLY = 65536;
private static final int SIG_ELEM_DEFINED_ONLY = 131072;
private static final int SIG_ELEM_DEFINEDNES_CHECK = (SIG_ELEM_UNDEFINED_ONLY | SIG_ELEM_DEFINED_ONLY);
private static final int SIG_ELEM_NOMINAL_GENERIC = 524288;
private static final int SIG_ELEM_DEFAULT_IS_LITERAL = 1048576;
private static final int SIG_ELEM_NATIVE_INT_VALUE = 2097152;
private static final int SIG_ELEM_NATIVE_NUM_VALUE = 4194304;
private static final int SIG_ELEM_NATIVE_STR_VALUE = 8388608;
private static final int SIG_ELEM_NATIVE_VALUE = (SIG_ELEM_NATIVE_INT_VALUE | SIG_ELEM_NATIVE_NUM_VALUE | SIG_ELEM_NATIVE_STR_VALUE);
/* Hints for Parameter attributes. */
private static final int HINT_variable_name = 0;
private static final int HINT_named_names = 1;
private static final int HINT_type_captures = 2;
private static final int HINT_flags = 3;
private static final int HINT_nominal_type = 4;
private static final int HINT_post_constraints = 5;
private static final int HINT_coerce_type = 6;
private static final int HINT_coerce_method = 7;
private static final int HINT_sub_signature = 8;
private static final int HINT_default_value = 9;
private static final int HINT_container_descriptor = 10;
private static final int HINT_attr_package = 11;
/* Other hints. */
private static final int HINT_ENUMMAP_storage = 0;
private static final int HINT_CAPTURE_list = 0;
private static final int HINT_CAPTURE_hash = 1;
private static final int HINT_SIG_params = 0;
private static SixModelObject createBox(ThreadContext tc, RakOps.GlobalExt gcx, Object arg, int flag) {
switch (flag) {
case CallSiteDescriptor.ARG_INT:
return Ops.box_i((long)arg, gcx.Int, tc);
case CallSiteDescriptor.ARG_NUM:
return Ops.box_n((double)arg, gcx.Num, tc);
case CallSiteDescriptor.ARG_STR:
return Ops.box_s((String)arg, gcx.Str, tc);
default:
throw new RuntimeException("Impossible case reached in createBox");
}
}
private static String arityFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject params,
int numParams, int numPosArgs, boolean tooMany) {
int arity = 0;
int count = 0;
String fail = tooMany ? "Too many" : "Too few";
/* Work out how many we could have been passed. */
for (int i = 0; i < numParams; i++) {
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
if (namedNames != null)
continue;
if ((flags & SIG_ELEM_SLURPY_NAMED) != 0)
continue;
if ((flags & SIG_ELEM_SLURPY_POS) != 0) {
count = -1;
}
else if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
count++;
}
else {
count++;
arity++;
}
}
/* Now generate decent error. */
if (arity == count)
return String.format(
"%s positionals passed; expected %d arguments but got %d",
fail, arity, numPosArgs);
else if (count == -1)
return String.format(
"%s positionals passed; expected at least %d arguments but got only %d",
fail, arity, numPosArgs);
else
return String.format(
"%s positionals passed; expected %d %s %d arguments but got %d",
fail, arity, arity + 1 == count ? "or" : "to" , count, numPosArgs);
}
/* Binds any type captures. */
public static void bindTypeCaptures(ThreadContext tc, SixModelObject typeCaps, CallFrame cf, SixModelObject type) {
long elems = typeCaps.elems(tc);
StaticCodeInfo sci = cf.codeRef.staticInfo;
for (long i = 0; i < elems; i++) {
String name = typeCaps.at_pos_boxed(tc, i).get_str(tc);
cf.oLex[sci.oTryGetLexicalIdx(name)] = type;
}
}
/* Assigns an attributive parameter to the desired attribute. */
private static int assignAttributive(ThreadContext tc, CallFrame cf, String varName,
int paramFlags, SixModelObject attrPackage, SixModelObject value, String[] error) {
/* Find self. */
StaticCodeInfo sci = cf.codeRef.staticInfo;
Integer selfIdx = sci.oTryGetLexicalIdx("self");
if (selfIdx == null) {
if (error != null)
error[0] = String.format(
"Unable to bind attributive parameter '%s' - could not find self",
varName);
return BIND_RESULT_FAIL;
}
SixModelObject self = cf.oLex[selfIdx];
/* If it's private, just need to fetch the attribute. */
SixModelObject assignee;
if ((paramFlags & SIG_ELEM_BIND_PRIVATE_ATTR) != 0) {
assignee = self.get_attribute_boxed(tc, attrPackage, varName, STable.NO_HINT);
}
/* Otherwise if it's public, do a method call to get the assignee. */
else {
throw new RuntimeException("$.x parameters NYI");
}
RakOps.p6store(assignee, value, tc);
return BIND_RESULT_OK;
}
/* Returns an appropriate failure mode (junction fail or normal fail). */
private static int juncOrFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject value) {
if (value.st.WHAT == gcx.Junction && Ops.isconcrete(value, tc) != 0)
return BIND_RESULT_JUNCTION;
else
return BIND_RESULT_FAIL;
}
/* Binds a single argument into the lexpad, after doing any checks that are
* needed. Also handles any type captures. If there is a sub signature, then
* re-enters the binder. Returns one of the BIND_RESULT_* codes. */
private static final CallSiteDescriptor genIns = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
private static final CallSiteDescriptor ACCEPTS_o = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
private static final CallSiteDescriptor ACCEPTS_i = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_INT }, null);
private static final CallSiteDescriptor ACCEPTS_n = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_NUM }, null);
private static final CallSiteDescriptor ACCEPTS_s = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null);
private static final CallSiteDescriptor bindThrower = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null);
private static int bindOneParam(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject param,
Object origArg, byte origFlag, boolean noNomTypeCheck, String[] error) {
/* Get parameter flags and variable name. */
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int paramFlags = (int)tc.native_i;
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
String varName = tc.native_s;
if (RakOps.DEBUG_MODE)
System.err.println(varName);
/* We'll put the value to bind into one of the following locals, and
* flag will indicate what type of thing it is. */
int flag;
long arg_i = 0;
double arg_n = 0.0;
String arg_s = null;
SixModelObject arg_o = null;
/* Check if boxed/unboxed expections are met. */
int desiredNative = paramFlags & SIG_ELEM_NATIVE_VALUE;
boolean is_rw = (paramFlags & SIG_ELEM_IS_RW) != 0;
int gotNative = origFlag & 7;
if (is_rw && desiredNative != 0) {
switch (desiredNative) {
case SIG_ELEM_NATIVE_INT_VALUE:
if (gotNative != 0 || Ops.iscont_i((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native int argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if (gotNative != 0 || Ops.iscont_n((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native num argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if (gotNative != 0 || Ops.iscont_s((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native str argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
}
flag = CallSiteDescriptor.ARG_OBJ;
arg_o = (SixModelObject)origArg;
}
else if (desiredNative == 0 && gotNative == CallSiteDescriptor.ARG_OBJ) {
flag = gotNative;
arg_o = (SixModelObject)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_INT_VALUE && gotNative == CallSiteDescriptor.ARG_INT) {
flag = gotNative;
arg_i = (long)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_NUM_VALUE && gotNative == CallSiteDescriptor.ARG_NUM) {
flag = gotNative;
arg_n = (double)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_STR_VALUE && gotNative == CallSiteDescriptor.ARG_STR) {
flag = gotNative;
arg_s = (String)origArg;
}
else if (desiredNative == 0) {
/* We need to do a boxing operation. */
flag = CallSiteDescriptor.ARG_OBJ;
arg_o = createBox(tc, gcx, origArg, gotNative);
}
else {
/* We need to do an unboxing opeation. */
SixModelObject decontValue = Ops.decont((SixModelObject)origArg, tc);
StorageSpec spec = decontValue.st.REPR.get_storage_spec(tc, decontValue.st);
switch (desiredNative) {
case SIG_ELEM_NATIVE_INT_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_INT) != 0) {
flag = CallSiteDescriptor.ARG_INT;
arg_i = decontValue.get_int(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native int",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_NUM) != 0) {
flag = CallSiteDescriptor.ARG_NUM;
arg_n = decontValue.get_num(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native num",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_STR) != 0) {
flag = CallSiteDescriptor.ARG_STR;
arg_s = decontValue.get_str(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native str",
varName);
return BIND_RESULT_FAIL;
}
break;
default:
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native type",
varName);
return BIND_RESULT_FAIL;
}
}
/* By this point, we'll either have an object that we might be able to
* bind if it passes the type check, or a native value that needs no
* further checking. */
SixModelObject decontValue = null;
boolean didHLLTransform = false;
if (flag == CallSiteDescriptor.ARG_OBJ && !(is_rw && desiredNative != 0)) {
/* We need to work on the decontainerized value. */
decontValue = Ops.decont(arg_o, tc);
/* HLL map it as needed. */
SixModelObject beforeHLLize = decontValue;
decontValue = Ops.hllize(decontValue, tc);
if (decontValue != beforeHLLize)
didHLLTransform = true;
/* Skip nominal type check if not needed. */
if (!noNomTypeCheck) {
/* Is the nominal type generic and in need of instantiation? (This
* can happen in (::T, T) where we didn't learn about the type until
* during the signature bind). */
SixModelObject nomType = param.get_attribute_boxed(tc, gcx.Parameter,
"$!nominal_type", HINT_nominal_type);
if ((paramFlags & SIG_ELEM_NOMINAL_GENERIC) != 0) {
SixModelObject HOW = nomType.st.HOW;
SixModelObject ig = Ops.findmethod(tc, HOW,
"instantiate_generic");
SixModelObject ContextRef = tc.gc.ContextRef;
SixModelObject cc = ContextRef.st.REPR.allocate(tc, ContextRef.st);
((ContextRefInstance)cc).context = cf;
Ops.invokeDirect(tc, ig, genIns,
new Object[] { HOW, nomType, cc });
nomType = Ops.result_o(tc.curFrame);
}
/* If not, do the check. If the wanted nominal type is Mu, then
* anything goes. */
if (nomType != gcx.Mu && Ops.istype_nodecont(decontValue, nomType, tc) == 0) {
/* Type check failed; produce error if needed. */
if (error != null) {
SixModelObject thrower = RakOps.getThrower(tc, "X::TypeCheck::Binding");
if (thrower != null && decontValue.st.WHAT != gcx.Junction) {
Ops.invokeDirect(tc, thrower,
bindThrower, new Object[] { decontValue.st.WHAT, nomType.st.WHAT, varName });
return BIND_RESULT_FAIL;
}
else
error[0] = String.format(
"Nominal type check failed for parameter '%s'",
varName);
}
/* Report junction failure mode if it's a junction. */
return juncOrFail(tc, gcx, decontValue);
}
/* Also enforce definedness check */
if ( (paramFlags & SIG_ELEM_DEFINEDNES_CHECK) != 0) {
/* Don't check decontValue for concreteness though, but arg_o,
seeing as we don't have a isconcrete_nodecont */
if ((paramFlags & SIG_ELEM_UNDEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) == 1) {
if (error != null) {
if ((paramFlags & SIG_ELEM_INVOCANT) != 0) {
error[0] = "Invocant requires a type object, but an object instance was passed";
}
else {
error[0] = String.format(
"Parameter '%s' requires a type object, but an object instance was passed",
varName);
}
}
return juncOrFail(tc, gcx, decontValue);
}
if ((paramFlags & SIG_ELEM_DEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) != 1) {
if (error != null) {
if ((paramFlags & SIG_ELEM_INVOCANT) != 0) {
error[0] = "Invocant requires an instance, but a type object was passed";
}
else {
error[0] = String.format(
"Parameter '%s' requires an instance, but a type object was passed",
varName);
}
}
return juncOrFail(tc, gcx, decontValue);
}
}
}
}
/* Type captures. */
SixModelObject typeCaps = param.get_attribute_boxed(tc, gcx.Parameter,
"$!type_captures", HINT_type_captures);
if (typeCaps != null)
bindTypeCaptures(tc, typeCaps, cf, decontValue.st.WHAT);
/* Do a coercion, if one is needed. */
SixModelObject coerceType = param.get_attribute_boxed(tc, gcx.Parameter,
"$!coerce_type", HINT_coerce_type);
if (coerceType != null) {
/* Coercing natives not possible - nothing to call a method on. */
if (flag != CallSiteDescriptor.ARG_OBJ) {
if (error != null)
error[0] = String.format(
"Unable to coerce natively typed parameter '%s'",
varName);
return BIND_RESULT_FAIL;
}
/* Only coerce if we don't already have the correct type. */
if (Ops.istype(decontValue, coerceType, tc) == 0) {
param.get_attribute_native(tc, gcx.Parameter, "$!coerce_method", HINT_coerce_method);
String methName = tc.native_s;
SixModelObject coerceMeth = Ops.findmethod(tc,
decontValue, methName);
if (coerceMeth != null) {
Ops.invokeDirect(tc, coerceMeth,
Ops.invocantCallSite,
new Object[] { decontValue });
arg_o = Ops.result_o(tc.curFrame);
decontValue = Ops.decont(arg_o, tc);
}
else {
if (error != null)
error[0] = String.format(
"Unable to coerce value for '%s' to %s; no coercion method defined",
varName, methName);
return BIND_RESULT_FAIL;
}
}
}
/* If it's not got attributive binding, we'll go about binding it into the
* lex pad. */
StaticCodeInfo sci = cf.codeRef.staticInfo;
if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) == 0 && varName != null) {
/* Is it native? If so, just go ahead and bind it. */
if (flag != CallSiteDescriptor.ARG_OBJ) {
switch (flag) {
case CallSiteDescriptor.ARG_INT:
cf.iLex[sci.iTryGetLexicalIdx(varName)] = arg_i;
break;
case CallSiteDescriptor.ARG_NUM:
cf.nLex[sci.nTryGetLexicalIdx(varName)] = arg_n;
break;
case CallSiteDescriptor.ARG_STR:
cf.sLex[sci.sTryGetLexicalIdx(varName)] = arg_s;
break;
}
}
/* Otherwise it's some objecty case. */
else if (is_rw) {
/* XXX TODO Check if rw flag is set; also need to have a
* wrapper container that carries extra constraints. */
cf.oLex[sci.oTryGetLexicalIdx(varName)] = arg_o;
}
else if ((paramFlags & SIG_ELEM_IS_PARCEL) != 0) {
/* Just bind the thing as is into the lexpad. */
cf.oLex[sci.oTryGetLexicalIdx(varName)] = didHLLTransform ? decontValue : arg_o;
}
else {
/* If it's an array, copy means make a new one and store,
* and a normal bind is a straightforward binding plus
* adding a constraint. */
if ((paramFlags & SIG_ELEM_ARRAY_SIGIL) != 0) {
SixModelObject bindee = decontValue;
if ((paramFlags & SIG_ELEM_IS_COPY) != 0) {
throw ExceptionHandling.dieInternal(tc, "is copy on lists NYI after GLR");
//bindee = RakOps.p6list(gcx.EMPTYARR.clone(tc), gcx.Array, gcx.True, tc);
//RakOps.p6store(bindee, decontValue, tc);
}
cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee;
}
/* If it's a hash, similar approach to array. */
else if ((paramFlags & SIG_ELEM_HASH_SIGIL) != 0) {
SixModelObject bindee = decontValue;
if ((paramFlags & SIG_ELEM_IS_COPY) != 0) {
SixModelObject BOOTHash = tc.gc.BOOTHash;
bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
bindee.bind_attribute_boxed(tc, gcx.EnumMap, "$!storage",
HINT_ENUMMAP_storage, BOOTHash.st.REPR.allocate(tc, BOOTHash.st));
RakOps.p6store(bindee, decontValue, tc);
}
cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee;
}
/* If it's a scalar, we always need to wrap it into a new
* container and store it, for copy or ro case (the rw bit
* in the container descriptor takes care of the rest). */
else {
STable stScalar = gcx.Scalar.st;
SixModelObject new_cont = stScalar.REPR.allocate(tc, stScalar);
SixModelObject desc = param.get_attribute_boxed(tc, gcx.Parameter,
"$!container_descriptor", HINT_container_descriptor);
new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!descriptor",
RakudoContainerSpec.HINT_descriptor, desc);
new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!value",
RakudoContainerSpec.HINT_value, decontValue);
cf.oLex[sci.oTryGetLexicalIdx(varName)] = new_cont;
}
}
}
/* Is it the invocant? If so, also have to bind to self lexical. */
if ((paramFlags & SIG_ELEM_INVOCANT) != 0)
cf.oLex[sci.oTryGetLexicalIdx("self")] = decontValue;
/* Handle any constraint types (note that they may refer to the parameter by
* name, so we need to have bound it already). */
SixModelObject postConstraints = param.get_attribute_boxed(tc, gcx.Parameter,
"$!post_contraints", HINT_post_constraints);
if (postConstraints != null) {
long numConstraints = postConstraints.elems(tc);
for (long i = 0; i < numConstraints; i++) {
/* Check we meet the constraint. */
SixModelObject consType = postConstraints.at_pos_boxed(tc, i);
SixModelObject acceptsMeth = Ops.findmethod(consType, "ACCEPTS", tc);
if (Ops.istype(consType, gcx.Code, tc) != 0)
RakOps.p6capturelex(consType, tc);
switch (flag) {
case CallSiteDescriptor.ARG_INT:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_i, new Object[] { consType, arg_i });
break;
case CallSiteDescriptor.ARG_NUM:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_n, new Object[] { consType, arg_n });
break;
case CallSiteDescriptor.ARG_STR:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_s, new Object[] { consType, arg_s });
break;
default:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_o, new Object[] { consType, arg_o });
break;
}
long result = Ops.istrue(
Ops.result_o(tc.curFrame), tc);
if (result == 0) {
if (error != null)
error[0] = "Constraint type check failed for parameter '" + varName + "'";
return BIND_RESULT_FAIL;
}
}
}
/* TODO: attributives. */
if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) != 0) {
if (flag != CallSiteDescriptor.ARG_OBJ) {
if (error != null)
error[0] = "Native attributive binding not yet implemented";
return BIND_RESULT_FAIL;
}
int result = assignAttributive(tc, cf, varName, paramFlags,
param.get_attribute_boxed(tc, gcx.Parameter, "$!attr_package", HINT_attr_package),
decontValue, error);
if (result != BIND_RESULT_OK)
return result;
}
/* If it has a sub-signature, bind that. */
SixModelObject subSignature = param.get_attribute_boxed(tc, gcx.Parameter,
"$!sub_signature", HINT_sub_signature);
if (subSignature != null && flag == CallSiteDescriptor.ARG_OBJ) {
/* Turn value into a capture, unless we already have one. */
SixModelObject capture = null;
int result;
if ((paramFlags & SIG_ELEM_IS_CAPTURE) != 0) {
capture = decontValue;
}
else {
SixModelObject meth = Ops.findmethod(decontValue, "Capture", tc);
if (meth == null) {
if (error != null)
error[0] = "Could not turn argument into capture";
return BIND_RESULT_FAIL;
}
Ops.invokeDirect(tc, meth, Ops.invocantCallSite, new Object[] { decontValue });
capture = Ops.result_o(tc.curFrame);
}
SixModelObject subParams = subSignature
.get_attribute_boxed(tc, gcx.Signature, "$!params", HINT_SIG_params);
/* Recurse into signature binder. */
CallSiteDescriptor subCsd = explodeCapture(tc, gcx, capture);
result = bind(tc, gcx, cf, subParams, subCsd, tc.flatArgs, noNomTypeCheck, error);
if (result != BIND_RESULT_OK)
{
if (error != null) {
/* Note in the error message that we're in a sub-signature. */
error[0] += " in sub-signature";
/* Have we a variable name? */
if (varName != null) {
error[0] += " of parameter " + varName;
}
}
return result;
}
}
if (RakOps.DEBUG_MODE)
System.err.println("bindOneParam NYFI");
return BIND_RESULT_OK;
}
private static final CallSiteDescriptor exploder = new CallSiteDescriptor(new byte[] {
CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT,
CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT | CallSiteDescriptor.ARG_NAMED
}, null);
public static CallSiteDescriptor explodeCapture(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject capture) {
capture = Ops.decont(capture, tc);
SixModelObject capType = gcx.Capture;
SixModelObject list = capture.get_attribute_boxed(tc, capType, "$!list", HINT_CAPTURE_list);
SixModelObject hash = capture.get_attribute_boxed(tc, capType, "$!hash", HINT_CAPTURE_hash);
if (list == null)
list = gcx.EMPTYARR;
if (hash == null)
hash = gcx.EMPTYHASH;
return exploder.explodeFlattening(tc.curFrame, new Object[] { list, hash });
}
/* This takes a signature element and either runs the closure to get a default
* value if there is one, or creates an appropriate undefined-ish thingy. */
private static SixModelObject handleOptional(ThreadContext tc, RakOps.GlobalExt gcx, int flags, SixModelObject param, CallFrame cf) {
/* Is the "get default from outer" flag set? */
if ((flags & SIG_ELEM_DEFAULT_FROM_OUTER) != 0) {
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
String varName = tc.native_s;
CallFrame curOuter = cf.outer;
while (curOuter != null) {
Integer idx = curOuter.codeRef.staticInfo.oTryGetLexicalIdx(varName);
if (idx != null)
return curOuter.oLex[idx];
curOuter = curOuter.outer;
}
return null;
}
/* Do we have a default value or value closure? */
SixModelObject defaultValue = param.get_attribute_boxed(tc, gcx.Parameter,
"$!default_value", HINT_default_value);
if (defaultValue != null) {
if ((flags & SIG_ELEM_DEFAULT_IS_LITERAL) != 0) {
return defaultValue;
}
else {
/* Thunk; run it to get a value. */
Ops.invokeArgless(tc, defaultValue);
return Ops.result_o(tc.curFrame);
}
}
/* Otherwise, go by sigil to pick the correct default type of value. */
else {
if ((flags & SIG_ELEM_ARRAY_SIGIL) != 0) {
throw ExceptionHandling.dieInternal(tc, "optional array param NYI after GLR");
//return RakOps.p6list(null, gcx.Array, gcx.True, tc);
}
else if ((flags & SIG_ELEM_HASH_SIGIL) != 0) {
SixModelObject res = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
return res;
}
else {
return param.get_attribute_boxed(tc, gcx.Parameter, "$!nominal_type", HINT_nominal_type);
}
}
}
/* Takes a signature along with positional and named arguments and binds them
* into the provided callframe. Returns BIND_RESULT_OK if binding works out,
* BIND_RESULT_FAIL if there is a failure and BIND_RESULT_JUNCTION if the
* failure was because of a Junction being passed (meaning we need to auto-thread). */
private static final CallSiteDescriptor slurpyFromArgs = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
public static int bind(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject params,
CallSiteDescriptor csd, Object[] args,
boolean noNomTypeCheck, String[] error) {
int bindFail = BIND_RESULT_OK;
int curPosArg = 0;
/* If we have a |$foo that's followed by slurpies, then we can suppress
* any future arity checks. */
boolean suppressArityFail = false;
/* If we do have some named args, we want to make a clone of the hash
* to work on. We'll delete stuff from it as we bind, and what we have
* left over can become the slurpy hash or - if we aren't meant to be
* taking one - tell us we have a problem. */
HashMap<String, Integer> namedArgsCopy = csd.nameMap == null
? null
: new HashMap<String, Integer>(csd.nameMap);
/* Now we'll walk through the signature and go about binding things. */
int numPosArgs = csd.numPositionals;
long numParams = params.elems(tc);
for (long i = 0; i < numParams; i++) {
/* Get parameter, its flags and any named names. */
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
/* Is it looking for us to bind a capture here? */
if ((flags & SIG_ELEM_IS_CAPTURE) != 0) {
/* Capture the arguments from this point forwards into a Capture.
* Of course, if there's no variable name we can (cheaply) do pretty
* much nothing. */
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
if (tc.native_s == null) {
bindFail = BIND_RESULT_OK;
}
else {
SixModelObject posArgs = gcx.EMPTYARR.clone(tc);
for (int k = curPosArg; k < numPosArgs; k++) {
switch (csd.argFlags[k]) {
case CallSiteDescriptor.ARG_OBJ:
posArgs.push_boxed(tc, (SixModelObject)args[k]);
break;
case CallSiteDescriptor.ARG_INT:
posArgs.push_boxed(tc, RakOps.p6box_i((long)args[k], tc));
break;
case CallSiteDescriptor.ARG_NUM:
posArgs.push_boxed(tc, RakOps.p6box_n((double)args[k], tc));
break;
case CallSiteDescriptor.ARG_STR:
posArgs.push_boxed(tc, RakOps.p6box_s((String)args[k], tc));
break;
}
}
SixModelObject namedArgs = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args);
SixModelObject capType = gcx.Capture;
SixModelObject capSnap = capType.st.REPR.allocate(tc, capType.st);
capSnap.bind_attribute_boxed(tc, capType, "$!list", HINT_CAPTURE_list, posArgs);
capSnap.bind_attribute_boxed(tc, capType, "$!hash", HINT_CAPTURE_hash, namedArgs);
bindFail = bindOneParam(tc, gcx, cf, param, capSnap, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
}
if (bindFail != 0) {
return bindFail;
}
else if (i + 1 == numParams) {
/* Since a capture acts as "the ultimate slurpy" in a sense, if
* this is the last parameter in the signature we can return
* success right off the bat. */
return BIND_RESULT_OK;
}
else {
SixModelObject nextParam = params.at_pos_boxed(tc, i + 1);
nextParam.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
if (((int)tc.native_i & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED)) != 0)
suppressArityFail = true;
}
}
/* Could it be a named slurpy? */
else if ((flags & SIG_ELEM_SLURPY_NAMED) != 0) {
SixModelObject slurpy = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args);
SixModelObject bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
bindee.bind_attribute_boxed(tc, gcx.EnumMap, "$!storage",
HINT_ENUMMAP_storage, slurpy);
bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
/* Nullify named arguments hash now we've consumed it, to mark all
* is well. */
namedArgsCopy = null;
}
/* Otherwise, maybe it's a positional of some kind. */
else if (namedNames == null) {
/* Slurpy or LoL-slurpy? */
if ((flags & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_LOL)) != 0) {
/* Create Perl 6 array, create VM array of all remaining things,
* then store it. */
SixModelObject slurpy = gcx.EMPTYARR.clone(tc);
while (curPosArg < numPosArgs) {
switch (csd.argFlags[curPosArg]) {
case CallSiteDescriptor.ARG_OBJ:
slurpy.push_boxed(tc, (SixModelObject)args[curPosArg]);
break;
case CallSiteDescriptor.ARG_INT:
slurpy.push_boxed(tc, RakOps.p6box_i((long)args[curPosArg], tc));
break;
case CallSiteDescriptor.ARG_NUM:
slurpy.push_boxed(tc, RakOps.p6box_n((double)args[curPosArg], tc));
break;
case CallSiteDescriptor.ARG_STR:
slurpy.push_boxed(tc, RakOps.p6box_s((String)args[curPosArg], tc));
break;
}
curPosArg++;
}
SixModelObject slurpyType = (flags & SIG_ELEM_IS_RW) != 0 ? gcx.List : gcx.Array;
SixModelObject sm = Ops.findmethod(tc, slurpyType,
(flags & SIG_ELEM_SLURPY_POS) == 0 ? "from-slurpy-flat" : "from-slurpy");
Ops.invokeDirect(tc, sm, slurpyFromArgs, new Object[] { slurpyType, slurpy });
SixModelObject bindee = Ops.result_o(tc.curFrame);
bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
}
/* Otherwise, a positional. */
else {
/* Do we have a value?. */
if (curPosArg < numPosArgs) {
/* Easy - just bind that. */
bindFail = bindOneParam(tc, gcx, cf, param, args[curPosArg],
csd.argFlags[curPosArg], noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
curPosArg++;
}
else {
/* No value. If it's optional, fetch a default and bind that;
* if not, we're screwed. Note that we never nominal type check
* an optional with no value passed. */
if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
bindFail = bindOneParam(tc, gcx, cf, param,
handleOptional(tc, gcx, flags, param, cf),
CallSiteDescriptor.ARG_OBJ, false, error);
if (bindFail != 0)
return bindFail;
}
else {
if (error != null)
error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, false);
return BIND_RESULT_FAIL;
}
}
}
}
/* Else, it's a non-slurpy named. */
else {
/* Try and get hold of value. */
Integer lookup = null;
if (namedArgsCopy != null) {
long numNames = namedNames.elems(tc);
for (long j = 0; j < numNames; j++) {
String name = namedNames.at_pos_boxed(tc, j).get_str(tc);
lookup = namedArgsCopy.remove(name);
if (lookup != null)
break;
}
}
/* Did we get one? */
if (lookup == null) {
/* Nope. We'd better hope this param was optional... */
if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
bindFail = bindOneParam(tc, gcx, cf, param,
handleOptional(tc, gcx, flags, param, cf),
CallSiteDescriptor.ARG_OBJ, false, error);
}
else if (!suppressArityFail) {
if (error != null)
error[0] = "Required named argument '" +
namedNames.at_pos_boxed(tc, 0).get_str(tc) +
"' not passed";
return BIND_RESULT_FAIL;
}
}
else {
bindFail = bindOneParam(tc, gcx, cf, param, args[lookup >> 3],
(byte)(lookup & 7), noNomTypeCheck, error);
}
/* If we got a binding failure, return it. */
if (bindFail != 0)
return bindFail;
}
}
/* Do we have any left-over args? */
if (curPosArg < numPosArgs && !suppressArityFail) {
/* Oh noes, too many positionals passed. */
if (error != null)
error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, true);
return BIND_RESULT_FAIL;
}
if (namedArgsCopy != null && namedArgsCopy.size() > 0) {
/* Oh noes, unexpected named args. */
if (error != null) {
int numExtra = namedArgsCopy.size();
if (numExtra == 1) {
for (String name : namedArgsCopy.keySet())
error[0] = "Unexpected named argument '" + name + "' passed";
}
else {
boolean first = true;
error[0] = numExtra + " unexpected named arguments passed (";
for (String name : namedArgsCopy.keySet()) {
if (!first)
error[0] += ", ";
else
first = false;
error[0] += name;
}
error[0] += ")";
}
}
return BIND_RESULT_FAIL;
}
/* If we get here, we're done. */
return BIND_RESULT_OK;
}
/* Takes any nameds we didn't capture yet and makes a VM Hash of them. */
private static SixModelObject vmHashOfRemainingNameds(ThreadContext tc, RakOps.GlobalExt gcx, HashMap<String, Integer> namedArgsCopy, Object[] args) {
SixModelObject slurpy = gcx.Mu;
if (namedArgsCopy != null) {
SixModelObject BOOTHash = tc.gc.BOOTHash;
slurpy = BOOTHash.st.REPR.allocate(tc, BOOTHash.st);
for (String name : namedArgsCopy.keySet()) {
int lookup = namedArgsCopy.get(name);
switch (lookup & 7) {
case CallSiteDescriptor.ARG_OBJ:
slurpy.bind_key_boxed(tc, name, (SixModelObject)args[lookup >> 3]);
break;
case CallSiteDescriptor.ARG_INT:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_i((long)args[lookup >> 3], tc));
break;
case CallSiteDescriptor.ARG_NUM:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_n((double)args[lookup >> 3], tc));
break;
case CallSiteDescriptor.ARG_STR:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_s((String)args[lookup >> 3], tc));
break;
}
}
}
return slurpy;
}
/* Compile time trial binding; tries to determine at compile time whether
* certain binds will/won't work. */
public static int trialBind(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject params,
CallSiteDescriptor csd, Object[] args) {
/* If there's a single capture parameter, then we're OK. (Worth
* handling especially as it's the common case for protos). */
int numParams = (int)params.elems(tc);
if (numParams == 1) {
SixModelObject param = params.at_pos_boxed(tc, 0);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
if ((flags & SIG_ELEM_IS_CAPTURE) != 0)
return TRIAL_BIND_OK;
}
/* Walk through the signature and consider the parameters. */
int numPosArgs = csd.numPositionals;
int curPosArg = 0;
for (int i = 0; i < numParams; i++) {
/* If the parameter is anything other than a boring old
* positional parameter, we won't analyze it. */
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
if ((flags & ~(
SIG_ELEM_MULTI_INVOCANT | SIG_ELEM_IS_PARCEL |
SIG_ELEM_IS_COPY | SIG_ELEM_ARRAY_SIGIL |
SIG_ELEM_HASH_SIGIL | SIG_ELEM_NATIVE_VALUE |
SIG_ELEM_IS_OPTIONAL)) != 0)
return TRIAL_BIND_NOT_SURE;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
if (namedNames != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject postConstraints = param.get_attribute_boxed(tc,
gcx.Parameter, "$!post_constraints", HINT_post_constraints);
if (postConstraints != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject typeCaptures = param.get_attribute_boxed(tc,
gcx.Parameter, "$!type_captures", HINT_type_captures);
if (typeCaptures != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject coerceType = param.get_attribute_boxed(tc,
gcx.Parameter, "$!coerce_type", HINT_coerce_type);
if (coerceType != null)
return TRIAL_BIND_NOT_SURE;
/* Do we have an argument for this parameter? */
if (curPosArg >= numPosArgs) {
/* No; if it's not optional, fail.*/
if ((flags & SIG_ELEM_IS_OPTIONAL) == 0)
return TRIAL_BIND_NO_WAY;
}
else {
/* Yes, need to consider type. */
int gotPrim = csd.argFlags[curPosArg];
if ((flags & SIG_ELEM_NATIVE_VALUE) != 0) {
if (gotPrim == CallSiteDescriptor.ARG_OBJ) {
/* We got an object; if we aren't sure we can unbox, we can't
* be sure about the dispatch. */
SixModelObject arg = (SixModelObject)args[i];
StorageSpec spec = arg.st.REPR.get_storage_spec(tc, arg.st);
switch (flags & SIG_ELEM_NATIVE_VALUE) {
case SIG_ELEM_NATIVE_INT_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_INT) == 0)
return TRIAL_BIND_NOT_SURE;
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_NUM) == 0)
return TRIAL_BIND_NOT_SURE;
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_STR) == 0)
return TRIAL_BIND_NOT_SURE;
break;
default:
/* WTF... */
return TRIAL_BIND_NOT_SURE;
}
}
else {
/* If it's the wrong type of native, there's no way it
* can ever bind. */
if (((flags & SIG_ELEM_NATIVE_INT_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_INT) ||
((flags & SIG_ELEM_NATIVE_NUM_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_NUM) ||
((flags & SIG_ELEM_NATIVE_STR_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_STR))
return TRIAL_BIND_NO_WAY;
}
}
else {
/* Work out a parameter type to consider, and see if it matches. */
SixModelObject arg =
gotPrim == CallSiteDescriptor.ARG_OBJ ? (SixModelObject)args[curPosArg] :
gotPrim == CallSiteDescriptor.ARG_INT ? gcx.Int :
gotPrim == CallSiteDescriptor.ARG_NUM ? gcx.Num :
gcx.Str;
SixModelObject nominalType = param.get_attribute_boxed(tc,
gcx.Parameter, "$!nominal_type", HINT_nominal_type);
if (nominalType != gcx.Mu && Ops.istype(arg, nominalType, tc) == 0) {
/* If it failed because we got a junction, may auto-thread;
* hand back "not sure" for now. */
if (arg.st.WHAT == gcx.Junction)
return TRIAL_BIND_NOT_SURE;
/* It failed to, but that doesn't mean it can't work at runtime;
* we perhaps want an Int, and the most we know is we have an Any,
* which would include Int. However, the Int ~~ Str case can be
* rejected now, as there's no way it'd ever match. Basically, we
* just flip the type check around. */
return Ops.istype(nominalType, arg, tc) != 0
? TRIAL_BIND_NOT_SURE
: TRIAL_BIND_NO_WAY;
}
}
}
/* Continue to next argument. */
curPosArg++;
}
/* If we have any left over arguments, it's a binding fail. */
if (curPosArg < numPosArgs)
return TRIAL_BIND_NO_WAY;
/* Otherwise, if we get there, all is well. */
return TRIAL_BIND_OK;
}
}
| src/vm/jvm/runtime/org/perl6/rakudo/Binder.java | package org.perl6.rakudo;
import java.util.*;
import org.perl6.nqp.runtime.*;
import org.perl6.nqp.sixmodel.*;
import org.perl6.nqp.sixmodel.reprs.ContextRefInstance;
@SuppressWarnings("unused")
public final class Binder {
/* Possible results of binding. */
public static final int BIND_RESULT_OK = 0;
public static final int BIND_RESULT_FAIL = 1;
public static final int BIND_RESULT_JUNCTION = 2;
/* Compile time trial binding result indicators. */
public static final int TRIAL_BIND_NOT_SURE = 0; /* Plausible, but need to check at runtime. */
public static final int TRIAL_BIND_OK = 1; /* Bind will always work out. */
public static final int TRIAL_BIND_NO_WAY = -1; /* Bind could never work out. */
/* Flags. */
private static final int SIG_ELEM_BIND_CAPTURE = 1;
private static final int SIG_ELEM_BIND_PRIVATE_ATTR = 2;
private static final int SIG_ELEM_BIND_PUBLIC_ATTR = 4;
private static final int SIG_ELEM_BIND_ATTRIBUTIVE = (SIG_ELEM_BIND_PRIVATE_ATTR | SIG_ELEM_BIND_PUBLIC_ATTR);
private static final int SIG_ELEM_SLURPY_POS = 8;
private static final int SIG_ELEM_SLURPY_NAMED = 16;
private static final int SIG_ELEM_SLURPY_LOL = 32;
private static final int SIG_ELEM_SLURPY = (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED | SIG_ELEM_SLURPY_LOL);
private static final int SIG_ELEM_INVOCANT = 64;
private static final int SIG_ELEM_MULTI_INVOCANT = 128;
private static final int SIG_ELEM_IS_RW = 256;
private static final int SIG_ELEM_IS_COPY = 512;
private static final int SIG_ELEM_IS_PARCEL = 1024;
private static final int SIG_ELEM_IS_OPTIONAL = 2048;
private static final int SIG_ELEM_ARRAY_SIGIL = 4096;
private static final int SIG_ELEM_HASH_SIGIL = 8192;
private static final int SIG_ELEM_DEFAULT_FROM_OUTER = 16384;
private static final int SIG_ELEM_IS_CAPTURE = 32768;
private static final int SIG_ELEM_UNDEFINED_ONLY = 65536;
private static final int SIG_ELEM_DEFINED_ONLY = 131072;
private static final int SIG_ELEM_DEFINEDNES_CHECK = (SIG_ELEM_UNDEFINED_ONLY | SIG_ELEM_DEFINED_ONLY);
private static final int SIG_ELEM_NOMINAL_GENERIC = 524288;
private static final int SIG_ELEM_DEFAULT_IS_LITERAL = 1048576;
private static final int SIG_ELEM_NATIVE_INT_VALUE = 2097152;
private static final int SIG_ELEM_NATIVE_NUM_VALUE = 4194304;
private static final int SIG_ELEM_NATIVE_STR_VALUE = 8388608;
private static final int SIG_ELEM_NATIVE_VALUE = (SIG_ELEM_NATIVE_INT_VALUE | SIG_ELEM_NATIVE_NUM_VALUE | SIG_ELEM_NATIVE_STR_VALUE);
/* Hints for Parameter attributes. */
private static final int HINT_variable_name = 0;
private static final int HINT_named_names = 1;
private static final int HINT_type_captures = 2;
private static final int HINT_flags = 3;
private static final int HINT_nominal_type = 4;
private static final int HINT_post_constraints = 5;
private static final int HINT_coerce_type = 6;
private static final int HINT_coerce_method = 7;
private static final int HINT_sub_signature = 8;
private static final int HINT_default_value = 9;
private static final int HINT_container_descriptor = 10;
private static final int HINT_attr_package = 11;
/* Other hints. */
private static final int HINT_ENUMMAP_storage = 0;
private static final int HINT_CAPTURE_list = 0;
private static final int HINT_CAPTURE_hash = 1;
private static final int HINT_SIG_params = 0;
private static SixModelObject createBox(ThreadContext tc, RakOps.GlobalExt gcx, Object arg, int flag) {
switch (flag) {
case CallSiteDescriptor.ARG_INT:
return Ops.box_i((long)arg, gcx.Int, tc);
case CallSiteDescriptor.ARG_NUM:
return Ops.box_n((double)arg, gcx.Num, tc);
case CallSiteDescriptor.ARG_STR:
return Ops.box_s((String)arg, gcx.Str, tc);
default:
throw new RuntimeException("Impossible case reached in createBox");
}
}
private static String arityFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject params,
int numParams, int numPosArgs, boolean tooMany) {
int arity = 0;
int count = 0;
String fail = tooMany ? "Too many" : "Too few";
/* Work out how many we could have been passed. */
for (int i = 0; i < numParams; i++) {
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
if (namedNames != null)
continue;
if ((flags & SIG_ELEM_SLURPY_NAMED) != 0)
continue;
if ((flags & SIG_ELEM_SLURPY_POS) != 0) {
count = -1;
}
else if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
count++;
}
else {
count++;
arity++;
}
}
/* Now generate decent error. */
if (arity == count)
return String.format(
"%s positionals passed; expected %d arguments but got %d",
fail, arity, numPosArgs);
else if (count == -1)
return String.format(
"%s positionals passed; expected at least %d arguments but got only %d",
fail, arity, numPosArgs);
else
return String.format(
"%s positionals passed; expected %d %s %d arguments but got %d",
fail, arity, arity + 1 == count ? "or" : "to" , count, numPosArgs);
}
/* Binds any type captures. */
public static void bindTypeCaptures(ThreadContext tc, SixModelObject typeCaps, CallFrame cf, SixModelObject type) {
long elems = typeCaps.elems(tc);
StaticCodeInfo sci = cf.codeRef.staticInfo;
for (long i = 0; i < elems; i++) {
String name = typeCaps.at_pos_boxed(tc, i).get_str(tc);
cf.oLex[sci.oTryGetLexicalIdx(name)] = type;
}
}
/* Assigns an attributive parameter to the desired attribute. */
private static int assignAttributive(ThreadContext tc, CallFrame cf, String varName,
int paramFlags, SixModelObject attrPackage, SixModelObject value, String[] error) {
/* Find self. */
StaticCodeInfo sci = cf.codeRef.staticInfo;
Integer selfIdx = sci.oTryGetLexicalIdx("self");
if (selfIdx == null) {
if (error != null)
error[0] = String.format(
"Unable to bind attributive parameter '%s' - could not find self",
varName);
return BIND_RESULT_FAIL;
}
SixModelObject self = cf.oLex[selfIdx];
/* If it's private, just need to fetch the attribute. */
SixModelObject assignee;
if ((paramFlags & SIG_ELEM_BIND_PRIVATE_ATTR) != 0) {
assignee = self.get_attribute_boxed(tc, attrPackage, varName, STable.NO_HINT);
}
/* Otherwise if it's public, do a method call to get the assignee. */
else {
throw new RuntimeException("$.x parameters NYI");
}
RakOps.p6store(assignee, value, tc);
return BIND_RESULT_OK;
}
/* Returns an appropriate failure mode (junction fail or normal fail). */
private static int juncOrFail(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject value) {
if (value.st.WHAT == gcx.Junction && Ops.isconcrete(value, tc) != 0)
return BIND_RESULT_JUNCTION;
else
return BIND_RESULT_FAIL;
}
/* Binds a single argument into the lexpad, after doing any checks that are
* needed. Also handles any type captures. If there is a sub signature, then
* re-enters the binder. Returns one of the BIND_RESULT_* codes. */
private static final CallSiteDescriptor genIns = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
private static final CallSiteDescriptor ACCEPTS_o = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
private static final CallSiteDescriptor ACCEPTS_i = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_INT }, null);
private static final CallSiteDescriptor ACCEPTS_n = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_NUM }, null);
private static final CallSiteDescriptor ACCEPTS_s = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null);
private static final CallSiteDescriptor bindThrower = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null);
private static int bindOneParam(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject param,
Object origArg, byte origFlag, boolean noNomTypeCheck, String[] error) {
/* Get parameter flags and variable name. */
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int paramFlags = (int)tc.native_i;
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
String varName = tc.native_s;
if (RakOps.DEBUG_MODE)
System.err.println(varName);
/* We'll put the value to bind into one of the following locals, and
* flag will indicate what type of thing it is. */
int flag;
long arg_i = 0;
double arg_n = 0.0;
String arg_s = null;
SixModelObject arg_o = null;
/* Check if boxed/unboxed expections are met. */
int desiredNative = paramFlags & SIG_ELEM_NATIVE_VALUE;
boolean is_rw = (paramFlags & SIG_ELEM_IS_RW) != 0;
int gotNative = origFlag & 7;
if (is_rw && desiredNative != 0) {
switch (desiredNative) {
case SIG_ELEM_NATIVE_INT_VALUE:
if (gotNative != 0 || Ops.iscont_i((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native int argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if (gotNative != 0 || Ops.iscont_n((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native num argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if (gotNative != 0 || Ops.iscont_s((SixModelObject)origArg) == 0) {
if (error != null)
error[0] = String.format(
"Expected a native str argument for '%s'",
varName);
return BIND_RESULT_FAIL;
}
break;
}
flag = CallSiteDescriptor.ARG_OBJ;
arg_o = (SixModelObject)origArg;
}
else if (desiredNative == 0 && gotNative == CallSiteDescriptor.ARG_OBJ) {
flag = gotNative;
arg_o = (SixModelObject)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_INT_VALUE && gotNative == CallSiteDescriptor.ARG_INT) {
flag = gotNative;
arg_i = (long)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_NUM_VALUE && gotNative == CallSiteDescriptor.ARG_NUM) {
flag = gotNative;
arg_n = (double)origArg;
}
else if (desiredNative == SIG_ELEM_NATIVE_STR_VALUE && gotNative == CallSiteDescriptor.ARG_STR) {
flag = gotNative;
arg_s = (String)origArg;
}
else if (desiredNative == 0) {
/* We need to do a boxing operation. */
flag = CallSiteDescriptor.ARG_OBJ;
arg_o = createBox(tc, gcx, origArg, gotNative);
}
else {
/* We need to do an unboxing opeation. */
SixModelObject decontValue = Ops.decont((SixModelObject)origArg, tc);
StorageSpec spec = decontValue.st.REPR.get_storage_spec(tc, decontValue.st);
switch (desiredNative) {
case SIG_ELEM_NATIVE_INT_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_INT) != 0) {
flag = CallSiteDescriptor.ARG_INT;
arg_i = decontValue.get_int(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native int",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_NUM) != 0) {
flag = CallSiteDescriptor.ARG_NUM;
arg_n = decontValue.get_num(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native num",
varName);
return BIND_RESULT_FAIL;
}
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_STR) != 0) {
flag = CallSiteDescriptor.ARG_STR;
arg_s = decontValue.get_str(tc);
}
else {
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native str",
varName);
return BIND_RESULT_FAIL;
}
break;
default:
if (error != null)
error[0] = String.format(
"Cannot unbox argument to '%s' as a native type",
varName);
return BIND_RESULT_FAIL;
}
}
/* By this point, we'll either have an object that we might be able to
* bind if it passes the type check, or a native value that needs no
* further checking. */
SixModelObject decontValue = null;
if (flag == CallSiteDescriptor.ARG_OBJ && !(is_rw && desiredNative != 0)) {
/* We need to work on the decontainerized value. */
decontValue = Ops.decont(arg_o, tc);
/* HLL map it as needed. */
decontValue = Ops.hllize(decontValue, tc);
/* Skip nominal type check if not needed. */
if (!noNomTypeCheck) {
/* Is the nominal type generic and in need of instantiation? (This
* can happen in (::T, T) where we didn't learn about the type until
* during the signature bind). */
SixModelObject nomType = param.get_attribute_boxed(tc, gcx.Parameter,
"$!nominal_type", HINT_nominal_type);
if ((paramFlags & SIG_ELEM_NOMINAL_GENERIC) != 0) {
SixModelObject HOW = nomType.st.HOW;
SixModelObject ig = Ops.findmethod(tc, HOW,
"instantiate_generic");
SixModelObject ContextRef = tc.gc.ContextRef;
SixModelObject cc = ContextRef.st.REPR.allocate(tc, ContextRef.st);
((ContextRefInstance)cc).context = cf;
Ops.invokeDirect(tc, ig, genIns,
new Object[] { HOW, nomType, cc });
nomType = Ops.result_o(tc.curFrame);
}
/* If not, do the check. If the wanted nominal type is Mu, then
* anything goes. */
if (nomType != gcx.Mu && Ops.istype_nodecont(decontValue, nomType, tc) == 0) {
/* Type check failed; produce error if needed. */
if (error != null) {
SixModelObject thrower = RakOps.getThrower(tc, "X::TypeCheck::Binding");
if (thrower != null && decontValue.st.WHAT != gcx.Junction) {
Ops.invokeDirect(tc, thrower,
bindThrower, new Object[] { decontValue.st.WHAT, nomType.st.WHAT, varName });
return BIND_RESULT_FAIL;
}
else
error[0] = String.format(
"Nominal type check failed for parameter '%s'",
varName);
}
/* Report junction failure mode if it's a junction. */
return juncOrFail(tc, gcx, decontValue);
}
/* Also enforce definedness check */
if ( (paramFlags & SIG_ELEM_DEFINEDNES_CHECK) != 0) {
/* Don't check decontValue for concreteness though, but arg_o,
seeing as we don't have a isconcrete_nodecont */
if ((paramFlags & SIG_ELEM_UNDEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) == 1) {
if (error != null) {
if ((paramFlags & SIG_ELEM_INVOCANT) != 0) {
error[0] = "Invocant requires a type object, but an object instance was passed";
}
else {
error[0] = String.format(
"Parameter '%s' requires a type object, but an object instance was passed",
varName);
}
}
return juncOrFail(tc, gcx, decontValue);
}
if ((paramFlags & SIG_ELEM_DEFINED_ONLY) != 0 && Ops.isconcrete(arg_o, tc) != 1) {
if (error != null) {
if ((paramFlags & SIG_ELEM_INVOCANT) != 0) {
error[0] = "Invocant requires an instance, but a type object was passed";
}
else {
error[0] = String.format(
"Parameter '%s' requires an instance, but a type object was passed",
varName);
}
}
return juncOrFail(tc, gcx, decontValue);
}
}
}
}
/* Type captures. */
SixModelObject typeCaps = param.get_attribute_boxed(tc, gcx.Parameter,
"$!type_captures", HINT_type_captures);
if (typeCaps != null)
bindTypeCaptures(tc, typeCaps, cf, decontValue.st.WHAT);
/* Do a coercion, if one is needed. */
SixModelObject coerceType = param.get_attribute_boxed(tc, gcx.Parameter,
"$!coerce_type", HINT_coerce_type);
if (coerceType != null) {
/* Coercing natives not possible - nothing to call a method on. */
if (flag != CallSiteDescriptor.ARG_OBJ) {
if (error != null)
error[0] = String.format(
"Unable to coerce natively typed parameter '%s'",
varName);
return BIND_RESULT_FAIL;
}
/* Only coerce if we don't already have the correct type. */
if (Ops.istype(decontValue, coerceType, tc) == 0) {
param.get_attribute_native(tc, gcx.Parameter, "$!coerce_method", HINT_coerce_method);
String methName = tc.native_s;
SixModelObject coerceMeth = Ops.findmethod(tc,
decontValue, methName);
if (coerceMeth != null) {
Ops.invokeDirect(tc, coerceMeth,
Ops.invocantCallSite,
new Object[] { decontValue });
arg_o = Ops.result_o(tc.curFrame);
decontValue = Ops.decont(arg_o, tc);
}
else {
if (error != null)
error[0] = String.format(
"Unable to coerce value for '%s' to %s; no coercion method defined",
varName, methName);
return BIND_RESULT_FAIL;
}
}
}
/* If it's not got attributive binding, we'll go about binding it into the
* lex pad. */
StaticCodeInfo sci = cf.codeRef.staticInfo;
if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) == 0 && varName != null) {
/* Is it native? If so, just go ahead and bind it. */
if (flag != CallSiteDescriptor.ARG_OBJ) {
switch (flag) {
case CallSiteDescriptor.ARG_INT:
cf.iLex[sci.iTryGetLexicalIdx(varName)] = arg_i;
break;
case CallSiteDescriptor.ARG_NUM:
cf.nLex[sci.nTryGetLexicalIdx(varName)] = arg_n;
break;
case CallSiteDescriptor.ARG_STR:
cf.sLex[sci.sTryGetLexicalIdx(varName)] = arg_s;
break;
}
}
/* Otherwise it's some objecty case. */
else if (is_rw) {
/* XXX TODO Check if rw flag is set; also need to have a
* wrapper container that carries extra constraints. */
cf.oLex[sci.oTryGetLexicalIdx(varName)] = arg_o;
}
else if ((paramFlags & SIG_ELEM_IS_PARCEL) != 0) {
/* Just bind the thing as is into the lexpad. */
cf.oLex[sci.oTryGetLexicalIdx(varName)] = arg_o;
}
else {
/* If it's an array, copy means make a new one and store,
* and a normal bind is a straightforward binding plus
* adding a constraint. */
if ((paramFlags & SIG_ELEM_ARRAY_SIGIL) != 0) {
SixModelObject bindee = decontValue;
if ((paramFlags & SIG_ELEM_IS_COPY) != 0) {
throw ExceptionHandling.dieInternal(tc, "is copy on lists NYI after GLR");
//bindee = RakOps.p6list(gcx.EMPTYARR.clone(tc), gcx.Array, gcx.True, tc);
//RakOps.p6store(bindee, decontValue, tc);
}
cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee;
}
/* If it's a hash, similar approach to array. */
else if ((paramFlags & SIG_ELEM_HASH_SIGIL) != 0) {
SixModelObject bindee = decontValue;
if ((paramFlags & SIG_ELEM_IS_COPY) != 0) {
SixModelObject BOOTHash = tc.gc.BOOTHash;
bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
bindee.bind_attribute_boxed(tc, gcx.EnumMap, "$!storage",
HINT_ENUMMAP_storage, BOOTHash.st.REPR.allocate(tc, BOOTHash.st));
RakOps.p6store(bindee, decontValue, tc);
}
cf.oLex[sci.oTryGetLexicalIdx(varName)] = bindee;
}
/* If it's a scalar, we always need to wrap it into a new
* container and store it, for copy or ro case (the rw bit
* in the container descriptor takes care of the rest). */
else {
STable stScalar = gcx.Scalar.st;
SixModelObject new_cont = stScalar.REPR.allocate(tc, stScalar);
SixModelObject desc = param.get_attribute_boxed(tc, gcx.Parameter,
"$!container_descriptor", HINT_container_descriptor);
new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!descriptor",
RakudoContainerSpec.HINT_descriptor, desc);
new_cont.bind_attribute_boxed(tc, gcx.Scalar, "$!value",
RakudoContainerSpec.HINT_value, decontValue);
cf.oLex[sci.oTryGetLexicalIdx(varName)] = new_cont;
}
}
}
/* Is it the invocant? If so, also have to bind to self lexical. */
if ((paramFlags & SIG_ELEM_INVOCANT) != 0)
cf.oLex[sci.oTryGetLexicalIdx("self")] = decontValue;
/* Handle any constraint types (note that they may refer to the parameter by
* name, so we need to have bound it already). */
SixModelObject postConstraints = param.get_attribute_boxed(tc, gcx.Parameter,
"$!post_contraints", HINT_post_constraints);
if (postConstraints != null) {
long numConstraints = postConstraints.elems(tc);
for (long i = 0; i < numConstraints; i++) {
/* Check we meet the constraint. */
SixModelObject consType = postConstraints.at_pos_boxed(tc, i);
SixModelObject acceptsMeth = Ops.findmethod(consType, "ACCEPTS", tc);
if (Ops.istype(consType, gcx.Code, tc) != 0)
RakOps.p6capturelex(consType, tc);
switch (flag) {
case CallSiteDescriptor.ARG_INT:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_i, new Object[] { consType, arg_i });
break;
case CallSiteDescriptor.ARG_NUM:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_n, new Object[] { consType, arg_n });
break;
case CallSiteDescriptor.ARG_STR:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_s, new Object[] { consType, arg_s });
break;
default:
Ops.invokeDirect(tc, acceptsMeth,
ACCEPTS_o, new Object[] { consType, arg_o });
break;
}
long result = Ops.istrue(
Ops.result_o(tc.curFrame), tc);
if (result == 0) {
if (error != null)
error[0] = "Constraint type check failed for parameter '" + varName + "'";
return BIND_RESULT_FAIL;
}
}
}
/* TODO: attributives. */
if ((paramFlags & SIG_ELEM_BIND_ATTRIBUTIVE) != 0) {
if (flag != CallSiteDescriptor.ARG_OBJ) {
if (error != null)
error[0] = "Native attributive binding not yet implemented";
return BIND_RESULT_FAIL;
}
int result = assignAttributive(tc, cf, varName, paramFlags,
param.get_attribute_boxed(tc, gcx.Parameter, "$!attr_package", HINT_attr_package),
decontValue, error);
if (result != BIND_RESULT_OK)
return result;
}
/* If it has a sub-signature, bind that. */
SixModelObject subSignature = param.get_attribute_boxed(tc, gcx.Parameter,
"$!sub_signature", HINT_sub_signature);
if (subSignature != null && flag == CallSiteDescriptor.ARG_OBJ) {
/* Turn value into a capture, unless we already have one. */
SixModelObject capture = null;
int result;
if ((paramFlags & SIG_ELEM_IS_CAPTURE) != 0) {
capture = decontValue;
}
else {
SixModelObject meth = Ops.findmethod(decontValue, "Capture", tc);
if (meth == null) {
if (error != null)
error[0] = "Could not turn argument into capture";
return BIND_RESULT_FAIL;
}
Ops.invokeDirect(tc, meth, Ops.invocantCallSite, new Object[] { decontValue });
capture = Ops.result_o(tc.curFrame);
}
SixModelObject subParams = subSignature
.get_attribute_boxed(tc, gcx.Signature, "$!params", HINT_SIG_params);
/* Recurse into signature binder. */
CallSiteDescriptor subCsd = explodeCapture(tc, gcx, capture);
result = bind(tc, gcx, cf, subParams, subCsd, tc.flatArgs, noNomTypeCheck, error);
if (result != BIND_RESULT_OK)
{
if (error != null) {
/* Note in the error message that we're in a sub-signature. */
error[0] += " in sub-signature";
/* Have we a variable name? */
if (varName != null) {
error[0] += " of parameter " + varName;
}
}
return result;
}
}
if (RakOps.DEBUG_MODE)
System.err.println("bindOneParam NYFI");
return BIND_RESULT_OK;
}
private static final CallSiteDescriptor exploder = new CallSiteDescriptor(new byte[] {
CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT,
CallSiteDescriptor.ARG_OBJ | CallSiteDescriptor.ARG_FLAT | CallSiteDescriptor.ARG_NAMED
}, null);
public static CallSiteDescriptor explodeCapture(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject capture) {
capture = Ops.decont(capture, tc);
SixModelObject capType = gcx.Capture;
SixModelObject list = capture.get_attribute_boxed(tc, capType, "$!list", HINT_CAPTURE_list);
SixModelObject hash = capture.get_attribute_boxed(tc, capType, "$!hash", HINT_CAPTURE_hash);
if (list == null)
list = gcx.EMPTYARR;
if (hash == null)
hash = gcx.EMPTYHASH;
return exploder.explodeFlattening(tc.curFrame, new Object[] { list, hash });
}
/* This takes a signature element and either runs the closure to get a default
* value if there is one, or creates an appropriate undefined-ish thingy. */
private static SixModelObject handleOptional(ThreadContext tc, RakOps.GlobalExt gcx, int flags, SixModelObject param, CallFrame cf) {
/* Is the "get default from outer" flag set? */
if ((flags & SIG_ELEM_DEFAULT_FROM_OUTER) != 0) {
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
String varName = tc.native_s;
CallFrame curOuter = cf.outer;
while (curOuter != null) {
Integer idx = curOuter.codeRef.staticInfo.oTryGetLexicalIdx(varName);
if (idx != null)
return curOuter.oLex[idx];
curOuter = curOuter.outer;
}
return null;
}
/* Do we have a default value or value closure? */
SixModelObject defaultValue = param.get_attribute_boxed(tc, gcx.Parameter,
"$!default_value", HINT_default_value);
if (defaultValue != null) {
if ((flags & SIG_ELEM_DEFAULT_IS_LITERAL) != 0) {
return defaultValue;
}
else {
/* Thunk; run it to get a value. */
Ops.invokeArgless(tc, defaultValue);
return Ops.result_o(tc.curFrame);
}
}
/* Otherwise, go by sigil to pick the correct default type of value. */
else {
if ((flags & SIG_ELEM_ARRAY_SIGIL) != 0) {
throw ExceptionHandling.dieInternal(tc, "optional array param NYI after GLR");
//return RakOps.p6list(null, gcx.Array, gcx.True, tc);
}
else if ((flags & SIG_ELEM_HASH_SIGIL) != 0) {
SixModelObject res = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
return res;
}
else {
return param.get_attribute_boxed(tc, gcx.Parameter, "$!nominal_type", HINT_nominal_type);
}
}
}
/* Takes a signature along with positional and named arguments and binds them
* into the provided callframe. Returns BIND_RESULT_OK if binding works out,
* BIND_RESULT_FAIL if there is a failure and BIND_RESULT_JUNCTION if the
* failure was because of a Junction being passed (meaning we need to auto-thread). */
private static final CallSiteDescriptor slurpyFromArgs = new CallSiteDescriptor(
new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
public static int bind(ThreadContext tc, RakOps.GlobalExt gcx, CallFrame cf, SixModelObject params,
CallSiteDescriptor csd, Object[] args,
boolean noNomTypeCheck, String[] error) {
int bindFail = BIND_RESULT_OK;
int curPosArg = 0;
/* If we have a |$foo that's followed by slurpies, then we can suppress
* any future arity checks. */
boolean suppressArityFail = false;
/* If we do have some named args, we want to make a clone of the hash
* to work on. We'll delete stuff from it as we bind, and what we have
* left over can become the slurpy hash or - if we aren't meant to be
* taking one - tell us we have a problem. */
HashMap<String, Integer> namedArgsCopy = csd.nameMap == null
? null
: new HashMap<String, Integer>(csd.nameMap);
/* Now we'll walk through the signature and go about binding things. */
int numPosArgs = csd.numPositionals;
long numParams = params.elems(tc);
for (long i = 0; i < numParams; i++) {
/* Get parameter, its flags and any named names. */
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
/* Is it looking for us to bind a capture here? */
if ((flags & SIG_ELEM_IS_CAPTURE) != 0) {
/* Capture the arguments from this point forwards into a Capture.
* Of course, if there's no variable name we can (cheaply) do pretty
* much nothing. */
param.get_attribute_native(tc, gcx.Parameter, "$!variable_name", HINT_variable_name);
if (tc.native_s == null) {
bindFail = BIND_RESULT_OK;
}
else {
SixModelObject posArgs = gcx.EMPTYARR.clone(tc);
for (int k = curPosArg; k < numPosArgs; k++) {
switch (csd.argFlags[k]) {
case CallSiteDescriptor.ARG_OBJ:
posArgs.push_boxed(tc, (SixModelObject)args[k]);
break;
case CallSiteDescriptor.ARG_INT:
posArgs.push_boxed(tc, RakOps.p6box_i((long)args[k], tc));
break;
case CallSiteDescriptor.ARG_NUM:
posArgs.push_boxed(tc, RakOps.p6box_n((double)args[k], tc));
break;
case CallSiteDescriptor.ARG_STR:
posArgs.push_boxed(tc, RakOps.p6box_s((String)args[k], tc));
break;
}
}
SixModelObject namedArgs = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args);
SixModelObject capType = gcx.Capture;
SixModelObject capSnap = capType.st.REPR.allocate(tc, capType.st);
capSnap.bind_attribute_boxed(tc, capType, "$!list", HINT_CAPTURE_list, posArgs);
capSnap.bind_attribute_boxed(tc, capType, "$!hash", HINT_CAPTURE_hash, namedArgs);
bindFail = bindOneParam(tc, gcx, cf, param, capSnap, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
}
if (bindFail != 0) {
return bindFail;
}
else if (i + 1 == numParams) {
/* Since a capture acts as "the ultimate slurpy" in a sense, if
* this is the last parameter in the signature we can return
* success right off the bat. */
return BIND_RESULT_OK;
}
else {
SixModelObject nextParam = params.at_pos_boxed(tc, i + 1);
nextParam.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
if (((int)tc.native_i & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_NAMED)) != 0)
suppressArityFail = true;
}
}
/* Could it be a named slurpy? */
else if ((flags & SIG_ELEM_SLURPY_NAMED) != 0) {
SixModelObject slurpy = vmHashOfRemainingNameds(tc, gcx, namedArgsCopy, args);
SixModelObject bindee = gcx.Hash.st.REPR.allocate(tc, gcx.Hash.st);
bindee.bind_attribute_boxed(tc, gcx.EnumMap, "$!storage",
HINT_ENUMMAP_storage, slurpy);
bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
/* Nullify named arguments hash now we've consumed it, to mark all
* is well. */
namedArgsCopy = null;
}
/* Otherwise, maybe it's a positional of some kind. */
else if (namedNames == null) {
/* Slurpy or LoL-slurpy? */
if ((flags & (SIG_ELEM_SLURPY_POS | SIG_ELEM_SLURPY_LOL)) != 0) {
/* Create Perl 6 array, create VM array of all remaining things,
* then store it. */
SixModelObject slurpy = gcx.EMPTYARR.clone(tc);
while (curPosArg < numPosArgs) {
switch (csd.argFlags[curPosArg]) {
case CallSiteDescriptor.ARG_OBJ:
slurpy.push_boxed(tc, (SixModelObject)args[curPosArg]);
break;
case CallSiteDescriptor.ARG_INT:
slurpy.push_boxed(tc, RakOps.p6box_i((long)args[curPosArg], tc));
break;
case CallSiteDescriptor.ARG_NUM:
slurpy.push_boxed(tc, RakOps.p6box_n((double)args[curPosArg], tc));
break;
case CallSiteDescriptor.ARG_STR:
slurpy.push_boxed(tc, RakOps.p6box_s((String)args[curPosArg], tc));
break;
}
curPosArg++;
}
SixModelObject slurpyType = (flags & SIG_ELEM_IS_RW) != 0 ? gcx.List : gcx.Array;
SixModelObject sm = Ops.findmethod(tc, slurpyType,
(flags & SIG_ELEM_SLURPY_POS) == 0 ? "from-slurpy-flat" : "from-slurpy");
Ops.invokeDirect(tc, sm, slurpyFromArgs, new Object[] { slurpyType, slurpy });
SixModelObject bindee = Ops.result_o(tc.curFrame);
bindFail = bindOneParam(tc, gcx, cf, param, bindee, CallSiteDescriptor.ARG_OBJ,
noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
}
/* Otherwise, a positional. */
else {
/* Do we have a value?. */
if (curPosArg < numPosArgs) {
/* Easy - just bind that. */
bindFail = bindOneParam(tc, gcx, cf, param, args[curPosArg],
csd.argFlags[curPosArg], noNomTypeCheck, error);
if (bindFail != 0)
return bindFail;
curPosArg++;
}
else {
/* No value. If it's optional, fetch a default and bind that;
* if not, we're screwed. Note that we never nominal type check
* an optional with no value passed. */
if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
bindFail = bindOneParam(tc, gcx, cf, param,
handleOptional(tc, gcx, flags, param, cf),
CallSiteDescriptor.ARG_OBJ, false, error);
if (bindFail != 0)
return bindFail;
}
else {
if (error != null)
error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, false);
return BIND_RESULT_FAIL;
}
}
}
}
/* Else, it's a non-slurpy named. */
else {
/* Try and get hold of value. */
Integer lookup = null;
if (namedArgsCopy != null) {
long numNames = namedNames.elems(tc);
for (long j = 0; j < numNames; j++) {
String name = namedNames.at_pos_boxed(tc, j).get_str(tc);
lookup = namedArgsCopy.remove(name);
if (lookup != null)
break;
}
}
/* Did we get one? */
if (lookup == null) {
/* Nope. We'd better hope this param was optional... */
if ((flags & SIG_ELEM_IS_OPTIONAL) != 0) {
bindFail = bindOneParam(tc, gcx, cf, param,
handleOptional(tc, gcx, flags, param, cf),
CallSiteDescriptor.ARG_OBJ, false, error);
}
else if (!suppressArityFail) {
if (error != null)
error[0] = "Required named argument '" +
namedNames.at_pos_boxed(tc, 0).get_str(tc) +
"' not passed";
return BIND_RESULT_FAIL;
}
}
else {
bindFail = bindOneParam(tc, gcx, cf, param, args[lookup >> 3],
(byte)(lookup & 7), noNomTypeCheck, error);
}
/* If we got a binding failure, return it. */
if (bindFail != 0)
return bindFail;
}
}
/* Do we have any left-over args? */
if (curPosArg < numPosArgs && !suppressArityFail) {
/* Oh noes, too many positionals passed. */
if (error != null)
error[0] = arityFail(tc, gcx, params, (int)numParams, numPosArgs, true);
return BIND_RESULT_FAIL;
}
if (namedArgsCopy != null && namedArgsCopy.size() > 0) {
/* Oh noes, unexpected named args. */
if (error != null) {
int numExtra = namedArgsCopy.size();
if (numExtra == 1) {
for (String name : namedArgsCopy.keySet())
error[0] = "Unexpected named argument '" + name + "' passed";
}
else {
boolean first = true;
error[0] = numExtra + " unexpected named arguments passed (";
for (String name : namedArgsCopy.keySet()) {
if (!first)
error[0] += ", ";
else
first = false;
error[0] += name;
}
error[0] += ")";
}
}
return BIND_RESULT_FAIL;
}
/* If we get here, we're done. */
return BIND_RESULT_OK;
}
/* Takes any nameds we didn't capture yet and makes a VM Hash of them. */
private static SixModelObject vmHashOfRemainingNameds(ThreadContext tc, RakOps.GlobalExt gcx, HashMap<String, Integer> namedArgsCopy, Object[] args) {
SixModelObject slurpy = gcx.Mu;
if (namedArgsCopy != null) {
SixModelObject BOOTHash = tc.gc.BOOTHash;
slurpy = BOOTHash.st.REPR.allocate(tc, BOOTHash.st);
for (String name : namedArgsCopy.keySet()) {
int lookup = namedArgsCopy.get(name);
switch (lookup & 7) {
case CallSiteDescriptor.ARG_OBJ:
slurpy.bind_key_boxed(tc, name, (SixModelObject)args[lookup >> 3]);
break;
case CallSiteDescriptor.ARG_INT:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_i((long)args[lookup >> 3], tc));
break;
case CallSiteDescriptor.ARG_NUM:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_n((double)args[lookup >> 3], tc));
break;
case CallSiteDescriptor.ARG_STR:
slurpy.bind_key_boxed(tc, name, RakOps.p6box_s((String)args[lookup >> 3], tc));
break;
}
}
}
return slurpy;
}
/* Compile time trial binding; tries to determine at compile time whether
* certain binds will/won't work. */
public static int trialBind(ThreadContext tc, RakOps.GlobalExt gcx, SixModelObject params,
CallSiteDescriptor csd, Object[] args) {
/* If there's a single capture parameter, then we're OK. (Worth
* handling especially as it's the common case for protos). */
int numParams = (int)params.elems(tc);
if (numParams == 1) {
SixModelObject param = params.at_pos_boxed(tc, 0);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
if ((flags & SIG_ELEM_IS_CAPTURE) != 0)
return TRIAL_BIND_OK;
}
/* Walk through the signature and consider the parameters. */
int numPosArgs = csd.numPositionals;
int curPosArg = 0;
for (int i = 0; i < numParams; i++) {
/* If the parameter is anything other than a boring old
* positional parameter, we won't analyze it. */
SixModelObject param = params.at_pos_boxed(tc, i);
param.get_attribute_native(tc, gcx.Parameter, "$!flags", HINT_flags);
int flags = (int)tc.native_i;
if ((flags & ~(
SIG_ELEM_MULTI_INVOCANT | SIG_ELEM_IS_PARCEL |
SIG_ELEM_IS_COPY | SIG_ELEM_ARRAY_SIGIL |
SIG_ELEM_HASH_SIGIL | SIG_ELEM_NATIVE_VALUE |
SIG_ELEM_IS_OPTIONAL)) != 0)
return TRIAL_BIND_NOT_SURE;
SixModelObject namedNames = param.get_attribute_boxed(tc,
gcx.Parameter, "$!named_names", HINT_named_names);
if (namedNames != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject postConstraints = param.get_attribute_boxed(tc,
gcx.Parameter, "$!post_constraints", HINT_post_constraints);
if (postConstraints != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject typeCaptures = param.get_attribute_boxed(tc,
gcx.Parameter, "$!type_captures", HINT_type_captures);
if (typeCaptures != null)
return TRIAL_BIND_NOT_SURE;
SixModelObject coerceType = param.get_attribute_boxed(tc,
gcx.Parameter, "$!coerce_type", HINT_coerce_type);
if (coerceType != null)
return TRIAL_BIND_NOT_SURE;
/* Do we have an argument for this parameter? */
if (curPosArg >= numPosArgs) {
/* No; if it's not optional, fail.*/
if ((flags & SIG_ELEM_IS_OPTIONAL) == 0)
return TRIAL_BIND_NO_WAY;
}
else {
/* Yes, need to consider type. */
int gotPrim = csd.argFlags[curPosArg];
if ((flags & SIG_ELEM_NATIVE_VALUE) != 0) {
if (gotPrim == CallSiteDescriptor.ARG_OBJ) {
/* We got an object; if we aren't sure we can unbox, we can't
* be sure about the dispatch. */
SixModelObject arg = (SixModelObject)args[i];
StorageSpec spec = arg.st.REPR.get_storage_spec(tc, arg.st);
switch (flags & SIG_ELEM_NATIVE_VALUE) {
case SIG_ELEM_NATIVE_INT_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_INT) == 0)
return TRIAL_BIND_NOT_SURE;
break;
case SIG_ELEM_NATIVE_NUM_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_NUM) == 0)
return TRIAL_BIND_NOT_SURE;
break;
case SIG_ELEM_NATIVE_STR_VALUE:
if ((spec.can_box & StorageSpec.CAN_BOX_STR) == 0)
return TRIAL_BIND_NOT_SURE;
break;
default:
/* WTF... */
return TRIAL_BIND_NOT_SURE;
}
}
else {
/* If it's the wrong type of native, there's no way it
* can ever bind. */
if (((flags & SIG_ELEM_NATIVE_INT_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_INT) ||
((flags & SIG_ELEM_NATIVE_NUM_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_NUM) ||
((flags & SIG_ELEM_NATIVE_STR_VALUE) != 0 && gotPrim != CallSiteDescriptor.ARG_STR))
return TRIAL_BIND_NO_WAY;
}
}
else {
/* Work out a parameter type to consider, and see if it matches. */
SixModelObject arg =
gotPrim == CallSiteDescriptor.ARG_OBJ ? (SixModelObject)args[curPosArg] :
gotPrim == CallSiteDescriptor.ARG_INT ? gcx.Int :
gotPrim == CallSiteDescriptor.ARG_NUM ? gcx.Num :
gcx.Str;
SixModelObject nominalType = param.get_attribute_boxed(tc,
gcx.Parameter, "$!nominal_type", HINT_nominal_type);
if (nominalType != gcx.Mu && Ops.istype(arg, nominalType, tc) == 0) {
/* If it failed because we got a junction, may auto-thread;
* hand back "not sure" for now. */
if (arg.st.WHAT == gcx.Junction)
return TRIAL_BIND_NOT_SURE;
/* It failed to, but that doesn't mean it can't work at runtime;
* we perhaps want an Int, and the most we know is we have an Any,
* which would include Int. However, the Int ~~ Str case can be
* rejected now, as there's no way it'd ever match. Basically, we
* just flip the type check around. */
return Ops.istype(nominalType, arg, tc) != 0
? TRIAL_BIND_NOT_SURE
: TRIAL_BIND_NO_WAY;
}
}
}
/* Continue to next argument. */
curPosArg++;
}
/* If we have any left over arguments, it's a binding fail. */
if (curPosArg < numPosArgs)
return TRIAL_BIND_NO_WAY;
/* Otherwise, if we get there, all is well. */
return TRIAL_BIND_OK;
}
}
| Bring hllize semantics in line with Moar backend.
Fixes RETURN-LIST crash, getting us further through startup on JVM.
| src/vm/jvm/runtime/org/perl6/rakudo/Binder.java | Bring hllize semantics in line with Moar backend. | <ide><path>rc/vm/jvm/runtime/org/perl6/rakudo/Binder.java
<ide> * bind if it passes the type check, or a native value that needs no
<ide> * further checking. */
<ide> SixModelObject decontValue = null;
<add> boolean didHLLTransform = false;
<ide> if (flag == CallSiteDescriptor.ARG_OBJ && !(is_rw && desiredNative != 0)) {
<ide> /* We need to work on the decontainerized value. */
<ide> decontValue = Ops.decont(arg_o, tc);
<ide>
<ide> /* HLL map it as needed. */
<add> SixModelObject beforeHLLize = decontValue;
<ide> decontValue = Ops.hllize(decontValue, tc);
<add> if (decontValue != beforeHLLize)
<add> didHLLTransform = true;
<ide>
<ide> /* Skip nominal type check if not needed. */
<ide> if (!noNomTypeCheck) {
<ide> }
<ide> else if ((paramFlags & SIG_ELEM_IS_PARCEL) != 0) {
<ide> /* Just bind the thing as is into the lexpad. */
<del> cf.oLex[sci.oTryGetLexicalIdx(varName)] = arg_o;
<add> cf.oLex[sci.oTryGetLexicalIdx(varName)] = didHLLTransform ? decontValue : arg_o;
<ide> }
<ide> else {
<ide> /* If it's an array, copy means make a new one and store, |
|
Java | lgpl-2.1 | 001aeb07be2f82ff05c4ee9358aeeb0057a49ad6 | 0 | simoc/mapyrus,simoc/mapyrus,simoc/mapyrus | /*
* $Id$
*/
package au.id.chenery.mapyrus;
import java.awt.Graphics2D;
import java.awt.BasicStroke;
import java.awt.geom.PathIterator;
import java.awt.Shape;
import javax.imageio.*;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.BufferedWriter;
import java.io.IOException;
import java.text.DecimalFormat;
import java.awt.image.*;
import java.awt.Color;
/**
* Abstraction of a graphics format. Provides methods to create new
* output files and then draw to them, independent of the graphics
* format.
*/
public class OutputFormat
{
/*
* Type of output currently being generated.
*/
private static final int BUFFERED_IMAGE = 1;
private static final int IMAGE_FILE = 2;
private static final int POSTSCRIPT = 3;
/*
* Number of points and millimetres per inch.
*/
private static final int POINTS_PER_INCH = 72;
public static final double MM_PER_INCH = 25.4;
/*
* Format for coordinates and colors in PostScript files.
*/
private DecimalFormat mLinearFormat;
private DecimalFormat mColorFormat;
/*
* File or image that drawing commands are
* writing to.
*/
private int mOutputType;
private String mFormatName;
private BufferedImage mImage;
private String mFilename;
private PrintWriter mWriter;
private OutputStream mOutputStream;
private Graphics2D mGraphics2D;
private boolean mPipedOutput;
private Process mOutputProcess;
/*
* Page dimensions and resolution.
*/
private double mPageWidth;
private double mPageHeight;
private double mResolution;
/*
* Indentation for PostScript commands.
*/
private int mPostScriptIndent;
/*
* Write PostScript file header.
*/
private void writePostScriptHeader(double width, double height)
{
long widthInPoints = Math.round(width / MM_PER_INCH * POINTS_PER_INCH);
long heightInPoints = Math.round(height / MM_PER_INCH * POINTS_PER_INCH);
mWriter.println("%!PS-Adobe-3.0");
mWriter.println("%%BoundingBox: 0 0 " + widthInPoints + " " + heightInPoints);
mWriter.println("%%DocumentData: Clean7Bit");
mWriter.println("%%Creator: " + Mapyrus.PROGRAM_NAME);
mWriter.println("%%EndComments");
mWriter.println("");
/*
* Prevent anything being displayed outside bounding box we've just defined.
*/
mWriter.println("0 0 " + widthInPoints + " " + heightInPoints + " rectclip");
/*
* Set plotting units to millimetres.
*/
mWriter.println(POINTS_PER_INCH + " " + MM_PER_INCH + " div dup scale");
/*
* Define shorter names for most commonly used operations.
*/
mWriter.println("/m { moveto } def /l { lineto } def");
mWriter.println("/s { stroke } def /f { fill } def");
mWriter.println("/gs { gsave } def /gr { grestore } def");
mWriter.println("/rgb { setrgbcolor } def /sl { setlinewidth } def");
}
/*
* Sets correct background, rendering hints and transformation
* for buffered image we will plot to.
*/
private void setupBufferedImage(double resolution)
{
double scale;
mGraphics2D.setColor(Color.WHITE);
mGraphics2D.fillRect(0, 0, mImage.getWidth(), mImage.getHeight());
scale = resolution / MM_PER_INCH;
/*
* Set transform with origin in lower-left corner and
* Y axis increasing upwards.
*/
mGraphics2D.translate(0, mImage.getHeight());
mGraphics2D.scale(scale, -scale);
}
/**
* Creates new graphics file, ready for drawing to.
* @param filename name of image file output will be saved to.
* If filename begins with '|' character then output is piped as
* input to that command.
* @param format is the graphics format to use.
* @param width is the page width (in mm).
* @param height is the page height (in mm).
* @param resolution is resolution for output in dots per inch (DPI)
* @param extras contains extra settings for this output.
*/
public OutputFormat(String filename, String format,
double width, double height, double resolution, String extras)
throws IOException, MapyrusException
{
mFormatName = format.toUpperCase();
/*
* Check that Java can write this image format to a file.
*/
if (mFormatName.equals("PS") || mFormatName.equals("EPS"))
{
mOutputType = POSTSCRIPT;
}
else
{
boolean found = false;
String knownFormats[] = ImageIO.getWriterFormatNames();
for (int i = 0; i < knownFormats.length && found == false; i++)
{
if (mFormatName.equalsIgnoreCase(knownFormats[i]))
{
found = true;
}
}
if (found == false)
throw new MapyrusException("Cannot write image format: " + format);
mOutputType = IMAGE_FILE;
}
/*
* Should we pipe the output to another program
* instead of writing a file?
*/
mPipedOutput = filename.startsWith("|");
if (mPipedOutput)
{
String pipeCommand = filename.substring(1).trim();
mOutputProcess = Runtime.getRuntime().exec(pipeCommand);
mOutputStream = mOutputProcess.getOutputStream();
}
else
{
mOutputStream = new FileOutputStream(filename);
}
/*
* Setup file we are writing to.
*/
if (mOutputType == POSTSCRIPT)
{
mLinearFormat = new DecimalFormat("#.##");
mColorFormat = new DecimalFormat("#.###");
mWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(mOutputStream)));
writePostScriptHeader(width, height);
}
else
{
/*
* Create a BufferedImage to draw into. We'll save it to a file
* when user has finished drawing to it.
*/
int widthInPixels = (int)Math.round(width / MM_PER_INCH * resolution);
int heightInPixels = (int)Math.round(height / MM_PER_INCH * resolution);
mImage = new BufferedImage(widthInPixels, heightInPixels,
BufferedImage.TYPE_3BYTE_BGR);
mGraphics2D = (Graphics2D)(mImage.getGraphics());
setupBufferedImage(resolution);
}
mFilename = filename;
mPostScriptIndent = 0;
mPageWidth = width;
mPageHeight = height;
mResolution = MM_PER_INCH / resolution;
}
/**
* Return page width.
* @return width in millimetres.
*/
public double getPageWidth()
{
return(mPageWidth);
}
/**
* Return page height.
* @return height in millimetres.
*/
public double getPageHeight()
{
return(mPageHeight);
}
/**
* Return resolution of page as a distance measurement.
* @return distance in millimetres between centres of adjacent pixels.
*/
public double getResolution()
{
return(mResolution);
}
/*
* Write a line to PostScript file. Line is indented to show
* saving and restoring of state more clearly.
*/
private void writePostScriptLine(String line)
{
for (int i = 0; i < mPostScriptIndent; i++)
{
mWriter.print(" ");
}
mWriter.println(line);
}
/**
* Save state, protecting color, linestyle, transform of output.
* This state can be restored later with restoreState().
*/
public void saveState()
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptLine("gs");
mPostScriptIndent++;
}
}
/**
* Restore state saved with saveState().
* @return true if saved state was successfully restored.
* Only PostScript format can be successfully restored, caller
* will have to reset values for other formats.
*/
public boolean restoreState()
{
boolean retval;
if (mOutputType == POSTSCRIPT)
{
mPostScriptIndent--;
writePostScriptLine("gr");
retval = true;
}
else
{
/*
* Can't restore state when drawing to an image. Caller
* must set everything to correct values again.
*/
retval = false;
}
return(retval);
}
/**
* Writes trailing information and closes output file.
*/
public void closeOutputFormat() throws IOException, MapyrusException
{
if (mOutputType == POSTSCRIPT)
{
/*
* Finish off PostScript file.
*/
if (mFormatName.equals("PS"))
{
/*
* showpage is not included in Encapsulated PostScript files.
*/
mWriter.println("showpage");
}
mWriter.println("%%EOF");
mWriter.close();
if (mWriter.checkError())
{
throw new MapyrusException(mFilename +
": Error writing to PostScript file");
}
}
else if (mOutputType == IMAGE_FILE)
{
/*
* Write image buffer to file.
*/
ImageIO.write(mImage, mFormatName, mOutputStream);
mOutputStream.close();
mImage = null;
mGraphics2D = null;
}
/*
* If we are piping output to another program then wait for
* that program to finish. Then check that it succeeded.
*/
if (mPipedOutput)
{
int retval = 0;
try
{
retval = mOutputProcess.waitFor();
}
catch (InterruptedException e)
{
throw new MapyrusException(mFilename + ": " + e.getMessage());
}
if (retval != 0)
{
throw new MapyrusException("Process returned failure status: " +
mFilename);
}
}
}
/**
* Set graphics attributes.
* @param color is color to draw in.
* @param lineWidth is width of line to use for drawing.
* @param is clip path.
*/
public void setAttributes(Color color, double lineWidth, Shape clipPath)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptLine(mLinearFormat.format(lineWidth) + " sl");
float c[] = color.getRGBColorComponents(null);
writePostScriptLine(mColorFormat.format(c[0]) + " " +
mColorFormat.format(c[1]) + " " +
mColorFormat.format(c[2]) + " rgb");
}
else
{
mGraphics2D.setColor(color);
mGraphics2D.setStroke(new BasicStroke((float)lineWidth));
mGraphics2D.setClip(clipPath);
}
}
/*
* Walk through path, converting it to PostScript.
*/
private void writePostScriptShape(Shape shape)
{
PathIterator pi = shape.getPathIterator(null);
float coords[] = new float[6];
int segmentType;
while (!pi.isDone())
{
segmentType = pi.currentSegment(coords);
switch (segmentType)
{
case PathIterator.SEG_MOVETO:
writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
mLinearFormat.format(coords[1]) + " m");
break;
case PathIterator.SEG_LINETO:
writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
mLinearFormat.format(coords[1]) + " l");
break;
case PathIterator.SEG_CLOSE:
writePostScriptLine("closepath");
break;
case PathIterator.SEG_CUBICTO:
writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
mLinearFormat.format(coords[1]) + " " +
mLinearFormat.format(coords[2]) + " " +
mLinearFormat.format(coords[3]) + " " +
mLinearFormat.format(coords[4]) + " " +
mLinearFormat.format(coords[5]) + " " +
"curveto");
break;
}
pi.next();
}
}
/**
* Draw currently defined path to output page.
*/
public void stroke(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptShape(shape);
writePostScriptLine("s");
}
else
{
/*
* Draw path into image.
*/
mGraphics2D.draw(shape);
}
}
/**
* Fill currently defined path on output page.
*/
public void fill(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptShape(shape);
writePostScriptLine("f");
}
else
{
/*
* Fill path in image.
*/
mGraphics2D.fill(shape);
}
}
/**
* Set clip region to inside of currently defined path on output page.
*/
public void clip(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
/*
* Set clip path now, then it stays in effect until previous
* state is restored.
*/
writePostScriptShape(shape);
writePostScriptLine("clip newpath");
}
}
}
| src/org/mapyrus/OutputFormat.java | /*
* $Id$
*/
package au.id.chenery.mapyrus;
import java.awt.Graphics2D;
import java.awt.BasicStroke;
import java.awt.geom.PathIterator;
import java.awt.Shape;
import javax.imageio.*;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.BufferedWriter;
import java.io.IOException;
import java.text.DecimalFormat;
import java.awt.image.*;
import java.awt.Color;
/**
* Abstraction of a graphics format. Provides methods to create new
* output files and then draw to them, independent of the graphics
* format.
*/
public class OutputFormat
{
/*
* Type of output currently being generated.
*/
private static final int BUFFERED_IMAGE = 1;
private static final int IMAGE_FILE = 2;
private static final int POSTSCRIPT = 3;
/*
* Number of points and millimetres per inch.
*/
private static final int POINTS_PER_INCH = 72;
private static final double MM_PER_INCH = 25.4;
/*
* Format for coordinates and colors in PostScript files.
*/
private DecimalFormat mLinearFormat;
private DecimalFormat mColorFormat;
/*
* File or image that drawing commands are
* writing to.
*/
private int mOutputType;
private String mFormatName;
private BufferedImage mImage;
private String mFilename;
private PrintWriter mWriter;
private OutputStream mOutputStream;
private Graphics2D mGraphics2D;
private boolean mPipedOutput;
private Process mOutputProcess;
/*
* Page dimensions.
*/
private double mPageWidth;
private double mPageHeight;
/*
* Indentation for PostScript commands.
*/
private int mPostScriptIndent;
/*
* Write PostScript file header.
*/
private void writePostScriptHeader(double width, double height)
{
long widthInPoints = Math.round(width / MM_PER_INCH * POINTS_PER_INCH);
long heightInPoints = Math.round(height / MM_PER_INCH * POINTS_PER_INCH);
mWriter.println("%!PS-Adobe-3.0");
mWriter.println("%%BoundingBox: 0 0 " + widthInPoints + " " + heightInPoints);
mWriter.println("%%DocumentData: Clean7Bit");
mWriter.println("%%Creator: " + Mapyrus.PROGRAM_NAME);
mWriter.println("%%EndComments");
mWriter.println("");
/*
* Prevent anything being displayed outside bounding box we've just defined.
*/
mWriter.println("0 0 " + widthInPoints + " " + heightInPoints + " rectclip");
/*
* Set plotting units to millimetres.
*/
mWriter.println(POINTS_PER_INCH + " " + MM_PER_INCH + " div dup scale");
/*
* Define shorter names for most commonly used operations.
*/
mWriter.println("/m { moveto } def /l { lineto } def");
mWriter.println("/s { stroke } def /f { fill } def");
mWriter.println("/gs { gsave } def /gr { grestore } def");
mWriter.println("/rgb { setrgbcolor } def /sl { setlinewidth } def");
}
/*
* Sets correct background, rendering hints and transformation
* for buffered image we will plot to.
*/
private void setupBufferedImage(int resolution)
{
double scale;
mGraphics2D.setColor(Color.WHITE);
mGraphics2D.fillRect(0, 0, mImage.getWidth(), mImage.getHeight());
scale = resolution / MM_PER_INCH;
/*
* Set transform with origin in lower-left corner and
* Y axis increasing upwards.
*/
mGraphics2D.translate(0, mImage.getHeight());
mGraphics2D.scale(scale, -scale);
}
/**
* Return resolution to use for image files we create.
* @return resolution to use for images as dots per inch value.
*/
private int getResolution()
{
int resolution;
/*
* If a display resolution is given as a property then use that,
* otherwise assume 72 DPI. That is, an image 100mm wide will be made
* 720 pixels wide.
*/
try
{
String property = System.getProperty(Mapyrus.PROGRAM_NAME + ".resolution");
if (property != null)
resolution = Integer.parseInt(property);
else
resolution = POINTS_PER_INCH;
}
catch (SecurityException e)
{
resolution = POINTS_PER_INCH;
}
catch (NumberFormatException e)
{
resolution = POINTS_PER_INCH;
}
return(resolution);
}
/**
* Creates new graphics file, ready for drawing to.
* @param filename name of image file output will be saved to.
* If filename begins with '|' character then output is piped as
* input to that command.
* @param format is the graphics format to use.
* @param width is the page width (in mm).
* @param height is the page height (in mm).
* @param extras contains extra settings for this output.
*/
public OutputFormat(String filename, String format,
double width, double height, String extras)
throws IOException, MapyrusException
{
mFormatName = format.toUpperCase();
/*
* Check that Java can write this image format to a file.
*/
if (mFormatName.equals("PS") || mFormatName.equals("EPS"))
{
mOutputType = POSTSCRIPT;
}
else
{
boolean found = false;
String knownFormats[] = ImageIO.getWriterFormatNames();
for (int i = 0; i < knownFormats.length && found == false; i++)
{
if (mFormatName.equalsIgnoreCase(knownFormats[i]))
{
found = true;
}
}
if (found == false)
throw new MapyrusException("Cannot write image format: " + format);
mOutputType = IMAGE_FILE;
}
/*
* Should we pipe the output to another program
* instead of writing a file?
*/
mPipedOutput = filename.startsWith("|");
if (mPipedOutput)
{
String pipeCommand = filename.substring(1).trim();
mOutputProcess = Runtime.getRuntime().exec(pipeCommand);
mOutputStream = mOutputProcess.getOutputStream();
}
else
{
mOutputStream = new FileOutputStream(filename);
}
/*
* Setup file we are writing to.
*/
if (mOutputType == POSTSCRIPT)
{
mLinearFormat = new DecimalFormat("#.##");
mColorFormat = new DecimalFormat("#.###");
mWriter = new PrintWriter(new BufferedWriter(new OutputStreamWriter(mOutputStream)));
writePostScriptHeader(width, height);
}
else
{
/*
* Create a BufferedImage to draw into. We'll save it to a file
* when user has finished drawing to it.
*/
int resolution = getResolution();
int widthInPixels = (int)Math.round(width / MM_PER_INCH * resolution);
int heightInPixels = (int)Math.round(height / MM_PER_INCH * resolution);
mImage = new BufferedImage(widthInPixels, heightInPixels,
BufferedImage.TYPE_3BYTE_BGR);
mGraphics2D = (Graphics2D)(mImage.getGraphics());
setupBufferedImage(resolution);
}
mFilename = filename;
mPostScriptIndent = 0;
mPageWidth = width;
mPageHeight = height;
}
/**
* Set a buffered image as output.
* @param image is the image to draw to.
*/
public OutputFormat(BufferedImage image)
throws IOException, MapyrusException
{
int resolution = getResolution();
mOutputType = BUFFERED_IMAGE;
mImage = image;
mGraphics2D = (Graphics2D)(mImage.getGraphics());
setupBufferedImage(resolution);
mPipedOutput = false;
mPostScriptIndent = 0;
mPageWidth = (double)mImage.getWidth() / resolution;
mPageWidth = (double)mImage.getHeight() / resolution;
}
/**
* Return page width.
* @return width in millimetres.
*/
public double getPageWidth()
{
return(mPageWidth);
}
/**
* Return page height.
* @return height in millimetres.
*/
public double getPageHeight()
{
return(mPageHeight);
}
/*
* Write a line to PostScript file. Line is indented to show
* saving and restoring of state more clearly.
*/
private void writePostScriptLine(String line)
{
for (int i = 0; i < mPostScriptIndent; i++)
{
mWriter.print(" ");
}
mWriter.println(line);
}
/**
* Save state, protecting color, linestyle, transform of output.
* This state can be restored later with restoreState().
*/
public void saveState()
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptLine("gs");
mPostScriptIndent++;
}
}
/**
* Restore state saved with saveState().
* @return true if saved state was successfully restored.
* Only PostScript format can be successfully restored, caller
* will have to reset values for other formats.
*/
public boolean restoreState()
{
boolean retval;
if (mOutputType == POSTSCRIPT)
{
mPostScriptIndent--;
writePostScriptLine("gr");
retval = true;
}
else
{
/*
* Can't restore state when drawing to an image. Caller
* must set everything to correct values again.
*/
retval = false;
}
return(retval);
}
/**
* Writes trailing information and closes output file.
*/
public void closeOutputFormat() throws IOException, MapyrusException
{
if (mOutputType == POSTSCRIPT)
{
/*
* Finish off PostScript file.
*/
if (mFormatName.equals("PS"))
{
/*
* showpage is not included in Encapsulated PostScript files.
*/
mWriter.println("showpage");
}
mWriter.println("%%EOF");
mWriter.close();
if (mWriter.checkError())
{
throw new MapyrusException(mFilename +
": Error writing to PostScript file");
}
}
else if (mOutputType == IMAGE_FILE)
{
/*
* Write image buffer to file.
*/
ImageIO.write(mImage, mFormatName, mOutputStream);
mOutputStream.close();
mImage = null;
mGraphics2D = null;
}
/*
* If we are piping output to another program then wait for
* that program to finish. Then check that it succeeded.
*/
if (mPipedOutput)
{
int retval = 0;
try
{
retval = mOutputProcess.waitFor();
}
catch (InterruptedException e)
{
throw new MapyrusException(mFilename + ": " + e.getMessage());
}
if (retval != 0)
{
throw new MapyrusException("Process returned failure status: " +
mFilename);
}
}
}
/**
* Set graphics attributes.
* @param color is color to draw in.
* @param lineWidth is width of line to use for drawing.
* @param is clip path.
*/
public void setAttributes(Color color, double lineWidth, Shape clipPath)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptLine(mLinearFormat.format(lineWidth) + " sl");
float c[] = color.getRGBColorComponents(null);
writePostScriptLine(mColorFormat.format(c[0]) + " " +
mColorFormat.format(c[1]) + " " +
mColorFormat.format(c[2]) + " rgb");
}
else
{
mGraphics2D.setColor(color);
mGraphics2D.setStroke(new BasicStroke((float)lineWidth));
mGraphics2D.setClip(clipPath);
}
}
/*
* Walk through path, converting it to PostScript.
*/
private void writePostScriptShape(Shape shape)
{
PathIterator pi = shape.getPathIterator(null);
float coords[] = new float[6];
int segmentType;
while (!pi.isDone())
{
segmentType = pi.currentSegment(coords);
switch (segmentType)
{
case PathIterator.SEG_MOVETO:
writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
mLinearFormat.format(coords[1]) + " m");
break;
case PathIterator.SEG_LINETO:
writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
mLinearFormat.format(coords[1]) + " l");
break;
case PathIterator.SEG_CLOSE:
writePostScriptLine("closepath");
break;
}
pi.next();
}
}
/**
* Draw currently defined path to output page.
*/
public void stroke(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptShape(shape);
writePostScriptLine("s");
}
else
{
/*
* Draw path into image.
*/
mGraphics2D.draw(shape);
}
}
/**
* Fill currently defined path on output page.
*/
public void fill(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
writePostScriptShape(shape);
writePostScriptLine("f");
}
else
{
/*
* Fill path in image.
*/
mGraphics2D.fill(shape);
}
}
/**
* Set clip region to inside of currently defined path on output page.
*/
public void clip(Shape shape)
{
if (mOutputType == POSTSCRIPT)
{
/*
* Set clip path now, then it stays in effect until previous
* state is restored.
*/
writePostScriptShape(shape);
writePostScriptLine("clip newpath");
}
}
}
| Remove unused methods.
Make getResolution public.
Handle arcs/curves to PostScript files.
| src/org/mapyrus/OutputFormat.java | Remove unused methods. Make getResolution public. Handle arcs/curves to PostScript files. | <ide><path>rc/org/mapyrus/OutputFormat.java
<ide> * Number of points and millimetres per inch.
<ide> */
<ide> private static final int POINTS_PER_INCH = 72;
<del> private static final double MM_PER_INCH = 25.4;
<add> public static final double MM_PER_INCH = 25.4;
<ide>
<ide> /*
<ide> * Format for coordinates and colors in PostScript files.
<ide> private Process mOutputProcess;
<ide>
<ide> /*
<del> * Page dimensions.
<add> * Page dimensions and resolution.
<ide> */
<ide> private double mPageWidth;
<ide> private double mPageHeight;
<add> private double mResolution;
<ide>
<ide> /*
<ide> * Indentation for PostScript commands.
<ide> * Sets correct background, rendering hints and transformation
<ide> * for buffered image we will plot to.
<ide> */
<del> private void setupBufferedImage(int resolution)
<add> private void setupBufferedImage(double resolution)
<ide> {
<ide> double scale;
<ide>
<ide> */
<ide> mGraphics2D.translate(0, mImage.getHeight());
<ide> mGraphics2D.scale(scale, -scale);
<del> }
<del>
<del> /**
<del> * Return resolution to use for image files we create.
<del> * @return resolution to use for images as dots per inch value.
<del> */
<del> private int getResolution()
<del> {
<del> int resolution;
<del>
<del> /*
<del> * If a display resolution is given as a property then use that,
<del> * otherwise assume 72 DPI. That is, an image 100mm wide will be made
<del> * 720 pixels wide.
<del> */
<del> try
<del> {
<del> String property = System.getProperty(Mapyrus.PROGRAM_NAME + ".resolution");
<del> if (property != null)
<del> resolution = Integer.parseInt(property);
<del> else
<del> resolution = POINTS_PER_INCH;
<del>
<del> }
<del> catch (SecurityException e)
<del> {
<del> resolution = POINTS_PER_INCH;
<del> }
<del> catch (NumberFormatException e)
<del> {
<del> resolution = POINTS_PER_INCH;
<del> }
<del>
<del> return(resolution);
<ide> }
<ide>
<ide> /**
<ide> * @param format is the graphics format to use.
<ide> * @param width is the page width (in mm).
<ide> * @param height is the page height (in mm).
<add> * @param resolution is resolution for output in dots per inch (DPI)
<ide> * @param extras contains extra settings for this output.
<ide> */
<ide> public OutputFormat(String filename, String format,
<del> double width, double height, String extras)
<add> double width, double height, double resolution, String extras)
<ide> throws IOException, MapyrusException
<ide> {
<ide> mFormatName = format.toUpperCase();
<ide> * Create a BufferedImage to draw into. We'll save it to a file
<ide> * when user has finished drawing to it.
<ide> */
<del> int resolution = getResolution();
<ide> int widthInPixels = (int)Math.round(width / MM_PER_INCH * resolution);
<ide> int heightInPixels = (int)Math.round(height / MM_PER_INCH * resolution);
<ide> mImage = new BufferedImage(widthInPixels, heightInPixels,
<ide> mPostScriptIndent = 0;
<ide> mPageWidth = width;
<ide> mPageHeight = height;
<del> }
<del>
<del> /**
<del> * Set a buffered image as output.
<del> * @param image is the image to draw to.
<del> */
<del> public OutputFormat(BufferedImage image)
<del> throws IOException, MapyrusException
<del> {
<del> int resolution = getResolution();
<del>
<del> mOutputType = BUFFERED_IMAGE;
<del> mImage = image;
<del> mGraphics2D = (Graphics2D)(mImage.getGraphics());
<del> setupBufferedImage(resolution);
<del> mPipedOutput = false;
<del> mPostScriptIndent = 0;
<del> mPageWidth = (double)mImage.getWidth() / resolution;
<del> mPageWidth = (double)mImage.getHeight() / resolution;
<del> }
<del>
<add> mResolution = MM_PER_INCH / resolution;
<add> }
<add>
<ide> /**
<ide> * Return page width.
<ide> * @return width in millimetres.
<ide> public double getPageHeight()
<ide> {
<ide> return(mPageHeight);
<add> }
<add>
<add> /**
<add> * Return resolution of page as a distance measurement.
<add> * @return distance in millimetres between centres of adjacent pixels.
<add> */
<add> public double getResolution()
<add> {
<add> return(mResolution);
<ide> }
<ide>
<ide> /*
<ide> case PathIterator.SEG_CLOSE:
<ide> writePostScriptLine("closepath");
<ide> break;
<add>
<add> case PathIterator.SEG_CUBICTO:
<add> writePostScriptLine(mLinearFormat.format(coords[0]) + " " +
<add> mLinearFormat.format(coords[1]) + " " +
<add> mLinearFormat.format(coords[2]) + " " +
<add> mLinearFormat.format(coords[3]) + " " +
<add> mLinearFormat.format(coords[4]) + " " +
<add> mLinearFormat.format(coords[5]) + " " +
<add> "curveto");
<add> break;
<ide> }
<ide> pi.next();
<ide> } |
|
Java | apache-2.0 | b0f5c16f8fad3248aaa58f05a628271ec8b54a89 | 0 | pinterest/rocksplicator,pinterest/rocksplicator,pinterest/rocksplicator,pinterest/rocksplicator,pinterest/rocksplicator | /// Copyright 2017 Pinterest Inc.
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
//
// @author bol ([email protected])
//
package com.pinterest.rocksplicator;
import com.pinterest.rocksplicator.monitoring.mbeans.RocksplicatorMonitor;
import com.google.common.base.Stopwatch;
import org.apache.helix.HelixAdmin;
import org.apache.helix.HelixConstants;
import org.apache.helix.HelixManager;
import org.apache.helix.NotificationContext;
import org.apache.helix.api.listeners.PreFetch;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.participant.CustomCodeCallbackHandler;
import org.apache.helix.spectator.RoutingTableProvider;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class ConfigGenerator extends RoutingTableProvider implements CustomCodeCallbackHandler {
private static final Logger LOG = LoggerFactory.getLogger(ConfigGenerator.class);
private final String clusterName;
private final String postUrl;
private final boolean enableDumpToLocal;
private final Map<String, String> hostToHostWithDomain;
private final JSONObject dataParameters;
private final RocksplicatorMonitor monitor;
private final ReentrantLock synchronizedCallbackLock;
private HelixManager helixManager;
private String lastPostedContent;
private Set<String> disabledHosts;
public ConfigGenerator(String clusterName, HelixManager helixManager, String configPostUrl) {
this(clusterName, helixManager, configPostUrl,
new RocksplicatorMonitor(clusterName, helixManager.getInstanceName()));
}
public ConfigGenerator(String clusterName, HelixManager helixManager, String configPostUrl,
RocksplicatorMonitor monitor) {
this.clusterName = clusterName;
this.helixManager = helixManager;
this.hostToHostWithDomain = new HashMap<String, String>();
this.postUrl = configPostUrl;
this.dataParameters = new JSONObject();
this.dataParameters.put("config_version", "v3");
this.dataParameters.put("author", "ConfigGenerator");
this.dataParameters.put("comment", "new shard config");
this.dataParameters.put("content", "{}");
this.lastPostedContent = null;
this.disabledHosts = new HashSet<>();
this.monitor = monitor;
this.enableDumpToLocal = new File("/var/log/helixspectator").canWrite();
this.synchronizedCallbackLock = new ReentrantLock();
}
private static class AutoCloseableLock implements AutoCloseable {
private final Lock lock;
private AutoCloseableLock(Lock lock) {
this.lock = lock;
this.lock.lock();
}
public static AutoCloseableLock lock(Lock lock) {
return new AutoCloseableLock(lock);
}
@Override
public void close() {
this.lock.unlock();
}
}
private void logUncheckedException(Runnable r) {
try {
r.run();
} catch (Throwable throwable) {
LOG.error("Exception in generateShardConfig()", throwable);
throw throwable;
}
}
/**
* We are not 100% confident on behaviour of helix agent w.r.t. threading and execution model
* for callback functions call from helix agent. Especially is is possible to get multiple
* callbacks, of same of different types, sources to be called by helix in parallel.
*
* In order to ensure that only one callback is actively being processed at any time, we
* explicitly guard any callback function body with a single re-entrant lock. This provides
* explicit guarantee around the behaviour of how shard_maps are processed, generated and
* published.
*/
@Override
public void onCallback(final NotificationContext notificationContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
logUncheckedException(new Runnable() {
@Override
public void run() {
LOG.error("Received notification: " + notificationContext.getChangeType());
if (notificationContext.getChangeType() == HelixConstants.ChangeType.EXTERNAL_VIEW) {
generateShardConfig();
} else if (notificationContext.getChangeType() == HelixConstants.ChangeType.INSTANCE_CONFIG) {
if (updateDisabledHosts()) {
generateShardConfig();
}
}
}
});
}
}
@Override
@PreFetch(enabled = false)
public void onConfigChange(List<InstanceConfig> configs, NotificationContext changeContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
logUncheckedException(new Runnable() {
@Override
public void run() {
if (updateDisabledHosts()) {
generateShardConfig();
}
}
});
}
}
@Override
@PreFetch(enabled = false)
public void onExternalViewChange(List<ExternalView> externalViewList,
NotificationContext changeContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
logUncheckedException(new Runnable() {
@Override
public void run() {
if (updateDisabledHosts()) {
generateShardConfig();
}
}
});
}
}
private void generateShardConfig() {
monitor.incrementConfigGeneratorCalledCount();
Stopwatch stopwatch = Stopwatch.createStarted();
HelixAdmin admin = helixManager.getClusterManagmentTool();
List<String> resources = admin.getResourcesInCluster(clusterName);
filterOutTaskResources(resources);
Set<String> existingHosts = new HashSet<String>();
// compose cluster config
JSONObject config = new JSONObject();
for (String resource : resources) {
// Resources starting with PARTICIPANT_LEADER is for HelixCustomCodeRunner
if (resource.startsWith("PARTICIPANT_LEADER")) {
continue;
}
ExternalView externalView = admin.getResourceExternalView(clusterName, resource);
if (externalView == null) {
monitor.incrementConfigGeneratorNullExternalView();
LOG.error("Failed to get externalView for resource: " + resource);
/**
* In some situations, we may encounter a null externalView for a given resource.
* This can happen, since there exists a race condition between retrieving list of resources
* and then iterating over each of those resources to retrieve corresponding externalView
* and potential deletion of a resource in between.
*
* Another situation where we may receive null externalView for a resource is in case where
* a resource is newly created but it's externalView has not yet been generated by helix
* controller.
*
* There can be quite a few race conditions which are difficult to enumerate and under such
* scenarios, we can't guarantee that externalView for a given resource exists at a specific
* moment. In such cases, it is safe to ignore such resource until it's externalView is
* accessible and not null.
*/
continue;
}
Set<String> partitions = externalView.getPartitionSet();
// compose resource config
JSONObject resourceConfig = new JSONObject();
String partitionsStr = externalView.getRecord().getSimpleField("NUM_PARTITIONS");
resourceConfig.put("num_shards", Integer.parseInt(partitionsStr));
// build host to partition list map
Map<String, List<String>> hostToPartitionList = new HashMap<String, List<String>>();
for (String partition : partitions) {
String[] parts = partition.split("_");
String partitionNumber =
String.format("%05d", Integer.parseInt(parts[parts.length - 1]));
Map<String, String> hostToState = externalView.getStateMap(partition);
for (Map.Entry<String, String> entry : hostToState.entrySet()) {
existingHosts.add(entry.getKey());
/**TODO: gopalrajpurohit
* Add a LiveInstanceListener and remove any temporary / permanently dead hosts
* from consideration. This is to ensure that during deploys, we take into account
* downed instances faster then potentially available through externalViews.
*/
if (disabledHosts.contains(entry.getKey())) {
// exclude disabled hosts from the shard map config
continue;
}
String state = entry.getValue();
if (!state.equalsIgnoreCase("ONLINE") &&
!state.equalsIgnoreCase("MASTER") &&
!state.equalsIgnoreCase("SLAVE")) {
// Only ONLINE, MASTER and SLAVE states are ready for serving traffic
continue;
}
String hostWithDomain = getHostWithDomain(entry.getKey());
List<String> partitionList = hostToPartitionList.get(hostWithDomain);
if (partitionList == null) {
partitionList = new ArrayList<String>();
hostToPartitionList.put(hostWithDomain, partitionList);
}
if (state.equalsIgnoreCase("SLAVE")) {
partitionList.add(partitionNumber + ":S");
} else if (state.equalsIgnoreCase("MASTER")) {
partitionList.add(partitionNumber + ":M");
} else {
partitionList.add(partitionNumber);
}
}
}
// Add host to partition list map to the resource config
for (Map.Entry<String, List<String>> entry : hostToPartitionList.entrySet()) {
JSONArray jsonArray = new JSONArray();
for (String p : entry.getValue()) {
jsonArray.add(p);
}
resourceConfig.put(entry.getKey(), jsonArray);
}
// add the resource config to the cluster config
config.put(resource, resourceConfig);
}
// remove host that doesn't exist in the ExternalView from hostToHostWithDomain
hostToHostWithDomain.keySet().retainAll(existingHosts);
String newContent = config.toString();
if (lastPostedContent != null && lastPostedContent.equals(newContent)) {
LOG.error("Identical external view observed, skip updating config.");
return;
}
// Write the shard config to local
if (enableDumpToLocal) {
try {
FileWriter shard_config_writer = new FileWriter("/var/log/helixspectator/shard_config");
shard_config_writer.write(newContent);
shard_config_writer.close();
LOG.error("Successfully wrote the shard config to the local.");
} catch (IOException e) {
LOG.error("An error occurred when writing shard config to local");
e.printStackTrace();
}
}
// Write the config to ZK
LOG.error("Generating a new shard config...");
/**
* TODO: gopalrajpurohit
* Move shard_map updating logic into separate method.
*/
this.dataParameters.remove("content");
this.dataParameters.put("content", newContent);
HttpPost httpPost = new HttpPost(this.postUrl);
try {
httpPost.setEntity(new StringEntity(this.dataParameters.toString()));
HttpResponse response = new DefaultHttpClient().execute(httpPost);
if (response.getStatusLine().getStatusCode() == 200) {
lastPostedContent = newContent;
LOG.error("Succeed to generate a new shard config, sleep for 2 seconds");
TimeUnit.SECONDS.sleep(2);
} else {
LOG.error(response.getStatusLine().getReasonPhrase());
}
} catch (Exception e) {
LOG.error("Failed to post the new config", e);
}
stopwatch.stop();
long elapsedMs = stopwatch.elapsed(TimeUnit.MILLISECONDS);
monitor.reportConfigGeneratorLatency(elapsedMs);
}
private String getHostWithDomain(String host) {
String hostWithDomain = hostToHostWithDomain.get(host);
if (hostWithDomain != null) {
return hostWithDomain;
}
// local cache missed, read from ZK
HelixAdmin admin = helixManager.getClusterManagmentTool();
InstanceConfig instanceConfig = admin.getInstanceConfig(clusterName, host);
String domain = instanceConfig.getDomain();
String[] parts = domain.split(",");
String az = parts[0].split("=")[1];
String pg = parts[1].split("=")[1];
hostWithDomain = host.replace('_', ':') + ":" + az + "_" + pg;
hostToHostWithDomain.put(host, hostWithDomain);
return hostWithDomain;
}
// update disabledHosts, return true if there is any changes
private boolean updateDisabledHosts() {
HelixAdmin admin = helixManager.getClusterManagmentTool();
Set<String> latestDisabledInstances = new HashSet<>(
admin.getInstancesInClusterWithTag(clusterName, "disabled"));
if (disabledHosts.equals(latestDisabledInstances)) {
// no changes
LOG.error("No changes to disabled instances");
return false;
}
disabledHosts = latestDisabledInstances;
return true;
}
/**
* filter out resources with "Task" state model (ie. workflows and jobs);
* only keep db resources from ideal states
*/
public void filterOutTaskResources(List<String> resources) {
HelixAdmin admin = helixManager.getClusterManagmentTool();
Iterator<String> iter = resources.iterator();
while (iter.hasNext()) {
String res = iter.next();
IdealState ideal = admin.getResourceIdealState(clusterName, res);
if (ideal != null) {
String stateMode = ideal.getStateModelDefRef();
if (stateMode != null && stateMode.equals("Task")) {
iter.remove();
}
} else {
LOG.error(
"Did not remove resource from shard map generation, due to can't get ideal state for "
+ res);
}
}
}
}
| cluster_management/src/main/java/com/pinterest/rocksplicator/ConfigGenerator.java | /// Copyright 2017 Pinterest Inc.
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
//
// @author bol ([email protected])
//
package com.pinterest.rocksplicator;
import com.pinterest.rocksplicator.monitoring.mbeans.RocksplicatorMonitor;
import com.google.common.base.Stopwatch;
import org.apache.helix.HelixAdmin;
import org.apache.helix.HelixConstants;
import org.apache.helix.HelixManager;
import org.apache.helix.NotificationContext;
import org.apache.helix.api.listeners.PreFetch;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.participant.CustomCodeCallbackHandler;
import org.apache.helix.spectator.RoutingTableProvider;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class ConfigGenerator extends RoutingTableProvider implements CustomCodeCallbackHandler {
private static final Logger LOG = LoggerFactory.getLogger(ConfigGenerator.class);
private final String clusterName;
private final String postUrl;
private final boolean enableDumpToLocal;
private final Map<String, String> hostToHostWithDomain;
private final JSONObject dataParameters;
private final RocksplicatorMonitor monitor;
private final ReentrantLock synchronizedCallbackLock;
private HelixManager helixManager;
private String lastPostedContent;
private Set<String> disabledHosts;
public ConfigGenerator(String clusterName, HelixManager helixManager, String configPostUrl) {
this(clusterName, helixManager, configPostUrl,
new RocksplicatorMonitor(clusterName, helixManager.getInstanceName()));
}
public ConfigGenerator(String clusterName, HelixManager helixManager, String configPostUrl,
RocksplicatorMonitor monitor) {
this.clusterName = clusterName;
this.helixManager = helixManager;
this.hostToHostWithDomain = new HashMap<String, String>();
this.postUrl = configPostUrl;
this.dataParameters = new JSONObject();
this.dataParameters.put("config_version", "v3");
this.dataParameters.put("author", "ConfigGenerator");
this.dataParameters.put("comment", "new shard config");
this.dataParameters.put("content", "{}");
this.lastPostedContent = null;
this.disabledHosts = new HashSet<>();
this.monitor = monitor;
this.enableDumpToLocal = new File("/var/log/helixspectator").canWrite();
this.synchronizedCallbackLock = new ReentrantLock();
}
private static class AutoCloseableLock implements AutoCloseable {
private final Lock lock;
private AutoCloseableLock(Lock lock) {
this.lock = lock;
this.lock.lock();
}
public static AutoCloseableLock lock(Lock lock) {
return new AutoCloseableLock(lock);
}
@Override
public void close() {
this.lock.unlock();
}
}
/**
* We are not 100% confident on behaviour of helix agent w.r.t. threading and execution model
* for callback functions call from helix agent. Especially is is possible to get multiple
* callbacks, of same of different types, sources to be called by helix in parallel.
*
* In order to ensure that only one callback is actively being processed at any time, we
* explicitly guard any callback function body with a single re-entrant lock. This provides
* explicit guarantee around the behaviour of how shard_maps are processed, generated and
* published.
*/
@Override
public void onCallback(NotificationContext notificationContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
LOG.error("Received notification: " + notificationContext.getChangeType());
if (notificationContext.getChangeType() == HelixConstants.ChangeType.EXTERNAL_VIEW) {
generateShardConfig();
} else if (notificationContext.getChangeType() == HelixConstants.ChangeType.INSTANCE_CONFIG) {
if (updateDisabledHosts()) {
generateShardConfig();
}
}
}
}
@Override
@PreFetch(enabled = false)
public void onConfigChange(List<InstanceConfig> configs, NotificationContext changeContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
if (updateDisabledHosts()) {
generateShardConfig();
}
}
}
@Override
@PreFetch(enabled = false)
public void onExternalViewChange(List<ExternalView> externalViewList,
NotificationContext changeContext) {
try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
generateShardConfig();
}
}
private void generateShardConfig() {
monitor.incrementConfigGeneratorCalledCount();
Stopwatch stopwatch = Stopwatch.createStarted();
HelixAdmin admin = helixManager.getClusterManagmentTool();
List<String> resources = admin.getResourcesInCluster(clusterName);
filterOutTaskResources(resources);
Set<String> existingHosts = new HashSet<String>();
// compose cluster config
JSONObject config = new JSONObject();
for (String resource : resources) {
// Resources starting with PARTICIPANT_LEADER is for HelixCustomCodeRunner
if (resource.startsWith("PARTICIPANT_LEADER")) {
continue;
}
ExternalView externalView = admin.getResourceExternalView(clusterName, resource);
if (externalView == null) {
monitor.incrementConfigGeneratorNullExternalView();
LOG.error("Failed to get externalView for resource: " + resource);
/**
* In some situations, we may encounter a null externalView for a given resource.
* This can happen, since there exists a race condition between retrieving list of resources
* and then iterating over each of those resources to retrieve corresponding externalView
* and potential deletion of a resource in between.
*
* Another situation where we may receive null externalView for a resource is in case where
* a resource is newly created but it's externalView has not yet been generated by helix
* controller.
*
* There can be quite a few race conditions which are difficult to enumerate and under such
* scenarios, we can't guarantee that externalView for a given resource exists at a specific
* moment. In such cases, it is safe to ignore such resource until it's externalView is
* accessible and not null.
*/
continue;
}
Set<String> partitions = externalView.getPartitionSet();
// compose resource config
JSONObject resourceConfig = new JSONObject();
String partitionsStr = externalView.getRecord().getSimpleField("NUM_PARTITIONS");
resourceConfig.put("num_shards", Integer.parseInt(partitionsStr));
// build host to partition list map
Map<String, List<String>> hostToPartitionList = new HashMap<String, List<String>>();
for (String partition : partitions) {
String[] parts = partition.split("_");
String partitionNumber =
String.format("%05d", Integer.parseInt(parts[parts.length - 1]));
Map<String, String> hostToState = externalView.getStateMap(partition);
for (Map.Entry<String, String> entry : hostToState.entrySet()) {
existingHosts.add(entry.getKey());
/**TODO: gopalrajpurohit
* Add a LiveInstanceListener and remove any temporary / permanently dead hosts
* from consideration. This is to ensure that during deploys, we take into account
* downed instances faster then potentially available through externalViews.
*/
if (disabledHosts.contains(entry.getKey())) {
// exclude disabled hosts from the shard map config
continue;
}
String state = entry.getValue();
if (!state.equalsIgnoreCase("ONLINE") &&
!state.equalsIgnoreCase("MASTER") &&
!state.equalsIgnoreCase("SLAVE")) {
// Only ONLINE, MASTER and SLAVE states are ready for serving traffic
continue;
}
String hostWithDomain = getHostWithDomain(entry.getKey());
List<String> partitionList = hostToPartitionList.get(hostWithDomain);
if (partitionList == null) {
partitionList = new ArrayList<String>();
hostToPartitionList.put(hostWithDomain, partitionList);
}
if (state.equalsIgnoreCase("SLAVE")) {
partitionList.add(partitionNumber + ":S");
} else if (state.equalsIgnoreCase("MASTER")) {
partitionList.add(partitionNumber + ":M");
} else {
partitionList.add(partitionNumber);
}
}
}
// Add host to partition list map to the resource config
for (Map.Entry<String, List<String>> entry : hostToPartitionList.entrySet()) {
JSONArray jsonArray = new JSONArray();
for (String p : entry.getValue()) {
jsonArray.add(p);
}
resourceConfig.put(entry.getKey(), jsonArray);
}
// add the resource config to the cluster config
config.put(resource, resourceConfig);
}
// remove host that doesn't exist in the ExternalView from hostToHostWithDomain
hostToHostWithDomain.keySet().retainAll(existingHosts);
String newContent = config.toString();
if (lastPostedContent != null && lastPostedContent.equals(newContent)) {
LOG.error("Identical external view observed, skip updating config.");
return;
}
// Write the shard config to local
if (enableDumpToLocal) {
try {
FileWriter shard_config_writer = new FileWriter("/var/log/helixspectator/shard_config");
shard_config_writer.write(newContent);
shard_config_writer.close();
LOG.error("Successfully wrote the shard config to the local.");
} catch (IOException e) {
LOG.error("An error occurred when writing shard config to local");
e.printStackTrace();
}
}
// Write the config to ZK
LOG.error("Generating a new shard config...");
/**
* TODO: gopalrajpurohit
* Move shard_map updating logic into separate method.
*/
this.dataParameters.remove("content");
this.dataParameters.put("content", newContent);
HttpPost httpPost = new HttpPost(this.postUrl);
try {
httpPost.setEntity(new StringEntity(this.dataParameters.toString()));
HttpResponse response = new DefaultHttpClient().execute(httpPost);
if (response.getStatusLine().getStatusCode() == 200) {
lastPostedContent = newContent;
LOG.error("Succeed to generate a new shard config, sleep for 2 seconds");
TimeUnit.SECONDS.sleep(2);
} else {
LOG.error(response.getStatusLine().getReasonPhrase());
}
} catch (Exception e) {
LOG.error("Failed to post the new config", e);
}
stopwatch.stop();
long elapsedMs = stopwatch.elapsed(TimeUnit.MILLISECONDS);
monitor.reportConfigGeneratorLatency(elapsedMs);
}
private String getHostWithDomain(String host) {
String hostWithDomain = hostToHostWithDomain.get(host);
if (hostWithDomain != null) {
return hostWithDomain;
}
// local cache missed, read from ZK
HelixAdmin admin = helixManager.getClusterManagmentTool();
InstanceConfig instanceConfig = admin.getInstanceConfig(clusterName, host);
String domain = instanceConfig.getDomain();
String[] parts = domain.split(",");
String az = parts[0].split("=")[1];
String pg = parts[1].split("=")[1];
hostWithDomain = host.replace('_', ':') + ":" + az + "_" + pg;
hostToHostWithDomain.put(host, hostWithDomain);
return hostWithDomain;
}
// update disabledHosts, return true if there is any changes
private boolean updateDisabledHosts() {
HelixAdmin admin = helixManager.getClusterManagmentTool();
Set<String> latestDisabledInstances = new HashSet<>(
admin.getInstancesInClusterWithTag(clusterName, "disabled"));
if (disabledHosts.equals(latestDisabledInstances)) {
// no changes
LOG.error("No changes to disabled instances");
return false;
}
disabledHosts = latestDisabledInstances;
return true;
}
/**
* filter out resources with "Task" state model (ie. workflows and jobs);
* only keep db resources from ideal states
*/
public void filterOutTaskResources(List<String> resources) {
HelixAdmin admin = helixManager.getClusterManagmentTool();
Iterator<String> iter = resources.iterator();
while (iter.hasNext()) {
String res = iter.next();
IdealState ideal = admin.getResourceIdealState(clusterName, res);
if (ideal != null) {
String stateMode = ideal.getStateModelDefRef();
if (stateMode != null && stateMode.equals("Task")) {
iter.remove();
}
} else {
LOG.error(
"Did not remove resource from shard map generation, due to can't get ideal state for "
+ res);
}
}
}
}
| Log all unchecked exceptions from shard generation (#415)
| cluster_management/src/main/java/com/pinterest/rocksplicator/ConfigGenerator.java | Log all unchecked exceptions from shard generation (#415) | <ide><path>luster_management/src/main/java/com/pinterest/rocksplicator/ConfigGenerator.java
<ide> }
<ide> }
<ide>
<add> private void logUncheckedException(Runnable r) {
<add> try {
<add> r.run();
<add> } catch (Throwable throwable) {
<add> LOG.error("Exception in generateShardConfig()", throwable);
<add> throw throwable;
<add> }
<add> }
<add>
<ide> /**
<ide> * We are not 100% confident on behaviour of helix agent w.r.t. threading and execution model
<ide> * for callback functions call from helix agent. Especially is is possible to get multiple
<ide> * published.
<ide> */
<ide> @Override
<del> public void onCallback(NotificationContext notificationContext) {
<add> public void onCallback(final NotificationContext notificationContext) {
<ide> try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
<del> LOG.error("Received notification: " + notificationContext.getChangeType());
<del> if (notificationContext.getChangeType() == HelixConstants.ChangeType.EXTERNAL_VIEW) {
<del> generateShardConfig();
<del> } else if (notificationContext.getChangeType() == HelixConstants.ChangeType.INSTANCE_CONFIG) {
<del> if (updateDisabledHosts()) {
<del> generateShardConfig();
<del> }
<del> }
<add> logUncheckedException(new Runnable() {
<add> @Override
<add> public void run() {
<add> LOG.error("Received notification: " + notificationContext.getChangeType());
<add> if (notificationContext.getChangeType() == HelixConstants.ChangeType.EXTERNAL_VIEW) {
<add> generateShardConfig();
<add> } else if (notificationContext.getChangeType() == HelixConstants.ChangeType.INSTANCE_CONFIG) {
<add> if (updateDisabledHosts()) {
<add> generateShardConfig();
<add> }
<add> }
<add> }
<add> });
<ide> }
<ide> }
<ide>
<ide> @PreFetch(enabled = false)
<ide> public void onConfigChange(List<InstanceConfig> configs, NotificationContext changeContext) {
<ide> try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
<del> if (updateDisabledHosts()) {
<del> generateShardConfig();
<del> }
<add> logUncheckedException(new Runnable() {
<add> @Override
<add> public void run() {
<add> if (updateDisabledHosts()) {
<add> generateShardConfig();
<add> }
<add> }
<add> });
<ide> }
<ide> }
<ide>
<ide> public void onExternalViewChange(List<ExternalView> externalViewList,
<ide> NotificationContext changeContext) {
<ide> try (AutoCloseableLock autoLock = AutoCloseableLock.lock(this.synchronizedCallbackLock)) {
<del> generateShardConfig();
<add> logUncheckedException(new Runnable() {
<add> @Override
<add> public void run() {
<add> if (updateDisabledHosts()) {
<add> generateShardConfig();
<add> }
<add> }
<add> });
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | 0e7b985b874b2d3ed769817778167473abe9fdf5 | 0 | agrc/AddressPointEditor,agrc/AddressPointEditor,agrc/AddressPointEditor | module.exports = function (grunt) {
require('load-grunt-tasks')(grunt);
var otherFiles = [
'src/app/**/*.html',
'src/app/**/*.css',
'src/index.html',
'src/ChangeLog.html',
'tests/**/*.js'
];
var jsFiles = [
'src/app/**/*.js',
'profiles/*.js',
'GruntFile.js'
];
var bumpFiles = [
'package.json',
'bower.json',
'src/app/package.json',
'src/app/config.js'
];
var deployFiles = [
'**',
'!**/*.uncompressed.js',
'!**/*consoleStripped.js',
'!**/bootstrap/less/**',
'!**/bootstrap/test-infra/**',
'!**/tests/**',
'!build-report.txt',
'!components-jasmine/**',
'!favico.js/**',
'!jasmine-favicon-reporter/**',
'!jasmine-jsreporter/**',
'!stubmodule/**',
'!util/**'
];
var deployDirProd = 'AddressPointEditor';
var deployDirStage = 'wwwroot/AddressPointEditor';
var secrets;
try {
secrets = grunt.file.readJSON('secrets.json');
} catch (e) {
// swallow for build server
secrets = {
stageHost: '',
prodHost: '',
username: '',
password: ''
};
}
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
amdcheck: {
main: {
options: {
removeUnusedDependencies: false
},
files: [{
src: [
'src/app/**/*.js'
]
}]
}
},
bump: {
options: {
files: bumpFiles,
commitFiles: bumpFiles.concat('src/ChangeLog.html'),
push: false
}
},
clean: {
build: ['dist'],
deploy: ['deploy']
},
compress: {
main: {
options: {
archive: 'deploy/deploy.zip'
},
files: [{
src: deployFiles,
dest: './',
cwd: 'dist/',
expand: true
}]
}
},
connect: {
uses_mains: {}
},
copy: {
main: {
expand: true,
cwd: 'src/',
src: ['ChangeLog.html'],
dest: 'dist/'
}
},
dojo: {
prod: {
options: {
// You can also specify options to be used in all your tasks
profiles: ['profiles/prod.build.profile.js', 'profiles/build.profile.js'] // Profile for build
}
},
stage: {
options: {
// You can also specify options to be used in all your tasks
profiles: ['profiles/stage.build.profile.js', 'profiles/build.profile.js'] // Profile for build
}
},
options: {
// You can also specify options to be used in all your tasks
dojo: 'src/dojo/dojo.js', // Path to dojo.js file in dojo source
load: 'build', // Optional: Utility to bootstrap (Default: 'build')
releaseDir: '../dist',
require: 'src/app/run.js', // Optional: Module to require for the build (Default: nothing)
basePath: './src'
}
},
eslint: {
options: {
configFile: '.eslintrc'
},
main: {
src: jsFiles
}
},
imagemin: {
main: {
options: {
optimizationLevel: 3
},
files: [{
expand: true, // Enable dynamic expansion
cwd: 'src/', // Src matches are relative to this path
src: '**/*.{png,jpg,gif}', // Actual patterns to match
dest: 'src/' // Destination path prefix
}]
}
},
jasmine: {
main: {
src: ['src/app/run.js'],
options: {
specs: ['src/app/**/Spec*.js'],
vendor: [
'src/jasmine-favicon-reporter/vendor/favico.js',
'src/jasmine-favicon-reporter/jasmine-favicon-reporter.js',
'src/jasmine-jsreporter/jasmine-jsreporter.js',
'src/app/tests/jasmineTestBootstrap.js',
'src/dojo/dojo.js',
'src/app/tests/jsReporterSanitizer.js',
'src/app/tests/jasmineAMDErrorChecking.js'
],
host: 'http://localhost:8000'
}
}
},
parallel: {
options: {
grunt: true
},
assets: {
tasks: ['eslint:main', 'amdcheck:main', 'jasmine:main:build']
},
buildAssets: {
tasks: ['eslint:main', 'clean:build', 'newer:imagemin:main']
}
},
processhtml: {
options: {},
main: {
files: {
'dist/index.html': ['src/index.html'],
'dist/user_admin.html': ['src/user_admin.html']
}
}
},
secrets: secrets,
sftp: {
stage: {
files: {
'./': 'deploy/deploy.zip'
},
options: {
host: '<%= secrets.stageHost %>',
path: './' + deployDirStage + '/'
}
},
prod: {
files: {
'./': 'deploy/deploy.zip'
},
options: {
host: '<%= secrets.prodHost %>',
path: './' + deployDirProd + '/'
}
},
options: {
srcBasePath: 'deploy/',
username: '<%= secrets.username %>',
password: '<%= secrets.password %>',
showProgress: true,
readyTimeout: 30000
}
},
sshexec: {
options: {
username: '<%= secrets.username %>',
password: '<%= secrets.password %>',
readyTimeout: 30000
},
stage: {
command: ['cd ' + deployDirStage, 'unzip -oq deploy.zip', 'rm deploy.zip'].join(';'),
options: {
host: '<%= secrets.stageHost %>'
}
},
prod: {
command: ['cd ' + deployDirProd, 'unzip -oq deploy.zip', 'rm deploy.zip'].join(';'),
options: {
host: '<%= secrets.prodHost %>'
}
}
},
uglify: {
options: {
preserveComments: false,
sourceMap: true,
compress: {
drop_console: true,
passes: 2,
dead_code: true
}
},
stage: {
options: {
compress: {
drop_console: false
}
},
files: {
'dist/dojo/dojo.js': ['dist/dojo/dojo.js'],
'dist/app/run_user_admin.js': ['dist/app/run_user_admin.js']
}
},
prod: {
files: [{
expand: true,
cwd: 'dist',
src: '**/*.js',
dest: 'dist'
}]
}
},
watch: {
eslint: {
files: jsFiles,
tasks: ['newer:eslint:main', 'jasmine:main:build']
},
src: {
files: jsFiles.concat(otherFiles),
options: { livereload: true }
}
}
});
grunt.registerTask('default', [
'parallel:assets',
'connect',
'watch'
]);
grunt.registerTask('build-prod', [
'parallel:buildAssets',
'dojo:prod',
'uglify:prod',
'copy:main',
'processhtml:main'
]);
grunt.registerTask('build-stage', [
'parallel:buildAssets',
'dojo:stage',
'uglify:stage',
'copy:main',
'processhtml:main'
]);
grunt.registerTask('deploy-prod', [
'clean:deploy',
'compress:main',
'sftp:prod',
'sshexec:prod'
]);
grunt.registerTask('deploy-stage', [
'clean:deploy',
'compress:main',
'sftp:stage',
'sshexec:stage'
]);
grunt.registerTask('travis', [
'eslint:main',
'build-prod'
]);
};
| GruntFile.js | module.exports = function (grunt) {
require('load-grunt-tasks')(grunt);
var otherFiles = [
'src/app/**/*.html',
'src/app/**/*.css',
'src/index.html',
'src/ChangeLog.html',
'tests/**/*.js'
];
var jsFiles = [
'src/app/**/*.js',
'profiles/*.js',
'GruntFile.js'
];
var bumpFiles = [
'package.json',
'bower.json',
'src/app/package.json',
'src/app/config.js'
];
var deployFiles = [
'**',
'!**/*.uncompressed.js',
'!**/*consoleStripped.js',
'!**/bootstrap/less/**',
'!**/bootstrap/test-infra/**',
'!**/tests/**',
'!build-report.txt',
'!components-jasmine/**',
'!favico.js/**',
'!jasmine-favicon-reporter/**',
'!jasmine-jsreporter/**',
'!stubmodule/**',
'!util/**'
];
var deployDirProd = 'AddressPointEditor';
var deployDirStage = 'wwwroot/AddressPointEditor';
var secrets;
try {
secrets = grunt.file.readJSON('secrets.json');
} catch (e) {
// swallow for build server
secrets = {
stageHost: '',
prodHost: '',
username: '',
password: ''
};
}
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
amdcheck: {
main: {
options: {
removeUnusedDependencies: false
},
files: [{
src: [
'src/app/**/*.js'
]
}]
}
},
bump: {
options: {
files: bumpFiles,
commitFiles: bumpFiles.concat('src/ChangeLog.html'),
push: false
}
},
clean: {
build: ['dist'],
deploy: ['deploy']
},
compress: {
main: {
options: {
archive: 'deploy/deploy.zip'
},
files: [{
src: deployFiles,
dest: './',
cwd: 'dist/',
expand: true
}]
}
},
connect: {
uses_mains: {}
},
copy: {
main: {
expand: true,
cwd: 'src/',
src: ['ChangeLog.html'],
dest: 'dist/'
}
},
dojo: {
prod: {
options: {
// You can also specify options to be used in all your tasks
profiles: ['profiles/prod.build.profile.js', 'profiles/build.profile.js'] // Profile for build
}
},
stage: {
options: {
// You can also specify options to be used in all your tasks
profiles: ['profiles/stage.build.profile.js', 'profiles/build.profile.js'] // Profile for build
}
},
options: {
// You can also specify options to be used in all your tasks
dojo: 'src/dojo/dojo.js', // Path to dojo.js file in dojo source
load: 'build', // Optional: Utility to bootstrap (Default: 'build')
releaseDir: '../dist',
require: 'src/app/run.js', // Optional: Module to require for the build (Default: nothing)
basePath: './src'
}
},
eslint: {
options: {
configFile: '.eslintrc'
},
main: {
src: jsFiles
}
},
imagemin: {
main: {
options: {
optimizationLevel: 3
},
files: [{
expand: true, // Enable dynamic expansion
cwd: 'src/', // Src matches are relative to this path
src: '**/*.{png,jpg,gif}', // Actual patterns to match
dest: 'src/' // Destination path prefix
}]
}
},
jasmine: {
main: {
src: ['src/app/run.js'],
options: {
specs: ['src/app/**/Spec*.js'],
vendor: [
'src/jasmine-favicon-reporter/vendor/favico.js',
'src/jasmine-favicon-reporter/jasmine-favicon-reporter.js',
'src/jasmine-jsreporter/jasmine-jsreporter.js',
'src/app/tests/jasmineTestBootstrap.js',
'src/dojo/dojo.js',
'src/app/tests/jsReporterSanitizer.js',
'src/app/tests/jasmineAMDErrorChecking.js'
],
host: 'http://localhost:8000'
}
}
},
parallel: {
options: {
grunt: true
},
assets: {
tasks: ['eslint:main', 'amdcheck:main', 'jasmine:main:build']
},
buildAssets: {
tasks: ['eslint:main', 'clean:build', 'newer:imagemin:main']
}
},
processhtml: {
options: {},
main: {
files: {
'dist/index.html': ['src/index.html'],
'dist/user_admin.html': ['src/user_admin.html']
}
}
},
secrets: secrets,
sftp: {
stage: {
files: {
'./': 'deploy/deploy.zip'
},
options: {
host: '<%= secrets.stageHost %>',
path: './' + deployDirStage + '/'
}
},
prod: {
files: {
'./': 'deploy/deploy.zip'
},
options: {
host: '<%= secrets.prodHost %>',
path: './' + deployDirProd + '/'
}
},
options: {
srcBasePath: 'deploy/',
username: '<%= secrets.username %>',
password: '<%= secrets.password %>',
showProgress: true,
readyTimeout: 30000
}
},
sshexec: {
options: {
username: '<%= secrets.username %>',
password: '<%= secrets.password %>',
readyTimeout: 30000
},
stage: {
command: ['cd ' + deployDirStage, 'unzip -oq deploy.zip', 'rm deploy.zip'].join(';'),
options: {
host: '<%= secrets.stageHost %>'
}
},
prod: {
command: ['cd ' + deployDirProd, 'unzip -oq deploy.zip', 'rm deploy.zip'].join(';'),
options: {
host: '<%= secrets.prodHost %>'
}
}
},
uglify: {
options: {
preserveComments: false,
sourceMap: true,
compress: {
drop_console: true,
passes: 2,
dead_code: true
}
},
stage: {
options: {
compress: {
drop_console: false
}
},
src: ['dist/dojo/dojo.js'],
dest: 'dist/dojo/dojo.js'
},
prod: {
files: [{
expand: true,
cwd: 'dist',
src: '**/*.js',
dest: 'dist'
}]
}
},
watch: {
eslint: {
files: jsFiles,
tasks: ['newer:eslint:main', 'jasmine:main:build']
},
src: {
files: jsFiles.concat(otherFiles),
options: { livereload: true }
}
}
});
grunt.registerTask('default', [
'parallel:assets',
'connect',
'watch'
]);
grunt.registerTask('build-prod', [
'parallel:buildAssets',
'dojo:prod',
'uglify:prod',
'copy:main',
'processhtml:main'
]);
grunt.registerTask('build-stage', [
'parallel:buildAssets',
'dojo:stage',
'uglify:stage',
'copy:main',
'processhtml:main'
]);
grunt.registerTask('deploy-prod', [
'clean:deploy',
'compress:main',
'sftp:prod',
'sshexec:prod'
]);
grunt.registerTask('deploy-stage', [
'clean:deploy',
'compress:main',
'sftp:stage',
'sshexec:stage'
]);
grunt.registerTask('travis', [
'eslint:main',
'build-prod'
]);
};
| compress the admin page also
| GruntFile.js | compress the admin page also | <ide><path>runtFile.js
<ide> drop_console: false
<ide> }
<ide> },
<del> src: ['dist/dojo/dojo.js'],
<del> dest: 'dist/dojo/dojo.js'
<add> files: {
<add> 'dist/dojo/dojo.js': ['dist/dojo/dojo.js'],
<add> 'dist/app/run_user_admin.js': ['dist/app/run_user_admin.js']
<add> }
<ide> },
<ide> prod: {
<ide> files: [{ |
|
JavaScript | mit | b1d98e1fbbba6a6f8e0db54c479e0b0265640f10 | 0 | HumanDynamics/rhythm-server,HumanDynamics/rhythm-server | // deactivate-meeting-hook.js -- Before, patch. sets a meeting to inactive
// if it's going from n to 0 participant before a `patch` event
// will also:
// - generate meeting events for meeting ends
// - end turn computation when a meeting stops
'use strict'
const _ = require('underscore')
const winston = require('winston')
var d3 = require('d3')
var jsdom = require('jsdom')
var nodemailer = require('nodemailer')
var request = require('request')
function shouldMakeMeetingInactive (newParticipants, meetingObject) {
return (newParticipants.length === 0 &&
meetingObject.participants.length > 0 &&
meetingObject.active === true)
}
function reportMeeting (hook) {
getReportData(hook, (visualizationData, addresses) => {
sendReport(createVisualization(visualizationData), addresses)
})
}
function getReportData (hook, callback) {
winston.log('info', 'Getting report data...')
// find participant events (-> historical participants)
var meetingId = hook.id
if (hook.id === Object(hook.id)) {
meetingId = hook.id._id
}
return hook.app.service('participants').find({
query: {
$select: [ '_id', 'name', 'meetings' ]
}
}).then((participants) => {
var validParticipants = _.filter(participants.data, (participant) => {
return _.contains(participant.meetings, meetingId)
})
// find utterances
hook.app.service('utterances').find({
query: {
meeting: meetingId,
$select: [ 'participant', 'meeting', 'startTime', 'endTime' ]
}
}).then((utterances) => {
// {'participant': [utteranceObject, ...]}
var participantUtterances = _.groupBy(utterances, 'participant')
// {'participant': number of utterances}
var numUtterances = _.mapObject(participantUtterances, (val, key) => {
return val.length
})
// {'participant': mean length of utterances in seconds}
var meanLengthUtterances = _.mapObject(participantUtterances, (val, key) => {
var lengthsUtterances = val.map((utteranceObject) => {
return (new Date(utteranceObject.endTime).getTime() - new Date(utteranceObject.startTime).getTime()) / 1000
})
var sum = lengthsUtterances.reduce((previous, current) => current + previous, 0)
return sum / lengthsUtterances.length
})
// [{'name': ..., 'numUtterances': ..., 'meanLengthUtterances': ...}, ...]
var visualizationData = validParticipants.map((participant) => {
var participantId = participant[ '_id' ]
return {
name: participant[ 'name' ],
numUtterances: participantId in numUtterances ? numUtterances[ participantId ] : 0,
meanLengthUtterances: participantId in meanLengthUtterances ? meanLengthUtterances[ participantId ] : 0
}
})
// get mapping between google id and addresses
request.get(process.env.MAPPING_URL, function (error, response, body) {
// {'_id': address, ...}
var mapping = {}
if (!error && response.statusCode === 200) {
var rows = body.split('\n')
for (var i = 0; i < rows.length; i++) {
var cols = rows[ i ].split(',')
mapping[ String(cols[ 0 ]) ] = cols[ 1 ]
}
}
// [address, ...]
var addresses = validParticipants.map((participant) => {
return mapping[ participant[ '_id' ] ]
})
callback(visualizationData, addresses)
}).catch(function (error) {
winston.log('info', '[getReportData] error: ', error)
})
})
})
}
function createVisualization (visualizationData) {
winston.log('info', 'Creating report visualization...')
var margin = { top: 20, right: 15, bottom: 60, left: 60 }
var width = 800 - margin.left - margin.right
var height = 500 - margin.top - margin.bottom
var color = d3.scale.category20()
var x = d3.scale.linear()
.domain([ 0, d3.max(visualizationData, function (d) { return d.meanLengthUtterances }) + 5 ])
.range([ 0, width ])
var y = d3.scale.linear()
.domain([ 0, d3.max(visualizationData, function (d) { return d.numUtterances }) + 1 ])
.range([ height, 0 ])
var document = jsdom.jsdom()
var chart = d3.select(document.body)
.append('svg')
.attr('width', width + margin.right + margin.left)
.attr('height', height + margin.top + margin.bottom)
.attr('class', 'chart')
var main = chart.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
.attr('width', width)
.attr('height', height)
.attr('class', 'main')
// draw the x axis
var xAxis = d3.svg.axis()
.scale(x)
.orient('bottom')
main.append('g')
.attr('transform', 'translate(0,' + height + ')')
.attr('class', 'main axis date')
.call(xAxis)
.append('text')
.attr('class', 'label')
.attr('x', width)
.attr('y', -15)
.attr('dy', '.71em')
.style('text-anchor', 'end')
.text('Avg. Length of Turns')
// draw the y axis
var yAxis = d3.svg.axis()
.scale(y)
.orient('left')
main.append('g')
.attr('transform', 'translate(0,0)')
.attr('class', 'main axis date')
.call(yAxis)
.append('text')
.attr('class', 'label')
.attr('transform', 'rotate(-90)')
.attr('y', 6)
.attr('dy', '.71em')
.style('text-anchor', 'end')
.text('Turns Taken')
var g = main.append('svg:g')
var node = g.selectAll('scatter-dots')
.data(visualizationData)
.enter().append('g')
node.append('svg:circle')
.style('fill', function (d) { return color(d.name) })
.attr('cx', function (d, i) { return x(d.meanLengthUtterances) })
.attr('cy', function (d) { return y(d.numUtterances) })
.attr('r', 10)
node.append('text')
.attr('x', function (d) { return x(d.meanLengthUtterances) + 10 })
.attr('y', function (d) { return y(d.numUtterances) })
.text(function (d) { return d.name })
// create html file
var htmlStyle = '<style>\n' +
'body {\n' +
'font-family: "Helvetica", "Arial", sans-serif;\n' +
'color: #444444;\n' +
'font-size: 9pt;\n' +
'background-color: #FAFAFA;\n' +
'}\n' +
'.axis path,\n' +
'.axis line {\n' +
'fill: none;\n' +
'stroke: #000;\n' +
'shape-rendering: crispEdges;\n' +
'}\n' +
'.dot {\n' +
'stroke: #000;\n' +
'}\n' +
'.tooltip {\n' +
'position: absolute;\n' +
'width: 200px;\n' +
'height: 28px;\n' +
'pointer-events: none;\n' +
'}\n' +
'.node {\n' +
'stroke: #fff;\n' +
'stroke-width: 1.5px;\n' +
'}\n' +
'.link {\n' +
'stroke: #999;\n' +
'stroke-opacity: .6;\n' +
'}\n' +
'path.link {\n' +
'fill: none;\n' +
'stroke: #000;\n' +
'stroke-width: 4px;\n' +
'cursor: default;\n' +
'}\n' +
'</style>\n'
var htmlBody = '<body>\n' +
'<h1>Your Meeting: Turn Taken</h1>\n' +
d3.select(document.body).node().innerHTML + '\n' +
'</body>'
var html = '<!DOCTYPE html>\n' +
'<meta charset="utf-8">\n' +
htmlStyle +
htmlBody
return html
}
function sendReport (visualization, addresses) {
winston.log('info', 'Sending report...')
// TODO change SMTP configurations
// define SMTP configurations
var smtpConfig = {
host: process.env.REPORT_EMAIL_HOST,
port: 465,
secure: true,
auth: {
user: process.env.REPORT_EMAIL_LOGIN,
pass: process.env.REPORT_EMAIL_PASSWORD
}
}
// create transporter object
var transporter = nodemailer.createTransport(smtpConfig)
// TODO change email data
// setup email data
var mailOptions = {
from: process.env.REPORT_EMAIL_FROM,
to: addresses,
subject: process.env.REPORT_EMAIL_SUBJECT,
text: process.env.REPORT_EMAIL_TEXT,
html: process.env.REPORT_EMAIL_TEXT,
attachments: {
filename: 'visualization.html',
content: visualization
}
}
// send email with transporter object
transporter.sendMail(mailOptions, function (error, info) {
if (error) {
return console.log('[sendReport] error: ' + error)
}
return console.log('Report was sent: ' + info.response)
})
}
function createMeetingEndEvent (hook) {
var meetingId = (hook.method === 'create') ? hook.data._id : hook.id
return hook.app.service('meetingEvents').create({
meeting: meetingId,
event: 'end',
timestamp: new Date()
}, {}).then((meetingEvent) => {
return hook
})
}
module.exports = function (hook) {
if (!_.has(hook.data, 'participants')) {
return hook
} else {
return hook.app.service('meetings').get(hook.id)
.then((meeting) => {
if (shouldMakeMeetingInactive(hook.data.participants, meeting)) {
hook.data.active = false
hook.data.endTime = new Date()
if (process.env.SEND_REPORT) {
reportMeeting(hook)
}
return createMeetingEndEvent(hook)
} else {
return hook
}
})
}
}
| src/services/meeting/hooks/deactivate-meeting-hook.js | // deactivate-meeting-hook.js -- Before, patch. sets a meeting to inactive
// if it's going from n to 0 participant before a `patch` event
// will also:
// - generate meeting events for meeting ends
// - end turn computation when a meeting stops
'use strict'
const _ = require('underscore')
const winston = require('winston')
var d3 = require('d3')
var jsdom = require('jsdom')
var nodemailer = require('nodemailer')
var request = require('request')
function shouldMakeMeetingInactive (newParticipants, meetingObject) {
return (newParticipants.length === 0 &&
meetingObject.participants.length > 0 &&
meetingObject.active === true)
}
function reportMeeting (hook) {
getReportData(hook, (visualizationData, addresses) => {
sendReport(createVisualization(visualizationData), addresses)
})
}
function getReportData (hook, callback) {
winston.log('info', 'Getting report data...')
// find participant events (-> historical participants)
return hook.app.service('participants').find({
query: {
$select: [ '_id', 'name', 'meetings' ]
}
}).then((participants) => {
var validParticipants = _.filter(participants.data, (participant) => {
return _.contains(participant.meetings, hook.id)
})
// find utterances
hook.app.service('utterances').find({
query: {
meeting: hook.id,
$select: [ 'participant', 'meeting', 'startTime', 'endTime' ]
}
}).then((utterances) => {
// {'participant': [utteranceObject, ...]}
var participantUtterances = _.groupBy(utterances, 'participant')
// {'participant': number of utterances}
var numUtterances = _.mapObject(participantUtterances, (val, key) => {
return val.length
})
// {'participant': mean length of utterances in seconds}
var meanLengthUtterances = _.mapObject(participantUtterances, (val, key) => {
var lengthsUtterances = val.map((utteranceObject) => {
return (new Date(utteranceObject.endTime).getTime() - new Date(utteranceObject.startTime).getTime()) / 1000
})
var sum = lengthsUtterances.reduce((previous, current) => current + previous, 0)
return sum / lengthsUtterances.length
})
// [{'name': ..., 'numUtterances': ..., 'meanLengthUtterances': ...}, ...]
var visualizationData = validParticipants.map((participant) => {
var participantId = participant[ '_id' ]
return {
name: participant[ 'name' ],
numUtterances: participantId in numUtterances ? numUtterances[ participantId ] : 0,
meanLengthUtterances: participantId in meanLengthUtterances ? meanLengthUtterances[ participantId ] : 0
}
})
// get mapping between google id and addresses
request.get(process.env.MAPPING_URL, function (error, response, body) {
// {'_id': address, ...}
var mapping = {}
if (!error && response.statusCode === 200) {
var rows = body.split('\n')
for (var i = 0; i < rows.length; i++) {
var cols = rows[ i ].split(',')
mapping[ String(cols[ 0 ]) ] = cols[ 1 ]
}
}
// [address, ...]
var addresses = validParticipants.map((participant) => {
return mapping[ participant[ '_id' ] ]
})
callback(visualizationData, addresses)
}).catch(function (error) {
winston.log('info', '[getReportData] error: ', error)
})
})
})
}
function createVisualization (visualizationData) {
winston.log('info', 'Creating report visualization...')
var margin = { top: 20, right: 15, bottom: 60, left: 60 }
var width = 800 - margin.left - margin.right
var height = 500 - margin.top - margin.bottom
var color = d3.scale.category20()
var x = d3.scale.linear()
.domain([ 0, d3.max(visualizationData, function (d) { return d.meanLengthUtterances }) + 5 ])
.range([ 0, width ])
var y = d3.scale.linear()
.domain([ 0, d3.max(visualizationData, function (d) { return d.numUtterances }) + 1 ])
.range([ height, 0 ])
var document = jsdom.jsdom()
var chart = d3.select(document.body)
.append('svg')
.attr('width', width + margin.right + margin.left)
.attr('height', height + margin.top + margin.bottom)
.attr('class', 'chart')
var main = chart.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
.attr('width', width)
.attr('height', height)
.attr('class', 'main')
// draw the x axis
var xAxis = d3.svg.axis()
.scale(x)
.orient('bottom')
main.append('g')
.attr('transform', 'translate(0,' + height + ')')
.attr('class', 'main axis date')
.call(xAxis)
.append('text')
.attr('class', 'label')
.attr('x', width)
.attr('y', -15)
.attr('dy', '.71em')
.style('text-anchor', 'end')
.text('Avg. Length of Turns')
// draw the y axis
var yAxis = d3.svg.axis()
.scale(y)
.orient('left')
main.append('g')
.attr('transform', 'translate(0,0)')
.attr('class', 'main axis date')
.call(yAxis)
.append('text')
.attr('class', 'label')
.attr('transform', 'rotate(-90)')
.attr('y', 6)
.attr('dy', '.71em')
.style('text-anchor', 'end')
.text('Turns Taken')
var g = main.append('svg:g')
var node = g.selectAll('scatter-dots')
.data(visualizationData)
.enter().append('g')
node.append('svg:circle')
.style('fill', function (d) { return color(d.name) })
.attr('cx', function (d, i) { return x(d.meanLengthUtterances) })
.attr('cy', function (d) { return y(d.numUtterances) })
.attr('r', 10)
node.append('text')
.attr('x', function (d) { return x(d.meanLengthUtterances) + 10 })
.attr('y', function (d) { return y(d.numUtterances) })
.text(function (d) { return d.name })
// create html file
var htmlStyle = '<style>\n' +
'body {\n' +
'font-family: "Helvetica", "Arial", sans-serif;\n' +
'color: #444444;\n' +
'font-size: 9pt;\n' +
'background-color: #FAFAFA;\n' +
'}\n' +
'.axis path,\n' +
'.axis line {\n' +
'fill: none;\n' +
'stroke: #000;\n' +
'shape-rendering: crispEdges;\n' +
'}\n' +
'.dot {\n' +
'stroke: #000;\n' +
'}\n' +
'.tooltip {\n' +
'position: absolute;\n' +
'width: 200px;\n' +
'height: 28px;\n' +
'pointer-events: none;\n' +
'}\n' +
'.node {\n' +
'stroke: #fff;\n' +
'stroke-width: 1.5px;\n' +
'}\n' +
'.link {\n' +
'stroke: #999;\n' +
'stroke-opacity: .6;\n' +
'}\n' +
'path.link {\n' +
'fill: none;\n' +
'stroke: #000;\n' +
'stroke-width: 4px;\n' +
'cursor: default;\n' +
'}\n' +
'</style>\n'
var htmlBody = '<body>\n' +
'<h1>Your Meeting: Turn Taken</h1>\n' +
d3.select(document.body).node().innerHTML + '\n' +
'</body>'
var html = '<!DOCTYPE html>\n' +
'<meta charset="utf-8">\n' +
htmlStyle +
htmlBody
return html
}
function sendReport (visualization, addresses) {
winston.log('info', 'Sending report...')
// TODO change SMTP configurations
// define SMTP configurations
var smtpConfig = {
host: process.env.REPORT_EMAIL_HOST,
port: 465,
secure: true,
auth: {
user: process.env.REPORT_EMAIL_LOGIN,
pass: process.env.REPORT_EMAIL_PASSWORD
}
}
// create transporter object
var transporter = nodemailer.createTransport(smtpConfig)
// TODO change email data
// setup email data
var mailOptions = {
from: process.env.REPORT_EMAIL_FROM,
to: addresses,
subject: process.env.REPORT_EMAIL_SUBJECT,
text: process.env.REPORT_EMAIL_TEXT,
html: process.env.REPORT_EMAIL_TEXT,
attachments: {
filename: 'visualization.html',
content: visualization
}
}
// send email with transporter object
transporter.sendMail(mailOptions, function (error, info) {
if (error) {
return console.log('[sendReport] error: ' + error)
}
return console.log('Report was sent: ' + info.response)
})
}
function createMeetingEndEvent (hook) {
var meetingId = (hook.method === 'create') ? hook.data._id : hook.id
return hook.app.service('meetingEvents').create({
meeting: meetingId,
event: 'end',
timestamp: new Date()
}, {}).then((meetingEvent) => {
return hook
})
}
module.exports = function (hook) {
if (!_.has(hook.data, 'participants')) {
return hook
} else {
return hook.app.service('meetings').get(hook.id)
.then((meeting) => {
if (shouldMakeMeetingInactive(hook.data.participants, meeting)) {
hook.data.active = false
hook.data.endTime = new Date()
if (process.env.SEND_REPORT) {
reportMeeting(hook)
}
return createMeetingEndEvent(hook)
} else {
return hook
}
})
}
}
| Fixed hook.id bug
| src/services/meeting/hooks/deactivate-meeting-hook.js | Fixed hook.id bug | <ide><path>rc/services/meeting/hooks/deactivate-meeting-hook.js
<ide> function getReportData (hook, callback) {
<ide> winston.log('info', 'Getting report data...')
<ide> // find participant events (-> historical participants)
<add> var meetingId = hook.id
<add> if (hook.id === Object(hook.id)) {
<add> meetingId = hook.id._id
<add> }
<ide> return hook.app.service('participants').find({
<ide> query: {
<ide> $select: [ '_id', 'name', 'meetings' ]
<ide> }
<ide> }).then((participants) => {
<ide> var validParticipants = _.filter(participants.data, (participant) => {
<del> return _.contains(participant.meetings, hook.id)
<add> return _.contains(participant.meetings, meetingId)
<ide> })
<ide> // find utterances
<ide> hook.app.service('utterances').find({
<ide> query: {
<del> meeting: hook.id,
<add> meeting: meetingId,
<ide> $select: [ 'participant', 'meeting', 'startTime', 'endTime' ]
<ide> }
<ide> }).then((utterances) => { |
|
JavaScript | agpl-3.0 | 16b4f6cc6dd0292e38cdfc86e4128562a28c284d | 0 | onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi | import React from 'react';
import PropTypes from 'prop-types';
import Reflux from 'reflux';
import reactMixin from 'react-mixin';
import bem from '../bem';
import ui from '../ui';
import actions from '../actions';
import stores from '../stores';
import mixins from '../mixins';
import Select from 'react-select';
import autoBind from 'react-autobind';
import {
t,
notify
} from '../utils';
export class TableColumnFilter extends React.Component {
constructor(props){
super(props);
this.state = {
selectedColumns: [],
frozenColumn: false,
showGroupName: true,
showHXLTags: false,
translationIndex: 0
};
let _sett = props.asset.settings;
if (_sett['data-table']) {
if (_sett['data-table']['selected-columns'])
this.state.selectedColumns = _sett['data-table']['selected-columns'];
if (_sett['data-table']['frozen-column'])
this.state.frozenColumn = _sett['data-table']['frozen-column'];
if (_sett['data-table']['show-group-name'])
this.state.showGroupName = _sett['data-table']['show-group-name'];
if (_sett['data-table']['translation-index'])
this.state.translationIndex = _sett['data-table']['translation-index'];
if (_sett['data-table']['show-hxl-tags'])
this.state.showHXLTags = _sett['data-table']['show-hxl-tags'];
}
autoBind(this);
}
componentDidMount() {
this.listenTo(actions.table.updateSettings.failed, this.settingsUpdateFailed);
}
saveTableColumns() {
let s = this.state;
let settings = this.props.asset.settings;
if (!settings['data-table']) {
settings['data-table'] = {};
}
if (this.userCan('change_asset', this.props.asset)) {
settings['data-table']['selected-columns'] = s.selectedColumns.length > 0 ? s.selectedColumns : null;
settings['data-table']['frozen-column'] = s.frozenColumn;
settings['data-table']['show-group-name'] = s.showGroupName;
settings['data-table']['translation-index'] = s.translationIndex;
settings['data-table']['show-hxl-tags'] = s.showHXLTags;
actions.table.updateSettings(this.props.asset.uid, settings);
} else {
console.log('just update the state, since user cannot save settings');
let overrides = {
showGroupName: s.showGroupName,
translationIndex: s.translationIndex
}
this.props.overrideLabelsAndGroups(overrides);
}
}
toggleCheckboxChange(evt) {
let selectedColumns = this.state.selectedColumns,
id = evt.target.value,
idx = selectedColumns.indexOf(id);
if (idx !== -1) {
selectedColumns.splice(idx, 1);
} else {
selectedColumns.push(id);
}
this.setState({
selectedColumns: selectedColumns
})
}
setFrozenColumn(col) {
this.setState({
frozenColumn: col && col.value ? col.value : false
})
}
updateGroupHeaderDisplay(e) {
this.setState({
showGroupName: e.target.checked
})
}
onHXLTagsChange(evt) {
this.setState({
showHXLTags: evt.currentTarget.checked
})
}
onLabelChange(e) {
this.setState({
translationIndex: e.target.value
})
}
settingsUpdateFailed() {
notify(t('There was an error, table settings could not be saved.'));
}
resetTableSettings() {
let settings = this.props.asset.settings;
if (settings['data-table'])
delete settings['data-table'];
actions.table.updateSettings(this.props.asset.uid, settings);
}
listColumns() {
let stateOverrides = {
showGroupName: this.state.showGroupName,
translationIndex: this.state.translationIndex
}
let colsArray = this.props.columns.reduce((acc, col) => {
if (col.id && col.id !== '__SubmissionLinks' && col.id !== '__SubmissionCheckbox') {
let qParentGroup = [];
if (col.id.includes('/')) {
qParentGroup = col.id.split('/');
}
acc.push({
value: col.id,
label: this.props.getColumnLabel(col.id, col.question, qParentGroup, stateOverrides)
});
}
return acc;
}, []);
return colsArray;
}
render () {
let _this = this;
return (
<div className='tableColumn-modal'>
<bem.FormModal__item m='translation-radios'>
<bem.FormView__cell m='label'>
{t('Display labels or XML values?')}
</bem.FormView__cell>
<div>
<label htmlFor={'trnsl-xml'}>
<input type='radio' name='translation'
value='-1' id={'trnsl-xml'}
checked={this.state.translationIndex == '-1'}
onChange={this.onLabelChange} />
{t('XML Values')}
</label>
{
this.props.asset.content.translations.map((trns, n) => {
return (
<label htmlFor={`trnsl-${n}`} key={n}>
<input type='radio' name='translation'
value={n} id={`trnsl-${n}`}
checked={this.state.translationIndex == n}
onChange={this.onLabelChange} />
{t('Labels')} {trns ? ` - ${trns}` : null}
</label>
)
})
}
</div>
</bem.FormModal__item>
<bem.FormModal__item m='group-headings'>
<input
type='checkbox'
checked={this.state.showGroupName}
onChange={this.updateGroupHeaderDisplay}
id='check-group-headings'/>
<label htmlFor='check-group-headings'>
{t('Show group names in table headers')}
</label>
</bem.FormModal__item>
<bem.FormModal__item>
<input
type='checkbox'
checked={this.state.showHXLTags}
onChange={this.onHXLTagsChange}
id='hxl-tags'
/>
<label htmlFor='hxl-tags'>
{t('Show HXL tags in table headers')}
</label>
</bem.FormModal__item>
{this.userCan('change_asset', this.props.asset) &&
<bem.FormModal__item m='advanced-table-options'>
<bem.FormView__cell m='note'>
{t('Note: Only users with the "edit form" permission can see the following two options. If other users can view submissions on this project, their table view will be restricted by the choices made below.')}
</bem.FormView__cell>
<bem.FormModal__item>
<bem.FormView__cell m='label'>
{t('Set a frozen first column in the table.')}
</bem.FormView__cell>
<Select
value={this.state.frozenColumn}
options={this.listColumns()}
onChange={this.setFrozenColumn} />
</bem.FormModal__item>
<bem.FormModal__item>
<bem.FormView__cell m='label'>
{t('Restrict the visible columns in the table display')}
<span>{t('All columns are visible by default')}</span>
</bem.FormView__cell>
<ul>
{this.listColumns().map(function(col) {
return (
<li key={col.value}>
<input
type='checkbox'
value={col.value}
checked={_this.state.selectedColumns.includes(col.value)}
onChange={_this.toggleCheckboxChange}
id={`colcheck-${col.value}`}
/>
<label htmlFor={`colcheck-${col.value}`}>
{col.label}
</label>
</li>
);
})}
</ul>
</bem.FormModal__item>
</bem.FormModal__item>
}
<bem.Modal__footer>
{this.userCan('change_asset', this.props.asset) &&
<bem.Modal__footerButton m='secondary' onClick={this.resetTableSettings}>
{t('Reset')}
</bem.Modal__footerButton>
}
<bem.Modal__footerButton m='primary' onClick={this.saveTableColumns}>
{t('Save')}
</bem.Modal__footerButton>
</bem.Modal__footer>
</div>
)
}
}
reactMixin(TableColumnFilter.prototype, Reflux.ListenerMixin);
reactMixin(TableColumnFilter.prototype, mixins.permissions);
export default TableColumnFilter;
| jsapp/js/components/tableColumnFilter.es6 | import React from 'react';
import PropTypes from 'prop-types';
import Reflux from 'reflux';
import reactMixin from 'react-mixin';
import bem from '../bem';
import ui from '../ui';
import actions from '../actions';
import stores from '../stores';
import mixins from '../mixins';
import Select from 'react-select';
import autoBind from 'react-autobind';
import {
t,
notify
} from '../utils';
export class TableColumnFilter extends React.Component {
constructor(props){
super(props);
this.state = {
selectedColumns: [],
frozenColumn: false,
showGroupName: true,
translationIndex: 0
};
let _sett = props.asset.settings;
if (_sett['data-table']) {
if (_sett['data-table']['selected-columns'])
this.state.selectedColumns = _sett['data-table']['selected-columns'];
if (_sett['data-table']['frozen-column'])
this.state.frozenColumn = _sett['data-table']['frozen-column'];
if (_sett['data-table']['show-group-name'])
this.state.showGroupName = _sett['data-table']['show-group-name'];
if (_sett['data-table']['translation-index'])
this.state.translationIndex = _sett['data-table']['translation-index'];
}
autoBind(this);
}
componentDidMount() {
this.listenTo(actions.table.updateSettings.failed, this.settingsUpdateFailed);
}
saveTableColumns() {
let s = this.state;
let settings = this.props.asset.settings;
if (!settings['data-table']) {
settings['data-table'] = {};
}
if (this.userCan('change_asset', this.props.asset)) {
settings['data-table']['selected-columns'] = s.selectedColumns.length > 0 ? s.selectedColumns : null;
settings['data-table']['frozen-column'] = s.frozenColumn;
settings['data-table']['show-group-name'] = s.showGroupName;
settings['data-table']['translation-index'] = s.translationIndex;
actions.table.updateSettings(this.props.asset.uid, settings);
} else {
console.log('just update the state, since user cannot save settings');
let overrides = {
showGroupName: s.showGroupName,
translationIndex: s.translationIndex
}
this.props.overrideLabelsAndGroups(overrides);
}
}
toggleCheckboxChange(evt) {
let selectedColumns = this.state.selectedColumns,
id = evt.target.value,
idx = selectedColumns.indexOf(id);
if (idx !== -1) {
selectedColumns.splice(idx, 1);
} else {
selectedColumns.push(id);
}
this.setState({
selectedColumns: selectedColumns
})
}
setFrozenColumn(col) {
this.setState({
frozenColumn: col && col.value ? col.value : false
})
}
updateGroupHeaderDisplay(e) {
this.setState({
showGroupName: e.target.checked
})
}
onLabelChange(e) {
this.setState({
translationIndex: e.target.value
})
}
settingsUpdateFailed() {
notify(t('There was an error, table settings could not be saved.'));
}
resetTableSettings() {
let settings = this.props.asset.settings;
if (settings['data-table'])
delete settings['data-table'];
actions.table.updateSettings(this.props.asset.uid, settings);
}
listColumns() {
let stateOverrides = {
showGroupName: this.state.showGroupName,
translationIndex: this.state.translationIndex
}
let colsArray = this.props.columns.reduce((acc, col) => {
if (col.id && col.id !== '__SubmissionLinks' && col.id !== '__SubmissionCheckbox') {
let qParentGroup = [];
if (col.id.includes('/')) {
qParentGroup = col.id.split('/');
}
acc.push({
value: col.id,
label: this.props.getColumnLabel(col.id, col.question, qParentGroup, stateOverrides)
});
}
return acc;
}, []);
return colsArray;
}
render () {
let _this = this;
return (
<div className='tableColumn-modal'>
<bem.FormModal__item m='translation-radios'>
<bem.FormView__cell m='label'>
{t('Display labels or XML values?')}
</bem.FormView__cell>
<div>
<label htmlFor={'trnsl-xml'}>
<input type='radio' name='translation'
value='-1' id={'trnsl-xml'}
checked={this.state.translationIndex == '-1'}
onChange={this.onLabelChange} />
{t('XML Values')}
</label>
{
this.props.asset.content.translations.map((trns, n) => {
return (
<label htmlFor={`trnsl-${n}`} key={n}>
<input type='radio' name='translation'
value={n} id={`trnsl-${n}`}
checked={this.state.translationIndex == n}
onChange={this.onLabelChange} />
{t('Labels')} {trns ? ` - ${trns}` : null}
</label>
)
})
}
</div>
</bem.FormModal__item>
<bem.FormModal__item m='group-headings'>
<input
type='checkbox'
checked={this.state.showGroupName}
onChange={this.updateGroupHeaderDisplay}
id='check-group-headings'/>
<label htmlFor='check-group-headings'>
{t('Show group names in table headers')}
</label>
</bem.FormModal__item>
{this.userCan('change_asset', this.props.asset) &&
<bem.FormModal__item m='advanced-table-options'>
<bem.FormView__cell m='note'>
{t('Note: Only users with the "edit form" permission can see the following two options. If other users can view submissions on this project, their table view will be restricted by the choices made below.')}
</bem.FormView__cell>
<bem.FormModal__item>
<bem.FormView__cell m='label'>
{t('Set a frozen first column in the table.')}
</bem.FormView__cell>
<Select
value={this.state.frozenColumn}
options={this.listColumns()}
onChange={this.setFrozenColumn} />
</bem.FormModal__item>
<bem.FormModal__item>
<bem.FormView__cell m='label'>
{t('Restrict the visible columns in the table display')}
<span>{t('All columns are visible by default')}</span>
</bem.FormView__cell>
<ul>
{this.listColumns().map(function(col) {
return (
<li key={col.value}>
<input
type='checkbox'
value={col.value}
checked={_this.state.selectedColumns.includes(col.value)}
onChange={_this.toggleCheckboxChange}
id={`colcheck-${col.value}`}
/>
<label htmlFor={`colcheck-${col.value}`}>
{col.label}
</label>
</li>
);
})}
</ul>
</bem.FormModal__item>
</bem.FormModal__item>
}
<bem.Modal__footer>
{this.userCan('change_asset', this.props.asset) &&
<bem.Modal__footerButton m='secondary' onClick={this.resetTableSettings}>
{t('Reset')}
</bem.Modal__footerButton>
}
<bem.Modal__footerButton m='primary' onClick={this.saveTableColumns}>
{t('Save')}
</bem.Modal__footerButton>
</bem.Modal__footer>
</div>
)
}
}
reactMixin(TableColumnFilter.prototype, Reflux.ListenerMixin);
reactMixin(TableColumnFilter.prototype, mixins.permissions);
export default TableColumnFilter;
| add checkbox for hxl tags
| jsapp/js/components/tableColumnFilter.es6 | add checkbox for hxl tags | <ide><path>sapp/js/components/tableColumnFilter.es6
<ide> selectedColumns: [],
<ide> frozenColumn: false,
<ide> showGroupName: true,
<add> showHXLTags: false,
<ide> translationIndex: 0
<ide> };
<ide>
<ide> this.state.showGroupName = _sett['data-table']['show-group-name'];
<ide> if (_sett['data-table']['translation-index'])
<ide> this.state.translationIndex = _sett['data-table']['translation-index'];
<add> if (_sett['data-table']['show-hxl-tags'])
<add> this.state.showHXLTags = _sett['data-table']['show-hxl-tags'];
<ide> }
<ide>
<ide> autoBind(this);
<ide> settings['data-table']['frozen-column'] = s.frozenColumn;
<ide> settings['data-table']['show-group-name'] = s.showGroupName;
<ide> settings['data-table']['translation-index'] = s.translationIndex;
<add> settings['data-table']['show-hxl-tags'] = s.showHXLTags;
<ide>
<ide> actions.table.updateSettings(this.props.asset.uid, settings);
<ide> } else {
<ide> updateGroupHeaderDisplay(e) {
<ide> this.setState({
<ide> showGroupName: e.target.checked
<add> })
<add> }
<add> onHXLTagsChange(evt) {
<add> this.setState({
<add> showHXLTags: evt.currentTarget.checked
<ide> })
<ide> }
<ide> onLabelChange(e) {
<ide> {t('Show group names in table headers')}
<ide> </label>
<ide> </bem.FormModal__item>
<add>
<add> <bem.FormModal__item>
<add> <input
<add> type='checkbox'
<add> checked={this.state.showHXLTags}
<add> onChange={this.onHXLTagsChange}
<add> id='hxl-tags'
<add> />
<add> <label htmlFor='hxl-tags'>
<add> {t('Show HXL tags in table headers')}
<add> </label>
<add> </bem.FormModal__item>
<add>
<ide> {this.userCan('change_asset', this.props.asset) &&
<ide> <bem.FormModal__item m='advanced-table-options'>
<ide> <bem.FormView__cell m='note'> |
|
Java | bsd-3-clause | 725aea1b34d35afc5f4aa9562b58d2d58b0710f1 | 0 | rouault/GeoGit,annacarol/GeoGig,rouault/GeoGit,markles/GeoGit,boundlessgeo/GeoGig,markles/GeoGit,markles/GeoGit,markles/GeoGit,rouault/GeoGit,annacarol/GeoGig,boundlessgeo/GeoGig,rouault/GeoGit,boundlessgeo/GeoGig,annacarol/GeoGig,markles/GeoGit | /* Copyright (c) 2011 TOPP - www.openplans.org. All rights reserved.
* This code is licensed under the LGPL 2.1 license, available at the root
* application directory.
*/
package org.geogit.geotools.data;
import static com.google.common.base.Predicates.alwaysTrue;
import static com.google.common.base.Predicates.and;
import static com.google.common.base.Predicates.notNull;
import static com.google.common.collect.Iterators.filter;
import static com.google.common.collect.Iterators.transform;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import org.geogit.api.Bounded;
import org.geogit.api.Bucket;
import org.geogit.api.CommandLocator;
import org.geogit.api.FeatureBuilder;
import org.geogit.api.Node;
import org.geogit.api.NodeRef;
import org.geogit.api.Ref;
import org.geogit.api.RevFeature;
import org.geogit.api.RevObject;
import org.geogit.api.RevTree;
import org.geogit.api.plumbing.FindTreeChild;
import org.geogit.api.plumbing.LsTreeOp;
import org.geogit.api.plumbing.LsTreeOp.Strategy;
import org.geogit.api.plumbing.RevObjectParse;
import org.geogit.storage.NodePathStorageOrder;
import org.geotools.data.FeatureReader;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.filter.spatial.ReprojectingFilterVisitor;
import org.geotools.filter.visitor.SpatialFilterVisitor;
import org.geotools.util.logging.Logging;
import org.opengis.feature.Feature;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.FeatureType;
import org.opengis.filter.Filter;
import org.opengis.filter.FilterFactory2;
import org.opengis.filter.Id;
import org.opengis.filter.identity.FeatureId;
import org.opengis.filter.identity.Identifier;
import org.opengis.filter.spatial.BBOX;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
import com.vividsolutions.jts.geom.Envelope;
/**
*
*/
class GeogitFeatureReader<T extends FeatureType, F extends Feature> implements FeatureReader<T, F>,
Iterator<F> {
private static final Logger LOGGER = Logging.getLogger(GeogitFeatureReader.class);
private SimpleFeatureType schema;
private Stats stats;
private Iterator<SimpleFeature> features;
@Nullable
private Integer offset;
@Nullable
private Integer maxFeatures;
private static class Stats implements Predicate<Bounded> {
public int featureHits, featureMisses, treeHits, treeMisses, bucketHits, bucketMisses;
private Envelope bounds;
public Stats(Envelope bounds) {
this.bounds = bounds;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Hits/misses:\n");
sb.append("Trees: ").append(treeHits).append("/").append(treeMisses).append("\n");
sb.append("Features: ").append(featureHits).append("/").append(featureMisses)
.append("\n");
sb.append("Buckets: ").append(bucketHits).append("/").append(bucketMisses).append("\n");
return sb.toString();
}
@Override
public boolean apply(Bounded bounded) {
boolean intersects = bounds.isNull() ? true : bounded.intersects(bounds);
if (bounded instanceof Bucket) {
// {
// Envelope e = new Envelope();
// bounded.expand(e);
// stats.geoms.add(JTS.toGeometry(e));
// }
if (intersects)
bucketHits++;
else
bucketMisses++;
} else {
Node node;
if (bounded instanceof NodeRef) {
node = ((NodeRef) bounded).getNode();
} else {
node = (Node) bounded;
}
if (node.getType().equals(RevObject.TYPE.TREE)) {
if (intersects)
treeHits++;
else
treeMisses++;
} else {
if (intersects)
featureHits++;
else
featureMisses++;
}
}
return true;
}
}
/**
* @param commandLocator
* @param schema
* @param maxFeatures
* @param offset
* @param typeTree
* @param filter
* @param queryBounds
*/
public GeogitFeatureReader(final CommandLocator commandLocator, final SimpleFeatureType schema,
final Filter origFilter, final String typeTreePath, @Nullable final String headRef,
@Nullable Integer offset, @Nullable Integer maxFeatures) {
this.schema = schema;
this.offset = offset;
this.maxFeatures = maxFeatures;
final String branchRef = headRef == null ? Ref.WORK_HEAD : headRef;
final String typeTreeRefSpec = branchRef + ":" + typeTreePath;
final Optional<RevTree> parentTree = commandLocator.command(RevObjectParse.class)
.setRefSpec(typeTreeRefSpec).call(RevTree.class);
Preconditions.checkArgument(parentTree.isPresent(), "Feature type tree not found: %s",
typeTreeRefSpec);
final Filter filter = reprojectFilter(origFilter);
final Envelope queryBounds = getQueryBounds(filter);
Predicate<Bounded> refBoundsFilter = alwaysTrue();
if (!queryBounds.isNull()) {
refBoundsFilter = new Predicate<Bounded>() {
private final Envelope env = queryBounds;
@Override
public boolean apply(final Bounded bounded) {
boolean intersects = bounded.intersects(env);
return intersects;
}
};
this.stats = new Stats(queryBounds);
refBoundsFilter = and(stats, refBoundsFilter);
}
Iterator<NodeRef> featureRefs;
if (filter instanceof Id) {
final Function<FeatureId, NodeRef> idToRef;
idToRef = new FindFeatureRefFunction(commandLocator, parentTree.get());
Iterator<FeatureId> featureIds = getSortedFidsInNaturalOrder((Id) filter);
featureRefs = filter(transform(featureIds, idToRef), notNull());
} else {
featureRefs = commandLocator.command(LsTreeOp.class)
.setStrategy(Strategy.FEATURES_ONLY).setReference(typeTreeRefSpec)
.setBoundsFilter(refBoundsFilter).call();
}
final boolean filterSupportedByRefs = Filter.INCLUDE.equals(filter)
|| filter instanceof BBOX;
if (filterSupportedByRefs) {
featureRefs = applyRefsOffsetLimit(featureRefs);
}
NodeRefToFeature refToFeature = new NodeRefToFeature(commandLocator, schema);
final Iterator<SimpleFeature> featuresUnfiltered = transform(featureRefs, refToFeature);
FilterPredicate filterPredicate = new FilterPredicate(filter);
Iterator<SimpleFeature> featuresFiltered = filter(featuresUnfiltered, filterPredicate);
if (!filterSupportedByRefs) {
featuresFiltered = applyFeaturesOffsetLimit(featuresFiltered);
}
this.features = featuresFiltered;
}
@SuppressWarnings("unchecked")
@Override
public T getFeatureType() {
return (T) schema;
}
@Override
public void close() throws IOException {
if (stats != null) {
LOGGER.info("geogit reader stats: " + stats.toString());
}
}
@Override
public boolean hasNext() {
return features.hasNext();
}
@SuppressWarnings("unchecked")
@Override
public F next() {
return (F) features.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Iterator<SimpleFeature> applyFeaturesOffsetLimit(Iterator<SimpleFeature> features) {
if (offset != null) {
Iterators.advance(features, offset.intValue());
}
if (maxFeatures != null) {
features = Iterators.limit(features, maxFeatures.intValue());
}
return features;
}
private Iterator<NodeRef> applyRefsOffsetLimit(Iterator<NodeRef> featureRefs) {
if (offset != null) {
Iterators.advance(featureRefs, offset.intValue());
}
if (maxFeatures != null) {
featureRefs = Iterators.limit(featureRefs, maxFeatures.intValue());
}
return featureRefs;
}
private Iterator<FeatureId> getSortedFidsInNaturalOrder(Id filter) {
final Set<Identifier> identifiers = filter.getIdentifiers();
Iterator<FeatureId> featureIds = filter(filter(identifiers.iterator(), FeatureId.class),
notNull());
// used for the returned featrures to be in "natural" order
final Comparator<String> requestOrderMatchingStorageOrder = new NodePathStorageOrder();
Comparator<FeatureId> requestOrder = new Comparator<FeatureId>() {
@Override
public int compare(FeatureId o1, FeatureId o2) {
return requestOrderMatchingStorageOrder.compare(o1.getID(), o2.getID());
}
};
TreeSet<FeatureId> sortedFids = Sets.newTreeSet(requestOrder);
sortedFids.addAll(ImmutableList.copyOf(featureIds));
return sortedFids.iterator();
}
private static class FindFeatureRefFunction implements Function<FeatureId, NodeRef> {
private FindTreeChild command;
private RevTree parentTree;
public FindFeatureRefFunction(CommandLocator commandLocator, RevTree featureTypeTree) {
this.parentTree = featureTypeTree;
this.command = commandLocator.command(FindTreeChild.class);
}
@Override
@Nullable
public NodeRef apply(final FeatureId fid) {
final String featureName = fid.getID();
Optional<NodeRef> featureRef = command.setParent(parentTree).setChildPath(featureName)
.setIndex(true).call();
return featureRef.orNull();
}
};
private static class NodeRefToFeature implements Function<NodeRef, SimpleFeature> {
private RevObjectParse parseRevFeatureCommand;
private FeatureBuilder featureBuilder;
public NodeRefToFeature(CommandLocator commandLocator, SimpleFeatureType schema) {
this.featureBuilder = new FeatureBuilder(schema);
this.parseRevFeatureCommand = commandLocator.command(RevObjectParse.class);
}
@Override
public SimpleFeature apply(final NodeRef featureRef) {
Optional<RevFeature> revFeature = parseRevFeatureCommand.setObjectId(
featureRef.objectId()).call(RevFeature.class);
Preconditions.checkState(revFeature.isPresent());
String id = featureRef.name();
Feature feature = featureBuilder.build(id, revFeature.get());
return (SimpleFeature) feature;
}
};
private static final class FilterPredicate implements Predicate<SimpleFeature> {
private Filter filter;
public FilterPredicate(final Filter filter) {
this.filter = filter;
}
@Override
public boolean apply(SimpleFeature feature) {
return filter.evaluate(feature);
}
}
private Envelope getQueryBounds(Filter filter) {
final Envelope queryBounds = new Envelope();
Envelope bounds = (Envelope) filter.accept(new ExtractBounds(), queryBounds);
if (bounds != null) {
queryBounds.expandToInclude(bounds);
}
return queryBounds;
}
/**
* @param filter
* @return
*/
private Filter reprojectFilter(Filter filter) {
if (hasSpatialFilter(filter)) {
CoordinateReferenceSystem crs = schema.getCoordinateReferenceSystem();
if (crs == null) {
LOGGER.fine("Not reprojecting filter to native CRS because feature type does not declare a CRS");
} else {
FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();
filter = (Filter) filter.accept(new ReprojectingFilterVisitor(factory, schema),
null);
}
}
return filter;
}
private boolean hasSpatialFilter(Filter filter) {
SpatialFilterVisitor spatialFilterVisitor = new SpatialFilterVisitor();
filter.accept(spatialFilterVisitor, null);
return spatialFilterVisitor.hasSpatialFilter();
}
}
| src/geotools/src/main/java/org/geogit/geotools/data/GeogitFeatureReader.java | /* Copyright (c) 2011 TOPP - www.openplans.org. All rights reserved.
* This code is licensed under the LGPL 2.1 license, available at the root
* application directory.
*/
package org.geogit.geotools.data;
import static com.google.common.base.Predicates.alwaysTrue;
import static com.google.common.base.Predicates.and;
import static com.google.common.base.Predicates.notNull;
import static com.google.common.collect.Iterators.filter;
import static com.google.common.collect.Iterators.transform;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import org.geogit.api.Bounded;
import org.geogit.api.Bucket;
import org.geogit.api.CommandLocator;
import org.geogit.api.FeatureBuilder;
import org.geogit.api.Node;
import org.geogit.api.NodeRef;
import org.geogit.api.Ref;
import org.geogit.api.RevFeature;
import org.geogit.api.RevObject;
import org.geogit.api.RevTree;
import org.geogit.api.plumbing.FindTreeChild;
import org.geogit.api.plumbing.LsTreeOp;
import org.geogit.api.plumbing.LsTreeOp.Strategy;
import org.geogit.api.plumbing.RevObjectParse;
import org.geogit.storage.NodePathStorageOrder;
import org.geotools.data.FeatureReader;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.filter.spatial.ReprojectingFilterVisitor;
import org.geotools.filter.visitor.SpatialFilterVisitor;
import org.geotools.util.logging.Logging;
import org.opengis.feature.Feature;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.FeatureType;
import org.opengis.filter.Filter;
import org.opengis.filter.FilterFactory2;
import org.opengis.filter.Id;
import org.opengis.filter.identity.FeatureId;
import org.opengis.filter.identity.Identifier;
import org.opengis.filter.spatial.BBOX;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
import com.vividsolutions.jts.geom.Envelope;
/**
*
*/
class GeogitFeatureReader<T extends FeatureType, F extends Feature> implements FeatureReader<T, F>,
Iterator<F> {
private static final Logger LOGGER = Logging.getLogger(GeogitFeatureReader.class);
private SimpleFeatureType schema;
private Stats stats;
private Iterator<SimpleFeature> features;
@Nullable
private Integer offset;
@Nullable
private Integer maxFeatures;
private static class Stats implements Predicate<Bounded> {
public int featureHits, featureMisses, treeHits, treeMisses, bucketHits, bucketMisses;
private Envelope bounds;
public Stats(Envelope bounds) {
this.bounds = bounds;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Hits/misses:\n");
sb.append("Trees: ").append(treeHits).append("/").append(treeMisses).append("\n");
sb.append("Features: ").append(featureHits).append("/").append(featureMisses)
.append("\n");
sb.append("Buckets: ").append(bucketHits).append("/").append(bucketMisses).append("\n");
return sb.toString();
}
@Override
public boolean apply(Bounded bounded) {
boolean intersects = bounds.isNull() ? true : bounded.intersects(bounds);
if (bounded instanceof Bucket) {
// {
// Envelope e = new Envelope();
// bounded.expand(e);
// stats.geoms.add(JTS.toGeometry(e));
// }
if (intersects)
bucketHits++;
else
bucketMisses++;
} else {
Node node;
if (bounded instanceof NodeRef) {
node = ((NodeRef) bounded).getNode();
} else {
node = (Node) bounded;
}
if (node.getType().equals(RevObject.TYPE.TREE)) {
if (intersects)
treeHits++;
else
treeMisses++;
} else {
if (intersects)
featureHits++;
else
featureMisses++;
}
}
return true;
}
}
/**
* @param commandLocator
* @param schema
* @param maxFeatures
* @param offset
* @param typeTree
* @param filter
* @param queryBounds
*/
public GeogitFeatureReader(final CommandLocator commandLocator, final SimpleFeatureType schema,
final Filter origFilter, final String typeTreePath, @Nullable final String headRef,
@Nullable Integer offset, @Nullable Integer maxFeatures) {
this.schema = schema;
this.offset = offset;
this.maxFeatures = maxFeatures;
final String branchRef = headRef == null ? Ref.WORK_HEAD : headRef;
final String typeTreeRefSpec = branchRef + ":" + typeTreePath;
final Optional<RevTree> parentTree = commandLocator.command(RevObjectParse.class)
.setRefSpec(typeTreeRefSpec).call(RevTree.class);
Preconditions.checkArgument(parentTree.isPresent(), "Feature type tree not found: %s",
typeTreeRefSpec);
final Filter filter = reprojectFilter(origFilter);
final Envelope queryBounds = getQueryBounds(filter);
Predicate<Bounded> refBoundsFilter = alwaysTrue();
if (!queryBounds.isNull()) {
refBoundsFilter = new Predicate<Bounded>() {
private final Envelope env = queryBounds;
@Override
public boolean apply(final Bounded bounded) {
boolean intersects = bounded.intersects(env);
return intersects;
}
};
this.stats = new Stats(queryBounds);
refBoundsFilter = and(stats, refBoundsFilter);
}
Iterator<NodeRef> featureRefs;
if (filter instanceof Id) {
final Function<FeatureId, NodeRef> idToRef;
idToRef = new FindFeatureRefFunction(commandLocator, parentTree.get());
Iterator<FeatureId> featureIds = getSortedFidsInNaturalOrder((Id) filter);
featureRefs = filter(transform(featureIds, idToRef), notNull());
} else {
featureRefs = commandLocator.command(LsTreeOp.class)
.setStrategy(Strategy.FEATURES_ONLY).setReference(typeTreeRefSpec)
.setBoundsFilter(refBoundsFilter).call();
}
final boolean filterSupportedByRefs = Filter.INCLUDE.equals(filter)
|| filter instanceof BBOX;
if (filterSupportedByRefs) {
featureRefs = applyRefsOffsetLimit(featureRefs);
}
NodeRefToFeature refToFeature = new NodeRefToFeature(commandLocator, schema);
final Iterator<SimpleFeature> featuresUnfiltered = transform(featureRefs, refToFeature);
FilterPredicate filterPredicate = new FilterPredicate(filter);
Iterator<SimpleFeature> featuresFiltered = filter(featuresUnfiltered, filterPredicate);
if (!filterSupportedByRefs) {
featuresFiltered = applyFeaturesOffsetLimit(featuresFiltered);
}
this.features = featuresFiltered;
}
@SuppressWarnings("unchecked")
@Override
public T getFeatureType() {
return (T) schema;
}
@Override
public void close() throws IOException {
if (stats != null) {
LOGGER.info("geogit reader stats: " + stats.toString());
}
}
@Override
public boolean hasNext() {
return features.hasNext();
}
@SuppressWarnings("unchecked")
@Override
public F next() {
return (F) features.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
private Iterator<SimpleFeature> applyFeaturesOffsetLimit(Iterator<SimpleFeature> features) {
if (offset != null) {
Iterators.advance(features, offset.intValue());
}
if (maxFeatures != null) {
features = Iterators.limit(features, maxFeatures.intValue());
}
return features;
}
private Iterator<NodeRef> applyRefsOffsetLimit(Iterator<NodeRef> featureRefs) {
if (offset != null) {
Iterators.advance(featureRefs, offset.intValue());
}
if (maxFeatures != null) {
featureRefs = Iterators.limit(featureRefs, maxFeatures.intValue());
}
return featureRefs;
}
private Iterator<FeatureId> getSortedFidsInNaturalOrder(Id filter) {
final Set<Identifier> identifiers = filter.getIdentifiers();
Iterator<FeatureId> featureIds = filter(filter(identifiers.iterator(), FeatureId.class),
notNull());
// used for the returned featrures to be in "natural" order
final Comparator<String> requestOrderMatchingStorageOrder = new NodePathStorageOrder();
Comparator<FeatureId> requestOrder = new Comparator<FeatureId>() {
@Override
public int compare(FeatureId o1, FeatureId o2) {
return requestOrderMatchingStorageOrder.compare(o1.getID(), o2.getID());
}
};
TreeSet<FeatureId> sortedFids = Sets.newTreeSet(requestOrder);
sortedFids.addAll(ImmutableList.copyOf(featureIds));
return sortedFids.iterator();
}
private static class FindFeatureRefFunction implements Function<FeatureId, NodeRef> {
private FindTreeChild command;
private RevTree parentTree;
public FindFeatureRefFunction(CommandLocator commandLocator, RevTree featureTypeTree) {
this.parentTree = featureTypeTree;
this.command = commandLocator.command(FindTreeChild.class);
}
@Override
@Nullable
public NodeRef apply(final FeatureId fid) {
final String featureName = fid.getID();
Optional<NodeRef> featureRef = command.setParent(parentTree).setChildPath(featureName)
.setIndex(true).call();
return featureRef.orNull();
}
};
private static class NodeRefToFeature implements Function<NodeRef, SimpleFeature> {
private RevObjectParse parseRevFeatureCommand;
private FeatureBuilder featureBuilder;
public NodeRefToFeature(CommandLocator commandLocator, SimpleFeatureType schema) {
this.featureBuilder = new FeatureBuilder(schema);
this.parseRevFeatureCommand = commandLocator.command(RevObjectParse.class);
}
@Override
public SimpleFeature apply(final NodeRef featureRef) {
Optional<RevFeature> revFeature = parseRevFeatureCommand.setObjectId(
featureRef.objectId()).call(RevFeature.class);
Preconditions.checkState(revFeature.isPresent());
String id = featureRef.name();
Feature feature = featureBuilder.build(id, revFeature.get());
return (SimpleFeature) feature;
}
};
private static final class FilterPredicate implements Predicate<SimpleFeature> {
private Filter filter;
public FilterPredicate(final Filter filter) {
this.filter = filter;
}
@Override
public boolean apply(SimpleFeature feature) {
return filter.evaluate(feature);
}
}
private Envelope getQueryBounds(Filter filter) {
final Envelope queryBounds = new Envelope();
Envelope bounds = (Envelope) filter.accept(new ExtractBounds(), queryBounds);
if (bounds != null) {
queryBounds.expandToInclude(bounds);
}
return queryBounds;
}
/**
* @param filter
* @return
*/
private Filter reprojectFilter(Filter filter) {
if (hasSpatialFilter(filter)) {
FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();
filter = (Filter) filter.accept(new ReprojectingFilterVisitor(factory, schema), null);
}
return filter;
}
private boolean hasSpatialFilter(Filter filter) {
SpatialFilterVisitor spatialFilterVisitor = new SpatialFilterVisitor();
filter.accept(spatialFilterVisitor, null);
return spatialFilterVisitor.hasSpatialFilter();
}
}
| Fixed an issue with NULL CRS.
Author: groldan
| src/geotools/src/main/java/org/geogit/geotools/data/GeogitFeatureReader.java | Fixed an issue with NULL CRS. | <ide><path>rc/geotools/src/main/java/org/geogit/geotools/data/GeogitFeatureReader.java
<ide> import org.opengis.filter.identity.FeatureId;
<ide> import org.opengis.filter.identity.Identifier;
<ide> import org.opengis.filter.spatial.BBOX;
<add>import org.opengis.referencing.crs.CoordinateReferenceSystem;
<ide>
<ide> import com.google.common.base.Function;
<ide> import com.google.common.base.Optional;
<ide> */
<ide> private Filter reprojectFilter(Filter filter) {
<ide> if (hasSpatialFilter(filter)) {
<del> FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();
<del> filter = (Filter) filter.accept(new ReprojectingFilterVisitor(factory, schema), null);
<add> CoordinateReferenceSystem crs = schema.getCoordinateReferenceSystem();
<add> if (crs == null) {
<add> LOGGER.fine("Not reprojecting filter to native CRS because feature type does not declare a CRS");
<add>
<add> } else {
<add>
<add> FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2();
<add>
<add> filter = (Filter) filter.accept(new ReprojectingFilterVisitor(factory, schema),
<add>
<add> null);
<add>
<add> }
<ide> }
<ide> return filter;
<ide> } |
|
Java | agpl-3.0 | 3f9c4a410a934946391129499a56597aa5270c5d | 0 | PReimers/sportchef,McPringle/sportchef,McPringle/sportchef,PReimers/sportchef | /*
* SportChef – Sports Competition Management Software
* Copyright (C) 2016 Marcus Fihlon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.sportchef.business;
import lombok.experimental.UtilityClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.setblack.airomem.core.SimpleController;
import java.io.Serializable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.function.Supplier;
@UtilityClass
public class PersistenceManager {
private static final String SPORTCHEF_DIRECTORY_NAME = ".sportchef"; //NON-NLS
private static final String PREVAYLER_DIRECTORY_NAME = "prevayler"; //NON-NLS
private static final Path DATA_DIRECTORY;
private static final Logger LOGGER = LoggerFactory.getLogger(PersistenceManager.class);
static {
DATA_DIRECTORY = Paths.get(SPORTCHEF_DIRECTORY_NAME, PREVAYLER_DIRECTORY_NAME);
}
public static <T extends Serializable> SimpleController<T> createSimpleController(
final Class<? extends Serializable> clazz, final Supplier<T> constructor) {
final String dir = DATA_DIRECTORY.resolve(clazz.getName()).toString();
LOGGER.info("Using persistence store '{}' for entity '{}'.",
Paths.get(System.getProperty("user.home"), dir).toAbsolutePath(),
clazz.getName());
return SimpleController.loadOptional(dir, constructor);
}
}
| src/main/java/ch/sportchef/business/PersistenceManager.java | /*
* SportChef – Sports Competition Management Software
* Copyright (C) 2016 Marcus Fihlon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.sportchef.business;
import lombok.experimental.UtilityClass;
import org.jetbrains.annotations.NonNls;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.setblack.airomem.core.SimpleController;
import java.io.Serializable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.function.Supplier;
@UtilityClass
public class PersistenceManager {
private static final String SPORTCHEF_DIRECTORY_NAME = ".sportchef"; //NON-NLS
private static final String PREVAYLER_DIRECTORY_NAME = "prevayler"; //NON-NLS
private static final Path DATA_DIRECTORY;
@NonNls
private static final Logger LOGGER = LoggerFactory.getLogger(PersistenceManager.class);
static {
DATA_DIRECTORY = Paths.get(SPORTCHEF_DIRECTORY_NAME, PREVAYLER_DIRECTORY_NAME);
}
public static <T extends Serializable> SimpleController<T> createSimpleController(
final Class<? extends Serializable> clazz, final Supplier<T> constructor) {
final String dir = DATA_DIRECTORY.resolve(clazz.getName()).toString();
LOGGER.info("Using persistence store '{}' for entity '{}'.",
Paths.get(System.getProperty("user.home"), dir).toAbsolutePath(),
clazz.getName());
return SimpleController.loadOptional(dir, constructor);
}
}
| #270 Modified persistence stuff to work without Java EE dependencies
| src/main/java/ch/sportchef/business/PersistenceManager.java | #270 Modified persistence stuff to work without Java EE dependencies | <ide><path>rc/main/java/ch/sportchef/business/PersistenceManager.java
<ide> package ch.sportchef.business;
<ide>
<ide> import lombok.experimental.UtilityClass;
<del>import org.jetbrains.annotations.NonNls;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide> import pl.setblack.airomem.core.SimpleController;
<ide>
<ide> private static final Path DATA_DIRECTORY;
<ide>
<del> @NonNls
<ide> private static final Logger LOGGER = LoggerFactory.getLogger(PersistenceManager.class);
<ide>
<ide> static { |
|
JavaScript | agpl-3.0 | 0b4185d100014737426c97f3d07915c1234d5e30 | 0 | abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack,abetusk/bostontraintrack | /*
Copyright (C) 2013 Abram Connelly
This file is part of bostontraintrack.
bostontraintrack is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
bostontraintrack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with bostontraintrack. If not, see <http://www.gnu.org/licenses/>.
*/
var http = require("http");
var sockio = require('socket.io')();
var xmlparse = require("xml2js").parseString;
var g_verbose = 0;
var global_connect = { };
var global_status = { };
var global_data = { n: 0,
interval : { bus:13000 },
port : 80,
url : "webservices.nextbus.com",
path : { bus : "/service/publicXMLFeed?command=vehicleLocations&a=mbta&t=0" }
};
for (var rbo in global_data.path) {
console.log("setting up " + rbo );
setInterval(
(function(RBO) {
return function() {
if (g_verbose) { console.log("upating " + RBO + " -->>>>"); }
var opt = { host: global_data.url, port:global_data.port, path: global_data.path[RBO] };
try {
var req = http.request( opt, function(res) {
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) { body += chunk; });
res.on('end', function() {
try {
xmlparse( body,
function(err, result) {
global_data[RBO] = result;
console.log("Buses: " + result.body.vehicle.length);
});
} catch (ee) {
console.log("xml parse error:", ee );
}
});
});
req.on('error', function(err) { console.log("got http erro:", err); });
req.end();
} catch (e) {
console.log("http: got e:", e);
}
};
})(rbo), global_data.interval[rbo] );
}
sockio.on('connection', function(socket) {
console.log("connection!");
global_data.n++;
var local_name = global_data.n;
global_connect[ local_name ] = socket;
global_status [ local_name ] = { enable : false };
socket.on("enable", function(msg) {
console.log("enable:", local_name);
global_status[ local_name ].enable = true;
pushSingleUpdate( local_name );
});
socket.on("disable", function(msg) {
console.log("disable:", local_name);
global_status[ local_name ].enable = false;
});
socket.on("myevent", function(msg) {
console.log("got myevent!");
console.log(msg);
socket.emit("update", { n: global_data.n } );
});
socket.on("disconnect", function() {
console.log("disconnecting client ", local_name);
delete global_connect[ local_name ];
delete global_status [ local_name ];
});
});
sockio.listen(8182);
function pushSingleUpdate( cli_id ) {
var colors = [ "bus" ];
for (var i in colors) {
if (colors[i] in global_data) {
var dat = {}
dat[ colors[i] ] = global_data[ colors[i] ];
try {
if (global_status[ cli_id ].enable) {
global_connect[ cli_id ].emit("update", dat );
}
} catch (ee) {
console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
}
}
}
}
function pushUpdate() {
var conn_count = 0;
for (var cli_id in global_connect) {
conn_count++;
var colors = [ "bus" ];
pushSingleUpdate( cli_id );
}
var dt = Date.now();
var dts = dt.toString();
var enable_count=0;
for (var i in global_status) {
if (global_status[i].enable) { enable_count++; }
}
console.log( dts + " connected clients:", conn_count, ", (", global_data.n, "), (enabled: ", enable_count, ")" );
}
setInterval( pushUpdate, 10000 );
| srv/srv_bus.js | var http = require("http");
var sockio = require('socket.io')();
var xmlparse = require("xml2js").parseString;
var g_verbose = 0;
var global_connect = { };
var global_status = { };
var global_data = { n: 0,
interval : { bus:13000 },
//interval : { bus:3000 },
port : 80,
url : "webservices.nextbus.com",
path : { bus : "/service/publicXMLFeed?command=vehicleLocations&a=mbta&t=0" }
};
for (var rbo in global_data.path) {
console.log("setting up " + rbo );
setInterval(
(function(RBO) {
return function() {
if (g_verbose) { console.log("upating " + RBO + " -->>>>"); }
var opt = { host: global_data.url, port:global_data.port, path: global_data.path[RBO] };
try {
var req = http.request( opt, function(res) {
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) { body += chunk; });
res.on('end', function() {
try {
xmlparse( body,
function(err, result) {
global_data[RBO] = result;
console.log("Buses: " + result.body.vehicle.length);
});
} catch (ee) {
console.log("xml parse error:", ee );
}
});
});
req.on('error', function(err) { console.log("got http erro:", err); });
req.end();
} catch (e) {
console.log("http: got e:", e);
}
};
})(rbo), global_data.interval[rbo] );
}
sockio.on('connection', function(socket) {
console.log("connection!");
global_data.n++;
var local_name = global_data.n;
global_connect[ local_name ] = socket;
global_status [ local_name ] = { enable : false };
socket.on("enable", function(msg) {
console.log("enable:", local_name);
global_status[ local_name ].enable = true;
pushSingleUpdate( local_name );
});
socket.on("disable", function(msg) {
console.log("disable:", local_name);
global_status[ local_name ].enable = false;
});
socket.on("myevent", function(msg) {
console.log("got myevent!");
console.log(msg);
socket.emit("update", { n: global_data.n } );
});
socket.on("disconnect", function() {
console.log("disconnecting client ", local_name);
delete global_connect[ local_name ];
delete global_status [ local_name ];
});
});
sockio.listen(8182);
function pushSingleUpdate( cli_id ) {
var colors = [ "bus" ];
for (var i in colors) {
if (colors[i] in global_data) {
var dat = {}
dat[ colors[i] ] = global_data[ colors[i] ];
try {
if (global_status[ cli_id ].enable) {
global_connect[ cli_id ].emit("update", dat );
}
} catch (ee) {
console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
}
}
}
}
function pushUpdate() {
//console.log(global_data.n);
var conn_count = 0;
for (var cli_id in global_connect) {
conn_count++;
var colors = [ "bus" ];
pushSingleUpdate( cli_id );
/*
for (var i in colors) {
if (colors[i] in global_data) {
var dat = {}
dat[ colors[i] ] = global_data[ colors[i] ];
try {
if (global_status[ cli_id ].enable) {
global_connect[ cli_id ].emit("update", dat );
}
} catch (ee) {
console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
}
}
}
*/
}
var dt = Date.now();
var dts = dt.toString();
var enable_count=0;
for (var i in global_status) {
if (global_status[i].enable) { enable_count++; }
}
console.log( dts + " connected clients:", conn_count, ", (", global_data.n, "), (enabled: ", enable_count, ")" );
}
setInterval( pushUpdate, 10000 );
/*
setInterval( function() {
//console.log(global_data.n);
var conn_count = 0;
for (var cli_id in global_connect) {
conn_count++;
var colors = [ "bus" ];
for (var i in colors) {
if (colors[i] in global_data) {
var dat = {}
dat[ colors[i] ] = global_data[ colors[i] ];
try {
if (global_status[ cli_id ].enable) {
global_connect[ cli_id ].emit("update", dat );
}
} catch (ee) {
console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
}
}
}
}
var dt = Date.now();
var dts = dt.toString();
var enable_count=0;
for (var i in global_status) {
if (global_status[i].enable) { enable_count++; }
}
console.log( dts + " connected clients:", conn_count, ", (", global_data.n, "), (enabled: ", enable_count, ")" );
}, 5000 );
*/
| added AGPL header and took out dead comments
| srv/srv_bus.js | added AGPL header and took out dead comments | <ide><path>rv/srv_bus.js
<add>/*
<add>
<add> Copyright (C) 2013 Abram Connelly
<add>
<add> This file is part of bostontraintrack.
<add>
<add> bostontraintrack is free software: you can redistribute it and/or modify
<add> it under the terms of the GNU Affero General Public License as published by
<add> the Free Software Foundation, either version 3 of the License, or
<add> (at your option) any later version.
<add>
<add> bostontraintrack is distributed in the hope that it will be useful,
<add> but WITHOUT ANY WARRANTY; without even the implied warranty of
<add> MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<add> GNU Affero General Public License for more details.
<add>
<add> You should have received a copy of the GNU Affero General Public License
<add> along with bostontraintrack. If not, see <http://www.gnu.org/licenses/>.
<add>
<add>*/
<add>
<add>
<ide> var http = require("http");
<ide> var sockio = require('socket.io')();
<ide> var xmlparse = require("xml2js").parseString;
<ide>
<ide> var global_data = { n: 0,
<ide> interval : { bus:13000 },
<del> //interval : { bus:3000 },
<ide> port : 80,
<ide> url : "webservices.nextbus.com",
<ide> path : { bus : "/service/publicXMLFeed?command=vehicleLocations&a=mbta&t=0" }
<ide> }
<ide>
<ide> function pushUpdate() {
<del> //console.log(global_data.n);
<ide>
<ide> var conn_count = 0;
<ide> for (var cli_id in global_connect) {
<ide> var colors = [ "bus" ];
<ide>
<ide> pushSingleUpdate( cli_id );
<del>
<del>/*
<del> for (var i in colors) {
<del> if (colors[i] in global_data) {
<del> var dat = {}
<del> dat[ colors[i] ] = global_data[ colors[i] ];
<del>
<del> try {
<del> if (global_status[ cli_id ].enable) {
<del> global_connect[ cli_id ].emit("update", dat );
<del> }
<del> } catch (ee) {
<del> console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
<del> }
<del>
<del> }
<del> }
<del>*/
<ide>
<ide> }
<ide>
<ide>
<ide> setInterval( pushUpdate, 10000 );
<ide>
<del>/*
<del>setInterval( function() {
<del> //console.log(global_data.n);
<ide>
<del> var conn_count = 0;
<del> for (var cli_id in global_connect) {
<del> conn_count++;
<del> var colors = [ "bus" ];
<del>
<del> for (var i in colors) {
<del> if (colors[i] in global_data) {
<del> var dat = {}
<del> dat[ colors[i] ] = global_data[ colors[i] ];
<del>
<del> try {
<del> if (global_status[ cli_id ].enable) {
<del> global_connect[ cli_id ].emit("update", dat );
<del> }
<del> } catch (ee) {
<del> console.log("when trying to emit to cli_id: ", cli_id, " got error:", ee );
<del> }
<del>
<del> }
<del> }
<del>
<del> }
<del>
<del> var dt = Date.now();
<del> var dts = dt.toString();
<del>
<del> var enable_count=0;
<del> for (var i in global_status) {
<del> if (global_status[i].enable) { enable_count++; }
<del> }
<del>
<del> console.log( dts + " connected clients:", conn_count, ", (", global_data.n, "), (enabled: ", enable_count, ")" );
<del>}, 5000 );
<del>*/
<del>
<del>
<del>
<del> |
|
Java | apache-2.0 | 5d8a0e6c68222d74e3ddb33b118cf39f98ae7c77 | 0 | jguerinet/MyMartlet,jguerinet/MyMartlet-Android,jguerinet/MyMartlet,jguerinet/MyMartlet,jguerinet/MyMartlet | /*
* Copyright 2014-2016 Julien Guerinet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.appvelopers.mcgillmobile.util.manager;
import android.content.Context;
import android.content.SharedPreferences;
import com.guerinet.utils.Utils;
import com.guerinet.utils.prefs.IntPreference;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import ca.appvelopers.mcgillmobile.util.dagger.prefs.PrefsModule;
import ca.appvelopers.mcgillmobile.util.storage.ClearManager;
/**
* Runs any update code
* @author Julien Guerinet
* @since 1.0.0
*/
@Singleton
public class UpdateManager {
/**
* App context
*/
private final Context context;
/**
* Version {@link IntPreference}
*/
private final IntPreference versionPref;
/**
* {@link ClearManager} instance
*/
private final ClearManager clearManager;
/**
* {@link SharedPreferences} instance
*/
private final SharedPreferences sharedPrefs;
/**
* Default Injectable Constructor
*
* @param context App context
* @param versionPref Version {@link IntPreference}
* @param clearManager {@link ClearManager} instance
* @param sharedPrefs {@link SharedPreferences} instance
*/
@Inject
UpdateManager(Context context, @Named(PrefsModule.VERSION) IntPreference versionPref,
ClearManager clearManager, SharedPreferences sharedPrefs) {
this.context = context;
this.versionPref = versionPref;
this.clearManager = clearManager;
this.sharedPrefs = sharedPrefs;
}
/**
* Checks if the app has been updated and runs any update code needed if so
*/
public void update() {
//G t the version code
int code = Utils.versionCode(context);
// Get the current version number
int storedVersion = versionPref.get();
//Stored version is smaller than version number
if (storedVersion < code) {
updateLoop: while (storedVersion < code) {
// Find the closest version to the stored one and cascade down through the updates
switch (storedVersion) {
case -1:
// First time opening the app, break out of the loop
break updateLoop;
case 6:
update7();
case 12:
update13();
case 15:
update16();
case 16:
update17();
case 24:
update25();
case 0:
// This will never get directly called, it will only be accessed through
// another update above
break updateLoop;
}
storedVersion ++;
}
// Store the new version in the SharedPrefs
versionPref.set(code);
}
}
/**
* v2.3.2
* - Changed the way the language pref was being stored
*/
private void update25() {
// Get the int stored at the current language key
int language = sharedPrefs.getInt("language", 0);
// Store the equivalent language code in its place
sharedPrefs.edit()
.putString("language", language == 0 ? "en" : "fr")
.apply();
}
/**
* v2.2.0
* - Redid the entire admin system
* - Redid all of the user info parsing, made some changes to the objects
*/
private void update17() {
// Redownload everything
clearManager.config();
clearManager.all();
}
/**
* v2.1.0
* - Removed Hungarian notation everywhere -> redownload config and user data
*/
private void update16() {
// Redownload everything
clearManager.config();
clearManager.all();
}
/**
* v2.0.1
* - Object Changes -> Force the user to reload all of their info
* - Place changes -> Force the reload of all of the config stuff
*/
private void update13() {
// Re-download all user info
clearManager.config();
clearManager.all();
}
/**
* v1.0.1
* - Object changes -> Force the reload of all of the info
*/
private void update7() {
// Force the user to re-update all of the information in the app
clearManager.all();
}
}
| app/src/main/java/ca/appvelopers/mcgillmobile/util/manager/UpdateManager.java | /*
* Copyright 2014-2016 Julien Guerinet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.appvelopers.mcgillmobile.util.manager;
import android.content.Context;
import com.guerinet.utils.Utils;
import com.guerinet.utils.prefs.IntPreference;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import ca.appvelopers.mcgillmobile.util.dagger.prefs.PrefsModule;
import ca.appvelopers.mcgillmobile.util.storage.ClearManager;
/**
* Runs any update code
* @author Julien Guerinet
* @since 1.0.0
*/
@Singleton
public class UpdateManager {
/**
* App context
*/
private final Context context;
/**
* Version {@link IntPreference}
*/
private final IntPreference versionPref;
/**
* {@link ClearManager} instance
*/
private final ClearManager clearManager;
/**
* Default Injectable Constructor
*
* @param context App context
* @param versionPref Version {@link IntPreference}
* @param clearManager {@link ClearManager} instance
*/
@Inject
public UpdateManager(Context context, @Named(PrefsModule.VERSION) IntPreference versionPref,
ClearManager clearManager) {
this.context = context;
this.versionPref = versionPref;
this.clearManager = clearManager;
}
/**
* Checks if the app has been updated and runs any update code needed if so
*/
public void update() {
//Get the version code
int code = Utils.versionCode(context);
//Get the current version number
int storedVersion = versionPref.get();
//Stored version is smaller than version number
if (storedVersion < code) {
updateLoop: while (storedVersion < code) {
//Find the closest version to the stored one and cascade down through the updates
switch (storedVersion) {
case -1:
//First time opening the app, break out of the loop
break updateLoop;
case 6:
update7();
case 12:
update13();
case 15:
update16();
case 16:
update17();
case 0:
//This will never get directly called, it will only be accessed through
// another update above
break updateLoop;
}
storedVersion ++;
}
//Store the new version in the SharedPrefs
versionPref.set(code);
}
}
/**
* v2.2.0
* - Redid the entire admin system
* - Redid all of the user info parsing, made some changes to the objects
*/
private void update17() {
//Redownload everything
clearManager.config();
clearManager.all();
}
/**
* v2.1.0
* - Removed Hungarian notation everywhere -> redownload config and user data
*/
private void update16() {
//Redownload everything
clearManager.config();
clearManager.all();
}
/**
* v2.0.1
* - Object Changes -> Force the user to reload all of their info
* - Place changes -> Force the reload of all of the config stuff
*/
private void update13() {
//Re-download all user info
clearManager.config();
clearManager.all();
}
/**
* v1.0.1
* - Object changes -> Force the reload of all of the info
*/
private void update7() {
//Force the user to re-update all of the information in the app
clearManager.all();
}
}
| Added update code to go from the old language storage type to the new one
| app/src/main/java/ca/appvelopers/mcgillmobile/util/manager/UpdateManager.java | Added update code to go from the old language storage type to the new one | <ide><path>pp/src/main/java/ca/appvelopers/mcgillmobile/util/manager/UpdateManager.java
<ide> package ca.appvelopers.mcgillmobile.util.manager;
<ide>
<ide> import android.content.Context;
<add>import android.content.SharedPreferences;
<ide>
<ide> import com.guerinet.utils.Utils;
<ide> import com.guerinet.utils.prefs.IntPreference;
<ide> * {@link ClearManager} instance
<ide> */
<ide> private final ClearManager clearManager;
<add> /**
<add> * {@link SharedPreferences} instance
<add> */
<add> private final SharedPreferences sharedPrefs;
<ide>
<ide> /**
<ide> * Default Injectable Constructor
<ide> * @param context App context
<ide> * @param versionPref Version {@link IntPreference}
<ide> * @param clearManager {@link ClearManager} instance
<add> * @param sharedPrefs {@link SharedPreferences} instance
<ide> */
<ide> @Inject
<del> public UpdateManager(Context context, @Named(PrefsModule.VERSION) IntPreference versionPref,
<del> ClearManager clearManager) {
<add> UpdateManager(Context context, @Named(PrefsModule.VERSION) IntPreference versionPref,
<add> ClearManager clearManager, SharedPreferences sharedPrefs) {
<ide> this.context = context;
<ide> this.versionPref = versionPref;
<ide> this.clearManager = clearManager;
<add> this.sharedPrefs = sharedPrefs;
<ide> }
<ide>
<ide> /**
<ide> * Checks if the app has been updated and runs any update code needed if so
<ide> */
<ide> public void update() {
<del> //Get the version code
<add> //G t the version code
<ide> int code = Utils.versionCode(context);
<ide>
<del> //Get the current version number
<add> // Get the current version number
<ide> int storedVersion = versionPref.get();
<ide>
<ide> //Stored version is smaller than version number
<ide> if (storedVersion < code) {
<ide> updateLoop: while (storedVersion < code) {
<del> //Find the closest version to the stored one and cascade down through the updates
<add> // Find the closest version to the stored one and cascade down through the updates
<ide> switch (storedVersion) {
<ide> case -1:
<del> //First time opening the app, break out of the loop
<add> // First time opening the app, break out of the loop
<ide> break updateLoop;
<ide> case 6:
<ide> update7();
<ide> update16();
<ide> case 16:
<ide> update17();
<add> case 24:
<add> update25();
<ide> case 0:
<del> //This will never get directly called, it will only be accessed through
<del> // another update above
<add> // This will never get directly called, it will only be accessed through
<add> // another update above
<ide> break updateLoop;
<ide> }
<ide> storedVersion ++;
<ide> }
<ide>
<del> //Store the new version in the SharedPrefs
<add> // Store the new version in the SharedPrefs
<ide> versionPref.set(code);
<ide> }
<add> }
<add>
<add> /**
<add> * v2.3.2
<add> * - Changed the way the language pref was being stored
<add> */
<add> private void update25() {
<add> // Get the int stored at the current language key
<add> int language = sharedPrefs.getInt("language", 0);
<add>
<add> // Store the equivalent language code in its place
<add> sharedPrefs.edit()
<add> .putString("language", language == 0 ? "en" : "fr")
<add> .apply();
<ide> }
<ide>
<ide> /**
<ide> * - Redid all of the user info parsing, made some changes to the objects
<ide> */
<ide> private void update17() {
<del> //Redownload everything
<add> // Redownload everything
<ide> clearManager.config();
<ide> clearManager.all();
<ide> }
<ide> * - Removed Hungarian notation everywhere -> redownload config and user data
<ide> */
<ide> private void update16() {
<del> //Redownload everything
<add> // Redownload everything
<ide> clearManager.config();
<ide> clearManager.all();
<ide> }
<ide> * - Place changes -> Force the reload of all of the config stuff
<ide> */
<ide> private void update13() {
<del> //Re-download all user info
<add> // Re-download all user info
<ide> clearManager.config();
<ide> clearManager.all();
<ide> }
<ide> * - Object changes -> Force the reload of all of the info
<ide> */
<ide> private void update7() {
<del> //Force the user to re-update all of the information in the app
<add> // Force the user to re-update all of the information in the app
<ide> clearManager.all();
<ide> }
<ide> } |
|
Java | bsd-3-clause | d5d64bc71a564a38de014eb97bb95e78927bdc14 | 0 | NCIP/c3pr,NCIP/c3pr,NCIP/c3pr | package edu.duke.cabig.c3pr.web.study;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.acegisecurity.Authentication;
import org.acegisecurity.GrantedAuthority;
import org.acegisecurity.context.SecurityContext;
import org.acegisecurity.context.SecurityContextHolder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.propertyeditors.CustomBooleanEditor;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.web.bind.ServletRequestDataBinder;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.view.RedirectView;
import edu.duke.cabig.c3pr.domain.Study;
import gov.nih.nci.cabig.ctms.web.tabs.Flow;
import gov.nih.nci.cabig.ctms.web.tabs.Tab;
public class AmendStudyController extends StudyController<Study> {
protected static final Log log = LogFactory.getLog(AmendStudyController.class);
public AmendStudyController() {
super("Amend Study");
setBindOnNewForm(true);
}
public AmendStudyController(String s) {
super(s);
setBindOnNewForm(true);
}
@Override
protected Map referenceData(HttpServletRequest request, int arg1) throws Exception {
request.setAttribute("flowType", "AMEND_STUDY");
request.setAttribute("amendFlow", "true");
SecurityContext context = SecurityContextHolder.getContext();
Authentication auth = context.getAuthentication();
GrantedAuthority[] groups = auth.getAuthorities();
String isAdmin = "false";
for (GrantedAuthority ga : groups) {
if (ga.getAuthority().endsWith("admin")) {
isAdmin = "true";
}
}
request.setAttribute("softDelete", "true");
request.setAttribute("isAdmin", isAdmin);
return super.referenceData(request, arg1);
}
@Override
protected void layoutTabs(Flow flow) {
boolean editMode = false;
flow.addTab(new StudyAmendmentTab());
flow.addTab(new StudyDetailsTab());
flow.addTab(new StudyDesignTab());
flow.addTab(new StudyEligibilityChecklistTab());
flow.addTab(new StudyStratificationTab());
flow.addTab(new StudyRandomizationTab());
flow.addTab(new StudyDiseasesTab());
flow.addTab(new StudySitesTab());
flow.addTab(new StudyIdentifiersTab());
flow.addTab(new StudyInvestigatorsTab());
flow.addTab(new StudyPersonnelTab());
flow.addTab(new StudyNotificationTab());
flow.addTab(new StudyOverviewTab("Summary", "Summary", "study/study_summary_view"));
}
@Override
protected void initBinder(HttpServletRequest req, ServletRequestDataBinder binder)
throws Exception {
super.initBinder(req, binder);
binder.registerCustomEditor(Date.class, new CustomDateEditor(new SimpleDateFormat(
"MM/dd/yyyy"), true));
binder.registerCustomEditor(Boolean.class, "epochAndArmsIndicator",
new CustomBooleanEditor(false));
}
protected Object formBackingObject(HttpServletRequest request) throws ServletException {
request.getSession().removeAttribute(getReplacedCommandSessionAttributeName(request));
Study study = studyDao.getById(Integer.parseInt(request.getParameter("studyId")));
if (study != null) {
log.debug("Retrieving Study Details for Id: " + study.getId());
}
study.getStudyAmendments().size();
return study;
}
@Override
protected boolean shouldSave(HttpServletRequest request, Study command, Tab<Study> tab) {
return super.shouldSave(request, command, tab)
&& (request.getParameter("_action") == null || "".equals(request
.getParameter("_action")));
}
@Override
protected boolean isSummaryEnabled() {
return true;
}
@Override
protected Object currentFormObject(HttpServletRequest request, Object sessionFormObject)
throws Exception {
if (sessionFormObject != null) {
getDao().reassociate((Study) sessionFormObject);
getDao().refresh((Study) sessionFormObject);
}
return sessionFormObject;
}
@Override
protected ModelAndView processFinish(HttpServletRequest request, HttpServletResponse response,
Object command, BindException errors) throws Exception {
// Redirect to Search page
ModelAndView modelAndView = new ModelAndView(new RedirectView("searchStudy"));
return modelAndView;
}
@Override
protected void postProcessPage(HttpServletRequest request, Object command, Errors errors,
int page) throws Exception {
super.postProcessPage(request, command, errors, page);
}
}
| codebase/projects/web/src/java/edu/duke/cabig/c3pr/web/study/AmendStudyController.java | package edu.duke.cabig.c3pr.web.study;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.acegisecurity.Authentication;
import org.acegisecurity.GrantedAuthority;
import org.acegisecurity.context.SecurityContext;
import org.acegisecurity.context.SecurityContextHolder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.propertyeditors.CustomBooleanEditor;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.web.bind.ServletRequestDataBinder;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.view.RedirectView;
import edu.duke.cabig.c3pr.domain.Study;
import gov.nih.nci.cabig.ctms.web.tabs.Flow;
import gov.nih.nci.cabig.ctms.web.tabs.Tab;
public class AmendStudyController extends StudyController<Study> {
protected static final Log log = LogFactory.getLog(AmendStudyController.class);
public AmendStudyController() {
super("Amend Study");
setBindOnNewForm(true);
}
public AmendStudyController(String s) {
super(s);
setBindOnNewForm(true);
}
@Override
protected Map referenceData(HttpServletRequest request, int arg1) throws Exception {
request.setAttribute("flowType", "AMEND_STUDY");
request.setAttribute("amendFlow", "true");
SecurityContext context = SecurityContextHolder.getContext();
Authentication auth = context.getAuthentication();
GrantedAuthority[] groups = auth.getAuthorities();
String isAdmin = "false";
for (GrantedAuthority ga : groups) {
if (ga.getAuthority().endsWith("admin")) {
isAdmin = "true";
}
}
request.setAttribute("isAdmin", isAdmin);
return super.referenceData(request, arg1);
}
@Override
protected void layoutTabs(Flow flow) {
boolean editMode = false;
flow.addTab(new StudyAmendmentTab());
flow.addTab(new StudyDetailsTab());
flow.addTab(new StudyDesignTab());
flow.addTab(new StudyEligibilityChecklistTab());
flow.addTab(new StudyStratificationTab());
flow.addTab(new StudyRandomizationTab());
flow.addTab(new StudyDiseasesTab());
flow.addTab(new StudySitesTab());
flow.addTab(new StudyIdentifiersTab());
flow.addTab(new StudyInvestigatorsTab());
flow.addTab(new StudyPersonnelTab());
flow.addTab(new StudyNotificationTab());
flow.addTab(new StudyOverviewTab("Summary", "Summary", "study/study_summary_view"));
}
@Override
protected void initBinder(HttpServletRequest req, ServletRequestDataBinder binder)
throws Exception {
super.initBinder(req, binder);
binder.registerCustomEditor(Date.class, new CustomDateEditor(new SimpleDateFormat(
"MM/dd/yyyy"), true));
binder.registerCustomEditor(Boolean.class, "epochAndArmsIndicator",
new CustomBooleanEditor(false));
}
protected Object formBackingObject(HttpServletRequest request) throws ServletException {
request.getSession().removeAttribute(getReplacedCommandSessionAttributeName(request));
Study study = studyDao.getById(Integer.parseInt(request.getParameter("studyId")));
if (study != null) {
log.debug("Retrieving Study Details for Id: " + study.getId());
}
study.getStudyAmendments().size();
return study;
}
@Override
protected boolean shouldSave(HttpServletRequest request, Study command, Tab<Study> tab) {
return super.shouldSave(request, command, tab)
&& (request.getParameter("_action") == null || "".equals(request
.getParameter("_action")));
}
@Override
protected boolean isSummaryEnabled() {
return true;
}
@Override
protected Object currentFormObject(HttpServletRequest request, Object sessionFormObject)
throws Exception {
if (sessionFormObject != null) {
getDao().reassociate((Study) sessionFormObject);
getDao().refresh((Study) sessionFormObject);
}
return sessionFormObject;
}
@Override
protected ModelAndView processFinish(HttpServletRequest request, HttpServletResponse response,
Object command, BindException errors) throws Exception {
// Redirect to Search page
ModelAndView modelAndView = new ModelAndView(new RedirectView("searchStudy"));
return modelAndView;
}
@Override
protected void postProcessPage(HttpServletRequest request, Object command, Errors errors,
int page) throws Exception {
super.postProcessPage(request, command, errors, page);
}
}
| Added soft delete to refdata
| codebase/projects/web/src/java/edu/duke/cabig/c3pr/web/study/AmendStudyController.java | Added soft delete to refdata | <ide><path>odebase/projects/web/src/java/edu/duke/cabig/c3pr/web/study/AmendStudyController.java
<ide> isAdmin = "true";
<ide> }
<ide> }
<add> request.setAttribute("softDelete", "true");
<ide> request.setAttribute("isAdmin", isAdmin);
<ide> return super.referenceData(request, arg1);
<ide> } |
|
Java | unlicense | c83684f3ebb04b03c8d85df17f3e810f954d39a3 | 0 | floating-cat/S1-Next,suafeng/S1-Next,ykrank/S1-Next,ykrank/S1-Next,gy6221/S1-Next,superpig11/S1-Next,ykrank/S1-Next | package cl.monsoon.s1next.widget;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.StateListDrawable;
import android.support.annotation.DrawableRes;
import android.support.annotation.NonNull;
public final class StateListDrawableWithTint extends StateListDrawable {
private final ColorStateList mColorStateList;
private final PorterDuff.Mode mMode;
public StateListDrawableWithTint(@DrawableRes Drawable drawable, @NonNull ColorStateList colorStateList, @NonNull PorterDuff.Mode mode) {
this.mColorStateList = colorStateList;
this.mMode = mode;
addState(new int[]{}, drawable);
}
@Override
protected boolean onStateChange(int[] stateSet) {
// mColorStateList is null when constructor invokes super() implicitly
//noinspection ConstantConditions
if (mColorStateList != null) {
int color = mColorStateList.getColorForState(stateSet, Color.TRANSPARENT);
setColorFilter(color, mMode);
}
return super.onStateChange(stateSet);
}
}
| app/src/main/java/cl/monsoon/s1next/widget/StateListDrawableWithTint.java | package cl.monsoon.s1next.widget;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.StateListDrawable;
import android.support.annotation.DrawableRes;
import android.support.annotation.NonNull;
public final class StateListDrawableWithTint extends StateListDrawable {
private final ColorStateList mColorStateList;
private final PorterDuff.Mode mMode;
public StateListDrawableWithTint(@DrawableRes Drawable drawable, @NonNull ColorStateList colorStateList, @NonNull PorterDuff.Mode mode) {
this.mColorStateList = colorStateList;
this.mMode = mode;
addState(new int[]{}, drawable);
}
@Override
protected boolean onStateChange(int[] stateSet) {
int color = mColorStateList.getColorForState(stateSet, Color.TRANSPARENT);
setColorFilter(color, mMode);
return super.onStateChange(stateSet);
}
}
| Fix NullPointerException due to aa05a2716a977f2ce442af61f538496e389ca1b6
| app/src/main/java/cl/monsoon/s1next/widget/StateListDrawableWithTint.java | Fix NullPointerException due to aa05a2716a977f2ce442af61f538496e389ca1b6 | <ide><path>pp/src/main/java/cl/monsoon/s1next/widget/StateListDrawableWithTint.java
<ide>
<ide> @Override
<ide> protected boolean onStateChange(int[] stateSet) {
<del> int color = mColorStateList.getColorForState(stateSet, Color.TRANSPARENT);
<del> setColorFilter(color, mMode);
<add> // mColorStateList is null when constructor invokes super() implicitly
<add> //noinspection ConstantConditions
<add> if (mColorStateList != null) {
<add> int color = mColorStateList.getColorForState(stateSet, Color.TRANSPARENT);
<add> setColorFilter(color, mMode);
<add> }
<ide>
<ide> return super.onStateChange(stateSet);
<ide> } |
|
Java | isc | error: pathspec 'mobile/src/main/java/theputnams/net/isitrecyclingweek/restclients/model/APIContract.java' did not match any file(s) known to git
| 4af01529206915e122e3d24a9c78eabb74f5ed9c | 1 | dhmncivichacks/isitrecyclingweek-android,dhmncivichacks/isitrecyclingweek,mikeputnam/isitrecyclingweek | package theputnams.net.isitrecyclingweek.restclients.model;
import com.google.gson.annotations.Expose;
public class APIContract {
@Expose
private String contractName;
@Expose
private String contractDescription;
@Expose
private String implementationName;
@Expose
private String homepageUrl;
@Expose
private String implementationApiUrl;
public String getContractName() {
return contractName;
}
public void setContractName(String contractName) {
this.contractName = contractName;
}
public String getContractDescription() {
return contractDescription;
}
public void setContractDescription(String contractDescription) {
this.contractDescription = contractDescription;
}
public String getImplementationName() {
return implementationName;
}
public void setImplementationName(String implementationName) {
this.implementationName = implementationName;
}
public String getHomepageUrl() {
return homepageUrl;
}
public void setHomepageUrl(String homepageUrl) {
this.homepageUrl = homepageUrl;
}
public String getImplementationApiUrl() {
return implementationApiUrl;
}
public void setImplementationApiUrl(String implementationApiUrl) {
this.implementationApiUrl = implementationApiUrl;
}
}
| mobile/src/main/java/theputnams/net/isitrecyclingweek/restclients/model/APIContract.java | Adds ApiContract data model
| mobile/src/main/java/theputnams/net/isitrecyclingweek/restclients/model/APIContract.java | Adds ApiContract data model | <ide><path>obile/src/main/java/theputnams/net/isitrecyclingweek/restclients/model/APIContract.java
<add>package theputnams.net.isitrecyclingweek.restclients.model;
<add>
<add>import com.google.gson.annotations.Expose;
<add>
<add>public class APIContract {
<add>
<add> @Expose
<add> private String contractName;
<add> @Expose
<add> private String contractDescription;
<add> @Expose
<add> private String implementationName;
<add> @Expose
<add> private String homepageUrl;
<add> @Expose
<add> private String implementationApiUrl;
<add>
<add> public String getContractName() {
<add> return contractName;
<add> }
<add>
<add> public void setContractName(String contractName) {
<add> this.contractName = contractName;
<add> }
<add>
<add> public String getContractDescription() {
<add> return contractDescription;
<add> }
<add>
<add> public void setContractDescription(String contractDescription) {
<add> this.contractDescription = contractDescription;
<add> }
<add>
<add> public String getImplementationName() {
<add> return implementationName;
<add> }
<add>
<add> public void setImplementationName(String implementationName) {
<add> this.implementationName = implementationName;
<add> }
<add>
<add> public String getHomepageUrl() {
<add> return homepageUrl;
<add> }
<add>
<add> public void setHomepageUrl(String homepageUrl) {
<add> this.homepageUrl = homepageUrl;
<add> }
<add>
<add> public String getImplementationApiUrl() {
<add> return implementationApiUrl;
<add> }
<add>
<add> public void setImplementationApiUrl(String implementationApiUrl) {
<add> this.implementationApiUrl = implementationApiUrl;
<add> }
<add>} |
|
Java | mit | bfd6197e0b8a1b4cece828881f666b5a474610f4 | 0 | codistmonk/IMJ | package imj3.draft.processing;
import static imj3.tools.AwtImage2D.awtRead;
import static imj3.tools.CommonSwingTools.limitHeight;
import static imj3.tools.CommonSwingTools.setModel;
import static java.lang.Math.max;
import static net.sourceforge.aprog.swing.SwingTools.horizontalBox;
import static net.sourceforge.aprog.swing.SwingTools.horizontalSplit;
import static net.sourceforge.aprog.swing.SwingTools.scrollable;
import static net.sourceforge.aprog.swing.SwingTools.verticalBox;
import static net.sourceforge.aprog.tools.Tools.append;
import static net.sourceforge.aprog.tools.Tools.array;
import static net.sourceforge.aprog.tools.Tools.baseName;
import static net.sourceforge.aprog.tools.Tools.join;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.StaxDriver;
import imj2.tools.Canvas;
import imj3.draft.processing.VisualAnalysis.Context.Refresh;
import imj3.draft.segmentation.ImageComponent;
import imj3.draft.segmentation.ImageComponent.Layer;
import imj3.draft.segmentation.ImageComponent.Painter;
import imj3.tools.CommonSwingTools.NestedList;
import imj3.tools.CommonSwingTools.PropertyGetter;
import imj3.tools.CommonSwingTools.PropertySetter;
import imj3.tools.CommonSwingTools.StringGetter;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.dnd.DropTarget;
import java.awt.dnd.DropTargetDropEvent;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.prefs.Preferences;
import javax.imageio.ImageIO;
import javax.swing.AbstractAction;
import javax.swing.Box;
import javax.swing.DefaultComboBoxModel;
import javax.swing.DefaultListCellRenderer;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JSplitPane;
import javax.swing.JTextField;
import javax.swing.JTree;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import net.sourceforge.aprog.swing.SwingTools;
import net.sourceforge.aprog.tools.IllegalInstantiationException;
import net.sourceforge.aprog.tools.Tools;
/**
* @author codistmonk (creation 2015-02-13)
*/
public final class VisualAnalysis {
private VisualAnalysis() {
throw new IllegalInstantiationException();
}
static final Preferences preferences = Preferences.userNodeForPackage(VisualAnalysis.class);
static final XStream xstream = new XStream(new StaxDriver());
public static final String IMAGE_PATH = "image.path";
public static final String GROUND_TRUTH = "groundtruth";
public static final String EXPERIMENT = "experiment";
/**
* @param commandLineArguments
* <br>Unused
*/
public static final void main(final String[] commandLineArguments) {
SwingTools.useSystemLookAndFeel();
final Context context = new Context();
SwingUtilities.invokeLater(new Runnable() {
@Override
public final void run() {
SwingTools.show(new MainPanel(context), VisualAnalysis.class.getSimpleName(), false);
context.setImageFile(new File(preferences.get(IMAGE_PATH, "")));
context.setGroundTruth(preferences.get(GROUND_TRUTH, "gt"));
context.setExperiment(new File(preferences.get(EXPERIMENT, "experiment.xml")));
}
});
}
public static final Component label(final String text, final Component... components) {
return limitHeight(horizontalBox(append(array((Component) new JLabel(text)), components)));
}
public static final <C extends JComponent> C centerX(final C component) {
component.setAlignmentX(Component.CENTER_ALIGNMENT);
return component;
}
public static final JTextField textView(final String text) {
final JTextField result = new JTextField(text);
result.setEditable(false);
return result;
}
public static final JButton button(final String type) {
final JButton result = new JButton(new ImageIcon(Tools.getResourceURL("lib/tango/" + type + ".png")));
final int size = max(result.getIcon().getIconWidth(), result.getIcon().getIconHeight());
result.setPreferredSize(new Dimension(size + 2, size + 2));
return result;
}
/**
* @author codistmonk (creation 2015-02-19)
*/
public static final class FileSelector extends JButton {
private final List<File> files = new ArrayList<>();
private ActionListener fileListener;
{
this.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
FileSelector.this.showPopup();
}
});
this.setMaximumSize(new Dimension(Integer.MAX_VALUE, 26));
this.setHorizontalAlignment(SwingConstants.LEFT);
}
public ActionListener getFileListener() {
return fileListener;
}
public void setFileListener(final ActionListener fileListener) {
this.fileListener = fileListener;
}
public final FileSelector setFile(final File file) {
this.files.remove(file);
this.files.add(0, file);
Tools.debugPrint(file);
final boolean changed = file.equals(new File(this.getText()));
this.setText(file.getName());
if (changed && this.fileListener != null) {
this.fileListener.actionPerformed(new ActionEvent(this, -1, null));
}
return this;
}
final void showPopup() {
final JPopupMenu popup = new JPopupMenu();
for (final File file : this.files) {
popup.add(new JMenuItem(new AbstractAction(file.getPath()) {
@Override
public final void actionPerformed(final ActionEvent event) {
FileSelector.this.setFile(file);
}
private static final long serialVersionUID = 8311454620470586686L;
}));
}
popup.show(this, 0, 0);
}
private static final long serialVersionUID = 7227165282556980768L;
}
/**
* @author codistmonk (creation 2015-02-13)
*/
public static final class MainPanel extends JPanel {
private final Context context;
private final FileSelector imageSelector;
private final JCheckBox imageVisibilitySelector;
private final FileSelector groundTruthSelector;
private final JCheckBox groundTruthVisibilitySelector;
private final PathSelector experimentSelector;
private final JTextField trainingTimeView;
private final JTextField classificationTimeView;
private final JCheckBox classificationVisibilitySelector;
private final JTextField scoreView;
private final JTree tree;
private final JSplitPane mainSplitPane;
private ImageComponent imageComponent;
private Experiment experiment;
public MainPanel(final Context context) {
super(new BorderLayout());
this.context = context;
this.imageSelector = new FileSelector();
this.imageVisibilitySelector = new JCheckBox("", true);
this.groundTruthSelector = new FileSelector();
this.groundTruthVisibilitySelector = new JCheckBox();
this.experimentSelector = new PathSelector();
this.trainingTimeView = textView("-");
this.classificationTimeView = textView("-");
this.classificationVisibilitySelector = new JCheckBox();
this.scoreView = textView("-");
this.tree = new JTree(new DefaultTreeModel(new DefaultMutableTreeNode("No experiment")));
final int padding = this.imageVisibilitySelector.getPreferredSize().width;
final JButton openImageButton = button("open");
final JButton newGroundTruthButton = button("new");
final JButton saveGroundTruthButton = button("save");
final JButton refreshGroundTruthButton = button("refresh");
this.mainSplitPane = horizontalSplit(verticalBox(
label(" Image: ", this.imageSelector, openImageButton, this.imageVisibilitySelector),
label(" Ground truth: ", this.groundTruthSelector, newGroundTruthButton, saveGroundTruthButton, refreshGroundTruthButton, this.groundTruthVisibilitySelector),
label(" Experiment: ", this.experimentSelector, button("new"), button("open"), button("save"), button("refresh"), Box.createHorizontalStrut(padding)),
label(" Training (s): ", this.trainingTimeView, button("process"), Box.createHorizontalStrut(padding)),
label(" Classification (s): ", this.classificationTimeView, button("process"), button("save"), button("refresh"), this.classificationVisibilitySelector),
label(" F1: ", this.scoreView, Box.createHorizontalStrut(padding)),
centerX(new JButton("Confusion matrix...")),
scrollable(this.tree)), scrollable(new JLabel("Drop file here")));
this.mainSplitPane.getLeftComponent().setMaximumSize(new Dimension(128, Integer.MAX_VALUE));
this.add(this.mainSplitPane, BorderLayout.CENTER);
openImageButton.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final JFileChooser fileChooser = new JFileChooser(new File(preferences.get(IMAGE_PATH, "")).getParent());
if (JFileChooser.APPROVE_OPTION == fileChooser.showOpenDialog(MainPanel.this)) {
context.setImageFile(fileChooser.getSelectedFile());
}
}
});
this.imageSelector.setFileListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
context.setImageFile(new File(MainPanel.this.getImageSelector().getText()));
}
});
this.groundTruthSelector.setFileListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
context.refreshGroundTruthAndClassification(Refresh.FROM_FILE);
}
});
newGroundTruthButton.addActionListener(e -> {
final String name = JOptionPane.showInputDialog("Ground truth name:");
if (name != null && context.getImage() != null) {
try {
ImageIO.write(context.formatGroundTruth().getGroundTruth().getImage(), "png", new File(context.getGroundTruthPath(name)));
} catch (final IOException exception) {
throw new UncheckedIOException(exception);
}
context.setGroundTruth(name);
}
});
saveGroundTruthButton.addActionListener(e -> Tools.debugPrint("TODO"));
this.experimentSelector.setPathListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
try {
MainPanel.this.setExperiment((Experiment) xstream.fromXML(
new File(MainPanel.this.getExperimentSelector().getSelectedItem().toString())));
} catch (final Exception exception) {
Tools.debugError(exception);
}
}
});
this.experimentSelector.setOptionListener(PathSelector.Option.NEW, e -> {
final JFileChooser fileChooser = new JFileChooser(new File(preferences.get(EXPERIMENT, "")).getParentFile());
if (JFileChooser.APPROVE_OPTION == fileChooser.showSaveDialog(this)) {
final File selectedFile = fileChooser.getSelectedFile();
try (final OutputStream output = new FileOutputStream(selectedFile)) {
xstream.toXML(new Experiment(), output);
} catch (final IOException exception) {
throw new UncheckedIOException(exception);
}
context.setExperiment(selectedFile);
}
});
this.experimentSelector.setOptionListener(PathSelector.Option.OPEN, e -> Tools.debugPrint("TODO"));
this.experimentSelector.setOptionListener(PathSelector.Option.SAVE, e -> Tools.debugPrint("TODO"));
this.imageVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer imageLayer = MainPanel.this.getImageComponent().getLayers().get(0);
final Painter imagePainter = imageLayer.getPainters().get(0);
final boolean imageVisible = MainPanel.this.getImageVisibilitySelector().isSelected();
if (!imageVisible) {
imageLayer.getCanvas().clear(Color.GRAY);
}
imagePainter.getActive().set(imageVisible);
imagePainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.groundTruthVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer groundTruthLayer = MainPanel.this.getImageComponent().getLayers().get(1);
final Painter groundTruthPainter = groundTruthLayer.getPainters().get(0);
final boolean groundTruthVisible = MainPanel.this.getGroundTruthVisibilitySelector().isSelected();
groundTruthPainter.getActive().set(groundTruthVisible);
groundTruthPainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.classificationVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer classificationLayer = MainPanel.this.getImageComponent().getLayers().get(2);
final Painter classificationPainter = classificationLayer.getPainters().get(0);
final boolean classificationVisible = MainPanel.this.getClassificationVisibilitySelector().isSelected();
classificationPainter.getActive().set(classificationVisible);
classificationPainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.setDropTarget(new DropTarget() {
@Override
public final synchronized void drop(final DropTargetDropEvent event) {
final File file = SwingTools.getFiles(event).get(0);
if (file.getName().toLowerCase(Locale.ENGLISH).endsWith(".xml")) {
} else {
context.setImageFile(file);
}
}
/**
* {@value}.
*/
private static final long serialVersionUID = 5442000733451223725L;
});
this.setPreferredSize(new Dimension(800, 600));
context.setMainPanel(this);
}
public final Context getContext() {
return this.context;
}
public final Experiment getExperiment() {
return this.experiment;
}
public final void setExperiment(final Experiment experiment) {
this.experiment = experiment;
setModel(this.tree, experiment, "Experiment");
}
public final ImageComponent getImageComponent() {
return this.imageComponent;
}
final void setImage(final String path) {
this.imageComponent = new ImageComponent(awtRead(path));
this.getImageComponent().addLayer().getPainters().add(new Painter.Abstract() {
@Override
public final void paint(final Canvas canvas) {
canvas.getGraphics().drawImage(MainPanel.this.getContext().getGroundTruth().getImage(), 0, 0, null);
}
private static final long serialVersionUID = 4700895082820237288L;
});
this.getImageComponent().addLayer().getPainters().add(new Painter.Abstract() {
@Override
public final void paint(final Canvas canvas) {
canvas.getGraphics().drawImage(MainPanel.this.getContext().getClassification().getImage(), 0, 0, null);
}
private static final long serialVersionUID = 7941391067177261093L;
});
this.setContents(this.getImageComponent());
}
public final FileSelector getImageSelector() {
return this.imageSelector;
}
public final JTree getTree() {
return this.tree;
}
public final JCheckBox getImageVisibilitySelector() {
return this.imageVisibilitySelector;
}
public final FileSelector getGroundTruthSelector() {
return this.groundTruthSelector;
}
public final JCheckBox getGroundTruthVisibilitySelector() {
return this.groundTruthVisibilitySelector;
}
public final PathSelector getExperimentSelector() {
return this.experimentSelector;
}
public final JTextField getTrainingTimeView() {
return this.trainingTimeView;
}
public final JTextField getClassificationTimeView() {
return this.classificationTimeView;
}
public final JCheckBox getClassificationVisibilitySelector() {
return this.classificationVisibilitySelector;
}
public final JTextField getScoreView() {
return this.scoreView;
}
public final void setContents(final Component component) {
this.mainSplitPane.setRightComponent(scrollable(component));
}
private static final long serialVersionUID = 2173077945563031333L;
public static final int IMAGE_SELECTOR_RESERVED_SLOTS = 2;
}
/**
* @author codistmonk (creation 2015-02-13)
*/
public static final class Context implements Serializable {
private MainPanel mainPanel;
private Experiment experiment;
private final Canvas groundTruth = new Canvas();
private final Canvas classification = new Canvas();
public final MainPanel getMainPanel() {
return this.mainPanel;
}
public final void setMainPanel(final MainPanel mainPanel) {
this.mainPanel = mainPanel;
}
public final File getExperimentFile() {
return new File(this.getMainPanel().getExperimentSelector().getSelectedItem().toString());
}
public final String getGroundTruthName() {
return this.getMainPanel().getGroundTruthSelector().getText();
}
public final Context setGroundTruthName(final String groundTruthName) {
this.getMainPanel().getGroundTruthSelector().setFile(new File(groundTruthName));
return this;
}
public final Experiment getExperiment() {
return this.experiment;
}
public final Context setExperiment(final Experiment experiment) {
this.experiment = experiment;
return this;
}
public final BufferedImage getImage() {
final MainPanel mainPanel = this.getMainPanel();
final ImageComponent imageComponent = mainPanel == null ? null : mainPanel.getImageComponent();
return imageComponent == null ? null : imageComponent.getImage();
}
public final Canvas getGroundTruth() {
return this.groundTruth;
}
public final Context formatGroundTruth() {
return format(this.getGroundTruth());
}
public final Canvas getClassification() {
return this.classification;
}
public final String getExperimentName() {
return baseName(this.getExperimentFile().getName());
}
public final File getImageFile() {
return new File(this.getMainPanel().getImageSelector().getText());
}
public final String getGroundTruthPath() {
return this.getGroundTruthPath(this.getGroundTruthName());
}
public final String getGroundTruthPath(final String name) {
return baseName(this.getImageFile().getPath()) + "_groundtruth_" + name + ".png";
}
public final String getClassificationPath() {
return baseName(this.getImageFile().getPath()) + "_classification_" + this.getGroundTruthName() + "_" + this.getExperimentName() + ".png";
}
public final void refreshGroundTruthAndClassification(final Refresh refresh) {
final BufferedImage image = this.getImage();
if (image == null) {
return;
}
System.out.println(Tools.debug(Tools.DEBUG_STACK_OFFSET + 1));
Tools.debugPrint(this.getGroundTruthName());
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
this.getGroundTruth().setFormat(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
this.getClassification().setFormat(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
switch (refresh) {
case CLEAR:
this.getGroundTruth().clear(CLEAR);
this.getClassification().clear(CLEAR);
break;
case FROM_FILE:
{
{
final String groundTruthPath = this.getGroundTruthPath();
if (new File(groundTruthPath).isFile()) {
this.getGroundTruth().getGraphics().drawImage(awtRead(groundTruthPath), 0, 0, null);
} else {
this.getGroundTruth().clear(CLEAR);
}
}
{
final String classificationPath = this.getClassificationPath();
if (new File(classificationPath).isFile()) {
this.getClassification().getGraphics().drawImage(awtRead(classificationPath), 0, 0, null);
} else {
this.getClassification().clear(CLEAR);
}
}
break;
}
case NOP:
break;
}
}
public final Context setImageFile(final File imageFile) {
System.out.println(Tools.debug(Tools.DEBUG_STACK_OFFSET + 1, imageFile));
final File oldImageFile = this.getImageFile();
if (imageFile.isFile() && !imageFile.equals(oldImageFile)) {
this.getMainPanel().setImage(imageFile.getPath());
this.getMainPanel().getImageSelector().setFile(imageFile);
this.refreshGroundTruthAndClassification(Refresh.FROM_FILE);
preferences.put(IMAGE_PATH, imageFile.getPath());
}
return this;
}
public final Context setGroundTruth(final String name) {
if (new File(this.getGroundTruthPath(name)).isFile()) {
this.getMainPanel().getGroundTruthSelector().setFile(new File(name));
preferences.put(GROUND_TRUTH, name);
}
return this;
}
public final Context setExperiment(final File experimentFile) {
if (experimentFile.isFile()) {
this.getMainPanel().getExperimentSelector().setPath(experimentFile.getPath());
preferences.put(EXPERIMENT, experimentFile.getPath());
}
return this;
}
private final Context format(final Canvas canvas) {
final BufferedImage image = this.getImage();
if (image != null) {
canvas.setFormat(image.getWidth(), image.getHeight(), BufferedImage.TYPE_INT_ARGB);
} else {
canvas.setFormat(1, 1, BufferedImage.TYPE_INT_ARGB);
}
canvas.clear(CLEAR);
return this;
}
private static final long serialVersionUID = -2487965125442868238L;
public static final Color CLEAR = new Color(0, true);
/**
* @author codistmonk (creation 2015-02-17)
*/
public static enum Refresh {
NOP, CLEAR, FROM_FILE;
}
}
/**
* @author codistmonk (creation 2015-02-16)
*/
public static final class Experiment implements Serializable {
private final List<ClassDescription> classDescriptions = new ArrayList<>();
private final List<TrainingField> trainingFields = new ArrayList<>();
@Override
public final String toString() {
return "Experiment";
}
@NestedList(name="classes", element="class", elementClass=ClassDescription.class)
public final List<ClassDescription> getClassDescriptions() {
return this.classDescriptions;
}
@NestedList(name="training", element="training field", elementClass=TrainingField.class)
public final List<TrainingField> getTrainingFields() {
return this.trainingFields;
}
private static final long serialVersionUID = -4539259556658072410L;
/**
* @author codistmonk (creation 2015-02-16)
*/
public static final class ClassDescription implements Serializable {
private String name = "class";
private int label = 0xFF000000;
@StringGetter
@PropertyGetter("name")
public final String getName() {
return this.name;
}
@PropertySetter("name")
public final ClassDescription setName(final String name) {
this.name = name;
return this;
}
public final int getLabel() {
return this.label;
}
public final ClassDescription setLabel(final int label) {
this.label = label;
return this;
}
@PropertyGetter("label")
public final String getLabelAsString() {
return "#" + Integer.toHexString(this.getLabel()).toUpperCase(Locale.ENGLISH);
}
@PropertySetter("label")
public final ClassDescription setLabel(final String labelAsString) {
return this.setLabel((int) Long.parseLong(labelAsString.substring(1), 16));
}
private static final long serialVersionUID = 4974707407567297906L;
}
/**
* @author codistmonk (creation 2015-02-17)
*/
public static final class TrainingField implements Serializable {
private String imagePath = "";
private final Rectangle bounds = new Rectangle();
@PropertyGetter("image")
public final String getImagePath() {
return this.imagePath;
}
@PropertySetter("image")
public final TrainingField setImagePath(final String imagePath) {
this.imagePath = imagePath;
return this;
}
public final Rectangle getBounds() {
return this.bounds;
}
@PropertyGetter("bounds")
public final String getBoundsAsString() {
return join(",", this.getBounds().x, this.getBounds().y, this.getBounds().width, this.getBounds().height);
}
@PropertySetter("bounds")
public final TrainingField setBounds(final String boundsAsString) {
final int[] bounds = Arrays.stream(boundsAsString.split(",")).mapToInt(Integer::parseInt).toArray();
this.getBounds().setBounds(bounds[0], bounds[1], bounds[2], bounds[3]);
return this;
}
@Override
public final String toString() {
return new File(this.getImagePath()).getName() + "[" + this.getBoundsAsString() + "]";
}
private static final long serialVersionUID = 847822079141878928L;
}
}
/**
* @author codistmonk (creation 2015-02-18)
*/
public static final class PathSelector extends JComboBox<Object> {
private final Map<Option, ActionListener> optionListeners;
private ActionListener pathListener;
public PathSelector() {
this.optionListeners = new LinkedHashMap<>();
this.setRenderer(new DefaultListCellRenderer() {
@Override
public final Component getListCellRendererComponent(final JList<?> list,
final Object value, final int index, final boolean isSelected,
final boolean cellHasFocus) {
final Component result = super.getListCellRendererComponent(
list, value, index, isSelected, cellHasFocus);
if (value instanceof Option) {
this.setText(((Option) value).getTranslationKey());
} else if (index < 0 || isSelected) {
this.setText(new File(this.getText()).getName());
}
return result;
}
private static final long serialVersionUID = -3014056515590258107L;
});
this.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
PathSelector.this.action(event);
}
});
}
public final PathSelector setPath(final String path) {
final DefaultComboBoxModel<Object> model = (DefaultComboBoxModel<Object>) this.getModel();
model.insertElementAt(path, 0);
for (int i = model.getSize() - this.optionListeners.size() - 1; 0 < i; --i) {
if (model.getElementAt(i).equals(path)) {
model.removeElementAt(i);
}
}
SwingUtilities.invokeLater(() -> this.setSelectedIndex(0));
return this;
}
public final PathSelector setPathListener(final ActionListener listener) {
this.pathListener = listener;
return this;
}
public final PathSelector setOptionListener(final Option option, final ActionListener listener) {
final DefaultComboBoxModel<Object> model = (DefaultComboBoxModel<Object>) this.getModel();
if (this.optionListeners.isEmpty()) {
model.addElement("-");
} else {
this.optionListeners.remove(option);
model.removeElement(option);
}
if (listener != null) {
this.optionListeners.put(option, listener);
model.addElement(option);
}
return this;
}
final void action(final ActionEvent event) {
final Object selectedItem = this.getSelectedItem();
if (selectedItem instanceof Option) {
this.optionListeners.get(selectedItem).actionPerformed(event);
} else if (selectedItem != null && this.pathListener != null) {
this.pathListener.actionPerformed(event);
}
if (!this.optionListeners.isEmpty()
&& this.getItemCount() - this.optionListeners.size() - 1 <= this.getSelectedIndex()) {
this.setSelectedIndex(0);
}
}
private static final long serialVersionUID = 2024380772192514052L;
/**
* @author codistmonk (creation 2015-02-18)
*/
public static enum Option {
NEW {
@Override
public final String getTranslationKey() {
return "New...";
}
}, OPEN {
@Override
public final String getTranslationKey() {
return "Open...";
}
}, SAVE {
@Override
public final String getTranslationKey() {
return "Save";
}
}, DELETE {
@Override
public final String getTranslationKey() {
return "Delete";
}
};
public abstract String getTranslationKey();
}
}
}
| IMJ/src/imj3/draft/processing/VisualAnalysis.java | package imj3.draft.processing;
import static imj3.tools.AwtImage2D.awtRead;
import static imj3.tools.CommonSwingTools.limitHeight;
import static imj3.tools.CommonSwingTools.setModel;
import static java.lang.Math.max;
import static net.sourceforge.aprog.swing.SwingTools.horizontalBox;
import static net.sourceforge.aprog.swing.SwingTools.horizontalSplit;
import static net.sourceforge.aprog.swing.SwingTools.scrollable;
import static net.sourceforge.aprog.swing.SwingTools.verticalBox;
import static net.sourceforge.aprog.tools.Tools.append;
import static net.sourceforge.aprog.tools.Tools.array;
import static net.sourceforge.aprog.tools.Tools.baseName;
import static net.sourceforge.aprog.tools.Tools.join;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.StaxDriver;
import imj2.tools.Canvas;
import imj3.draft.processing.VisualAnalysis.Context.Refresh;
import imj3.draft.segmentation.ImageComponent;
import imj3.draft.segmentation.ImageComponent.Layer;
import imj3.draft.segmentation.ImageComponent.Painter;
import imj3.tools.CommonSwingTools.NestedList;
import imj3.tools.CommonSwingTools.PropertyGetter;
import imj3.tools.CommonSwingTools.PropertySetter;
import imj3.tools.CommonSwingTools.StringGetter;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.dnd.DropTarget;
import java.awt.dnd.DropTargetDropEvent;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.prefs.Preferences;
import javax.imageio.ImageIO;
import javax.swing.AbstractAction;
import javax.swing.Box;
import javax.swing.DefaultComboBoxModel;
import javax.swing.DefaultListCellRenderer;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JSplitPane;
import javax.swing.JTextField;
import javax.swing.JTree;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import net.sourceforge.aprog.swing.SwingTools;
import net.sourceforge.aprog.tools.IllegalInstantiationException;
import net.sourceforge.aprog.tools.Tools;
/**
* @author codistmonk (creation 2015-02-13)
*/
public final class VisualAnalysis {
private VisualAnalysis() {
throw new IllegalInstantiationException();
}
static final Preferences preferences = Preferences.userNodeForPackage(VisualAnalysis.class);
static final XStream xstream = new XStream(new StaxDriver());
public static final String IMAGE_PATH = "image.path";
public static final String GROUND_TRUTH = "groundtruth";
public static final String EXPERIMENT = "experiment";
/**
* @param commandLineArguments
* <br>Unused
*/
public static final void main(final String[] commandLineArguments) {
SwingTools.useSystemLookAndFeel();
final Context context = new Context();
SwingUtilities.invokeLater(new Runnable() {
@Override
public final void run() {
SwingTools.show(new MainPanel(context), VisualAnalysis.class.getSimpleName(), false);
context.setImageFile(new File(preferences.get(IMAGE_PATH, "")));
context.setGroundTruth(preferences.get(GROUND_TRUTH, "gt"));
context.setExperiment(new File(preferences.get(EXPERIMENT, "experiment.xml")));
}
});
}
public static final Component label(final String text, final Component... components) {
return limitHeight(horizontalBox(append(array((Component) new JLabel(text)), components)));
}
public static final <C extends JComponent> C centerX(final C component) {
component.setAlignmentX(Component.CENTER_ALIGNMENT);
return component;
}
public static final JTextField textView(final String text) {
final JTextField result = new JTextField(text);
result.setEditable(false);
return result;
}
public static final JButton button(final String type) {
final JButton result = new JButton(new ImageIcon(Tools.getResourceURL("lib/tango/" + type + ".png")));
final int size = max(result.getIcon().getIconWidth(), result.getIcon().getIconHeight());
result.setPreferredSize(new Dimension(size + 2, size + 2));
return result;
}
/**
* @author codistmonk (creation 2015-02-19)
*/
public static final class FileSelector extends JButton {
private final List<File> files = new ArrayList<>();
private ActionListener fileListener;
{
this.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
FileSelector.this.showPopup();
}
});
this.setMaximumSize(new Dimension(Integer.MAX_VALUE, 26));
this.setHorizontalAlignment(SwingConstants.LEFT);
}
public ActionListener getFileListener() {
return fileListener;
}
public void setFileListener(final ActionListener fileListener) {
this.fileListener = fileListener;
}
public final FileSelector setFile(final File file) {
this.files.remove(file);
this.files.add(0, file);
Tools.debugPrint(file);
final boolean changed = file.equals(new File(this.getText()));
this.setText(file.getName());
if (changed && this.fileListener != null) {
this.fileListener.actionPerformed(new ActionEvent(this, -1, null));
}
return this;
}
final void showPopup() {
final JPopupMenu popup = new JPopupMenu();
for (final File file : this.files) {
popup.add(new JMenuItem(new AbstractAction(file.getPath()) {
@Override
public final void actionPerformed(final ActionEvent event) {
FileSelector.this.setFile(file);
}
private static final long serialVersionUID = 8311454620470586686L;
}));
}
popup.show(this, 0, 0);
}
private static final long serialVersionUID = 7227165282556980768L;
}
/**
* @author codistmonk (creation 2015-02-13)
*/
public static final class MainPanel extends JPanel {
private final Context context;
private final PathSelector imageSelector;
private final JCheckBox imageVisibilitySelector;
private final FileSelector groundTruthSelector;
private final JCheckBox groundTruthVisibilitySelector;
private final PathSelector experimentSelector;
private final JTextField trainingTimeView;
private final JTextField classificationTimeView;
private final JCheckBox classificationVisibilitySelector;
private final JTextField scoreView;
private final JTree tree;
private final JSplitPane mainSplitPane;
private ImageComponent imageComponent;
private Experiment experiment;
public MainPanel(final Context context) {
super(new BorderLayout());
this.context = context;
this.imageSelector = new PathSelector();
this.imageVisibilitySelector = new JCheckBox("", true);
this.groundTruthSelector = new FileSelector();
this.groundTruthVisibilitySelector = new JCheckBox();
this.experimentSelector = new PathSelector();
this.trainingTimeView = textView("-");
this.classificationTimeView = textView("-");
this.classificationVisibilitySelector = new JCheckBox();
this.scoreView = textView("-");
this.tree = new JTree(new DefaultTreeModel(new DefaultMutableTreeNode("No experiment")));
final int padding = this.imageVisibilitySelector.getPreferredSize().width;
final JButton newGroundTruthButton = button("new");
final JButton saveGroundTruthButton = button("save");
final JButton refreshGroundTruthButton = button("refresh");
this.mainSplitPane = horizontalSplit(verticalBox(
label(" Image: ", this.imageSelector, button("open"), button("refresh"), this.imageVisibilitySelector),
label(" Ground truth: ", this.groundTruthSelector, newGroundTruthButton, saveGroundTruthButton, refreshGroundTruthButton, this.groundTruthVisibilitySelector),
// label(" Ground truth: ", new FileSelector().setFile(new File("b/test2")).setFile(new File("a/test1")), button("new"), button("save"), button("refresh"), Box.createHorizontalStrut(padding)),
label(" Experiment: ", this.experimentSelector, button("new"), button("open"), button("save"), button("refresh"), Box.createHorizontalStrut(padding)),
label(" Training (s): ", this.trainingTimeView, button("process"), Box.createHorizontalStrut(padding)),
label(" Classification (s): ", this.classificationTimeView, button("process"), button("save"), button("refresh"), this.classificationVisibilitySelector),
label(" F1: ", this.scoreView, Box.createHorizontalStrut(padding)),
centerX(new JButton("Confusion matrix...")),
scrollable(this.tree)), scrollable(new JLabel("Drop file here")));
this.mainSplitPane.getLeftComponent().setMaximumSize(new Dimension(128, Integer.MAX_VALUE));
this.add(this.mainSplitPane, BorderLayout.CENTER);
this.imageSelector.setOptionListener(PathSelector.Option.OPEN, new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final JFileChooser fileChooser = new JFileChooser(new File(preferences.get(IMAGE_PATH, "")).getParent());
if (JFileChooser.APPROVE_OPTION == fileChooser.showOpenDialog(MainPanel.this)) {
context.setImageFile(fileChooser.getSelectedFile());
}
}
});
this.imageSelector.setPathListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
MainPanel.this.setImage(MainPanel.this.getImageSelector().getSelectedItem().toString());
}
});
this.groundTruthSelector.setFileListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
context.refreshGroundTruthAndClassification(Refresh.FROM_FILE);
}
});
newGroundTruthButton.addActionListener(e -> {
final String name = JOptionPane.showInputDialog("Ground truth name:");
if (name != null && context.getImage() != null) {
try {
ImageIO.write(context.formatGroundTruth().getGroundTruth().getImage(), "png", new File(context.getGroundTruthPath(name)));
} catch (final IOException exception) {
throw new UncheckedIOException(exception);
}
context.setGroundTruth(name);
}
});
saveGroundTruthButton.addActionListener(e -> Tools.debugPrint("TODO"));
this.experimentSelector.setPathListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
try {
MainPanel.this.setExperiment((Experiment) xstream.fromXML(
new File(MainPanel.this.getExperimentSelector().getSelectedItem().toString())));
} catch (final Exception exception) {
Tools.debugError(exception);
}
}
});
this.experimentSelector.setOptionListener(PathSelector.Option.NEW, e -> {
final JFileChooser fileChooser = new JFileChooser(new File(preferences.get(EXPERIMENT, "")).getParentFile());
if (JFileChooser.APPROVE_OPTION == fileChooser.showSaveDialog(this)) {
final File selectedFile = fileChooser.getSelectedFile();
try (final OutputStream output = new FileOutputStream(selectedFile)) {
xstream.toXML(new Experiment(), output);
} catch (final IOException exception) {
throw new UncheckedIOException(exception);
}
context.setExperiment(selectedFile);
}
});
this.experimentSelector.setOptionListener(PathSelector.Option.OPEN, e -> Tools.debugPrint("TODO"));
this.experimentSelector.setOptionListener(PathSelector.Option.SAVE, e -> Tools.debugPrint("TODO"));
this.imageVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer imageLayer = MainPanel.this.getImageComponent().getLayers().get(0);
final Painter imagePainter = imageLayer.getPainters().get(0);
final boolean imageVisible = MainPanel.this.getImageVisibilitySelector().isSelected();
if (!imageVisible) {
imageLayer.getCanvas().clear(Color.GRAY);
}
imagePainter.getActive().set(imageVisible);
imagePainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.groundTruthVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer groundTruthLayer = MainPanel.this.getImageComponent().getLayers().get(1);
final Painter groundTruthPainter = groundTruthLayer.getPainters().get(0);
final boolean groundTruthVisible = MainPanel.this.getGroundTruthVisibilitySelector().isSelected();
groundTruthPainter.getActive().set(groundTruthVisible);
groundTruthPainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.classificationVisibilitySelector.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
final Layer classificationLayer = MainPanel.this.getImageComponent().getLayers().get(2);
final Painter classificationPainter = classificationLayer.getPainters().get(0);
final boolean classificationVisible = MainPanel.this.getClassificationVisibilitySelector().isSelected();
classificationPainter.getActive().set(classificationVisible);
classificationPainter.getUpdateNeeded().set(true);
MainPanel.this.getImageComponent().repaint();
}
});
this.setDropTarget(new DropTarget() {
@Override
public final synchronized void drop(final DropTargetDropEvent event) {
final File file = SwingTools.getFiles(event).get(0);
if (file.getName().toLowerCase(Locale.ENGLISH).endsWith(".xml")) {
} else {
context.setImageFile(file);
}
}
/**
* {@value}.
*/
private static final long serialVersionUID = 5442000733451223725L;
});
this.setPreferredSize(new Dimension(800, 600));
context.setMainPanel(this);
}
public final Context getContext() {
return this.context;
}
public final Experiment getExperiment() {
return this.experiment;
}
public final void setExperiment(final Experiment experiment) {
this.experiment = experiment;
setModel(this.tree, experiment, "Experiment");
}
public final ImageComponent getImageComponent() {
return this.imageComponent;
}
final void setImage(final String path) {
this.imageComponent = new ImageComponent(awtRead(path));
this.getImageComponent().addLayer().getPainters().add(new Painter.Abstract() {
@Override
public final void paint(final Canvas canvas) {
canvas.getGraphics().drawImage(MainPanel.this.getContext().getGroundTruth().getImage(), 0, 0, null);
}
private static final long serialVersionUID = 4700895082820237288L;
});
this.getImageComponent().addLayer().getPainters().add(new Painter.Abstract() {
@Override
public final void paint(final Canvas canvas) {
canvas.getGraphics().drawImage(MainPanel.this.getContext().getClassification().getImage(), 0, 0, null);
}
private static final long serialVersionUID = 7941391067177261093L;
});
this.setContents(this.getImageComponent());
}
public final PathSelector getImageSelector() {
return this.imageSelector;
}
public final JTree getTree() {
return this.tree;
}
public final JCheckBox getImageVisibilitySelector() {
return this.imageVisibilitySelector;
}
public final FileSelector getGroundTruthSelector() {
return this.groundTruthSelector;
}
public final JCheckBox getGroundTruthVisibilitySelector() {
return this.groundTruthVisibilitySelector;
}
public final PathSelector getExperimentSelector() {
return this.experimentSelector;
}
public final JTextField getTrainingTimeView() {
return this.trainingTimeView;
}
public final JTextField getClassificationTimeView() {
return this.classificationTimeView;
}
public final JCheckBox getClassificationVisibilitySelector() {
return this.classificationVisibilitySelector;
}
public final JTextField getScoreView() {
return this.scoreView;
}
public final void setContents(final Component component) {
this.mainSplitPane.setRightComponent(scrollable(component));
}
private static final long serialVersionUID = 2173077945563031333L;
public static final int IMAGE_SELECTOR_RESERVED_SLOTS = 2;
}
/**
* @author codistmonk (creation 2015-02-13)
*/
public static final class Context implements Serializable {
private MainPanel mainPanel;
private Experiment experiment;
private final Canvas groundTruth = new Canvas();
private final Canvas classification = new Canvas();
public final MainPanel getMainPanel() {
return this.mainPanel;
}
public final void setMainPanel(final MainPanel mainPanel) {
this.mainPanel = mainPanel;
}
public final File getExperimentFile() {
return new File(this.getMainPanel().getExperimentSelector().getSelectedItem().toString());
}
public final String getGroundTruthName() {
return this.getMainPanel().getGroundTruthSelector().getText();
}
public final Context setGroundTruthName(final String groundTruthName) {
this.getMainPanel().getGroundTruthSelector().setFile(new File(groundTruthName));
return this;
}
public final Experiment getExperiment() {
return this.experiment;
}
public final Context setExperiment(final Experiment experiment) {
this.experiment = experiment;
return this;
}
public final BufferedImage getImage() {
final MainPanel mainPanel = this.getMainPanel();
final ImageComponent imageComponent = mainPanel == null ? null : mainPanel.getImageComponent();
return imageComponent == null ? null : imageComponent.getImage();
}
public final Canvas getGroundTruth() {
return this.groundTruth;
}
public final Context formatGroundTruth() {
return format(this.getGroundTruth());
}
public final Canvas getClassification() {
return this.classification;
}
public final String getExperimentName() {
return baseName(this.getExperimentFile().getName());
}
public final File getImageFile() {
return new File(this.getMainPanel().getImageSelector().getSelectedItem().toString());
}
public final String getGroundTruthPath() {
return this.getGroundTruthPath(this.getGroundTruthName());
}
public final String getGroundTruthPath(final String name) {
return baseName(this.getImageFile().getPath()) + "_groundtruth_" + name + ".png";
}
public final String getClassificationPath() {
return baseName(this.getImageFile().getPath()) + "_classification_" + this.getGroundTruthName() + "_" + this.getExperimentName() + ".png";
}
public final void refreshGroundTruthAndClassification(final Refresh refresh) {
final BufferedImage image = this.getImage();
if (image == null) {
return;
}
System.out.println(Tools.debug(Tools.DEBUG_STACK_OFFSET + 1));
Tools.debugPrint(this.getGroundTruthName());
final int imageWidth = image.getWidth();
final int imageHeight = image.getHeight();
this.getGroundTruth().setFormat(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
this.getClassification().setFormat(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
switch (refresh) {
case CLEAR:
this.getGroundTruth().clear(CLEAR);
this.getClassification().clear(CLEAR);
break;
case FROM_FILE:
{
{
final String groundTruthPath = this.getGroundTruthPath();
if (new File(groundTruthPath).isFile()) {
this.getGroundTruth().getGraphics().drawImage(awtRead(groundTruthPath), 0, 0, null);
} else {
this.getGroundTruth().clear(CLEAR);
}
}
{
final String classificationPath = this.getClassificationPath();
if (new File(classificationPath).isFile()) {
this.getClassification().getGraphics().drawImage(awtRead(classificationPath), 0, 0, null);
} else {
this.getClassification().clear(CLEAR);
}
}
break;
}
case NOP:
break;
}
}
public final Context setImageFile(final File imageFile) {
System.out.println(Tools.debug(Tools.DEBUG_STACK_OFFSET + 1, imageFile));
final File oldImageFile = this.getImageFile();
if (imageFile.isFile() && !imageFile.equals(oldImageFile)) {
this.getMainPanel().getImageSelector().setPath(imageFile.getPath());
this.refreshGroundTruthAndClassification(Refresh.FROM_FILE);
preferences.put(IMAGE_PATH, imageFile.getPath());
}
return this;
}
public final Context setGroundTruth(final String name) {
if (new File(this.getGroundTruthPath(name)).isFile()) {
this.getMainPanel().getGroundTruthSelector().setFile(new File(name));
preferences.put(GROUND_TRUTH, name);
}
return this;
}
public final Context setExperiment(final File experimentFile) {
if (experimentFile.isFile()) {
this.getMainPanel().getExperimentSelector().setPath(experimentFile.getPath());
preferences.put(EXPERIMENT, experimentFile.getPath());
}
return this;
}
private final Context format(final Canvas canvas) {
final BufferedImage image = this.getImage();
if (image != null) {
canvas.setFormat(image.getWidth(), image.getHeight(), BufferedImage.TYPE_INT_ARGB);
} else {
canvas.setFormat(1, 1, BufferedImage.TYPE_INT_ARGB);
}
canvas.clear(CLEAR);
return this;
}
private static final long serialVersionUID = -2487965125442868238L;
public static final Color CLEAR = new Color(0, true);
/**
* @author codistmonk (creation 2015-02-17)
*/
public static enum Refresh {
NOP, CLEAR, FROM_FILE;
}
}
/**
* @author codistmonk (creation 2015-02-16)
*/
public static final class Experiment implements Serializable {
private final List<ClassDescription> classDescriptions = new ArrayList<>();
private final List<TrainingField> trainingFields = new ArrayList<>();
@Override
public final String toString() {
return "Experiment";
}
@NestedList(name="classes", element="class", elementClass=ClassDescription.class)
public final List<ClassDescription> getClassDescriptions() {
return this.classDescriptions;
}
@NestedList(name="training", element="training field", elementClass=TrainingField.class)
public final List<TrainingField> getTrainingFields() {
return this.trainingFields;
}
private static final long serialVersionUID = -4539259556658072410L;
/**
* @author codistmonk (creation 2015-02-16)
*/
public static final class ClassDescription implements Serializable {
private String name = "class";
private int label = 0xFF000000;
@StringGetter
@PropertyGetter("name")
public final String getName() {
return this.name;
}
@PropertySetter("name")
public final ClassDescription setName(final String name) {
this.name = name;
return this;
}
public final int getLabel() {
return this.label;
}
public final ClassDescription setLabel(final int label) {
this.label = label;
return this;
}
@PropertyGetter("label")
public final String getLabelAsString() {
return "#" + Integer.toHexString(this.getLabel()).toUpperCase(Locale.ENGLISH);
}
@PropertySetter("label")
public final ClassDescription setLabel(final String labelAsString) {
return this.setLabel((int) Long.parseLong(labelAsString.substring(1), 16));
}
private static final long serialVersionUID = 4974707407567297906L;
}
/**
* @author codistmonk (creation 2015-02-17)
*/
public static final class TrainingField implements Serializable {
private String imagePath = "";
private final Rectangle bounds = new Rectangle();
@PropertyGetter("image")
public final String getImagePath() {
return this.imagePath;
}
@PropertySetter("image")
public final TrainingField setImagePath(final String imagePath) {
this.imagePath = imagePath;
return this;
}
public final Rectangle getBounds() {
return this.bounds;
}
@PropertyGetter("bounds")
public final String getBoundsAsString() {
return join(",", this.getBounds().x, this.getBounds().y, this.getBounds().width, this.getBounds().height);
}
@PropertySetter("bounds")
public final TrainingField setBounds(final String boundsAsString) {
final int[] bounds = Arrays.stream(boundsAsString.split(",")).mapToInt(Integer::parseInt).toArray();
this.getBounds().setBounds(bounds[0], bounds[1], bounds[2], bounds[3]);
return this;
}
@Override
public final String toString() {
return new File(this.getImagePath()).getName() + "[" + this.getBoundsAsString() + "]";
}
private static final long serialVersionUID = 847822079141878928L;
}
}
/**
* @author codistmonk (creation 2015-02-18)
*/
public static final class PathSelector extends JComboBox<Object> {
private final Map<Option, ActionListener> optionListeners;
private ActionListener pathListener;
public PathSelector() {
this.optionListeners = new LinkedHashMap<>();
this.setRenderer(new DefaultListCellRenderer() {
@Override
public final Component getListCellRendererComponent(final JList<?> list,
final Object value, final int index, final boolean isSelected,
final boolean cellHasFocus) {
final Component result = super.getListCellRendererComponent(
list, value, index, isSelected, cellHasFocus);
if (value instanceof Option) {
this.setText(((Option) value).getTranslationKey());
} else if (index < 0 || isSelected) {
this.setText(new File(this.getText()).getName());
}
return result;
}
private static final long serialVersionUID = -3014056515590258107L;
});
this.addActionListener(new ActionListener() {
@Override
public final void actionPerformed(final ActionEvent event) {
PathSelector.this.action(event);
}
});
}
public final PathSelector setPath(final String path) {
final DefaultComboBoxModel<Object> model = (DefaultComboBoxModel<Object>) this.getModel();
model.insertElementAt(path, 0);
for (int i = model.getSize() - this.optionListeners.size() - 1; 0 < i; --i) {
if (model.getElementAt(i).equals(path)) {
model.removeElementAt(i);
}
}
SwingUtilities.invokeLater(() -> this.setSelectedIndex(0));
return this;
}
public final PathSelector setPathListener(final ActionListener listener) {
this.pathListener = listener;
return this;
}
public final PathSelector setOptionListener(final Option option, final ActionListener listener) {
final DefaultComboBoxModel<Object> model = (DefaultComboBoxModel<Object>) this.getModel();
if (this.optionListeners.isEmpty()) {
model.addElement("-");
} else {
this.optionListeners.remove(option);
model.removeElement(option);
}
if (listener != null) {
this.optionListeners.put(option, listener);
model.addElement(option);
}
return this;
}
final void action(final ActionEvent event) {
final Object selectedItem = this.getSelectedItem();
if (selectedItem instanceof Option) {
this.optionListeners.get(selectedItem).actionPerformed(event);
} else if (selectedItem != null && this.pathListener != null) {
this.pathListener.actionPerformed(event);
}
if (!this.optionListeners.isEmpty()
&& this.getItemCount() - this.optionListeners.size() - 1 <= this.getSelectedIndex()) {
this.setSelectedIndex(0);
}
}
private static final long serialVersionUID = 2024380772192514052L;
/**
* @author codistmonk (creation 2015-02-18)
*/
public static enum Option {
NEW {
@Override
public final String getTranslationKey() {
return "New...";
}
}, OPEN {
@Override
public final String getTranslationKey() {
return "Open...";
}
}, SAVE {
@Override
public final String getTranslationKey() {
return "Save";
}
}, DELETE {
@Override
public final String getTranslationKey() {
return "Delete";
}
};
public abstract String getTranslationKey();
}
}
}
| [IMJ][imj3][processing] Updated VisualAnalysis.
| IMJ/src/imj3/draft/processing/VisualAnalysis.java | [IMJ][imj3][processing] Updated VisualAnalysis. | <ide><path>MJ/src/imj3/draft/processing/VisualAnalysis.java
<ide>
<ide> private final Context context;
<ide>
<del> private final PathSelector imageSelector;
<add> private final FileSelector imageSelector;
<ide>
<ide> private final JCheckBox imageVisibilitySelector;
<ide>
<ide> super(new BorderLayout());
<ide>
<ide> this.context = context;
<del> this.imageSelector = new PathSelector();
<add> this.imageSelector = new FileSelector();
<ide> this.imageVisibilitySelector = new JCheckBox("", true);
<ide> this.groundTruthSelector = new FileSelector();
<ide> this.groundTruthVisibilitySelector = new JCheckBox();
<ide> this.tree = new JTree(new DefaultTreeModel(new DefaultMutableTreeNode("No experiment")));
<ide>
<ide> final int padding = this.imageVisibilitySelector.getPreferredSize().width;
<add> final JButton openImageButton = button("open");
<ide> final JButton newGroundTruthButton = button("new");
<ide> final JButton saveGroundTruthButton = button("save");
<ide> final JButton refreshGroundTruthButton = button("refresh");
<ide>
<ide> this.mainSplitPane = horizontalSplit(verticalBox(
<del> label(" Image: ", this.imageSelector, button("open"), button("refresh"), this.imageVisibilitySelector),
<add> label(" Image: ", this.imageSelector, openImageButton, this.imageVisibilitySelector),
<ide> label(" Ground truth: ", this.groundTruthSelector, newGroundTruthButton, saveGroundTruthButton, refreshGroundTruthButton, this.groundTruthVisibilitySelector),
<del>// label(" Ground truth: ", new FileSelector().setFile(new File("b/test2")).setFile(new File("a/test1")), button("new"), button("save"), button("refresh"), Box.createHorizontalStrut(padding)),
<ide> label(" Experiment: ", this.experimentSelector, button("new"), button("open"), button("save"), button("refresh"), Box.createHorizontalStrut(padding)),
<ide> label(" Training (s): ", this.trainingTimeView, button("process"), Box.createHorizontalStrut(padding)),
<ide> label(" Classification (s): ", this.classificationTimeView, button("process"), button("save"), button("refresh"), this.classificationVisibilitySelector),
<ide> this.mainSplitPane.getLeftComponent().setMaximumSize(new Dimension(128, Integer.MAX_VALUE));
<ide> this.add(this.mainSplitPane, BorderLayout.CENTER);
<ide>
<del> this.imageSelector.setOptionListener(PathSelector.Option.OPEN, new ActionListener() {
<add> openImageButton.addActionListener(new ActionListener() {
<ide>
<ide> @Override
<ide> public final void actionPerformed(final ActionEvent event) {
<ide> }
<ide>
<ide> });
<del> this.imageSelector.setPathListener(new ActionListener() {
<add> this.imageSelector.setFileListener(new ActionListener() {
<ide>
<ide> @Override
<ide> public final void actionPerformed(final ActionEvent event) {
<del> MainPanel.this.setImage(MainPanel.this.getImageSelector().getSelectedItem().toString());
<add> context.setImageFile(new File(MainPanel.this.getImageSelector().getText()));
<ide> }
<ide>
<ide> });
<ide> this.setContents(this.getImageComponent());
<ide> }
<ide>
<del> public final PathSelector getImageSelector() {
<add> public final FileSelector getImageSelector() {
<ide> return this.imageSelector;
<ide> }
<ide>
<ide> }
<ide>
<ide> public final File getImageFile() {
<del> return new File(this.getMainPanel().getImageSelector().getSelectedItem().toString());
<add> return new File(this.getMainPanel().getImageSelector().getText());
<ide> }
<ide>
<ide> public final String getGroundTruthPath() {
<ide> final File oldImageFile = this.getImageFile();
<ide>
<ide> if (imageFile.isFile() && !imageFile.equals(oldImageFile)) {
<del> this.getMainPanel().getImageSelector().setPath(imageFile.getPath());
<add> this.getMainPanel().setImage(imageFile.getPath());
<add> this.getMainPanel().getImageSelector().setFile(imageFile);
<ide>
<ide> this.refreshGroundTruthAndClassification(Refresh.FROM_FILE);
<ide> |
|
Java | lgpl-2.1 | e3184701e8679194cafdaa079fd5b9b5a79fed81 | 0 | wolfgangmm/exist,lcahlander/exist,olvidalo/exist,dizzzz/exist,patczar/exist,wshager/exist,MjAbuz/exist,shabanovd/exist,MjAbuz/exist,dizzzz/exist,kohsah/exist,wolfgangmm/exist,ljo/exist,windauer/exist,windauer/exist,dizzzz/exist,opax/exist,lcahlander/exist,kohsah/exist,eXist-db/exist,MjAbuz/exist,zwobit/exist,jessealama/exist,jensopetersen/exist,adamretter/exist,lcahlander/exist,lcahlander/exist,dizzzz/exist,shabanovd/exist,wshager/exist,eXist-db/exist,zwobit/exist,shabanovd/exist,wshager/exist,windauer/exist,wolfgangmm/exist,patczar/exist,jessealama/exist,ambs/exist,adamretter/exist,adamretter/exist,ljo/exist,jensopetersen/exist,eXist-db/exist,ambs/exist,adamretter/exist,jessealama/exist,RemiKoutcherawy/exist,wshager/exist,dizzzz/exist,ambs/exist,zwobit/exist,joewiz/exist,lcahlander/exist,jessealama/exist,RemiKoutcherawy/exist,eXist-db/exist,MjAbuz/exist,zwobit/exist,wolfgangmm/exist,windauer/exist,zwobit/exist,ljo/exist,ljo/exist,opax/exist,opax/exist,windauer/exist,ambs/exist,patczar/exist,olvidalo/exist,RemiKoutcherawy/exist,eXist-db/exist,jessealama/exist,ambs/exist,RemiKoutcherawy/exist,wolfgangmm/exist,eXist-db/exist,lcahlander/exist,ljo/exist,hungerburg/exist,jessealama/exist,dizzzz/exist,jensopetersen/exist,patczar/exist,hungerburg/exist,MjAbuz/exist,adamretter/exist,olvidalo/exist,shabanovd/exist,hungerburg/exist,RemiKoutcherawy/exist,joewiz/exist,joewiz/exist,zwobit/exist,opax/exist,joewiz/exist,hungerburg/exist,patczar/exist,joewiz/exist,wshager/exist,kohsah/exist,hungerburg/exist,ambs/exist,kohsah/exist,RemiKoutcherawy/exist,kohsah/exist,olvidalo/exist,kohsah/exist,shabanovd/exist,wshager/exist,jensopetersen/exist,olvidalo/exist,windauer/exist,joewiz/exist,adamretter/exist,jensopetersen/exist,wolfgangmm/exist,patczar/exist,opax/exist,jensopetersen/exist,MjAbuz/exist,shabanovd/exist,ljo/exist | package org.exist.xquery;
import com.sun.xacml.ctx.RequestCtx;
import java.io.Writer;
import java.util.List;
import org.exist.dom.DocumentSet;
import org.exist.dom.QName;
import org.exist.security.PermissionDeniedException;
import org.exist.security.xacml.ExistPDP;
import org.exist.xquery.parser.XQueryAST;
import org.exist.xquery.util.ExpressionDumper;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.Sequence;
/**
* Wrapper for internal modules in order to
* perform access control checks on internal
* module function calls. It delegates to
* the wrapped <code>Function</code> for
* everything, but checks permission before
* delegating <code>eval</code>
*/
public class InternalFunctionCall extends Function
{
private Function function;
public InternalFunctionCall(Function f)
{
super(f.getContext(), f.getSignature());
this.function = f;
}
public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException
{
QName functionName = function.getName();
//check access to the method
try {
ExistPDP pdp = getContext().getPDP();
if(pdp != null) {
RequestCtx request = pdp.getRequestHelper().createFunctionRequest(context, null, functionName);
if(request != null)
pdp.evaluate(request);
}
} catch (PermissionDeniedException pde) {
throw new XPathException(function.getASTNode(), "Access to function '" + functionName + "' denied.", pde);
}
return function.eval(contextSequence, contextItem);
}
public int getArgumentCount()
{
return function.getArgumentCount();
}
public QName getName()
{
return function.getName();
}
public int returnsType()
{
return function.returnsType();
}
public int getCardinality()
{
return function.getCardinality();
}
/* (non-Javadoc)
* @see org.exist.xquery.Function#analyze(org.exist.xquery.AnalyzeContextInfo)
*/
public void analyze(AnalyzeContextInfo contextInfo) throws XPathException
{
contextInfo.setParent(this);
function.analyze(contextInfo);
}
public void setParent(Expression parent)
{
function.setParent(parent);
}
public Expression getParent()
{
return function.getParent();
}
public XQueryContext getContext()
{
return function.getContext();
}
public void setASTNode(XQueryAST ast)
{
function.setASTNode(ast);
}
public XQueryAST getASTNode()
{
return function.getASTNode();
}
public void add(Expression s)
{
function.add(s);
}
public void add(PathExpr path)
{
function.add(path);
}
public void addPath(PathExpr path)
{
function.addPath(path);
}
public void addPredicate(Predicate pred)
{
function.addPredicate(pred);
}
public void dump(ExpressionDumper dumper)
{
function.dump(dumper);
}
public void dump(Writer writer)
{
function.dump(writer);
}
public Expression getArgument(int pos)
{
return function.getArgument(pos);
}
public Sequence[] getArguments(Sequence contextSequence, Item contextItem) throws XPathException
{
return function.getArguments(contextSequence, contextItem);
}
public DocumentSet getContextDocSet()
{
return function.getContextDocSet();
}
public int getDependencies()
{
return function.getDependencies();
}
public DocumentSet getDocumentSet()
{
return function.getDocumentSet();
}
public Expression getExpression(int pos)
{
return function.getExpression(pos);
}
public Expression getLastExpression()
{
return function.getLastExpression();
}
public int getLength()
{
return function.getLength();
}
public String getLiteralValue()
{
return function.getLiteralValue();
}
public FunctionSignature getSignature()
{
return function.getSignature();
}
public boolean isCalledAs(String localName)
{
return function.isCalledAs(localName);
}
public boolean isValid()
{
return function.isValid();
}
public void replaceLastExpression(Expression s)
{
function.replaceLastExpression(s);
}
public void reset()
{
function.reset();
}
public void resetState()
{
function.resetState();
}
public void setArguments(List arguments) throws XPathException
{
function.setArguments(arguments);
}
public void setContext(XQueryContext context)
{
function.setContext(context);
}
public void setContextDocSet(DocumentSet contextSet)
{
function.setContextDocSet(contextSet);
}
public String toString()
{
return function.toString();
}
} | src/org/exist/xquery/InternalFunctionCall.java | package org.exist.xquery;
import com.sun.xacml.ctx.RequestCtx;
import java.io.Writer;
import java.util.List;
import org.exist.dom.DocumentSet;
import org.exist.dom.QName;
import org.exist.security.PermissionDeniedException;
import org.exist.security.xacml.ExistPDP;
import org.exist.xquery.parser.XQueryAST;
import org.exist.xquery.util.ExpressionDumper;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.Sequence;
/**
* Wrapper for internal modules in order to
* perform access control checks on internal
* module function calls. It delegates to
* the wrapped <code>Function</code> for
* everything, but checks permission before
* delegating <code>eval</code>
*/
public class InternalFunctionCall extends Function
{
private Function function;
public InternalFunctionCall(Function f)
{
super(f.getContext(), f.getSignature());
if(f == null)
throw new NullPointerException("Function cannot be null");
this.function = f;
}
public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException
{
QName functionName = function.getName();
//check access to the method
try {
ExistPDP pdp = getContext().getPDP();
if(pdp != null) {
RequestCtx request = pdp.getRequestHelper().createFunctionRequest(context, null, functionName);
if(request != null)
pdp.evaluate(request);
}
} catch (PermissionDeniedException pde) {
throw new XPathException(function.getASTNode(), "Access to function '" + functionName + "' denied.", pde);
}
return function.eval(contextSequence, contextItem);
}
public int getArgumentCount()
{
return function.getArgumentCount();
}
public QName getName()
{
return function.getName();
}
public int returnsType()
{
return function.returnsType();
}
public int getCardinality()
{
return function.getCardinality();
}
/* (non-Javadoc)
* @see org.exist.xquery.Function#analyze(org.exist.xquery.AnalyzeContextInfo)
*/
public void analyze(AnalyzeContextInfo contextInfo) throws XPathException
{
contextInfo.setParent(this);
function.analyze(contextInfo);
}
public void setParent(Expression parent)
{
function.setParent(parent);
}
public Expression getParent()
{
return function.getParent();
}
public XQueryContext getContext()
{
return function.getContext();
}
public void setASTNode(XQueryAST ast)
{
function.setASTNode(ast);
}
public XQueryAST getASTNode()
{
return function.getASTNode();
}
public void add(Expression s)
{
function.add(s);
}
public void add(PathExpr path)
{
function.add(path);
}
public void addPath(PathExpr path)
{
function.addPath(path);
}
public void addPredicate(Predicate pred)
{
function.addPredicate(pred);
}
public void dump(ExpressionDumper dumper)
{
function.dump(dumper);
}
public void dump(Writer writer)
{
function.dump(writer);
}
public Expression getArgument(int pos)
{
return function.getArgument(pos);
}
public Sequence[] getArguments(Sequence contextSequence, Item contextItem) throws XPathException
{
return function.getArguments(contextSequence, contextItem);
}
public DocumentSet getContextDocSet()
{
return function.getContextDocSet();
}
public int getDependencies()
{
return function.getDependencies();
}
public DocumentSet getDocumentSet()
{
return function.getDocumentSet();
}
public Expression getExpression(int pos)
{
return function.getExpression(pos);
}
public Expression getLastExpression()
{
return function.getLastExpression();
}
public int getLength()
{
return function.getLength();
}
public String getLiteralValue()
{
return function.getLiteralValue();
}
public FunctionSignature getSignature()
{
return function.getSignature();
}
public boolean isCalledAs(String localName)
{
return function.isCalledAs(localName);
}
public boolean isValid()
{
return function.isValid();
}
public void replaceLastExpression(Expression s)
{
function.replaceLastExpression(s);
}
public void reset()
{
function.reset();
}
public void resetState()
{
function.resetState();
}
public void setArguments(List arguments) throws XPathException
{
function.setArguments(arguments);
}
public void setContext(XQueryContext context)
{
function.setContext(context);
}
public void setContextDocSet(DocumentSet contextSet)
{
function.setContextDocSet(contextSet);
}
public String toString()
{
return function.toString();
}
} | removed useless check for null in InternalFunctionCall constructor
svn path=/trunk/eXist-1.0/; revision=2603
| src/org/exist/xquery/InternalFunctionCall.java | removed useless check for null in InternalFunctionCall constructor | <ide><path>rc/org/exist/xquery/InternalFunctionCall.java
<ide> public InternalFunctionCall(Function f)
<ide> {
<ide> super(f.getContext(), f.getSignature());
<del> if(f == null)
<del> throw new NullPointerException("Function cannot be null");
<ide> this.function = f;
<ide> }
<ide> public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException |
|
Java | apache-2.0 | 1dc57f02f3f2092c3722d61f429b40ffe8ba8594 | 0 | apache/mina-sshd,apache/mina-sshd,apache/mina-sshd,apache/mina-sshd | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.cli.client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.logging.ConsoleHandler;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import org.apache.sshd.cli.CliSupport;
import org.apache.sshd.client.ClientAuthenticationManager;
import org.apache.sshd.client.ClientBuilder;
import org.apache.sshd.client.ClientFactoryManager;
import org.apache.sshd.client.SshClient;
import org.apache.sshd.client.auth.keyboard.UserInteraction;
import org.apache.sshd.client.config.SshClientConfigFileReader;
import org.apache.sshd.client.config.hosts.HostConfigEntry;
import org.apache.sshd.client.config.keys.ClientIdentity;
import org.apache.sshd.client.keyverifier.DefaultKnownHostsServerKeyVerifier;
import org.apache.sshd.client.keyverifier.KnownHostsServerKeyVerifier;
import org.apache.sshd.client.keyverifier.ServerKeyVerifier;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.common.NamedFactory;
import org.apache.sshd.common.PropertyResolver;
import org.apache.sshd.common.PropertyResolverUtils;
import org.apache.sshd.common.SshConstants;
import org.apache.sshd.common.channel.PtyChannelConfiguration;
import org.apache.sshd.common.channel.PtyChannelConfigurationMutator;
import org.apache.sshd.common.channel.PtyMode;
import org.apache.sshd.common.cipher.Cipher;
import org.apache.sshd.common.compression.Compression;
import org.apache.sshd.common.config.ConfigFileReaderSupport;
import org.apache.sshd.common.config.SshConfigFileReader;
import org.apache.sshd.common.config.keys.BuiltinIdentities;
import org.apache.sshd.common.config.keys.KeyUtils;
import org.apache.sshd.common.config.keys.PublicKeyEntry;
import org.apache.sshd.common.kex.KexFactoryManager;
import org.apache.sshd.common.kex.extension.DefaultClientKexExtensionHandler;
import org.apache.sshd.common.kex.extension.KexExtensionHandler;
import org.apache.sshd.common.keyprovider.FileKeyPairProvider;
import org.apache.sshd.common.mac.Mac;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.OsUtils;
import org.apache.sshd.common.util.ValidateUtils;
import org.apache.sshd.common.util.io.NoCloseOutputStream;
import org.apache.sshd.common.util.threads.ThreadUtils;
import org.apache.sshd.core.CoreModuleProperties;
/**
* TODO Add javadoc
*
* @author <a href="mailto:[email protected]">Apache MINA SSHD Project</a>
*/
public abstract class SshClientCliSupport extends CliSupport {
/**
* Command line option used to indicate non-default target port
*/
public static final String SSH_CLIENT_PORT_OPTION = "-p";
protected SshClientCliSupport() {
super();
}
public static boolean isArgumentedOption(String portOption, String argName) {
return portOption.equals(argName)
|| "-io".equals(argName)
|| "-i".equals(argName)
|| "-o".equals(argName)
|| "-l".equals(argName)
|| "-w".equals(argName)
|| "-c".equals(argName)
|| "-m".equals(argName)
|| "-E".equals(argName);
}
// NOTE: ClientSession#getFactoryManager is the SshClient
public static ClientSession setupClientSession(
String portOption, BufferedReader stdin, Level level,
PrintStream stdout, PrintStream stderr, String... args)
throws Exception {
int port = -1;
String host = null;
String login = null;
String password = null;
boolean error = false;
List<Path> identities = new ArrayList<>();
Map<String, Object> options = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
List<NamedFactory<Cipher>> ciphers = null;
List<NamedFactory<Mac>> macs = null;
List<NamedFactory<Compression>> compressions = null;
int numArgs = GenericUtils.length(args);
for (int i = 0; (!error) && (i < numArgs); i++) {
String argName = args[i];
String argVal = null;
if (isArgumentedOption(portOption, argName)) {
i++;
if (i >= numArgs) {
error = showError(stderr, "option requires an argument: " + argName);
break;
}
argVal = args[i];
}
if (portOption.equals(argName)) {
if (port > 0) {
error = showError(stderr, argName + " option value re-specified: " + port);
break;
}
port = Integer.parseInt(argVal);
if (port <= 0) {
error = showError(stderr, "Bad option value for " + argName + ": " + port);
break;
}
} else if ("-w".equals(argName)) {
if (GenericUtils.length(password) > 0) {
error = showError(stderr, argName + " option value re-specified: " + password);
break;
}
password = argVal;
} else if ("-c".equals(argName)) {
ciphers = setupCiphers(argName, argVal, ciphers, stderr);
if (GenericUtils.isEmpty(ciphers)) {
error = true;
break;
}
} else if ("-m".equals(argName)) {
macs = setupMacs(argName, argVal, macs, stderr);
if (GenericUtils.isEmpty(macs)) {
error = true;
break;
}
} else if ("-i".equals(argName)) {
Path idFile = resolveIdentityFile(argVal);
identities.add(idFile);
} else if ("-C".equals(argName)) {
compressions = setupCompressions(argName, argVal, compressions, stderr);
if (GenericUtils.isEmpty(compressions)) {
error = true;
break;
}
} else if ("-o".equals(argName)) {
String opt = argVal;
int idx = opt.indexOf('=');
if (idx <= 0) {
error = showError(stderr, "bad syntax for option: " + opt);
break;
}
String optName = opt.substring(0, idx);
String optValue = opt.substring(idx + 1);
if (HostConfigEntry.IDENTITY_FILE_CONFIG_PROP.equals(optName)) {
Path idFile = resolveIdentityFile(optValue);
identities.add(idFile);
} else {
options.merge(optName, optValue, (a, b) -> a + "," + b);
}
} else if ("-l".equals(argName)) {
if (login != null) {
error = showError(stderr, argName + " option value re-specified: " + port);
break;
}
login = argVal;
} else if (argName.charAt(0) != '-') {
if (host != null) { // assume part of a command following it
break;
}
host = argName;
int pos = host.indexOf('@'); // check if user@host
if (pos > 0) {
if (login == null) {
login = host.substring(0, pos);
host = host.substring(pos + 1);
} else {
error = showError(stderr, "Login already specified using -l option (" + login + "): " + host);
break;
}
}
}
}
if ((!error) && GenericUtils.isEmpty(host)) {
error = showError(stderr, "Hostname not specified");
}
if (error) {
return null;
}
PropertyResolver resolver = PropertyResolverUtils.toPropertyResolver(options);
SshClient client = setupClient(
resolver, ciphers, macs, compressions, identities,
stdin, stdout, stderr, level, args);
if (client == null) {
return null;
}
try {
client.start();
if (login == null) {
login = OsUtils.getCurrentUser();
}
if (port <= 0) {
port = SshConstants.DEFAULT_PORT;
}
// TODO use a configurable wait time
ClientSession session = client.connect(login, host, port)
.verify()
.getSession();
try {
if (GenericUtils.length(password) > 0) {
session.addPasswordIdentity(password);
}
session.auth().verify(CoreModuleProperties.AUTH_TIMEOUT.getRequired(session));
return session;
} catch (Exception e) {
session.close(true);
throw e;
}
} catch (Exception e) {
client.close();
throw e;
}
}
public static Path resolveIdentityFile(String id) throws IOException {
BuiltinIdentities identity = BuiltinIdentities.fromName(id);
if (identity != null) {
String fileName = ClientIdentity.getIdentityFileName(identity.getName());
Path keysFolder = PublicKeyEntry.getDefaultKeysFolderPath();
return keysFolder.resolve(fileName);
} else {
return Paths.get(id);
}
}
public static Map<String, ?> resolveClientEnvironment(PropertyResolver resolver) {
return resolveClientEnvironment(
(resolver == null) ? Collections.emptyMap() : resolver.getProperties());
}
public static Map<String, ?> resolveClientEnvironment(Map<String, ?> options) {
if (GenericUtils.isEmpty(options)) {
return Collections.emptyMap();
}
Map<String, Object> env = Collections.emptyMap();
for (String propName : new String[] { SshClientConfigFileReader.SETENV_PROP, SshClientConfigFileReader.SENDENV_PROP }) {
Object v = options.get(propName);
String s = Objects.toString(v, null);
if (GenericUtils.isEmpty(s)) {
continue;
}
String[] kvp = GenericUtils.split(s, ',');
for (String kve : kvp) {
int pos = kve.indexOf('=');
String key = (pos >= 0) ? kve.substring(0, pos) : kve;
String value = (pos >= 0) ? kve.substring(pos + 1) : "";
if (env.isEmpty()) {
env = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}
Object prev = env.put(key, value);
if (prev != null) {
continue; // debug breakpoint
}
}
}
return env;
}
public static PtyChannelConfiguration resolveClientPtyOptions(PropertyResolver resolver)
throws IOException, InterruptedException {
return resolveClientPtyOptions(
(resolver == null) ? Collections.emptyMap() : resolver.getProperties());
}
public static PtyChannelConfiguration resolveClientPtyOptions(Map<String, ?> options)
throws IOException, InterruptedException {
Object v = GenericUtils.isEmpty(options)
? null
: options.get(SshClientConfigFileReader.REQUEST_TTY_OPTION);
String s = Objects.toString(v, "auto");
boolean autoDetect = "auto".equalsIgnoreCase(s);
Boolean ptyEnabled = autoDetect ? Boolean.TRUE : PropertyResolverUtils.parseBoolean(s);
if ((ptyEnabled == null) || (!ptyEnabled.booleanValue())) {
return null;
}
PtyChannelConfiguration config = new PtyChannelConfiguration();
if (autoDetect) {
PtyChannelConfigurationMutator.setupSensitiveDefaultPtyConfiguration(config);
}
// TODO add support for height/width, rows/columns and TERM(inal) type
Map<PtyMode, Integer> ptyModes = resolveClientPtyModes(options);
if (GenericUtils.isNotEmpty(ptyModes)) {
config.setPtyModes(ptyModes);
}
return config;
}
public static Map<PtyMode, Integer> resolveClientPtyModes(Map<String, ?> options)
throws IOException, InterruptedException {
Object v = GenericUtils.isEmpty(options) ? null : options.get(PtyMode.class.getSimpleName());
String s = Objects.toString(v, null);
if (GenericUtils.isEmpty(s)) {
return Collections.emptyMap();
}
String[] kvp = GenericUtils.split(s, ',');
Map<PtyMode, Integer> ptyModes = new EnumMap<>(PtyMode.class);
for (String kve : kvp) {
int pos = kve.indexOf('=');
String key = (pos >= 0) ? kve.substring(0, pos) : kve;
PtyMode mode = ValidateUtils.checkNotNull(
PtyMode.fromName(key), "Unknown PTY mode: %s", key);
s = (pos >= 0) ? kve.substring(pos + 1) : "";
Integer value = GenericUtils.isEmpty(s) ? Integer.valueOf(1) : Integer.valueOf(s);
Integer prev = ptyModes.put(mode, value);
if (prev != null) {
continue; // debug breakpoint
}
}
return ptyModes;
}
public static SshClient setupDefaultClient(
PropertyResolver resolver, Level level,
PrintStream stdout, PrintStream stderr, String... args) {
SshClient client = setupIoServiceFactory(
SshClient.setUpDefaultClient(), resolver, level, stdout, stderr, args);
SshConfigFileReader.configureKeyExchanges(client, resolver, true, ClientBuilder.DH2KEX, true);
SshConfigFileReader.configureSignatures(client, resolver, true, true);
SshClientConfigFileReader.setupClientHeartbeat(client, resolver);
return client;
}
// returns null if error encountered
@SuppressWarnings("checkstyle:ParameterNumber")
public static SshClient setupClient(
PropertyResolver resolver,
List<NamedFactory<Cipher>> ciphers,
List<NamedFactory<Mac>> macs,
List<NamedFactory<Compression>> compressions,
Collection<? extends Path> identities,
BufferedReader stdin, PrintStream stdout, PrintStream stderr,
Level level, String[] args)
throws Exception {
if (GenericUtils.isEmpty(ciphers)) {
ciphers = setupCiphers(resolver, stderr);
if (ciphers == null) {
return null;
}
}
if (GenericUtils.isEmpty(macs)) {
macs = setupMacs(resolver, stderr);
if (macs == null) {
return null;
}
}
if (GenericUtils.isEmpty(compressions)) {
compressions = setupCompressions(resolver, stderr);
if (compressions == null) {
return null;
}
}
SshClient client = setupDefaultClient(resolver, level, stdout, stderr, args);
if (client == null) {
return null;
}
try {
if (GenericUtils.size(ciphers) > 0) {
client.setCipherFactories(ciphers);
}
if (GenericUtils.size(macs) > 0) {
client.setMacFactories(macs);
}
if (GenericUtils.size(compressions) > 0) {
client.setCompressionFactories(compressions);
}
try {
setupSessionIdentities(client, identities, stdin, stdout, stderr);
} catch (Throwable t) { // show but do not fail the setup - maybe a password can be used
showError(stderr, t.getClass().getSimpleName() + " while loading user keys: " + t.getMessage());
}
setupServerKeyVerifier(client, resolver, stdin, stdout, stderr);
setupUserAuthFactories(client, resolver);
setupSessionUserInteraction(client, stdin, stdout, stderr);
setupSessionExtensions(client, resolver, stdin, stdout, stderr);
Map<String, ?> options = resolver.getProperties();
if (GenericUtils.isNotEmpty(options)) {
Map<String, Object> props = client.getProperties();
props.putAll(options);
}
return client;
} catch (Throwable t) {
showError(stderr, "Failed (" + t.getClass().getSimpleName() + ") to setup client: " + t.getMessage());
client.close();
return null;
}
}
public static FileKeyPairProvider setupSessionIdentities(
ClientFactoryManager client, Collection<? extends Path> identities,
BufferedReader stdin, PrintStream stdout, PrintStream stderr)
throws Throwable {
client.setFilePasswordProvider((session, file, index) -> {
stdout.print("Enter password for private key file=" + file + ": ");
return stdin.readLine();
});
if (GenericUtils.isEmpty(identities)) {
return null;
}
FileKeyPairProvider provider = new FileKeyPairProvider() {
@Override
public String toString() {
return FileKeyPairProvider.class.getSimpleName() + "[clientIdentitiesProvider]";
}
};
provider.setPaths(identities);
client.setKeyIdentityProvider(provider);
return provider;
}
public static UserInteraction setupSessionUserInteraction(
ClientAuthenticationManager client, BufferedReader stdin, PrintStream stdout, PrintStream stderr) {
UserInteraction ui = new UserInteraction() {
@Override
public boolean isInteractionAllowed(ClientSession session) {
return true;
}
@Override
public void serverVersionInfo(ClientSession session, List<String> lines) {
for (String l : lines) {
stdout.append('\t').println(l);
}
}
@Override
public void welcome(ClientSession clientSession, String banner, String lang) {
stdout.println(banner);
}
@Override
public String[] interactive(
ClientSession clientSession, String name, String instruction,
String lang, String[] prompt, boolean[] echo) {
int numPropmts = GenericUtils.length(prompt);
String[] answers = new String[numPropmts];
try {
for (int i = 0; i < numPropmts; i++) {
stdout.append(prompt[i]).print(" ");
answers[i] = stdin.readLine();
}
} catch (IOException e) {
stderr.append("WARNING: ").append(e.getClass().getSimpleName())
.append(" while read prompts: ").println(e.getMessage());
}
return answers;
}
@Override
public String getUpdatedPassword(ClientSession clientSession, String prompt, String lang) {
stdout.append(prompt).print(" ");
try {
return stdin.readLine();
} catch (IOException e) {
stderr.append("WARNING: ").append(e.getClass().getSimpleName())
.append(" while read password: ").println(e.getMessage());
return null;
}
}
};
client.setUserInteraction(ui);
return ui;
}
public static void setupSessionExtensions(
KexFactoryManager manager, PropertyResolver resolver,
BufferedReader stdin, PrintStream stdout, PrintStream stderr)
throws Exception {
Map<String, ?> options = resolver.getProperties();
String kexExtension = Objects.toString(
options.remove(KexExtensionHandler.class.getSimpleName()), null);
if (GenericUtils.isEmpty(kexExtension)) {
return;
}
if ("default".equalsIgnoreCase(kexExtension)) {
manager.setKexExtensionHandler(DefaultClientKexExtensionHandler.INSTANCE);
stdout.println("Using " + DefaultClientKexExtensionHandler.class.getSimpleName());
} else {
ClassLoader cl = ThreadUtils.resolveDefaultClassLoader(KexExtensionHandler.class);
try {
Class<?> clazz = cl.loadClass(kexExtension);
KexExtensionHandler handler = KexExtensionHandler.class.cast(clazz.newInstance());
manager.setKexExtensionHandler(handler);
} catch (Exception e) {
stderr.append("ERROR: Failed (").append(e.getClass().getSimpleName()).append(')')
.append(" to instantiate KEX extension handler=").append(kexExtension)
.append(": ").println(e.getMessage());
stderr.flush();
throw e;
}
stdout.println("Using " + KexExtensionHandler.class.getSimpleName() + "=" + kexExtension);
}
}
public static ServerKeyVerifier setupServerKeyVerifier(
ClientAuthenticationManager manager, PropertyResolver resolver,
BufferedReader stdin, PrintStream stdout, PrintStream stderr) {
ServerKeyVerifier current = manager.getServerKeyVerifier();
if (current == null) {
current = ClientBuilder.DEFAULT_SERVER_KEY_VERIFIER;
manager.setServerKeyVerifier(current);
}
Map<String, ?> options = resolver.getProperties();
String strictValue = Objects.toString(
options.remove(KnownHostsServerKeyVerifier.STRICT_CHECKING_OPTION), "true");
if (!ConfigFileReaderSupport.parseBooleanValue(strictValue)) {
return current;
}
String filePath = Objects.toString(
options.remove(KnownHostsServerKeyVerifier.KNOWN_HOSTS_FILE_OPTION), null);
if (GenericUtils.isEmpty(filePath)) {
current = new DefaultKnownHostsServerKeyVerifier(current);
} else { // if user specifies a different location than default be lenient
current = new DefaultKnownHostsServerKeyVerifier(current, false, Paths.get(filePath));
}
((KnownHostsServerKeyVerifier) current).setModifiedServerKeyAcceptor(
(clientSession, remoteAddress, entry, expected, actual) -> {
stderr.append("WARNING: Mismatched keys presented by ").append(Objects.toString(remoteAddress))
.append(" for entry=").println(entry);
stderr.append(" ").append("Expected=").append(KeyUtils.getKeyType(expected))
.append('-').println(KeyUtils.getFingerPrint(expected));
stderr.append(" ").append("Actual=").append(KeyUtils.getKeyType(actual))
.append('-').println(KeyUtils.getFingerPrint(actual));
stderr.flush(); // just making sure
stdout.append("Accept key and update known hosts: y/[N]");
stdout.flush(); // just making sure
String ans = GenericUtils.trimToEmpty(stdin.readLine());
return (GenericUtils.length(ans) > 0)
&& (Character.toLowerCase(ans.charAt(0)) == 'y');
});
manager.setServerKeyVerifier(current);
return current;
}
public static OutputStream resolveLoggingTargetStream(
PrintStream stdout, PrintStream stderr, String... args) {
return resolveLoggingTargetStream(stdout, stderr, args, GenericUtils.length(args));
}
public static OutputStream resolveLoggingTargetStream(
PrintStream stdout, PrintStream stderr, String[] args, int maxIndex) {
for (int index = 0; index < maxIndex; index++) {
String argName = args[index];
if ("-E".equals(argName)) {
if ((index + 1) >= maxIndex) {
showError(stderr, "Missing " + argName + " option argument");
return null;
}
String argVal = args[index + 1];
if ("--".equals(argVal)) {
return stdout;
}
try {
Path path = Paths.get(argVal).normalize().toAbsolutePath();
return Files.newOutputStream(path);
} catch (IOException e) {
showError(stderr,
"Failed (" + e.getClass().getSimpleName() + ") to open " + argVal + ": " + e.getMessage());
return null;
}
}
}
return stderr;
}
public static Handler setupLogging(
Level level, PrintStream stdout, PrintStream stderr, OutputStream outputStream) {
Handler fh = new ConsoleHandler() {
{
setOutputStream(outputStream); // override the default (stderr)
}
@Override
protected synchronized void setOutputStream(OutputStream out) throws SecurityException {
if ((out == stdout) || (out == stderr)) {
super.setOutputStream(new NoCloseOutputStream(out));
} else {
super.setOutputStream(out);
}
}
};
fh.setLevel(Level.FINEST);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
String message = formatMessage(record);
String throwable = "";
Throwable t = record.getThrown();
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
pw.println();
t.printStackTrace(pw); // NOPMD
}
throwable = sw.toString();
}
return String.format("%1$tY-%1$tm-%1$td: %2$-7.7s: %3$-32.32s: %4$s%5$s%n",
new Date(record.getMillis()), record.getLevel().getName(),
record.getLoggerName(), message, throwable);
}
});
Logger root = Logger.getLogger("");
for (Handler handler : root.getHandlers()) {
root.removeHandler(handler);
}
root.addHandler(fh);
root.setLevel(level);
return fh;
}
}
| sshd-cli/src/main/java/org/apache/sshd/cli/client/SshClientCliSupport.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.cli.client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.logging.ConsoleHandler;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import org.apache.sshd.cli.CliSupport;
import org.apache.sshd.client.ClientAuthenticationManager;
import org.apache.sshd.client.ClientBuilder;
import org.apache.sshd.client.ClientFactoryManager;
import org.apache.sshd.client.SshClient;
import org.apache.sshd.client.auth.keyboard.UserInteraction;
import org.apache.sshd.client.config.SshClientConfigFileReader;
import org.apache.sshd.client.config.hosts.HostConfigEntry;
import org.apache.sshd.client.config.keys.ClientIdentity;
import org.apache.sshd.client.keyverifier.DefaultKnownHostsServerKeyVerifier;
import org.apache.sshd.client.keyverifier.KnownHostsServerKeyVerifier;
import org.apache.sshd.client.keyverifier.ServerKeyVerifier;
import org.apache.sshd.client.session.ClientSession;
import org.apache.sshd.common.NamedFactory;
import org.apache.sshd.common.PropertyResolver;
import org.apache.sshd.common.PropertyResolverUtils;
import org.apache.sshd.common.SshConstants;
import org.apache.sshd.common.channel.PtyChannelConfiguration;
import org.apache.sshd.common.channel.PtyChannelConfigurationMutator;
import org.apache.sshd.common.channel.PtyMode;
import org.apache.sshd.common.cipher.Cipher;
import org.apache.sshd.common.compression.Compression;
import org.apache.sshd.common.config.ConfigFileReaderSupport;
import org.apache.sshd.common.config.SshConfigFileReader;
import org.apache.sshd.common.config.keys.BuiltinIdentities;
import org.apache.sshd.common.config.keys.KeyUtils;
import org.apache.sshd.common.config.keys.PublicKeyEntry;
import org.apache.sshd.common.kex.KexFactoryManager;
import org.apache.sshd.common.kex.extension.DefaultClientKexExtensionHandler;
import org.apache.sshd.common.kex.extension.KexExtensionHandler;
import org.apache.sshd.common.keyprovider.FileKeyPairProvider;
import org.apache.sshd.common.mac.Mac;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.OsUtils;
import org.apache.sshd.common.util.ValidateUtils;
import org.apache.sshd.common.util.io.NoCloseOutputStream;
import org.apache.sshd.common.util.threads.ThreadUtils;
import org.apache.sshd.core.CoreModuleProperties;
/**
* TODO Add javadoc
*
* @author <a href="mailto:[email protected]">Apache MINA SSHD Project</a>
*/
public abstract class SshClientCliSupport extends CliSupport {
/**
* Command line option used to indicate non-default target port
*/
public static final String SSH_CLIENT_PORT_OPTION = "-p";
protected SshClientCliSupport() {
super();
}
public static boolean isArgumentedOption(String portOption, String argName) {
return portOption.equals(argName)
|| "-io".equals(argName)
|| "-i".equals(argName)
|| "-o".equals(argName)
|| "-l".equals(argName)
|| "-w".equals(argName)
|| "-c".equals(argName)
|| "-m".equals(argName)
|| "-E".equals(argName);
}
// NOTE: ClientSession#getFactoryManager is the SshClient
public static ClientSession setupClientSession(
String portOption, BufferedReader stdin, Level level,
PrintStream stdout, PrintStream stderr, String... args)
throws Exception {
int port = -1;
String host = null;
String login = null;
String password = null;
boolean error = false;
List<Path> identities = new ArrayList<>();
Map<String, Object> options = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
List<NamedFactory<Cipher>> ciphers = null;
List<NamedFactory<Mac>> macs = null;
List<NamedFactory<Compression>> compressions = null;
int numArgs = GenericUtils.length(args);
for (int i = 0; (!error) && (i < numArgs); i++) {
String argName = args[i];
String argVal = null;
if (isArgumentedOption(portOption, argName)) {
i++;
if (i >= numArgs) {
error = showError(stderr, "option requires an argument: " + argName);
break;
}
argVal = args[i];
}
if (portOption.equals(argName)) {
if (port > 0) {
error = showError(stderr, argName + " option value re-specified: " + port);
break;
}
port = Integer.parseInt(argVal);
if (port <= 0) {
error = showError(stderr, "Bad option value for " + argName + ": " + port);
break;
}
} else if ("-w".equals(argName)) {
if (GenericUtils.length(password) > 0) {
error = showError(stderr, argName + " option value re-specified: " + password);
break;
}
password = argVal;
} else if ("-c".equals(argName)) {
ciphers = setupCiphers(argName, argVal, ciphers, stderr);
if (GenericUtils.isEmpty(ciphers)) {
error = true;
break;
}
} else if ("-m".equals(argName)) {
macs = setupMacs(argName, argVal, macs, stderr);
if (GenericUtils.isEmpty(macs)) {
error = true;
break;
}
} else if ("-i".equals(argName)) {
Path idFile = resolveIdentityFile(argVal);
identities.add(idFile);
} else if ("-C".equals(argName)) {
compressions = setupCompressions(argName, argVal, compressions, stderr);
if (GenericUtils.isEmpty(compressions)) {
error = true;
break;
}
} else if ("-o".equals(argName)) {
String opt = argVal;
int idx = opt.indexOf('=');
if (idx <= 0) {
error = showError(stderr, "bad syntax for option: " + opt);
break;
}
String optName = opt.substring(0, idx);
String optValue = opt.substring(idx + 1);
if (HostConfigEntry.IDENTITY_FILE_CONFIG_PROP.equals(optName)) {
Path idFile = resolveIdentityFile(optValue);
identities.add(idFile);
} else {
Object prev = options.get(optName);
if (prev == null) {
options.put(optName, optValue);
} else {
options.put(optName, Objects.toString(prev) + "," + optValue);
}
}
} else if ("-l".equals(argName)) {
if (login != null) {
error = showError(stderr, argName + " option value re-specified: " + port);
break;
}
login = argVal;
} else if (argName.charAt(0) != '-') {
if (host != null) { // assume part of a command following it
break;
}
host = argName;
int pos = host.indexOf('@'); // check if user@host
if (pos > 0) {
if (login == null) {
login = host.substring(0, pos);
host = host.substring(pos + 1);
} else {
error = showError(stderr, "Login already specified using -l option (" + login + "): " + host);
break;
}
}
}
}
if ((!error) && GenericUtils.isEmpty(host)) {
error = showError(stderr, "Hostname not specified");
}
if (error) {
return null;
}
PropertyResolver resolver = PropertyResolverUtils.toPropertyResolver(options);
SshClient client = setupClient(
resolver, ciphers, macs, compressions, identities,
stdin, stdout, stderr, level, args);
if (client == null) {
return null;
}
try {
client.start();
if (login == null) {
login = OsUtils.getCurrentUser();
}
if (port <= 0) {
port = SshConstants.DEFAULT_PORT;
}
// TODO use a configurable wait time
ClientSession session = client.connect(login, host, port)
.verify()
.getSession();
try {
if (GenericUtils.length(password) > 0) {
session.addPasswordIdentity(password);
}
session.auth().verify(CoreModuleProperties.AUTH_TIMEOUT.getRequired(session));
return session;
} catch (Exception e) {
session.close(true);
throw e;
}
} catch (Exception e) {
client.close();
throw e;
}
}
public static Path resolveIdentityFile(String id) throws IOException {
BuiltinIdentities identity = BuiltinIdentities.fromName(id);
if (identity != null) {
String fileName = ClientIdentity.getIdentityFileName(identity.getName());
Path keysFolder = PublicKeyEntry.getDefaultKeysFolderPath();
return keysFolder.resolve(fileName);
} else {
return Paths.get(id);
}
}
public static Map<String, ?> resolveClientEnvironment(PropertyResolver resolver) {
return resolveClientEnvironment(
(resolver == null) ? Collections.emptyMap() : resolver.getProperties());
}
public static Map<String, ?> resolveClientEnvironment(Map<String, ?> options) {
if (GenericUtils.isEmpty(options)) {
return Collections.emptyMap();
}
Map<String, Object> env = Collections.emptyMap();
for (String propName : new String[] { SshClientConfigFileReader.SETENV_PROP, SshClientConfigFileReader.SENDENV_PROP }) {
Object v = options.get(propName);
String s = Objects.toString(v, null);
if (GenericUtils.isEmpty(s)) {
continue;
}
String[] kvp = GenericUtils.split(s, ',');
for (String kve : kvp) {
int pos = kve.indexOf('=');
String key = (pos >= 0) ? kve.substring(0, pos) : kve;
String value = (pos >= 0) ? kve.substring(pos + 1) : "";
if (env.isEmpty()) {
env = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
}
Object prev = env.put(key, value);
if (prev != null) {
continue; // debug breakpoint
}
}
}
return env;
}
public static PtyChannelConfiguration resolveClientPtyOptions(PropertyResolver resolver)
throws IOException, InterruptedException {
return resolveClientPtyOptions(
(resolver == null) ? Collections.emptyMap() : resolver.getProperties());
}
public static PtyChannelConfiguration resolveClientPtyOptions(Map<String, ?> options)
throws IOException, InterruptedException {
Object v = GenericUtils.isEmpty(options)
? null
: options.get(SshClientConfigFileReader.REQUEST_TTY_OPTION);
String s = Objects.toString(v, "auto");
boolean autoDetect = "auto".equalsIgnoreCase(s);
Boolean ptyEnabled = autoDetect ? Boolean.TRUE : PropertyResolverUtils.parseBoolean(s);
if ((ptyEnabled == null) || (!ptyEnabled.booleanValue())) {
return null;
}
PtyChannelConfiguration config = new PtyChannelConfiguration();
if (autoDetect) {
PtyChannelConfigurationMutator.setupSensitiveDefaultPtyConfiguration(config);
}
// TODO add support for height/width, rows/columns and TERM(inal) type
Map<PtyMode, Integer> ptyModes = resolveClientPtyModes(options);
if (GenericUtils.isNotEmpty(ptyModes)) {
config.setPtyModes(ptyModes);
}
return config;
}
public static Map<PtyMode, Integer> resolveClientPtyModes(Map<String, ?> options)
throws IOException, InterruptedException {
Object v = GenericUtils.isEmpty(options) ? null : options.get(PtyMode.class.getSimpleName());
String s = Objects.toString(v, null);
if (GenericUtils.isEmpty(s)) {
return Collections.emptyMap();
}
String[] kvp = GenericUtils.split(s, ',');
Map<PtyMode, Integer> ptyModes = new EnumMap<>(PtyMode.class);
for (String kve : kvp) {
int pos = kve.indexOf('=');
String key = (pos >= 0) ? kve.substring(0, pos) : kve;
PtyMode mode = ValidateUtils.checkNotNull(
PtyMode.fromName(key), "Unknown PTY mode: %s", key);
s = (pos >= 0) ? kve.substring(pos + 1) : "";
Integer value = GenericUtils.isEmpty(s) ? Integer.valueOf(1) : Integer.valueOf(s);
Integer prev = ptyModes.put(mode, value);
if (prev != null) {
continue; // debug breakpoint
}
}
return ptyModes;
}
public static SshClient setupDefaultClient(
PropertyResolver resolver, Level level,
PrintStream stdout, PrintStream stderr, String... args) {
SshClient client = setupIoServiceFactory(
SshClient.setUpDefaultClient(), resolver, level, stdout, stderr, args);
SshConfigFileReader.configureKeyExchanges(client, resolver, true, ClientBuilder.DH2KEX, true);
SshConfigFileReader.configureSignatures(client, resolver, true, true);
SshClientConfigFileReader.setupClientHeartbeat(client, resolver);
return client;
}
// returns null if error encountered
@SuppressWarnings("checkstyle:ParameterNumber")
public static SshClient setupClient(
PropertyResolver resolver,
List<NamedFactory<Cipher>> ciphers,
List<NamedFactory<Mac>> macs,
List<NamedFactory<Compression>> compressions,
Collection<? extends Path> identities,
BufferedReader stdin, PrintStream stdout, PrintStream stderr,
Level level, String[] args)
throws Exception {
if (GenericUtils.isEmpty(ciphers)) {
ciphers = setupCiphers(resolver, stderr);
if (ciphers == null) {
return null;
}
}
if (GenericUtils.isEmpty(macs)) {
macs = setupMacs(resolver, stderr);
if (macs == null) {
return null;
}
}
if (GenericUtils.isEmpty(compressions)) {
compressions = setupCompressions(resolver, stderr);
if (compressions == null) {
return null;
}
}
SshClient client = setupDefaultClient(resolver, level, stdout, stderr, args);
if (client == null) {
return null;
}
try {
if (GenericUtils.size(ciphers) > 0) {
client.setCipherFactories(ciphers);
}
if (GenericUtils.size(macs) > 0) {
client.setMacFactories(macs);
}
if (GenericUtils.size(compressions) > 0) {
client.setCompressionFactories(compressions);
}
try {
setupSessionIdentities(client, identities, stdin, stdout, stderr);
} catch (Throwable t) { // show but do not fail the setup - maybe a password can be used
showError(stderr, t.getClass().getSimpleName() + " while loading user keys: " + t.getMessage());
}
setupServerKeyVerifier(client, resolver, stdin, stdout, stderr);
setupUserAuthFactories(client, resolver);
setupSessionUserInteraction(client, stdin, stdout, stderr);
setupSessionExtensions(client, resolver, stdin, stdout, stderr);
Map<String, ?> options = resolver.getProperties();
if (GenericUtils.isNotEmpty(options)) {
Map<String, Object> props = client.getProperties();
props.putAll(options);
}
return client;
} catch (Throwable t) {
showError(stderr, "Failed (" + t.getClass().getSimpleName() + ") to setup client: " + t.getMessage());
client.close();
return null;
}
}
public static FileKeyPairProvider setupSessionIdentities(
ClientFactoryManager client, Collection<? extends Path> identities,
BufferedReader stdin, PrintStream stdout, PrintStream stderr)
throws Throwable {
client.setFilePasswordProvider((session, file, index) -> {
stdout.print("Enter password for private key file=" + file + ": ");
return stdin.readLine();
});
if (GenericUtils.isEmpty(identities)) {
return null;
}
FileKeyPairProvider provider = new FileKeyPairProvider() {
@Override
public String toString() {
return FileKeyPairProvider.class.getSimpleName() + "[clientIdentitiesProvider]";
}
};
provider.setPaths(identities);
client.setKeyIdentityProvider(provider);
return provider;
}
public static UserInteraction setupSessionUserInteraction(
ClientAuthenticationManager client, BufferedReader stdin, PrintStream stdout, PrintStream stderr) {
UserInteraction ui = new UserInteraction() {
@Override
public boolean isInteractionAllowed(ClientSession session) {
return true;
}
@Override
public void serverVersionInfo(ClientSession session, List<String> lines) {
for (String l : lines) {
stdout.append('\t').println(l);
}
}
@Override
public void welcome(ClientSession clientSession, String banner, String lang) {
stdout.println(banner);
}
@Override
public String[] interactive(
ClientSession clientSession, String name, String instruction,
String lang, String[] prompt, boolean[] echo) {
int numPropmts = GenericUtils.length(prompt);
String[] answers = new String[numPropmts];
try {
for (int i = 0; i < numPropmts; i++) {
stdout.append(prompt[i]).print(" ");
answers[i] = stdin.readLine();
}
} catch (IOException e) {
stderr.append("WARNING: ").append(e.getClass().getSimpleName())
.append(" while read prompts: ").println(e.getMessage());
}
return answers;
}
@Override
public String getUpdatedPassword(ClientSession clientSession, String prompt, String lang) {
stdout.append(prompt).print(" ");
try {
return stdin.readLine();
} catch (IOException e) {
stderr.append("WARNING: ").append(e.getClass().getSimpleName())
.append(" while read password: ").println(e.getMessage());
return null;
}
}
};
client.setUserInteraction(ui);
return ui;
}
public static void setupSessionExtensions(
KexFactoryManager manager, PropertyResolver resolver,
BufferedReader stdin, PrintStream stdout, PrintStream stderr)
throws Exception {
Map<String, ?> options = resolver.getProperties();
String kexExtension = Objects.toString(
options.remove(KexExtensionHandler.class.getSimpleName()), null);
if (GenericUtils.isEmpty(kexExtension)) {
return;
}
if ("default".equalsIgnoreCase(kexExtension)) {
manager.setKexExtensionHandler(DefaultClientKexExtensionHandler.INSTANCE);
stdout.println("Using " + DefaultClientKexExtensionHandler.class.getSimpleName());
} else {
ClassLoader cl = ThreadUtils.resolveDefaultClassLoader(KexExtensionHandler.class);
try {
Class<?> clazz = cl.loadClass(kexExtension);
KexExtensionHandler handler = KexExtensionHandler.class.cast(clazz.newInstance());
manager.setKexExtensionHandler(handler);
} catch (Exception e) {
stderr.append("ERROR: Failed (").append(e.getClass().getSimpleName()).append(')')
.append(" to instantiate KEX extension handler=").append(kexExtension)
.append(": ").println(e.getMessage());
stderr.flush();
throw e;
}
stdout.println("Using " + KexExtensionHandler.class.getSimpleName() + "=" + kexExtension);
}
}
public static ServerKeyVerifier setupServerKeyVerifier(
ClientAuthenticationManager manager, PropertyResolver resolver,
BufferedReader stdin, PrintStream stdout, PrintStream stderr) {
ServerKeyVerifier current = manager.getServerKeyVerifier();
if (current == null) {
current = ClientBuilder.DEFAULT_SERVER_KEY_VERIFIER;
manager.setServerKeyVerifier(current);
}
Map<String, ?> options = resolver.getProperties();
String strictValue = Objects.toString(
options.remove(KnownHostsServerKeyVerifier.STRICT_CHECKING_OPTION), "true");
if (!ConfigFileReaderSupport.parseBooleanValue(strictValue)) {
return current;
}
String filePath = Objects.toString(
options.remove(KnownHostsServerKeyVerifier.KNOWN_HOSTS_FILE_OPTION), null);
if (GenericUtils.isEmpty(filePath)) {
current = new DefaultKnownHostsServerKeyVerifier(current);
} else { // if user specifies a different location than default be lenient
current = new DefaultKnownHostsServerKeyVerifier(current, false, Paths.get(filePath));
}
((KnownHostsServerKeyVerifier) current).setModifiedServerKeyAcceptor(
(clientSession, remoteAddress, entry, expected, actual) -> {
stderr.append("WARNING: Mismatched keys presented by ").append(Objects.toString(remoteAddress))
.append(" for entry=").println(entry);
stderr.append(" ").append("Expected=").append(KeyUtils.getKeyType(expected))
.append('-').println(KeyUtils.getFingerPrint(expected));
stderr.append(" ").append("Actual=").append(KeyUtils.getKeyType(actual))
.append('-').println(KeyUtils.getFingerPrint(actual));
stderr.flush(); // just making sure
stdout.append("Accept key and update known hosts: y/[N]");
stdout.flush(); // just making sure
String ans = GenericUtils.trimToEmpty(stdin.readLine());
return (GenericUtils.length(ans) > 0)
&& (Character.toLowerCase(ans.charAt(0)) == 'y');
});
manager.setServerKeyVerifier(current);
return current;
}
public static OutputStream resolveLoggingTargetStream(
PrintStream stdout, PrintStream stderr, String... args) {
return resolveLoggingTargetStream(stdout, stderr, args, GenericUtils.length(args));
}
public static OutputStream resolveLoggingTargetStream(
PrintStream stdout, PrintStream stderr, String[] args, int maxIndex) {
for (int index = 0; index < maxIndex; index++) {
String argName = args[index];
if ("-E".equals(argName)) {
if ((index + 1) >= maxIndex) {
showError(stderr, "Missing " + argName + " option argument");
return null;
}
String argVal = args[index + 1];
if ("--".equals(argVal)) {
return stdout;
}
try {
Path path = Paths.get(argVal).normalize().toAbsolutePath();
return Files.newOutputStream(path);
} catch (IOException e) {
showError(stderr,
"Failed (" + e.getClass().getSimpleName() + ") to open " + argVal + ": " + e.getMessage());
return null;
}
}
}
return stderr;
}
public static Handler setupLogging(
Level level, PrintStream stdout, PrintStream stderr, OutputStream outputStream) {
Handler fh = new ConsoleHandler() {
{
setOutputStream(outputStream); // override the default (stderr)
}
@Override
protected synchronized void setOutputStream(OutputStream out) throws SecurityException {
if ((out == stdout) || (out == stderr)) {
super.setOutputStream(new NoCloseOutputStream(out));
} else {
super.setOutputStream(out);
}
}
};
fh.setLevel(Level.FINEST);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
String message = formatMessage(record);
String throwable = "";
Throwable t = record.getThrown();
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
pw.println();
t.printStackTrace(pw); // NOPMD
}
throwable = sw.toString();
}
return String.format("%1$tY-%1$tm-%1$td: %2$-7.7s: %3$-32.32s: %4$s%5$s%n",
new Date(record.getMillis()), record.getLevel().getName(),
record.getLoggerName(), message, throwable);
}
});
Logger root = Logger.getLogger("");
for (Handler handler : root.getHandlers()) {
root.removeHandler(handler);
}
root.addHandler(fh);
root.setLevel(level);
return fh;
}
}
| Use Map.merge
| sshd-cli/src/main/java/org/apache/sshd/cli/client/SshClientCliSupport.java | Use Map.merge | <ide><path>shd-cli/src/main/java/org/apache/sshd/cli/client/SshClientCliSupport.java
<ide> Path idFile = resolveIdentityFile(optValue);
<ide> identities.add(idFile);
<ide> } else {
<del> Object prev = options.get(optName);
<del> if (prev == null) {
<del> options.put(optName, optValue);
<del> } else {
<del> options.put(optName, Objects.toString(prev) + "," + optValue);
<del> }
<add> options.merge(optName, optValue, (a, b) -> a + "," + b);
<ide> }
<ide> } else if ("-l".equals(argName)) {
<ide> if (login != null) { |
|
Java | bsd-3-clause | aead27bcc5ca0b3c6e72d86a94379305545385bf | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | package gov.nih.nci.cananolab.service.sample.helper;
import gov.nih.nci.cananolab.domain.agentmaterial.OtherFunctionalizingEntity;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.Organization;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.function.OtherFunction;
import gov.nih.nci.cananolab.domain.nanomaterial.OtherNanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.common.UserBean;
import gov.nih.nci.cananolab.dto.particle.AdvancedSampleBean;
import gov.nih.nci.cananolab.dto.particle.AdvancedSampleSearchBean;
import gov.nih.nci.cananolab.dto.particle.CharacterizationQueryBean;
import gov.nih.nci.cananolab.dto.particle.CompositionQueryBean;
import gov.nih.nci.cananolab.dto.particle.SampleQueryBean;
import gov.nih.nci.cananolab.exception.NoAccessException;
import gov.nih.nci.cananolab.service.security.AuthorizationService;
import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService;
import gov.nih.nci.cananolab.util.ClassUtils;
import gov.nih.nci.cananolab.util.Comparators;
import gov.nih.nci.cananolab.util.Constants;
import gov.nih.nci.cananolab.util.StringUtils;
import gov.nih.nci.cananolab.util.TextMatchMode;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import gov.nih.nci.system.query.hibernate.HQLCriteria;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.hibernate.FetchMode;
import org.hibernate.criterion.Conjunction;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Expression;
import org.hibernate.criterion.Junction;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions;
import org.hibernate.criterion.Subqueries;
/**
* Helper class providing implementations of search methods needed for both
* local implementation of SampleService and grid service *
*
* @author pansu, tanq
*
*/
public class SampleServiceHelper {
private AuthorizationService authService;
private static Logger logger = Logger.getLogger(SampleServiceHelper.class);
public SampleServiceHelper() {
try {
authService = new AuthorizationService(Constants.CSM_APP_NAME);
} catch (Exception e) {
logger.error("Can't create authorization service: " + e);
}
}
public List<Sample> findSamplesBy(String samplePointOfContact,
String[] nanomaterialEntityClassNames,
String[] otherNanomaterialEntityTypes,
String[] functionalizingEntityClassNames,
String[] otherFunctionalizingEntityTypes,
String[] functionClassNames, String[] otherFunctionTypes,
String[] characterizationClassNames,
String[] otherCharacterizationTypes, String[] wordList,
UserBean user) throws Exception {
List<Sample> samples = new ArrayList<Sample>();
// can't query for the entire Sample object due to
// limitations in pagination in SDK
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class)
.setProjection(Projections.distinct(Property.forName("name")));
if (samplePointOfContact != null && samplePointOfContact.length() > 0) {
TextMatchMode pocMatchMode = new TextMatchMode(samplePointOfContact);
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("primaryPointOfContact", "pointOfContact");
crit.createAlias("pointOfContact.organization", "organization");
crit.createAlias("otherPointOfContactCollection", "otherPoc",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("otherPoc.organization", "otherOrg",
CriteriaSpecification.LEFT_JOIN);
String critStrs[] = { "pointOfContact.lastName",
"pointOfContact.firstName", "pointOfContact.role",
"organization.name", "otherPoc.lastName",
"otherPoc.firstName", "otherOrg.name" };
for (String critStr : critStrs) {
Criterion pocCrit = Restrictions.ilike(critStr, pocMatchMode
.getUpdatedText(), pocMatchMode.getMatchMode());
disjunction.add(pocCrit);
}
crit.add(disjunction);
}
// join composition
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0
|| functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
crit.createAlias("sampleComposition", "comp",
CriteriaSpecification.LEFT_JOIN);
}
// join nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.nanomaterialEntityCollection", "nanoEntity",
CriteriaSpecification.LEFT_JOIN);
}
// join functionalizing entity
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.functionalizingEntityCollection",
"funcEntity", CriteriaSpecification.LEFT_JOIN);
}
// nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0) {
Criterion nanoEntityCrit = Restrictions.in("nanoEntity.class",
nanomaterialEntityClassNames);
disjunction.add(nanoEntityCrit);
}
if (otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0) {
Criterion otherNanoCrit1 = Restrictions.eq("nanoEntity.class",
"OtherNanomaterialEntity");
Criterion otherNanoCrit2 = Restrictions.in("nanoEntity.type",
otherNanomaterialEntityTypes);
Criterion otherNanoCrit = Restrictions.and(otherNanoCrit1,
otherNanoCrit2);
disjunction.add(otherNanoCrit);
}
crit.add(disjunction);
}
// functionalizing entity
// need to turn class names into integers in order for the .class
// clause to work
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0) {
Integer[] functionalizingEntityClassNameIntegers = this
.convertToFunctionalizingEntityClassOrderNumber(functionalizingEntityClassNames);
Criterion funcEntityCrit = Restrictions.in("funcEntity.class",
functionalizingEntityClassNameIntegers);
disjunction.add(funcEntityCrit);
}
if (otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
Integer classOrderNumber = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get("otherFunctionalizingEntity");
Criterion otherFuncCrit1 = Restrictions.eq("funcEntity.class",
classOrderNumber);
Criterion otherFuncCrit2 = Restrictions.in("funcEntity.type",
otherNanomaterialEntityTypes);
Criterion otherFuncCrit = Restrictions.and(otherFuncCrit1,
otherFuncCrit2);
disjunction.add(otherFuncCrit);
}
crit.add(disjunction);
}
// function
if (functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("nanoEntity.composingElementCollection",
"compElement", CriteriaSpecification.LEFT_JOIN)
.createAlias("compElement.inherentFunctionCollection",
"inFunc", CriteriaSpecification.LEFT_JOIN);
crit.createAlias("funcEntity.functionCollection", "func",
CriteriaSpecification.LEFT_JOIN);
if (functionClassNames != null && functionClassNames.length > 0) {
Criterion funcCrit1 = Restrictions.in("inFunc.class",
functionClassNames);
Criterion funcCrit2 = Restrictions.in("func.class",
functionClassNames);
disjunction.add(funcCrit1).add(funcCrit2);
}
if (otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Criterion otherFuncCrit1 = Restrictions.and(Restrictions.eq(
"inFunc.class", "OtherFunction"), Restrictions.in(
"inFunc.type", otherFunctionTypes));
Criterion otherFuncCrit2 = Restrictions.and(Restrictions.eq(
"func.class", "OtherFunction"), Restrictions.in(
"func.type", otherFunctionTypes));
disjunction.add(otherFuncCrit1).add(otherFuncCrit2);
}
crit.add(disjunction);
}
// join characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0 || wordList != null
&& wordList.length > 0) {
crit.createAlias("characterizationCollection", "chara",
CriteriaSpecification.LEFT_JOIN);
}
// characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0) {
crit
.add(Restrictions.in("chara.class",
characterizationClassNames));
}
// join keyword, finding, publication
if (wordList != null && wordList.length > 0) {
crit.createAlias("keywordCollection", "keyword1");
crit.createAlias("chara.findingCollection", "finding",
CriteriaSpecification.LEFT_JOIN).createAlias(
"finding.fileCollection", "charFile",
CriteriaSpecification.LEFT_JOIN).createAlias(
"charFile.keywordCollection", "keyword2",
CriteriaSpecification.LEFT_JOIN);
// publication keywords
crit.createAlias("publicationCollection", "pub1",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("pub1.keywordCollection", "keyword3",
CriteriaSpecification.LEFT_JOIN);
}
// keyword
if (wordList != null && wordList.length > 0) {
// turn words into upper case before searching keywords
String[] upperKeywords = new String[wordList.length];
for (int i = 0; i < wordList.length; i++) {
upperKeywords[i] = wordList[i].toUpperCase();
}
Disjunction disjunction = Restrictions.disjunction();
for (String keyword : upperKeywords) {
Criterion keywordCrit1 = Restrictions.like("keyword1.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit2 = Restrictions.like("keyword2.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit3 = Restrictions.like("keyword3.name",
keyword, MatchMode.ANYWHERE);
disjunction.add(keywordCrit1);
disjunction.add(keywordCrit2);
disjunction.add(keywordCrit3);
}
for (String word : wordList) {
Criterion summaryCrit1 = Restrictions.ilike(
"chara.designMethodsDescription", word,
MatchMode.ANYWHERE);
Criterion summaryCrit2 = Restrictions.ilike(
"charFile.description", word, MatchMode.ANYWHERE);
Criterion summaryCrit = Restrictions.or(summaryCrit1,
summaryCrit2);
disjunction.add(summaryCrit);
}
crit.add(disjunction);
}
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
// get public data
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
String sampleName = obj.toString();
try {
Sample sample = findSampleByName(sampleName, user);
samples.add(sample);
} catch (NoAccessException e) {
// ignore no access exception
logger.debug("User doesn't have access to sample with name "
+ sampleName);
}
}
Collections.sort(samples, new Comparators.SampleNameComparator());
return samples;
}
public List<String> findSampleNamesBy(String samplePointOfContact,
String[] nanomaterialEntityClassNames,
String[] otherNanomaterialEntityTypes,
String[] functionalizingEntityClassNames,
String[] otherFunctionalizingEntityTypes,
String[] functionClassNames, String[] otherFunctionTypes,
String[] characterizationClassNames,
String[] otherCharacterizationTypes, String[] wordList,
UserBean user) throws Exception {
List<String> sampleNames = new ArrayList<String>();
// can't query for the entire Sample object due to
// limitations in pagination in SDK
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class)
.setProjection(Projections.distinct(Property.forName("name")));
if (!StringUtils.isEmpty(samplePointOfContact)) {
TextMatchMode pocMatchMode = new TextMatchMode(samplePointOfContact);
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("primaryPointOfContact", "pointOfContact");
crit.createAlias("pointOfContact.organization", "organization");
crit.createAlias("otherPointOfContactCollection", "otherPoc",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("otherPoc.organization", "otherOrg",
CriteriaSpecification.LEFT_JOIN);
String critStrs[] = { "pointOfContact.lastName",
"pointOfContact.firstName", "pointOfContact.role",
"organization.name", "otherPoc.lastName",
"otherPoc.firstName", "otherOrg.name" };
for (String critStr : critStrs) {
Criterion pocCrit = Restrictions.ilike(critStr, pocMatchMode
.getUpdatedText(), pocMatchMode.getMatchMode());
disjunction.add(pocCrit);
}
crit.add(disjunction);
}
// join composition
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0
|| functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
crit.createAlias("sampleComposition", "comp",
CriteriaSpecification.LEFT_JOIN);
}
// join nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.nanomaterialEntityCollection", "nanoEntity",
CriteriaSpecification.LEFT_JOIN);
}
// join functionalizing entity
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.functionalizingEntityCollection",
"funcEntity", CriteriaSpecification.LEFT_JOIN);
}
// nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0) {
Criterion nanoEntityCrit = Restrictions.in("nanoEntity.class",
nanomaterialEntityClassNames);
disjunction.add(nanoEntityCrit);
}
if (otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0) {
Criterion otherNanoCrit1 = Restrictions.eq("nanoEntity.class",
"OtherNanomaterialEntity");
Criterion otherNanoCrit2 = Restrictions.in("nanoEntity.type",
otherNanomaterialEntityTypes);
Criterion otherNanoCrit = Restrictions.and(otherNanoCrit1,
otherNanoCrit2);
disjunction.add(otherNanoCrit);
}
crit.add(disjunction);
}
// functionalizing entity
// need to turn class names into integers in order for the .class
// clause to work
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0) {
Integer[] functionalizingEntityClassNameIntegers = this
.convertToFunctionalizingEntityClassOrderNumber(functionalizingEntityClassNames);
Criterion funcEntityCrit = Restrictions.in("funcEntity.class",
functionalizingEntityClassNameIntegers);
disjunction.add(funcEntityCrit);
}
if (otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
Integer classOrderNumber = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get("OtherFunctionalizingEntity");
Criterion otherFuncCrit1 = Restrictions.eq("funcEntity.class",
classOrderNumber);
Criterion otherFuncCrit2 = Restrictions.in("funcEntity.type",
otherFunctionalizingEntityTypes);
Criterion otherFuncCrit = Restrictions.and(otherFuncCrit1,
otherFuncCrit2);
disjunction.add(otherFuncCrit);
}
crit.add(disjunction);
}
// function
if (functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("nanoEntity.composingElementCollection",
"compElement", CriteriaSpecification.LEFT_JOIN)
.createAlias("compElement.inherentFunctionCollection",
"inFunc", CriteriaSpecification.LEFT_JOIN);
crit.createAlias("funcEntity.functionCollection", "func",
CriteriaSpecification.LEFT_JOIN);
if (functionClassNames != null && functionClassNames.length > 0) {
Criterion funcCrit1 = Restrictions.in("inFunc.class",
functionClassNames);
Criterion funcCrit2 = Restrictions.in("func.class",
functionClassNames);
disjunction.add(funcCrit1).add(funcCrit2);
}
if (otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Criterion otherFuncCrit1 = Restrictions.and(Restrictions.eq(
"inFunc.class", "OtherFunction"), Restrictions.in(
"inFunc.type", otherFunctionTypes));
Criterion otherFuncCrit2 = Restrictions.and(Restrictions.eq(
"func.class", "OtherFunction"), Restrictions.in(
"func.type", otherFunctionTypes));
disjunction.add(otherFuncCrit1).add(otherFuncCrit2);
}
crit.add(disjunction);
}
// join characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0 || wordList != null
&& wordList.length > 0) {
crit.createAlias("characterizationCollection", "chara",
CriteriaSpecification.LEFT_JOIN);
}
// characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0) {
crit
.add(Restrictions.in("chara.class",
characterizationClassNames));
}
// join keyword, finding, publication
if (wordList != null && wordList.length > 0) {
crit.createAlias("keywordCollection", "keyword1");
crit.createAlias("chara.findingCollection", "finding",
CriteriaSpecification.LEFT_JOIN).createAlias(
"finding.fileCollection", "charFile",
CriteriaSpecification.LEFT_JOIN).createAlias(
"charFile.keywordCollection", "keyword2",
CriteriaSpecification.LEFT_JOIN);
// publication keywords
crit.createAlias("publicationCollection", "pub1",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("pub1.keywordCollection", "keyword3",
CriteriaSpecification.LEFT_JOIN);
}
// keyword
if (wordList != null && wordList.length > 0) {
// turn words into upper case before searching keywords
String[] upperKeywords = new String[wordList.length];
for (int i = 0; i < wordList.length; i++) {
upperKeywords[i] = wordList[i].toUpperCase();
}
Disjunction disjunction = Restrictions.disjunction();
for (String keyword : upperKeywords) {
Criterion keywordCrit1 = Restrictions.like("keyword1.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit2 = Restrictions.like("keyword2.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit3 = Restrictions.like("keyword3.name",
keyword, MatchMode.ANYWHERE);
disjunction.add(keywordCrit1);
disjunction.add(keywordCrit2);
disjunction.add(keywordCrit3);
}
for (String word : wordList) {
Criterion summaryCrit1 = Restrictions.ilike(
"chara.designMethodsDescription", word,
MatchMode.ANYWHERE);
Criterion summaryCrit2 = Restrictions.ilike(
"charFile.description", word, MatchMode.ANYWHERE);
Criterion summaryCrit = Restrictions.or(summaryCrit1,
summaryCrit2);
disjunction.add(summaryCrit);
}
crit.add(disjunction);
}
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
// get public data
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
String sampleName = obj.toString();
if (user == null
|| authService.checkReadPermission(user, sampleName)) {
sampleNames.add(sampleName);
} else { // ignore no access exception
logger.debug("User doesn't have access to sample with name "
+ sampleName);
}
}
Collections.sort(sampleNames, new Comparators.SortableNameComparator());
return sampleNames;
}
/**
* Return all stored functionalizing entity class names. In case of
* OtherFunctionalizingEntity, store the OtherFunctionalizingEntity type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredFunctionalizingEntityClassNames(
Sample particleSample) {
SortedSet<String> storedEntities = new TreeSet<String>();
if (particleSample.getSampleComposition() != null
&& particleSample.getSampleComposition()
.getFunctionalizingEntityCollection() != null) {
for (FunctionalizingEntity entity : particleSample
.getSampleComposition()
.getFunctionalizingEntityCollection()) {
if (entity instanceof OtherFunctionalizingEntity) {
storedEntities.add(((OtherFunctionalizingEntity) entity)
.getType());
} else {
storedEntities.add(ClassUtils.getShortClassName(entity
.getClass().getCanonicalName()));
}
}
}
return storedEntities;
}
/**
* Return all stored function class names. In case of OtherFunction, store
* the otherFunction type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredFunctionClassNames(Sample particleSample) {
SortedSet<String> storedFunctions = new TreeSet<String>();
if (particleSample.getSampleComposition() != null) {
if (particleSample.getSampleComposition()
.getNanomaterialEntityCollection() != null) {
for (NanomaterialEntity entity : particleSample
.getSampleComposition()
.getNanomaterialEntityCollection()) {
if (entity.getComposingElementCollection() != null) {
for (ComposingElement element : entity
.getComposingElementCollection()) {
if (element.getInherentFunctionCollection() != null) {
for (Function function : element
.getInherentFunctionCollection()) {
if (function instanceof OtherFunction) {
storedFunctions
.add(((OtherFunction) function)
.getType());
} else {
storedFunctions.add(ClassUtils
.getShortClassName(function
.getClass()
.getCanonicalName()));
}
}
}
}
}
}
}
if (particleSample.getSampleComposition()
.getFunctionalizingEntityCollection() != null) {
for (FunctionalizingEntity entity : particleSample
.getSampleComposition()
.getFunctionalizingEntityCollection()) {
if (entity.getFunctionCollection() != null) {
for (Function function : entity.getFunctionCollection()) {
if (function instanceof OtherFunction) {
storedFunctions.add(((OtherFunction) function)
.getType());
} else {
storedFunctions.add(ClassUtils
.getShortClassName(function.getClass()
.getCanonicalName()));
}
}
}
}
}
}
return storedFunctions;
}
/**
* Return all stored nanomaterial entity class names. In case of
* OtherNanomaterialEntity, store the otherNanomaterialEntity type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredNanomaterialEntityClassNames(
Sample particleSample) {
SortedSet<String> storedEntities = new TreeSet<String>();
if (particleSample.getSampleComposition() != null
&& particleSample.getSampleComposition()
.getNanomaterialEntityCollection() != null) {
for (NanomaterialEntity entity : particleSample
.getSampleComposition().getNanomaterialEntityCollection()) {
if (entity instanceof OtherNanomaterialEntity) {
storedEntities.add(((OtherNanomaterialEntity) entity)
.getType());
} else {
storedEntities.add(ClassUtils.getShortClassName(entity
.getClass().getCanonicalName()));
}
}
}
return storedEntities;
}
public SortedSet<String> getStoredCharacterizationClassNames(Sample particle) {
SortedSet<String> storedChars = new TreeSet<String>();
if (particle.getCharacterizationCollection() != null) {
for (Characterization achar : particle
.getCharacterizationCollection()) {
storedChars.add(ClassUtils.getShortClassName(achar.getClass()
.getCanonicalName()));
}
}
return storedChars;
}
public Sample findSampleByName(String sampleName, UserBean user)
throws Exception {
Sample sample = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("name").eq(sampleName));
crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection.organization",
FetchMode.JOIN);
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
crit.setFetchMode("characterizationCollection", FetchMode.JOIN);
crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit.setFetchMode("publicationCollection", FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
if (authService.checkReadPermission(user, sample.getName())) {
return sample;
} else {
throw new NoAccessException();
}
}
return sample;
}
public Sample findSampleById(String sampleId, UserBean user)
throws Exception {
Sample sample = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection.organization",
FetchMode.JOIN);
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
crit.setFetchMode("characterizationCollection", FetchMode.JOIN);
crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit.setFetchMode("publicationCollection", FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
if (authService.checkReadPermission(user, sample.getName())) {
return sample;
} else {
throw new NoAccessException();
}
}
return sample;
}
public List<Keyword> findKeywordsBySampleId(String sampleId, UserBean user)
throws Exception {
List<Keyword> keywords = new ArrayList<Keyword>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
List result = appService.query(crit);
Sample sample = null;
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
// check whether user has access to the sample
if (authService.checkReadPermission(user, sample.getName())) {
keywords.addAll(sample.getKeywordCollection());
} else {
throw new NoAccessException(
"User doesn't have access to the sample.");
}
}
return keywords;
}
public PointOfContact findPrimaryPointOfContactBySampleId(String sampleId,
UserBean user) throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(
"select aSample.primaryPointOfContact from gov.nih.nci.cananolab.domain.particle.Sample aSample where aSample.id = "
+ sampleId);
List result = appService.query(crit);
PointOfContact poc = null;
if (!result.isEmpty()) {
poc = (PointOfContact) result.get(0);
if (authService.checkReadPermission(user, poc.getId().toString())) {
return poc;
} else {
throw new NoAccessException();
}
}
return poc;
}
public List<PointOfContact> findOtherPointOfContactBySampleId(
String sampleId, UserBean user) throws Exception {
List<PointOfContact> pocs = new ArrayList<PointOfContact>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(
"select aSample.otherPointOfContactCollection from gov.nih.nci.cananolab.domain.particle.Sample aSample where aSample.id = "
+ sampleId);
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
PointOfContact poc = (PointOfContact) obj;
}
return pocs;
}
public int getNumberOfPublicSamples() throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List<String> publicData = appService.getAllPublicData();
HQLCriteria crit = new HQLCriteria(
"select name from gov.nih.nci.cananolab.domain.particle.Sample");
List results = appService.query(crit);
List<String> publicNames = new ArrayList<String>();
for (Object obj : results) {
String name = (String) obj.toString();
if (StringUtils.containsIgnoreCase(publicData, name)) {
publicNames.add(name);
}
}
return publicNames.size();
}
public String[] getCharacterizationClassNames(String sampleId)
throws Exception {
String hql = "select distinct achar.class from gov.nih.nci.cananolab.domain.particle.characterization.Characterization achar"
+ " where achar.sample.id = " + sampleId;
return this.getClassNames(hql);
}
public String[] getFunctionalizingEntityClassNames(String sampleId)
throws Exception {
SortedSet<String> names = new TreeSet<String>();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
for (Object obj : results) {
Sample particleSample = (Sample) obj;
names = this
.getStoredFunctionalizingEntityClassNames(particleSample);
}
return names.toArray(new String[0]);
}
public String[] getFunctionClassNames(String sampleId) throws Exception {
SortedSet<String> names = new TreeSet<String>();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
for (Object obj : results) {
Sample particleSample = (Sample) obj;
names = this.getStoredFunctionClassNames(particleSample);
}
return names.toArray(new String[0]);
}
public String[] getNanomaterialEntityClassNames(String sampleId)
throws Exception {
String hql = "select distinct entity.class from "
+ " gov.nih.nci.cananolab.domain.particle.NanomaterialEntity entity"
+ " where entity.class!='OtherNanomaterialEntity' and entity.sampleComposition.sample.id = "
+ sampleId;
String[] classNames = this.getClassNames(hql);
SortedSet<String> names = new TreeSet<String>();
if (classNames.length > 0) {
names.addAll(Arrays.asList(classNames));
}
String hql2 = "select distinct entity.type from "
+ " gov.nih.nci.cananolab.domain.nanomaterial.OtherNanomaterialEntity entity"
+ " where entity.sampleComposition.sample.id = " + sampleId;
String[] otherTypes = this.getClassNames(hql2);
if (otherTypes.length > 0) {
names.addAll(Arrays.asList(otherTypes));
}
return names.toArray(new String[0]);
}
private String[] getClassNames(String hql) throws Exception {
String[] classNames = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(hql);
List results = appService.query(crit);
if (results != null) {
classNames = new String[results.size()];
} else {
classNames = new String[0];
}
int i = 0;
for (Object obj : results) {
classNames[i] = (String) obj.toString();
i++;
}
return classNames;
}
public String[] getSampleViewStrs(List<Sample> samples) {
List<String> sampleStrings = new ArrayList<String>(samples.size());
List<String> columns = new ArrayList<String>(7);
for (Sample sample : samples) {
columns.clear();
columns.add(sample.getId().toString());
columns.add(sample.getName());
PointOfContactBean primaryPOC = new PointOfContactBean(sample
.getPrimaryPointOfContact());
columns.add(primaryPOC.getDomain().getFirstName());
columns.add(primaryPOC.getDomain().getLastName());
columns.add(primaryPOC.getDomain().getOrganization().getName());
// nanomaterial entities and functionalizing entities are in one
// column.
SortedSet<String> entities = new TreeSet<String>();
entities.addAll(getStoredNanomaterialEntityClassNames(sample));
entities.addAll(getStoredFunctionalizingEntityClassNames(sample));
columns.add(StringUtils.join(entities,
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(getStoredFunctionClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(
getStoredCharacterizationClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
sampleStrings.add(StringUtils.joinEmptyItemIncluded(columns,
Constants.VIEW_COL_DELIMITER));
}
return sampleStrings.toArray(new String[0]);
}
public String[] getSampleViewStrs(Sample sample) {
List<String> columns = new ArrayList<String>(7);
columns.clear();
columns.add(sample.getId().toString());
columns.add(sample.getName());
PointOfContactBean primaryPOC = new PointOfContactBean(sample
.getPrimaryPointOfContact());
columns.add(primaryPOC.getDomain().getFirstName());
columns.add(primaryPOC.getDomain().getLastName());
columns.add(primaryPOC.getDomain().getOrganization().getName());
columns.add(StringUtils.join(
getStoredNanomaterialEntityClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils
.join(getStoredFunctionalizingEntityClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(getStoredFunctionClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(
getStoredCharacterizationClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
return columns.toArray(new String[0]);
}
public AuthorizationService getAuthService() {
return authService;
}
public Integer[] convertToFunctionalizingEntityClassOrderNumber(
String[] classNames) {
Integer[] orderNumbers = new Integer[classNames.length];
int i = 0;
for (String name : classNames) {
orderNumbers[i] = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get(name);
i++;
}
return orderNumbers;
}
public Organization findOrganizationByName(String orgName, UserBean user)
throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Organization.class);
crit.add(Restrictions.eq("name", orgName));
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
Organization org = null;
for (Object obj : results) {
org = (Organization) obj;
if (authService.checkReadPermission(user, org.getId().toString())) {
return org;
} else {
throw new NoAccessException();
}
}
return org;
}
public PointOfContact findPointOfContactByNameAndOrg(String firstName,
String lastName, String orgName, UserBean user) throws Exception {
PointOfContact poc = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(PointOfContact.class);
crit.createAlias("organization", "organization");
if (!StringUtils.isEmpty(lastName))
crit.add(Restrictions.eq("lastName", lastName));
if (!StringUtils.isEmpty(firstName))
crit.add(Restrictions.eq("firstName", firstName));
if (!StringUtils.isEmpty(orgName))
crit.add(Restrictions.eq("organization.name", orgName));
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
for (Object obj : results) {
poc = (PointOfContact) obj;
if (authService.checkReadPermission(user, poc.getId().toString())) {
return poc;
} else {
throw new NoAccessException();
}
}
return poc;
}
} | src/gov/nih/nci/cananolab/service/sample/helper/SampleServiceHelper.java | package gov.nih.nci.cananolab.service.sample.helper;
import gov.nih.nci.cananolab.domain.agentmaterial.OtherFunctionalizingEntity;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Keyword;
import gov.nih.nci.cananolab.domain.common.Organization;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.function.OtherFunction;
import gov.nih.nci.cananolab.domain.nanomaterial.OtherNanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.common.UserBean;
import gov.nih.nci.cananolab.dto.particle.AdvancedSampleBean;
import gov.nih.nci.cananolab.dto.particle.AdvancedSampleSearchBean;
import gov.nih.nci.cananolab.dto.particle.CharacterizationQueryBean;
import gov.nih.nci.cananolab.dto.particle.CompositionQueryBean;
import gov.nih.nci.cananolab.dto.particle.SampleQueryBean;
import gov.nih.nci.cananolab.exception.NoAccessException;
import gov.nih.nci.cananolab.service.security.AuthorizationService;
import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService;
import gov.nih.nci.cananolab.util.ClassUtils;
import gov.nih.nci.cananolab.util.Comparators;
import gov.nih.nci.cananolab.util.Constants;
import gov.nih.nci.cananolab.util.StringUtils;
import gov.nih.nci.cananolab.util.TextMatchMode;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import gov.nih.nci.system.query.hibernate.HQLCriteria;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.hibernate.FetchMode;
import org.hibernate.criterion.Conjunction;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Expression;
import org.hibernate.criterion.Junction;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions;
import org.hibernate.criterion.Subqueries;
/**
* Helper class providing implementations of search methods needed for both
* local implementation of SampleService and grid service *
*
* @author pansu, tanq
*
*/
public class SampleServiceHelper {
private AuthorizationService authService;
private static Logger logger = Logger.getLogger(SampleServiceHelper.class);
public SampleServiceHelper() {
try {
authService = new AuthorizationService(Constants.CSM_APP_NAME);
} catch (Exception e) {
logger.error("Can't create authorization service: " + e);
}
}
public List<Sample> findSamplesBy(String samplePointOfContact,
String[] nanomaterialEntityClassNames,
String[] otherNanomaterialEntityTypes,
String[] functionalizingEntityClassNames,
String[] otherFunctionalizingEntityTypes,
String[] functionClassNames, String[] otherFunctionTypes,
String[] characterizationClassNames,
String[] otherCharacterizationTypes, String[] wordList,
UserBean user) throws Exception {
List<Sample> samples = new ArrayList<Sample>();
// can't query for the entire Sample object due to
// limitations in pagination in SDK
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class)
.setProjection(Projections.distinct(Property.forName("name")));
if (samplePointOfContact != null && samplePointOfContact.length() > 0) {
TextMatchMode pocMatchMode = new TextMatchMode(samplePointOfContact);
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("primaryPointOfContact", "pointOfContact");
crit.createAlias("pointOfContact.organization", "organization");
crit.createAlias("otherPointOfContactCollection", "otherPoc",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("otherPoc.organization", "otherOrg",
CriteriaSpecification.LEFT_JOIN);
String critStrs[] = { "pointOfContact.lastName",
"pointOfContact.firstName", "pointOfContact.role",
"organization.name", "otherPoc.lastName",
"otherPoc.firstName", "otherOrg.name" };
for (String critStr : critStrs) {
Criterion pocCrit = Restrictions.ilike(critStr, pocMatchMode
.getUpdatedText(), pocMatchMode.getMatchMode());
disjunction.add(pocCrit);
}
crit.add(disjunction);
}
// join composition
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0
|| functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
crit.createAlias("sampleComposition", "comp",
CriteriaSpecification.LEFT_JOIN);
}
// join nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.nanomaterialEntityCollection", "nanoEntity",
CriteriaSpecification.LEFT_JOIN);
}
// join functionalizing entity
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.functionalizingEntityCollection",
"funcEntity", CriteriaSpecification.LEFT_JOIN);
}
// nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0) {
Criterion nanoEntityCrit = Restrictions.in("nanoEntity.class",
nanomaterialEntityClassNames);
disjunction.add(nanoEntityCrit);
}
if (otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0) {
Criterion otherNanoCrit1 = Restrictions.eq("nanoEntity.class",
"OtherNanomaterialEntity");
Criterion otherNanoCrit2 = Restrictions.in("nanoEntity.type",
otherNanomaterialEntityTypes);
Criterion otherNanoCrit = Restrictions.and(otherNanoCrit1,
otherNanoCrit2);
disjunction.add(otherNanoCrit);
}
crit.add(disjunction);
}
// functionalizing entity
// need to turn class names into integers in order for the .class
// clause to work
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0) {
Integer[] functionalizingEntityClassNameIntegers = this
.convertToFunctionalizingEntityClassOrderNumber(functionalizingEntityClassNames);
Criterion funcEntityCrit = Restrictions.in("funcEntity.class",
functionalizingEntityClassNameIntegers);
disjunction.add(funcEntityCrit);
}
if (otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
Integer classOrderNumber = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get("otherFunctionalizingEntity");
Criterion otherFuncCrit1 = Restrictions.eq("funcEntity.class",
classOrderNumber);
Criterion otherFuncCrit2 = Restrictions.in("funcEntity.type",
otherNanomaterialEntityTypes);
Criterion otherFuncCrit = Restrictions.and(otherFuncCrit1,
otherFuncCrit2);
disjunction.add(otherFuncCrit);
}
crit.add(disjunction);
}
// function
if (functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("nanoEntity.composingElementCollection",
"compElement", CriteriaSpecification.LEFT_JOIN)
.createAlias("compElement.inherentFunctionCollection",
"inFunc", CriteriaSpecification.LEFT_JOIN);
crit.createAlias("funcEntity.functionCollection", "func",
CriteriaSpecification.LEFT_JOIN);
if (functionClassNames != null && functionClassNames.length > 0) {
Criterion funcCrit1 = Restrictions.in("inFunc.class",
functionClassNames);
Criterion funcCrit2 = Restrictions.in("func.class",
functionClassNames);
disjunction.add(funcCrit1).add(funcCrit2);
}
if (otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Criterion otherFuncCrit1 = Restrictions.and(Restrictions.eq(
"inFunc.class", "OtherFunction"), Restrictions.in(
"inFunc.type", otherFunctionTypes));
Criterion otherFuncCrit2 = Restrictions.and(Restrictions.eq(
"func.class", "OtherFunction"), Restrictions.in(
"func.type", otherFunctionTypes));
disjunction.add(otherFuncCrit1).add(otherFuncCrit2);
}
crit.add(disjunction);
}
// join characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0 || wordList != null
&& wordList.length > 0) {
crit.createAlias("characterizationCollection", "chara",
CriteriaSpecification.LEFT_JOIN);
}
// characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0) {
crit
.add(Restrictions.in("chara.class",
characterizationClassNames));
}
// join keyword, finding, publication
if (wordList != null && wordList.length > 0) {
crit.createAlias("keywordCollection", "keyword1");
crit.createAlias("chara.findingCollection", "finding",
CriteriaSpecification.LEFT_JOIN).createAlias(
"finding.fileCollection", "charFile",
CriteriaSpecification.LEFT_JOIN).createAlias(
"charFile.keywordCollection", "keyword2",
CriteriaSpecification.LEFT_JOIN);
// publication keywords
crit.createAlias("publicationCollection", "pub1",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("pub1.keywordCollection", "keyword3",
CriteriaSpecification.LEFT_JOIN);
}
// keyword
if (wordList != null && wordList.length > 0) {
// turn words into upper case before searching keywords
String[] upperKeywords = new String[wordList.length];
for (int i = 0; i < wordList.length; i++) {
upperKeywords[i] = wordList[i].toUpperCase();
}
Disjunction disjunction = Restrictions.disjunction();
for (String keyword : upperKeywords) {
Criterion keywordCrit1 = Restrictions.like("keyword1.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit2 = Restrictions.like("keyword2.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit3 = Restrictions.like("keyword3.name",
keyword, MatchMode.ANYWHERE);
disjunction.add(keywordCrit1);
disjunction.add(keywordCrit2);
disjunction.add(keywordCrit3);
}
for (String word : wordList) {
Criterion summaryCrit1 = Restrictions.ilike(
"chara.designMethodsDescription", word,
MatchMode.ANYWHERE);
Criterion summaryCrit2 = Restrictions.ilike(
"charFile.description", word, MatchMode.ANYWHERE);
Criterion summaryCrit = Restrictions.or(summaryCrit1,
summaryCrit2);
disjunction.add(summaryCrit);
}
crit.add(disjunction);
}
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
// get public data
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
String sampleName = obj.toString();
try {
Sample sample = findSampleByName(sampleName, user);
samples.add(sample);
} catch (NoAccessException e) {
// ignore no access exception
logger.debug("User doesn't have access to sample with name "
+ sampleName);
}
}
Collections.sort(samples, new Comparators.SampleNameComparator());
return samples;
}
public List<String> findSampleNamesBy(String samplePointOfContact,
String[] nanomaterialEntityClassNames,
String[] otherNanomaterialEntityTypes,
String[] functionalizingEntityClassNames,
String[] otherFunctionalizingEntityTypes,
String[] functionClassNames, String[] otherFunctionTypes,
String[] characterizationClassNames,
String[] otherCharacterizationTypes, String[] wordList,
UserBean user) throws Exception {
List<String> sampleNames = new ArrayList<String>();
// can't query for the entire Sample object due to
// limitations in pagination in SDK
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class)
.setProjection(Projections.distinct(Property.forName("name")));
if (!StringUtils.isEmpty(samplePointOfContact)) {
TextMatchMode pocMatchMode = new TextMatchMode(samplePointOfContact);
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("primaryPointOfContact", "pointOfContact");
crit.createAlias("pointOfContact.organization", "organization");
crit.createAlias("otherPointOfContactCollection", "otherPoc",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("otherPoc.organization", "otherOrg",
CriteriaSpecification.LEFT_JOIN);
String critStrs[] = { "pointOfContact.lastName",
"pointOfContact.firstName", "pointOfContact.role",
"organization.name", "otherPoc.lastName",
"otherPoc.firstName", "otherOrg.name" };
for (String critStr : critStrs) {
Criterion pocCrit = Restrictions.ilike(critStr, pocMatchMode
.getUpdatedText(), pocMatchMode.getMatchMode());
disjunction.add(pocCrit);
}
crit.add(disjunction);
}
// join composition
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0
|| functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
crit.createAlias("sampleComposition", "comp",
CriteriaSpecification.LEFT_JOIN);
}
// join nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.nanomaterialEntityCollection", "nanoEntity",
CriteriaSpecification.LEFT_JOIN);
}
// join functionalizing entity
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
crit.createAlias("comp.functionalizingEntityCollection",
"funcEntity", CriteriaSpecification.LEFT_JOIN);
}
// nanomaterial entity
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0
|| otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (nanomaterialEntityClassNames != null
&& nanomaterialEntityClassNames.length > 0) {
Criterion nanoEntityCrit = Restrictions.in("nanoEntity.class",
nanomaterialEntityClassNames);
disjunction.add(nanoEntityCrit);
}
if (otherNanomaterialEntityTypes != null
&& otherNanomaterialEntityTypes.length > 0) {
Criterion otherNanoCrit1 = Restrictions.eq("nanoEntity.class",
"OtherNanomaterialEntity");
Criterion otherNanoCrit2 = Restrictions.in("nanoEntity.type",
otherNanomaterialEntityTypes);
Criterion otherNanoCrit = Restrictions.and(otherNanoCrit1,
otherNanoCrit2);
disjunction.add(otherNanoCrit);
}
crit.add(disjunction);
}
// functionalizing entity
// need to turn class names into integers in order for the .class
// clause to work
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0
|| otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0
|| functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
if (functionalizingEntityClassNames != null
&& functionalizingEntityClassNames.length > 0) {
Integer[] functionalizingEntityClassNameIntegers = this
.convertToFunctionalizingEntityClassOrderNumber(functionalizingEntityClassNames);
Criterion funcEntityCrit = Restrictions.in("funcEntity.class",
functionalizingEntityClassNameIntegers);
disjunction.add(funcEntityCrit);
}
if (otherFunctionalizingEntityTypes != null
&& otherFunctionalizingEntityTypes.length > 0) {
Integer classOrderNumber = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get("OtherFunctionalizingEntity");
Criterion otherFuncCrit1 = Restrictions.eq("funcEntity.class",
classOrderNumber);
Criterion otherFuncCrit2 = Restrictions.in("funcEntity.type",
otherFunctionalizingEntityTypes);
Criterion otherFuncCrit = Restrictions.and(otherFuncCrit1,
otherFuncCrit2);
disjunction.add(otherFuncCrit);
}
crit.add(disjunction);
}
// function
if (functionClassNames != null && functionClassNames.length > 0
|| otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Disjunction disjunction = Restrictions.disjunction();
crit.createAlias("nanoEntity.composingElementCollection",
"compElement", CriteriaSpecification.LEFT_JOIN)
.createAlias("compElement.inherentFunctionCollection",
"inFunc", CriteriaSpecification.LEFT_JOIN);
crit.createAlias("funcEntity.functionCollection", "func",
CriteriaSpecification.LEFT_JOIN);
if (functionClassNames != null && functionClassNames.length > 0) {
Criterion funcCrit1 = Restrictions.in("inFunc.class",
functionClassNames);
Criterion funcCrit2 = Restrictions.in("func.class",
functionClassNames);
disjunction.add(funcCrit1).add(funcCrit2);
}
if (otherFunctionTypes != null && otherFunctionTypes.length > 0) {
Criterion otherFuncCrit1 = Restrictions.and(Restrictions.eq(
"inFunc.class", "OtherFunction"), Restrictions.in(
"inFunc.type", otherFunctionTypes));
Criterion otherFuncCrit2 = Restrictions.and(Restrictions.eq(
"func.class", "OtherFunction"), Restrictions.in(
"func.type", otherFunctionTypes));
disjunction.add(otherFuncCrit1).add(otherFuncCrit2);
}
crit.add(disjunction);
}
// join characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0 || wordList != null
&& wordList.length > 0) {
crit.createAlias("characterizationCollection", "chara",
CriteriaSpecification.LEFT_JOIN);
}
// characterization
if (characterizationClassNames != null
&& characterizationClassNames.length > 0) {
crit
.add(Restrictions.in("chara.class",
characterizationClassNames));
}
// join keyword, finding, publication
if (wordList != null && wordList.length > 0) {
crit.createAlias("keywordCollection", "keyword1");
crit.createAlias("chara.findingCollection", "finding",
CriteriaSpecification.LEFT_JOIN).createAlias(
"finding.fileCollection", "charFile",
CriteriaSpecification.LEFT_JOIN).createAlias(
"charFile.keywordCollection", "keyword2",
CriteriaSpecification.LEFT_JOIN);
// publication keywords
crit.createAlias("publicationCollection", "pub1",
CriteriaSpecification.LEFT_JOIN);
crit.createAlias("pub1.keywordCollection", "keyword3",
CriteriaSpecification.LEFT_JOIN);
}
// keyword
if (wordList != null && wordList.length > 0) {
// turn words into upper case before searching keywords
String[] upperKeywords = new String[wordList.length];
for (int i = 0; i < wordList.length; i++) {
upperKeywords[i] = wordList[i].toUpperCase();
}
Disjunction disjunction = Restrictions.disjunction();
for (String keyword : upperKeywords) {
Criterion keywordCrit1 = Restrictions.like("keyword1.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit2 = Restrictions.like("keyword2.name",
keyword, MatchMode.ANYWHERE);
Criterion keywordCrit3 = Restrictions.like("keyword3.name",
keyword, MatchMode.ANYWHERE);
disjunction.add(keywordCrit1);
disjunction.add(keywordCrit2);
disjunction.add(keywordCrit3);
}
for (String word : wordList) {
Criterion summaryCrit1 = Restrictions.ilike(
"chara.designMethodsDescription", word,
MatchMode.ANYWHERE);
Criterion summaryCrit2 = Restrictions.ilike(
"charFile.description", word, MatchMode.ANYWHERE);
Criterion summaryCrit = Restrictions.or(summaryCrit1,
summaryCrit2);
disjunction.add(summaryCrit);
}
crit.add(disjunction);
}
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
// get public data
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
String sampleName = obj.toString();
if (user == null
|| authService.checkReadPermission(user, sampleName)) {
sampleNames.add(sampleName);
} else { // ignore no access exception
logger.debug("User doesn't have access to sample with name "
+ sampleName);
}
}
Collections.sort(sampleNames, new Comparators.SortableNameComparator());
return sampleNames;
}
/**
* Return all stored functionalizing entity class names. In case of
* OtherFunctionalizingEntity, store the OtherFunctionalizingEntity type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredFunctionalizingEntityClassNames(
Sample particleSample) {
SortedSet<String> storedEntities = new TreeSet<String>();
if (particleSample.getSampleComposition() != null
&& particleSample.getSampleComposition()
.getFunctionalizingEntityCollection() != null) {
for (FunctionalizingEntity entity : particleSample
.getSampleComposition()
.getFunctionalizingEntityCollection()) {
if (entity instanceof OtherFunctionalizingEntity) {
storedEntities.add(((OtherFunctionalizingEntity) entity)
.getType());
} else {
storedEntities.add(ClassUtils.getShortClassName(entity
.getClass().getCanonicalName()));
}
}
}
return storedEntities;
}
/**
* Return all stored function class names. In case of OtherFunction, store
* the otherFunction type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredFunctionClassNames(Sample particleSample) {
SortedSet<String> storedFunctions = new TreeSet<String>();
if (particleSample.getSampleComposition() != null) {
if (particleSample.getSampleComposition()
.getNanomaterialEntityCollection() != null) {
for (NanomaterialEntity entity : particleSample
.getSampleComposition()
.getNanomaterialEntityCollection()) {
if (entity.getComposingElementCollection() != null) {
for (ComposingElement element : entity
.getComposingElementCollection()) {
if (element.getInherentFunctionCollection() != null) {
for (Function function : element
.getInherentFunctionCollection()) {
if (function instanceof OtherFunction) {
storedFunctions
.add(((OtherFunction) function)
.getType());
} else {
storedFunctions.add(ClassUtils
.getShortClassName(function
.getClass()
.getCanonicalName()));
}
}
}
}
}
}
}
if (particleSample.getSampleComposition()
.getFunctionalizingEntityCollection() != null) {
for (FunctionalizingEntity entity : particleSample
.getSampleComposition()
.getFunctionalizingEntityCollection()) {
if (entity.getFunctionCollection() != null) {
for (Function function : entity.getFunctionCollection()) {
if (function instanceof OtherFunction) {
storedFunctions.add(((OtherFunction) function)
.getType());
} else {
storedFunctions.add(ClassUtils
.getShortClassName(function.getClass()
.getCanonicalName()));
}
}
}
}
}
}
return storedFunctions;
}
/**
* Return all stored nanomaterial entity class names. In case of
* OtherNanomaterialEntity, store the otherNanomaterialEntity type
*
* @param particleSample
* @return
*/
public SortedSet<String> getStoredNanomaterialEntityClassNames(
Sample particleSample) {
SortedSet<String> storedEntities = new TreeSet<String>();
if (particleSample.getSampleComposition() != null
&& particleSample.getSampleComposition()
.getNanomaterialEntityCollection() != null) {
for (NanomaterialEntity entity : particleSample
.getSampleComposition().getNanomaterialEntityCollection()) {
if (entity instanceof OtherNanomaterialEntity) {
storedEntities.add(((OtherNanomaterialEntity) entity)
.getType());
} else {
storedEntities.add(ClassUtils.getShortClassName(entity
.getClass().getCanonicalName()));
}
}
}
return storedEntities;
}
public SortedSet<String> getStoredCharacterizationClassNames(Sample particle) {
SortedSet<String> storedChars = new TreeSet<String>();
if (particle.getCharacterizationCollection() != null) {
for (Characterization achar : particle
.getCharacterizationCollection()) {
storedChars.add(ClassUtils.getShortClassName(achar.getClass()
.getCanonicalName()));
}
}
return storedChars;
}
public Sample findSampleByName(String sampleName, UserBean user)
throws Exception {
Sample sample = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("name").eq(sampleName));
crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection.organization",
FetchMode.JOIN);
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
crit.setFetchMode("characterizationCollection", FetchMode.JOIN);
crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit.setFetchMode("publicationCollection", FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
if (authService.checkReadPermission(user, sample.getName())) {
return sample;
} else {
throw new NoAccessException();
}
}
return sample;
}
public List<Keyword> findKeywordsBySampleId(String sampleId, UserBean user)
throws Exception {
List<Keyword> keywords = new ArrayList<Keyword>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
List result = appService.query(crit);
Sample sample = null;
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
// check whether user has access to the sample
if (authService.checkReadPermission(user, sample.getName())) {
keywords.addAll(sample.getKeywordCollection());
} else {
throw new NoAccessException(
"User doesn't have access to the sample.");
}
}
return keywords;
}
public PointOfContact findPrimaryPointOfContactBySampleId(String sampleId,
UserBean user) throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(
"select aSample.primaryPointOfContact from gov.nih.nci.cananolab.domain.particle.Sample aSample where aSample.id = "
+ sampleId);
List result = appService.query(crit);
PointOfContact poc = null;
if (!result.isEmpty()) {
poc = (PointOfContact) result.get(0);
if (authService.checkReadPermission(user, poc.getId().toString())) {
return poc;
} else {
throw new NoAccessException();
}
}
return poc;
}
public List<PointOfContact> findOtherPointOfContactBySampleId(
String sampleId, UserBean user) throws Exception {
List<PointOfContact> pocs = new ArrayList<PointOfContact>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(
"select aSample.otherPointOfContactCollection from gov.nih.nci.cananolab.domain.particle.Sample aSample where aSample.id = "
+ sampleId);
List results = appService.query(crit);
List filteredResults = new ArrayList(results);
if (user == null) {
filteredResults = authService.filterNonPublic(results);
}
for (Object obj : filteredResults) {
PointOfContact poc = (PointOfContact) obj;
}
return pocs;
}
public int getNumberOfPublicSamples() throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List<String> publicData = appService.getAllPublicData();
HQLCriteria crit = new HQLCriteria(
"select name from gov.nih.nci.cananolab.domain.particle.Sample");
List results = appService.query(crit);
List<String> publicNames = new ArrayList<String>();
for (Object obj : results) {
String name = (String) obj.toString();
if (StringUtils.containsIgnoreCase(publicData, name)) {
publicNames.add(name);
}
}
return publicNames.size();
}
public String[] getCharacterizationClassNames(String sampleId)
throws Exception {
String hql = "select distinct achar.class from gov.nih.nci.cananolab.domain.particle.characterization.Characterization achar"
+ " where achar.sample.id = " + sampleId;
return this.getClassNames(hql);
}
public String[] getFunctionalizingEntityClassNames(String sampleId)
throws Exception {
SortedSet<String> names = new TreeSet<String>();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
for (Object obj : results) {
Sample particleSample = (Sample) obj;
names = this
.getStoredFunctionalizingEntityClassNames(particleSample);
}
return names.toArray(new String[0]);
}
public String[] getFunctionClassNames(String sampleId) throws Exception {
SortedSet<String> names = new TreeSet<String>();
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"sampleComposition.functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
List results = appService.query(crit);
for (Object obj : results) {
Sample particleSample = (Sample) obj;
names = this.getStoredFunctionClassNames(particleSample);
}
return names.toArray(new String[0]);
}
public String[] getNanomaterialEntityClassNames(String sampleId)
throws Exception {
String hql = "select distinct entity.class from "
+ " gov.nih.nci.cananolab.domain.particle.NanomaterialEntity entity"
+ " where entity.class!='OtherNanomaterialEntity' and entity.sampleComposition.sample.id = "
+ sampleId;
String[] classNames = this.getClassNames(hql);
SortedSet<String> names = new TreeSet<String>();
if (classNames.length > 0) {
names.addAll(Arrays.asList(classNames));
}
String hql2 = "select distinct entity.type from "
+ " gov.nih.nci.cananolab.domain.nanomaterial.OtherNanomaterialEntity entity"
+ " where entity.sampleComposition.sample.id = " + sampleId;
String[] otherTypes = this.getClassNames(hql2);
if (otherTypes.length > 0) {
names.addAll(Arrays.asList(otherTypes));
}
return names.toArray(new String[0]);
}
private String[] getClassNames(String hql) throws Exception {
String[] classNames = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
HQLCriteria crit = new HQLCriteria(hql);
List results = appService.query(crit);
if (results != null) {
classNames = new String[results.size()];
} else {
classNames = new String[0];
}
int i = 0;
for (Object obj : results) {
classNames[i] = (String) obj.toString();
i++;
}
return classNames;
}
public String[] getSampleViewStrs(List<Sample> samples) {
List<String> sampleStrings = new ArrayList<String>(samples.size());
List<String> columns = new ArrayList<String>(7);
for (Sample sample : samples) {
columns.clear();
columns.add(sample.getId().toString());
columns.add(sample.getName());
PointOfContactBean primaryPOC = new PointOfContactBean(sample
.getPrimaryPointOfContact());
columns.add(primaryPOC.getDomain().getFirstName());
columns.add(primaryPOC.getDomain().getLastName());
columns.add(primaryPOC.getDomain().getOrganization().getName());
// nanomaterial entities and functionalizing entities are in one
// column.
SortedSet<String> entities = new TreeSet<String>();
entities.addAll(getStoredNanomaterialEntityClassNames(sample));
entities.addAll(getStoredFunctionalizingEntityClassNames(sample));
columns.add(StringUtils.join(entities,
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(getStoredFunctionClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(
getStoredCharacterizationClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
sampleStrings.add(StringUtils.joinEmptyItemIncluded(columns,
Constants.VIEW_COL_DELIMITER));
}
return sampleStrings.toArray(new String[0]);
}
public String[] getSampleViewStrs(Sample sample) {
List<String> columns = new ArrayList<String>(7);
columns.clear();
columns.add(sample.getId().toString());
columns.add(sample.getName());
PointOfContactBean primaryPOC = new PointOfContactBean(sample
.getPrimaryPointOfContact());
columns.add(primaryPOC.getDomain().getFirstName());
columns.add(primaryPOC.getDomain().getLastName());
columns.add(primaryPOC.getDomain().getOrganization().getName());
// nanomaterial entities and functionalizing entities are in one
// column.
SortedSet<String> entities = new TreeSet<String>();
entities.addAll(getStoredNanomaterialEntityClassNames(sample));
entities.addAll(getStoredFunctionalizingEntityClassNames(sample));
columns.add(StringUtils.join(entities,
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(getStoredFunctionClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
columns.add(StringUtils.join(
getStoredCharacterizationClassNames(sample),
Constants.VIEW_CLASSNAME_DELIMITER));
return columns.toArray(new String[0]);
}
public AuthorizationService getAuthService() {
return authService;
}
public Integer[] convertToFunctionalizingEntityClassOrderNumber(
String[] classNames) {
Integer[] orderNumbers = new Integer[classNames.length];
int i = 0;
for (String name : classNames) {
orderNumbers[i] = Constants.FUNCTIONALIZING_ENTITY_SUBCLASS_ORDER_MAP
.get(name);
i++;
}
return orderNumbers;
}
public Organization findOrganizationByName(String orgName, UserBean user)
throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(Organization.class);
crit.add(Restrictions.eq("name", orgName));
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
Organization org = null;
for (Object obj : results) {
org = (Organization) obj;
if (authService.checkReadPermission(user, org.getId().toString())) {
return org;
} else {
throw new NoAccessException();
}
}
return org;
}
public PointOfContact findPointOfContactByNameAndOrg(String firstName,
String lastName, String orgName, UserBean user) throws Exception {
PointOfContact poc = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria.forClass(PointOfContact.class);
crit.createAlias("organization", "organization");
if (!StringUtils.isEmpty(lastName))
crit.add(Restrictions.eq("lastName", lastName));
if (!StringUtils.isEmpty(firstName))
crit.add(Restrictions.eq("firstName", firstName));
if (!StringUtils.isEmpty(orgName))
crit.add(Restrictions.eq("organization.name", orgName));
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
for (Object obj : results) {
poc = (PointOfContact) obj;
if (authService.checkReadPermission(user, poc.getId().toString())) {
return poc;
} else {
throw new NoAccessException();
}
}
return poc;
}
} | added findDampleById
SVN-Revision: 16625
| src/gov/nih/nci/cananolab/service/sample/helper/SampleServiceHelper.java | added findDampleById | <ide><path>rc/gov/nih/nci/cananolab/service/sample/helper/SampleServiceHelper.java
<ide> return sample;
<ide> }
<ide>
<add> public Sample findSampleById(String sampleId, UserBean user)
<add> throws Exception {
<add> Sample sample = null;
<add> CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
<add> .getApplicationService();
<add>
<add> DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
<add> Property.forName("id").eq(new Long(sampleId)));
<add> crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
<add> crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
<add> crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
<add> crit.setFetchMode("otherPointOfContactCollection.organization",
<add> FetchMode.JOIN);
<add> crit.setFetchMode("keywordCollection", FetchMode.JOIN);
<add> crit.setFetchMode("characterizationCollection", FetchMode.JOIN);
<add> crit.setFetchMode("sampleComposition.nanomaterialEntityCollection",
<add> FetchMode.JOIN);
<add> crit
<add> .setFetchMode(
<add> "sampleComposition.nanomaterialEntityCollection.composingElementCollection",
<add> FetchMode.JOIN);
<add> crit
<add> .setFetchMode(
<add> "sampleComposition.nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
<add> FetchMode.JOIN);
<add>
<add> crit.setFetchMode("sampleComposition.functionalizingEntityCollection",
<add> FetchMode.JOIN);
<add> crit
<add> .setFetchMode(
<add> "sampleComposition.functionalizingEntityCollection.functionCollection",
<add> FetchMode.JOIN);
<add> crit.setFetchMode("publicationCollection", FetchMode.JOIN);
<add> crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
<add>
<add> List result = appService.query(crit);
<add> if (!result.isEmpty()) {
<add> sample = (Sample) result.get(0);
<add> if (authService.checkReadPermission(user, sample.getName())) {
<add> return sample;
<add> } else {
<add> throw new NoAccessException();
<add> }
<add> }
<add> return sample;
<add> }
<add>
<ide> public List<Keyword> findKeywordsBySampleId(String sampleId, UserBean user)
<ide> throws Exception {
<ide> List<Keyword> keywords = new ArrayList<Keyword>();
<ide> columns.add(primaryPOC.getDomain().getFirstName());
<ide> columns.add(primaryPOC.getDomain().getLastName());
<ide> columns.add(primaryPOC.getDomain().getOrganization().getName());
<del> // nanomaterial entities and functionalizing entities are in one
<del> // column.
<del> SortedSet<String> entities = new TreeSet<String>();
<del> entities.addAll(getStoredNanomaterialEntityClassNames(sample));
<del> entities.addAll(getStoredFunctionalizingEntityClassNames(sample));
<del> columns.add(StringUtils.join(entities,
<add> columns.add(StringUtils.join(
<add> getStoredNanomaterialEntityClassNames(sample),
<add> Constants.VIEW_CLASSNAME_DELIMITER));
<add> columns.add(StringUtils
<add> .join(getStoredFunctionalizingEntityClassNames(sample),
<ide> Constants.VIEW_CLASSNAME_DELIMITER));
<ide> columns.add(StringUtils.join(getStoredFunctionClassNames(sample),
<ide> Constants.VIEW_CLASSNAME_DELIMITER)); |
|
Java | mit | b1a04ebfd94acd48ab3fc59ad84c221ad0c9e75c | 0 | tsmacdonald/simulator,tsmacdonald/simulator | package edu.wheaton.simulator.statistics;
import java.util.Map;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import edu.wheaton.simulator.entity.Agent;
import edu.wheaton.simulator.entity.EntityID;
import edu.wheaton.simulator.entity.GridEntity;
import edu.wheaton.simulator.entity.Prototype;
import edu.wheaton.simulator.entity.PrototypeID;
import edu.wheaton.simulator.entity.Slot;
/**
* This class will create the Snapshots to be put into the Database
*
* @author akonwi and Daniel Gill
*/
public class SnapshotFactory {
// TODO Please check these methods and see if they're all okay.
/**
* Produce a snapshot of the given slot in time.
* @param slot The relevant slot.
* @param step The present moment in time.
* @return An EntitySnapshot representing that slot at that point in time.
*/
public static EntitySnapshot makeSlotSnapshot(GridEntity slot,
Integer step) {
return new EntitySnapshot(slot.getEntityID(),
makeFieldSnapshots(slot.getFieldMap()), step);
}
/**
* Make a snapshot of an Agent at a particular point in time.
* @param agent The agent being recorded.
* @param step The point at which the capture was taken.
* @return
*/
public static AgentSnapshot makeAgentSnapshot(GridEntity agent,
Integer step) {
return new AgentSnapshot(agent.getEntityID(),
makeFieldSnapshots(agent.getFieldMap()), step,
/*entity.getProtype()*/ null);
}
/**
* Make a FieldSnapshot from the associated name and value.
* @param name The name of the field.
* @param value The value of the field.
* @return A FieldSnapshot corresponding to the pair of Strings.
*/
public static FieldSnapshot makeFieldSnapshot(String name, String value) {
return new FieldSnapshot(name, value);
}
/**
* Build FieldSnapshots out of all the string pairs in a map.
* @param fields A series of name-value string pairs.
* @return An ImmutableMap of Strings to FieldSnapshots.
*/
public static ImmutableMap<String, FieldSnapshot> makeFieldSnapshots(
Map<String, String> fields) {
ImmutableMap.Builder<String, FieldSnapshot> builder = new ImmutableMap.Builder<String, FieldSnapshot>();
for (String name : fields.keySet()) {
String value = fields.get(name);
builder.put(name, makeFieldSnapshot(name, value));
}
return builder.build();
}
/**
* Build a new snapshot of a Prototype at a given point in time.
* @param prototype The prototype being recorded.
* @param step The point in the simulation being captured.
* @return A PrototypeSnapshot corresponding to the provided Prototype.
*/
public static PrototypeSnapshot makePrototypeSnapshot(Prototype prototype,
Integer step) {
return new PrototypeSnapshot(prototype.getProtypeName(),
makeFieldSnapshots(prototype.getFieldMap()),
prototype.childPopulation(), prototype.childIDs(), step);
}
private SnapshotFactory() {
}
}
| src/edu/wheaton/simulator/statistics/SnapshotFactory.java | package edu.wheaton.simulator.statistics;
import java.util.Map;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import edu.wheaton.simulator.entity.Agent;
import edu.wheaton.simulator.entity.EntityID;
import edu.wheaton.simulator.entity.GridEntity;
import edu.wheaton.simulator.entity.Prototype;
import edu.wheaton.simulator.entity.PrototypeID;
import edu.wheaton.simulator.entity.Slot;
/**
* This class will create the Snapshots to be put into the Database
*
* @author akonwi and Daniel Gill
*/
public class SnapshotFactory {
// TODO Please check these methods and see if they're all okay.
public static EntitySnapshot makeSlotSnapshot(GridEntity entity,
Integer step) {
return new EntitySnapshot(entity.getEntityID(),
makeFieldSnapshots(entity.getFieldMap()), step);
}
public static AgentSnapshot makeAgentSnapshot(GridEntity entity,
Integer step) {
// Sort out with the Agent guys just wtf is up with fields.
return new AgentSnapshot(entity.getEntityID(), makeFieldSnapshots(entity.getFieldMap()), step,
entity.getPrototypeName(), );
}
/**
* Make a FieldSnapshot from the associated name and value.
* @param name The name of the field.
* @param value The value of the field.
* @return A FieldSnapshot corresponding to the pair of Strings.
*/
public static FieldSnapshot makeFieldSnapshot(String name, String value) {
return new FieldSnapshot(name, value);
}
public static ImmutableMap<String, FieldSnapshot> makeFieldSnapshots(
Map<String, String> fields) {
ImmutableMap.Builder<String, FieldSnapshot> builder = new ImmutableMap.Builder<String, FieldSnapshot>();
for (String name : fields.keySet()) {
String value = fields.get(name);
builder.put(name, makeFieldSnapshot(name, value));
}
return builder.build();
}
public static PrototypeSnapshot makePrototypeSnapshot(Prototype prototype,
Integer step) {
return null; // TODO
}
private SnapshotFactory() {
}
}
| Implemented makeAgentSnapshot() and makePrototypeSnapshot().
| src/edu/wheaton/simulator/statistics/SnapshotFactory.java | Implemented makeAgentSnapshot() and makePrototypeSnapshot(). | <ide><path>rc/edu/wheaton/simulator/statistics/SnapshotFactory.java
<ide>
<ide> // TODO Please check these methods and see if they're all okay.
<ide>
<del> public static EntitySnapshot makeSlotSnapshot(GridEntity entity,
<add> /**
<add> * Produce a snapshot of the given slot in time.
<add> * @param slot The relevant slot.
<add> * @param step The present moment in time.
<add> * @return An EntitySnapshot representing that slot at that point in time.
<add> */
<add> public static EntitySnapshot makeSlotSnapshot(GridEntity slot,
<ide> Integer step) {
<del> return new EntitySnapshot(entity.getEntityID(),
<del> makeFieldSnapshots(entity.getFieldMap()), step);
<add> return new EntitySnapshot(slot.getEntityID(),
<add> makeFieldSnapshots(slot.getFieldMap()), step);
<ide> }
<ide>
<del> public static AgentSnapshot makeAgentSnapshot(GridEntity entity,
<add> /**
<add> * Make a snapshot of an Agent at a particular point in time.
<add> * @param agent The agent being recorded.
<add> * @param step The point at which the capture was taken.
<add> * @return
<add> */
<add> public static AgentSnapshot makeAgentSnapshot(GridEntity agent,
<ide> Integer step) {
<del> // Sort out with the Agent guys just wtf is up with fields.
<del> return new AgentSnapshot(entity.getEntityID(), makeFieldSnapshots(entity.getFieldMap()), step,
<del> entity.getPrototypeName(), );
<add> return new AgentSnapshot(agent.getEntityID(),
<add> makeFieldSnapshots(agent.getFieldMap()), step,
<add> /*entity.getProtype()*/ null);
<ide> }
<ide>
<ide> /**
<ide> return new FieldSnapshot(name, value);
<ide> }
<ide>
<add> /**
<add> * Build FieldSnapshots out of all the string pairs in a map.
<add> * @param fields A series of name-value string pairs.
<add> * @return An ImmutableMap of Strings to FieldSnapshots.
<add> */
<ide> public static ImmutableMap<String, FieldSnapshot> makeFieldSnapshots(
<ide> Map<String, String> fields) {
<ide> ImmutableMap.Builder<String, FieldSnapshot> builder = new ImmutableMap.Builder<String, FieldSnapshot>();
<ide> return builder.build();
<ide> }
<ide>
<add> /**
<add> * Build a new snapshot of a Prototype at a given point in time.
<add> * @param prototype The prototype being recorded.
<add> * @param step The point in the simulation being captured.
<add> * @return A PrototypeSnapshot corresponding to the provided Prototype.
<add> */
<ide> public static PrototypeSnapshot makePrototypeSnapshot(Prototype prototype,
<ide> Integer step) {
<del> return null; // TODO
<add> return new PrototypeSnapshot(prototype.getProtypeName(),
<add> makeFieldSnapshots(prototype.getFieldMap()),
<add> prototype.childPopulation(), prototype.childIDs(), step);
<ide> }
<ide>
<ide> private SnapshotFactory() { |
|
Java | apache-2.0 | d7f0d76dcd80da84e5be12934a6aba26fcc2fdf4 | 0 | romartin/kie-wb-common,manstis/kie-wb-common,manstis/kie-wb-common,romartin/kie-wb-common,manstis/kie-wb-common,romartin/kie-wb-common,droolsjbpm/kie-wb-common,romartin/kie-wb-common,droolsjbpm/kie-wb-common,manstis/kie-wb-common,romartin/kie-wb-common,manstis/kie-wb-common | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.commands.expressions.types.dtable;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.api.definition.model.DecisionRule;
import org.kie.workbench.common.dmn.api.definition.model.DecisionTable;
import org.kie.workbench.common.dmn.api.definition.model.InputClause;
import org.kie.workbench.common.dmn.api.definition.model.LiteralExpression;
import org.kie.workbench.common.dmn.api.definition.model.OutputClause;
import org.kie.workbench.common.dmn.api.definition.model.UnaryTests;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableDefaultValueUtilities;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapperHelper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.InputClauseColumn;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.OutputClauseColumn;
import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView;
import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.command.Command;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.ext.wires.core.grids.client.model.GridData;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridRow;
import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.kie.workbench.common.dmn.client.widgets.grid.model.BaseHasDynamicHeightCell.DEFAULT_HEIGHT;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.Silent.class)
public class AddOutputClauseCommandTest {
@Mock
private RowNumberColumn uiRowNumberColumn;
@Mock
private OutputClauseColumn uiOutputClauseColumn;
@Mock
private InputClauseColumn uiInputClauseColumn;
@Mock
private ListSelectorView.Presenter listSelector;
@Mock
private AbstractCanvasHandler canvasHandler;
@Mock
private GraphCommandExecutionContext graphCommandExecutionContext;
@Mock
private org.uberfire.mvp.Command executeCanvasOperation;
@Mock
private org.uberfire.mvp.Command undoCanvasOperation;
private DecisionTable dtable;
private OutputClause outputClause;
private GridData uiModel;
private DecisionTableUIModelMapper uiModelMapper;
private AddOutputClauseCommand command;
@Before
public void setUp() throws Exception {
this.dtable = new DecisionTable();
this.uiModel = new DMNGridData();
this.uiModel.appendColumn(uiRowNumberColumn);
this.outputClause = new OutputClause();
this.uiModelMapper = new DecisionTableUIModelMapper(() -> uiModel,
() -> Optional.of(dtable),
listSelector,
DEFAULT_HEIGHT);
doReturn(0).when(uiRowNumberColumn).getIndex();
doReturn(1).when(uiOutputClauseColumn).getIndex();
}
private void makeCommand(final int index) {
this.command = spy(new AddOutputClauseCommand(dtable,
outputClause,
uiModel,
() -> uiOutputClauseColumn,
index,
uiModelMapper,
executeCanvasOperation,
undoCanvasOperation));
}
@Test
public void testGraphCommandAllow() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandCheck() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandExecute() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
dtable.getRule().add(new DecisionRule());
dtable.getRule().add(new DecisionRule());
assertEquals(0, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
// one new output column
assertEquals(1, dtable.getOutput().size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(0).getName());
// first rule
final List<LiteralExpression> outputEntriesRuleOne = dtable.getRule().get(0).getOutputEntry();
assertEquals(1, outputEntriesRuleOne.size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleOne.get(0).getText().getValue());
assertEquals(dtable.getRule().get(0), outputEntriesRuleOne.get(0).getParent());
// second rule
final List<LiteralExpression> outputEntriesRuleTwo = dtable.getRule().get(1).getOutputEntry();
assertEquals(1, outputEntriesRuleTwo.size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleTwo.get(0).getText().getValue());
assertEquals(dtable.getRule().get(1), outputEntriesRuleTwo.get(0).getParent());
assertEquals(dtable,
outputClause.getParent());
}
@Test
public void testGraphCommandExecuteExistingNotAffected() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOldOutput = "old rule 1";
final String ruleTwoOldOutput = "old rule 2";
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOneOldOutput);
addRuleWithOutputClauseValues(ruleTwoOldOutput);
assertEquals(1, dtable.getOutput().size());
//Graph command will insert new OutputClause at index 0 of the OutputEntries
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(2, dtable.getOutput().size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(0).getName());
assertNull(dtable.getOutput().get(1).getName());
// first rule
final List<LiteralExpression> outputEntriesRuleOne = dtable.getRule().get(0).getOutputEntry();
assertEquals(2, outputEntriesRuleOne.size());
assertEquals(ruleOneOldOutput, outputEntriesRuleOne.get(1).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleOne.get(0).getText().getValue());
assertEquals(dtable.getRule().get(0), outputEntriesRuleOne.get(0).getParent());
// second rule
final List<LiteralExpression> outputEntriesRuleTwo = dtable.getRule().get(1).getOutputEntry();
assertEquals(2, outputEntriesRuleTwo.size());
assertEquals(ruleTwoOldOutput, outputEntriesRuleTwo.get(1).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleTwo.get(0).getText().getValue());
assertEquals(dtable.getRule().get(1), outputEntriesRuleTwo.get(0).getParent());
assertEquals(dtable,
outputClause.getParent());
}
@Test
public void testGraphCommandExecuteInsertMiddle() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT + 1);
final String ruleOutputOne = "rule out 1";
final String ruleOutputTwo = "rule out 2";
dtable.getOutput().add(new OutputClause());
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOutputOne, ruleOutputTwo);
assertEquals(2, dtable.getOutput().size());
//Graph command will insert new OutputClause at index 1 of the OutputEntries
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(3, dtable.getOutput().size());
assertNull(dtable.getOutput().get(0).getName());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(1).getName());
assertNull(dtable.getOutput().get(2).getName());
final List<LiteralExpression> ruleOutputs = dtable.getRule().get(0).getOutputEntry();
// first rule
assertEquals(3, ruleOutputs.size());
assertEquals(ruleOutputOne, ruleOutputs.get(0).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, ruleOutputs.get(1).getText().getValue());
assertEquals(dtable.getRule().get(0), ruleOutputs.get(1).getParent());
assertEquals(ruleOutputTwo, ruleOutputs.get(2).getText().getValue());
assertEquals(dtable,
outputClause.getParent());
}
@Test(expected = IndexOutOfBoundsException.class)
public void testGraphCommandUndoNoOutputClauseColumns() {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
dtable.getRule().add(new DecisionRule());
assertEquals(0, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
}
@Test
public void testGraphCommandUndoJustLastOutputClauseColumn() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOldOutput = "old rule 1";
final String ruleTwoOldOutput = "old rule 2";
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOneOldOutput);
addRuleWithOutputClauseValues(ruleTwoOldOutput);
assertEquals(1, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(1, dtable.getOutput().size());
// first rule
assertEquals(1, dtable.getRule().get(0).getOutputEntry().size());
assertEquals(ruleOneOldOutput, dtable.getRule().get(0).getOutputEntry().get(0).getText().getValue());
// second rule
assertEquals(1, dtable.getRule().get(1).getOutputEntry().size());
assertEquals(ruleTwoOldOutput, dtable.getRule().get(1).getOutputEntry().get(0).getText().getValue());
}
@Test
public void testCanvasCommandAllow() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.allow(canvasHandler));
}
@Test
public void testCanvasCommandAddOutputClauseToRuleWithInputs() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT + 1);
final String ruleInputValue = "in value";
final String ruleOutputValue = "out value";
dtable.getInput().add(new InputClause());
dtable.getRule().add(new DecisionRule() {{
getInputEntry().add(new UnaryTests() {{
getText().setValue(ruleInputValue);
}});
getOutputEntry().add(new LiteralExpression() {{
getText().setValue(ruleOutputValue);
}});
}});
//Graph command populates OutputEntries so overwrite with test values
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
dtable.getRule().get(0).getOutputEntry().get(0).getText().setValue(ruleOutputValue);
doReturn(1).when(uiInputClauseColumn).getIndex();
doReturn(2).when(uiOutputClauseColumn).getIndex();
uiModel.appendColumn(uiInputClauseColumn);
uiModel.appendRow(new BaseGridRow());
uiModelMapper.fromDMNModel(0, 1);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.execute(canvasHandler);
assertEquals(ruleInputValue, uiModel.getRow(0).getCells().get(1).getValue().getValue());
assertEquals(ruleOutputValue, uiModel.getRow(0).getCells().get(2).getValue().getValue());
assertEquals(3, uiModel.getColumnCount());
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddOutputClauseCommand.undo(canvasHandler));
assertEquals(2, uiModel.getColumnCount());
verify(executeCanvasOperation).execute();
verify(undoCanvasOperation).execute();
verify(command, times(2)).updateParentInformation();
}
@Test
public void testCanvasCommandAddOutputClauseToRuleWithoutInputsThenUndo() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOutputValue = "one";
final String ruleTwoOutputValue = "two";
dtable.getRule().add(new DecisionRule());
dtable.getRule().add(new DecisionRule());
uiModel.appendRow(new BaseGridRow());
uiModel.appendRow(new BaseGridRow());
//Graph command populates OutputEntries so overwrite with test values
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
dtable.getRule().get(0).getOutputEntry().get(0).getText().setValue(ruleOneOutputValue);
dtable.getRule().get(1).getOutputEntry().get(0).getText().setValue(ruleTwoOutputValue);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.execute(canvasHandler);
// first rule
assertEquals(ruleOneOutputValue, uiModel.getRow(0).getCells().get(1).getValue().getValue());
// second rule
assertEquals(ruleTwoOutputValue, uiModel.getRow(1).getCells().get(1).getValue().getValue());
assertEquals(2, uiModel.getColumnCount());
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddOutputClauseCommand.undo(canvasHandler));
assertEquals(1, uiModel.getColumnCount());
verify(executeCanvasOperation).execute();
verify(undoCanvasOperation).execute();
verify(command, times(2)).updateParentInformation();
}
@Test
public void testCanvasCommandUndoWhenNothingBefore() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.undo(canvasHandler);
// just row number column
assertEquals(1, uiModel.getColumnCount());
verify(undoCanvasOperation).execute();
verify(command).updateParentInformation();
}
@Test
public void testComponentWidths() {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
//Execute
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.execute(canvasHandler));
assertEquals(dtable.getRequiredComponentWidthCount(),
dtable.getComponentWidths().size());
assertNull(dtable.getComponentWidths().get(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT));
//Undo
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.undo(canvasHandler));
assertEquals(dtable.getRequiredComponentWidthCount(),
dtable.getComponentWidths().size());
}
private void addRuleWithOutputClauseValues(String... outputClauseValues) {
dtable.getRule().add(new DecisionRule() {{
Stream.of(outputClauseValues).forEach(oClause -> {
getOutputEntry().add(new LiteralExpression() {{
getText().setValue(oClause);
}});
});
}});
}
}
| kie-wb-common-dmn/kie-wb-common-dmn-client/src/test/java/org/kie/workbench/common/dmn/client/commands/expressions/types/dtable/AddOutputClauseCommandTest.java | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.commands.expressions.types.dtable;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.dmn.api.definition.model.DecisionRule;
import org.kie.workbench.common.dmn.api.definition.model.DecisionTable;
import org.kie.workbench.common.dmn.api.definition.model.InputClause;
import org.kie.workbench.common.dmn.api.definition.model.LiteralExpression;
import org.kie.workbench.common.dmn.api.definition.model.OutputClause;
import org.kie.workbench.common.dmn.api.definition.model.UnaryTests;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableDefaultValueUtilities;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.DecisionTableUIModelMapperHelper;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.InputClauseColumn;
import org.kie.workbench.common.dmn.client.editors.expressions.types.dtable.OutputClauseColumn;
import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView;
import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.command.Command;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.ext.wires.core.grids.client.model.GridData;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridRow;
import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.kie.workbench.common.dmn.client.widgets.grid.model.BaseHasDynamicHeightCell.DEFAULT_HEIGHT;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.Silent.class)
public class AddOutputClauseCommandTest {
@Mock
private RowNumberColumn uiRowNumberColumn;
@Mock
private OutputClauseColumn uiOutputClauseColumn;
@Mock
private InputClauseColumn uiInputClauseColumn;
@Mock
private ListSelectorView.Presenter listSelector;
@Mock
private AbstractCanvasHandler canvasHandler;
@Mock
private GraphCommandExecutionContext graphCommandExecutionContext;
@Mock
private org.uberfire.mvp.Command executeCanvasOperation;
@Mock
private org.uberfire.mvp.Command undoCanvasOperation;
private DecisionTable dtable;
private OutputClause outputClause;
private GridData uiModel;
private DecisionTableUIModelMapper uiModelMapper;
private AddOutputClauseCommand command;
@Before
public void doNotRunTestsOnJdkEleven() {
final String javaVersion = "11";
final String javaVersionPropertyKey = "java.version";
Assume.assumeFalse(System.getProperty(javaVersionPropertyKey).contains(javaVersion));
}
@Before
public void setUp() throws Exception {
this.dtable = new DecisionTable();
this.uiModel = new DMNGridData();
this.uiModel.appendColumn(uiRowNumberColumn);
this.outputClause = new OutputClause();
this.uiModelMapper = new DecisionTableUIModelMapper(() -> uiModel,
() -> Optional.of(dtable),
listSelector,
DEFAULT_HEIGHT);
doReturn(0).when(uiRowNumberColumn).getIndex();
doReturn(1).when(uiOutputClauseColumn).getIndex();
}
private void makeCommand(final int index) {
this.command = spy(new AddOutputClauseCommand(dtable,
outputClause,
uiModel,
() -> uiOutputClauseColumn,
index,
uiModelMapper,
executeCanvasOperation,
undoCanvasOperation));
}
@Test
public void testGraphCommandAllow() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandCheck() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.allow(graphCommandExecutionContext));
}
@Test
public void testGraphCommandExecute() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
dtable.getRule().add(new DecisionRule());
dtable.getRule().add(new DecisionRule());
assertEquals(0, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
// one new output column
assertEquals(1, dtable.getOutput().size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(0).getName());
// first rule
final List<LiteralExpression> outputEntriesRuleOne = dtable.getRule().get(0).getOutputEntry();
assertEquals(1, outputEntriesRuleOne.size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleOne.get(0).getText().getValue());
assertEquals(dtable.getRule().get(0), outputEntriesRuleOne.get(0).getParent());
// second rule
final List<LiteralExpression> outputEntriesRuleTwo = dtable.getRule().get(1).getOutputEntry();
assertEquals(1, outputEntriesRuleTwo.size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleTwo.get(0).getText().getValue());
assertEquals(dtable.getRule().get(1), outputEntriesRuleTwo.get(0).getParent());
assertEquals(dtable,
outputClause.getParent());
}
@Test
public void testGraphCommandExecuteExistingNotAffected() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOldOutput = "old rule 1";
final String ruleTwoOldOutput = "old rule 2";
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOneOldOutput);
addRuleWithOutputClauseValues(ruleTwoOldOutput);
assertEquals(1, dtable.getOutput().size());
//Graph command will insert new OutputClause at index 0 of the OutputEntries
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(2, dtable.getOutput().size());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(0).getName());
assertNull(dtable.getOutput().get(1).getName());
// first rule
final List<LiteralExpression> outputEntriesRuleOne = dtable.getRule().get(0).getOutputEntry();
assertEquals(2, outputEntriesRuleOne.size());
assertEquals(ruleOneOldOutput, outputEntriesRuleOne.get(1).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleOne.get(0).getText().getValue());
assertEquals(dtable.getRule().get(0), outputEntriesRuleOne.get(0).getParent());
// second rule
final List<LiteralExpression> outputEntriesRuleTwo = dtable.getRule().get(1).getOutputEntry();
assertEquals(2, outputEntriesRuleTwo.size());
assertEquals(ruleTwoOldOutput, outputEntriesRuleTwo.get(1).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, outputEntriesRuleTwo.get(0).getText().getValue());
assertEquals(dtable.getRule().get(1), outputEntriesRuleTwo.get(0).getParent());
assertEquals(dtable,
outputClause.getParent());
}
@Test
public void testGraphCommandExecuteInsertMiddle() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT + 1);
final String ruleOutputOne = "rule out 1";
final String ruleOutputTwo = "rule out 2";
dtable.getOutput().add(new OutputClause());
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOutputOne, ruleOutputTwo);
assertEquals(2, dtable.getOutput().size());
//Graph command will insert new OutputClause at index 1 of the OutputEntries
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(3, dtable.getOutput().size());
assertNull(dtable.getOutput().get(0).getName());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_PREFIX + "1",
dtable.getOutput().get(1).getName());
assertNull(dtable.getOutput().get(2).getName());
final List<LiteralExpression> ruleOutputs = dtable.getRule().get(0).getOutputEntry();
// first rule
assertEquals(3, ruleOutputs.size());
assertEquals(ruleOutputOne, ruleOutputs.get(0).getText().getValue());
assertEquals(DecisionTableDefaultValueUtilities.OUTPUT_CLAUSE_EXPRESSION_TEXT, ruleOutputs.get(1).getText().getValue());
assertEquals(dtable.getRule().get(0), ruleOutputs.get(1).getParent());
assertEquals(ruleOutputTwo, ruleOutputs.get(2).getText().getValue());
assertEquals(dtable,
outputClause.getParent());
}
@Test(expected = IndexOutOfBoundsException.class)
public void testGraphCommandUndoNoOutputClauseColumns() {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
dtable.getRule().add(new DecisionRule());
assertEquals(0, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
}
@Test
public void testGraphCommandUndoJustLastOutputClauseColumn() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOldOutput = "old rule 1";
final String ruleTwoOldOutput = "old rule 2";
dtable.getOutput().add(new OutputClause());
addRuleWithOutputClauseValues(ruleOneOldOutput);
addRuleWithOutputClauseValues(ruleTwoOldOutput);
assertEquals(1, dtable.getOutput().size());
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(1, dtable.getOutput().size());
// first rule
assertEquals(1, dtable.getRule().get(0).getOutputEntry().size());
assertEquals(ruleOneOldOutput, dtable.getRule().get(0).getOutputEntry().get(0).getText().getValue());
// second rule
assertEquals(1, dtable.getRule().get(1).getOutputEntry().size());
assertEquals(ruleTwoOldOutput, dtable.getRule().get(1).getOutputEntry().get(0).getText().getValue());
}
@Test
public void testCanvasCommandAllow() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.allow(canvasHandler));
}
@Test
public void testCanvasCommandAddOutputClauseToRuleWithInputs() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT + 1);
final String ruleInputValue = "in value";
final String ruleOutputValue = "out value";
dtable.getInput().add(new InputClause());
dtable.getRule().add(new DecisionRule() {{
getInputEntry().add(new UnaryTests() {{
getText().setValue(ruleInputValue);
}});
getOutputEntry().add(new LiteralExpression() {{
getText().setValue(ruleOutputValue);
}});
}});
//Graph command populates OutputEntries so overwrite with test values
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
dtable.getRule().get(0).getOutputEntry().get(0).getText().setValue(ruleOutputValue);
doReturn(1).when(uiInputClauseColumn).getIndex();
doReturn(2).when(uiOutputClauseColumn).getIndex();
uiModel.appendColumn(uiInputClauseColumn);
uiModel.appendRow(new BaseGridRow());
uiModelMapper.fromDMNModel(0, 1);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.execute(canvasHandler);
assertEquals(ruleInputValue, uiModel.getRow(0).getCells().get(1).getValue().getValue());
assertEquals(ruleOutputValue, uiModel.getRow(0).getCells().get(2).getValue().getValue());
assertEquals(3, uiModel.getColumnCount());
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddOutputClauseCommand.undo(canvasHandler));
assertEquals(2, uiModel.getColumnCount());
verify(executeCanvasOperation).execute();
verify(undoCanvasOperation).execute();
verify(command, times(2)).updateParentInformation();
}
@Test
public void testCanvasCommandAddOutputClauseToRuleWithoutInputsThenUndo() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final String ruleOneOutputValue = "one";
final String ruleTwoOutputValue = "two";
dtable.getRule().add(new DecisionRule());
dtable.getRule().add(new DecisionRule());
uiModel.appendRow(new BaseGridRow());
uiModel.appendRow(new BaseGridRow());
//Graph command populates OutputEntries so overwrite with test values
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
graphCommand.execute(graphCommandExecutionContext);
dtable.getRule().get(0).getOutputEntry().get(0).getText().setValue(ruleOneOutputValue);
dtable.getRule().get(1).getOutputEntry().get(0).getText().setValue(ruleTwoOutputValue);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.execute(canvasHandler);
// first rule
assertEquals(ruleOneOutputValue, uiModel.getRow(0).getCells().get(1).getValue().getValue());
// second rule
assertEquals(ruleTwoOutputValue, uiModel.getRow(1).getCells().get(1).getValue().getValue());
assertEquals(2, uiModel.getColumnCount());
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasAddOutputClauseCommand.undo(canvasHandler));
assertEquals(1, uiModel.getColumnCount());
verify(executeCanvasOperation).execute();
verify(undoCanvasOperation).execute();
verify(command, times(2)).updateParentInformation();
}
@Test
public void testCanvasCommandUndoWhenNothingBefore() throws Exception {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<AbstractCanvasHandler, CanvasViolation> canvasAddOutputClauseCommand = command.newCanvasCommand(canvasHandler);
canvasAddOutputClauseCommand.undo(canvasHandler);
// just row number column
assertEquals(1, uiModel.getColumnCount());
verify(undoCanvasOperation).execute();
verify(command).updateParentInformation();
}
@Test
public void testComponentWidths() {
makeCommand(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT);
final Command<GraphCommandExecutionContext, RuleViolation> graphCommand = command.newGraphCommand(canvasHandler);
final Command<AbstractCanvasHandler, CanvasViolation> canvasCommand = command.newCanvasCommand(canvasHandler);
//Execute
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.execute(graphCommandExecutionContext));
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.execute(canvasHandler));
assertEquals(dtable.getRequiredComponentWidthCount(),
dtable.getComponentWidths().size());
assertNull(dtable.getComponentWidths().get(DecisionTableUIModelMapperHelper.ROW_INDEX_COLUMN_COUNT));
//Undo
assertEquals(GraphCommandResultBuilder.SUCCESS,
graphCommand.undo(graphCommandExecutionContext));
assertEquals(CanvasCommandResultBuilder.SUCCESS,
canvasCommand.undo(canvasHandler));
assertEquals(dtable.getRequiredComponentWidthCount(),
dtable.getComponentWidths().size());
}
private void addRuleWithOutputClauseValues(String... outputClauseValues) {
dtable.getRule().add(new DecisionRule() {{
Stream.of(outputClauseValues).forEach(oClause -> {
getOutputEntry().add(new LiteralExpression() {{
getText().setValue(oClause);
}});
});
}});
}
}
| AF-2797: Reenable tests on jdk 11 (#3559)
Due to recent jdk11 support [1] introduced on master we should re-enable few tests [2] not working on jdk 11 previously.
[1] - https://issues.redhat.com/browse/AF-1799
[2] - https://issues.redhat.com/browse/AF-2797 | kie-wb-common-dmn/kie-wb-common-dmn-client/src/test/java/org/kie/workbench/common/dmn/client/commands/expressions/types/dtable/AddOutputClauseCommandTest.java | AF-2797: Reenable tests on jdk 11 (#3559) | <ide><path>ie-wb-common-dmn/kie-wb-common-dmn-client/src/test/java/org/kie/workbench/common/dmn/client/commands/expressions/types/dtable/AddOutputClauseCommandTest.java
<ide> import java.util.Optional;
<ide> import java.util.stream.Stream;
<ide>
<del>import org.junit.Assume;
<ide> import org.junit.Before;
<ide> import org.junit.Test;
<ide> import org.junit.runner.RunWith;
<ide> private AddOutputClauseCommand command;
<ide>
<ide> @Before
<del> public void doNotRunTestsOnJdkEleven() {
<del>
<del> final String javaVersion = "11";
<del> final String javaVersionPropertyKey = "java.version";
<del> Assume.assumeFalse(System.getProperty(javaVersionPropertyKey).contains(javaVersion));
<del> }
<del>
<del> @Before
<ide> public void setUp() throws Exception {
<ide> this.dtable = new DecisionTable();
<ide> this.uiModel = new DMNGridData(); |
|
JavaScript | mit | ac9d1b4ae621409611222069a04185f57b508aa2 | 0 | nikgraf/belle,nikgraf/belle | import React, {Component} from 'react';
import { canUseDOM } from 'exenv';
import {extend, omit, has} from '../utils/helpers';
import style from '../style/rating.js';
import {injectStyles, removeStyle} from '../utils/inject-style';
import unionClassNames from '../utils/union-class-names';
import config from '../config/rating';
import {requestAnimationFrame, cancelAnimationFrame} from '../utils/animation-frame-management';
/**
* sanitize properties for the wrapping div.
*/
function sanitizeWrapperProps(properties) {
return omit(properties, [
'className',
'onKeyDown',
'onMouseEnter',
'onMouseMove',
'onMouseLeave',
'onMouseUp',
'onMouseDown',
'onTouchStart',
'onTouchMove',
'onTouchEnd',
'onTouchCancel',
'onBlur',
'onFocus',
'tabIndex',
'aria-label',
'aria-valuemax',
'aria-valuemin',
'aria-valuenow',
'aria-disabled',
'style',
'focusStyle',
'disabledStyle',
'characterStyle',
'activeCharacterStyle',
'hoverCharacterStyle',
'characterProps'
]);
}
/**
* sanitize properties for the character span.
*/
function sanitizeCharacterProps(properties) {
return omit(properties, [
'data-belle-value',
'style'
]);
}
/**
* Injects pseudo classes for styles into the DOM.
*/
function updatePseudoClassStyle(ratingWrapperStyleId, properties, preventFocusStyleForTouchAndClick) {
let ratingFocusStyle;
if (preventFocusStyleForTouchAndClick) {
ratingFocusStyle = { outline: 0 };
} else {
ratingFocusStyle = extend({}, style.focusStyle, properties.focusStyle);
}
const styles = [
{
id: ratingWrapperStyleId,
style: ratingFocusStyle,
pseudoClass: 'focus'
}
];
injectStyles(styles);
}
/**
* Rating component
*
* The component leverages 5 characters (by default stars) to allow the user to
* to rate.
*/
export default class Rating extends Component {
constructor(properties) {
super(properties);
let value;
if (has(properties, 'valueLink')) {
value = properties.valueLink.value;
} else if (has(properties, 'value')) {
value = properties.value;
} else if (has(properties, 'defaultValue')) {
value = properties.defaultValue;
}
this.state = {
value: value,
focusedValue: undefined,
generalProps: sanitizeWrapperProps(properties),
characterProps: sanitizeCharacterProps(properties.characterProps),
isFocus: false,
isActive: false
};
this.preventFocusStyleForTouchAndClick = has(properties, 'preventFocusStyleForTouchAndClick') ? properties.preventFocusStyleForTouchAndClick : config.preventFocusStyleForTouchAndClick;
}
static displayName = 'Rating';
static propTypes = {
defaultValue: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
value: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
valueLink: React.PropTypes.shape({
value: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
requestChange: React.PropTypes.func.isRequired
}),
disabled: React.PropTypes.bool,
tabIndex: React.PropTypes.number,
character: React.PropTypes.string,
characterProps: React.PropTypes.object,
preventFocusStyleForTouchAndClick: React.PropTypes.bool,
'aria-label': React.PropTypes.string,
style: React.PropTypes.object,
className: React.PropTypes.string,
focusStyle: React.PropTypes.object,
disabledStyle: React.PropTypes.object,
hoverStyle: React.PropTypes.object,
disabledHoverStyle: React.PropTypes.object,
characterStyle: React.PropTypes.object,
activeCharacterStyle: React.PropTypes.object,
hoverCharacterStyle: React.PropTypes.object,
onUpdate: React.PropTypes.func,
onMouseDown: React.PropTypes.func,
onMouseUp: React.PropTypes.func,
onMouseEnter: React.PropTypes.func,
onMouseMove: React.PropTypes.func,
onMouseLeave: React.PropTypes.func,
onTouchStart: React.PropTypes.func,
onTouchMove: React.PropTypes.func,
onTouchEnd: React.PropTypes.func,
onTouchCancel: React.PropTypes.func,
onFocus: React.PropTypes.func,
onBlur: React.PropTypes.func,
onKeyDown: React.PropTypes.func
};
/**
* Setting default prop values.
*/
static defaultProps = {
disabled: false,
tabIndex: 0,
character: '★',
'aria-label': 'rating'
};
/**
* Apply pseudo class styling to the wrapper div.
*/
componentWillMount() {
const id = this._reactInternalInstance._rootNodeID.replace(/\./g, '-');
this.ratingWrapperStyleId = `rating-wrapper-style-id${id}`;
updatePseudoClassStyle(this.ratingWrapperStyleId, this.props, this.preventFocusStyleForTouchAndClick);
if (canUseDOM) {
this.mouseUpOnDocumentCallback = this._onMouseUpOnDocument.bind(this);
document.addEventListener('mouseup', this.mouseUpOnDocumentCallback);
}
}
componentWillReceiveProps(properties) {
const newState = {
wrapperProps: sanitizeWrapperProps(properties),
characterProps: sanitizeCharacterProps(properties.characterProps)
};
if (properties.valueLink) {
newState.value = properties.valueLink.value;
} else if (properties.value) {
newState.value = properties.value;
}
this.setState(newState);
this.preventFocusStyleForTouchAndClick = has(properties, 'preventFocusStyleForTouchAndClick') ? properties.preventFocusStyleForTouchAndClick : config.preventFocusStyleForTouchAndClick;
removeStyle(this.ratingWrapperStyleId);
updatePseudoClassStyle(this.ratingWrapperStyleId, properties, this.preventFocusStyleForTouchAndClick);
}
/**
* Removes pseudo classes from the DOM once component gets removed.
*/
componentWillUnmount() {
removeStyle(this.ratingWrapperStyleId);
if (canUseDOM) {
document.removeEventListener('mouseup', this.mouseUpOnDocumentCallback);
}
}
/**
* As soon as the mouse enters the component the focusedValue is updated based
* on the value of the targeted span.
*/
_onMouseEnter(event) {
// In case the user pressed the mouse and then hovers over the rating and
// releases the mousUp should no be trigger. Only when the mouseDown starts
// inside.
// Activating inside, going out & coming back should still be possible.
if (!this.state.isActive) {
this.preventNextMouseUpTriggerUpdate = true;
}
if (!this.props.disabled) {
const value = Number(event.target.getAttribute('data-belle-value'));
this.setState({
focusedValue: value,
isHover: true
});
} else {
this.setState({
isHover: true
});
}
if (this.props.onMouseEnter) {
this.props.onMouseEnter(event);
}
}
/**
* As the mouse moved over the component and enters a new star the focusedValue
* is updated based on the value of the targeted span.
*/
_onMouseMove(event) {
if (!this.props.disabled) {
const value = Number(event.target.getAttribute('data-belle-value'));
if (this.state.focusedValue !== value) {
this.setState({
focusedValue: value
});
}
}
if (this.props.onMouseMove) {
this.props.onMouseMove(event);
}
}
/**
* Resets the component as the mouse leaves the hover area.
*/
_onMouseLeave(event) {
if (!this.props.disabled) {
this.setState({
focusedValue: undefined,
isHover: false
});
} else {
this.setState({
isHover: false
});
}
if (this.props.onMouseLeave) {
this.props.onMouseLeave(event);
}
}
/**
* Sets isActive state to true.
*/
_onMouseDown(event) {
if (!this.props.disabled && event.buttons === 1) {
this.setState({ isActive: true });
this.preventNextMouseUpTriggerUpdate = false;
}
if (this.props.onMouseDown) {
this.props.onMouseDown(event);
}
}
/**
* Sets isActive state to false.
*/
_onMouseUp(event) {
if (!this.props.disabled && !this.preventNextMouseUpTriggerUpdate) {
const value = Number(event.target.getAttribute('data-belle-value'));
this._triggerComponentUpdate(value);
}
if (this.props.onMouseUp) {
this.props.onMouseUp(event);
}
}
_onMouseUpOnDocument() {
this.setState({ isActive: false });
}
_onContextMenu() {
this.setState({ isActive: false });
}
/**
* Change focusValue and sets isActive state to true.
*/
_onTouchStart(event) {
event.preventDefault();
if (!this.props.disabled && event.touches.length === 1) {
const value = Number(event.target.getAttribute('data-belle-value'));
this.setState({
focusedValue: value,
isActive: true
});
}
if (this.props.onTouchStart) {
this.props.onTouchStart(event);
}
}
/**
* set the focusedValue depending on mouse position
*/
_onTouchMove(event) {
if (!this.props.disabled && event.touches.length === 1) {
const touches = event.touches[0];
// the requestAnimationFrame function must be executed in the context of window
// see http://stackoverflow.com/a/9678166/837709
const animationFrame = requestAnimationFrame.call(
window,
this._triggerComponentUpdateOnTouchMove.bind(this, touches)
);
if (this.previousTouchMoveFrame) {
// the cancelAnimationFrame function must be executed in the context of window
// see http://stackoverflow.com/a/9678166/837709
cancelAnimationFrame.call(window, this.previousTouchMoveFrame);
}
this.previousTouchMoveFrame = animationFrame;
}
if (this.props.onTouchMove) {
this.props.onTouchMove(event);
}
}
/**
* update the component when touch ends
*/
_onTouchEnd(event) {
if (!this.props.disabled) {
event.preventDefault();
this.setState({isActive: false});
const value = this.state.focusedValue;
this._triggerComponentUpdate(value);
}
if (this.props.onTouchEnd) {
this.props.onTouchEnd(event);
}
}
/**
* reset the component in case of touch cancel
*/
_onTouchCancel(event) {
if (!this.props.disabled) {
this.setState({
isActive: false,
focusedValue: undefined
});
}
if (this.props.onTouchCancel) {
this.props.onTouchCancel(event);
}
}
/**
* reset the component on blur
*/
_onBlur(event) {
if (!this.props.disabled) {
this.setState({
focusedValue: undefined,
isFocus: false,
isActive: false
});
}
if (this.props.onBlur) {
this.props.onBlur(event);
}
}
/**
* enable focus styling of component when tab is used to focus component
*/
_onFocus() {
if (!this.state.isActive && !this.props.disabled) {
this.setState({isFocus: true});
}
if (this.props.onFocus) {
this.props.onFocus(event);
}
}
/**
* Manages the keyboard events.
*
* In case the Rating Component is in focus Space, ArrowUp will result in increasing the value and arrow down will result in decreasing the value.
* Enter/ space will result in updating the value of the component.
*
* Pressing Escape will reset the value to last value.
*
*/
_onKeyDown(event) {
if (!this.props.disabled) {
if (event.key === 'ArrowDown' || event.key === 'ArrowLeft') {
event.preventDefault();
this._onArrowDownKeyDown();
} else if (event.key === 'ArrowUp' || event.key === 'ArrowRight') {
event.preventDefault();
this._onArrowUpKeyDown();
} else if (event.key === 'Enter' || event.key === ' ') {
event.preventDefault();
this._onEnterSpaceKeyDown();
} else if (event.key === 'Escape') {
event.preventDefault();
this._onEscapeKeyDown();
}
}
if (this.props.onKeyDown) {
this.props.onKeyDown(event);
}
}
/**
* decrease the value by 1 when arrow down key is pressed
*/
_onArrowDownKeyDown() {
let newValue = this.state.focusedValue !== undefined ? this.state.focusedValue : this.state.value;
newValue = newValue > 0 ? (newValue - 1) : 0;
this.setState({
focusedValue: newValue
});
}
/**
* increase value by 1 when arrow up key is pressed
*/
_onArrowUpKeyDown() {
let newValue = this.state.focusedValue !== undefined ? this.state.focusedValue : this.state.value;
if (!newValue) {
newValue = 1;
} else if (newValue < 5) {
newValue = newValue + 1;
} else {
newValue = 5;
}
this.setState({
focusedValue: newValue
});
}
/**
* set component value to current focus value
*/
_onEnterSpaceKeyDown() {
let newValue;
if (this.state.focusedValue !== undefined) {
if (this.state.focusedValue === 0) {
newValue = undefined;
} else {
newValue = this.state.focusedValue;
}
this._triggerComponentUpdate(newValue);
}
}
/**
* reset component when escape key is pressed
* esc key should just reset the component displayed rating without removing hover or focus styles
*/
_onEscapeKeyDown() {
this.setState({
focusedValue: undefined
});
}
/**
* Returns current value of rating to be displayed on the component
*/
_getCurrentValue() {
let value;
if (this.state.focusedValue !== undefined) {
value = this.state.focusedValue;
} else {
value = (this.state.value) ? this.state.value : 0;
}
return value;
}
/**
* The function will be passed to requestAnimationFrame for touchMove
*/
_triggerComponentUpdateOnTouchMove(touches) {
const touchedElement = document.elementFromPoint(touches.clientX, touches.clientY);
const value = Number(touchedElement.getAttribute('data-belle-value'));
if (value && this.state.focusedValue !== value) {
this.setState({
focusedValue: value
});
}
}
/**
* update component when component is clicked, touch ends, enter or space key are hit
* different update logic will apply depending on whether component has property defaultValue, value or valueLink specified
*/
_triggerComponentUpdate(value) {
if (has(this.props, 'valueLink')) {
this.props.valueLink.requestChange(value);
this.setState({
focusedValue: undefined,
isActive: false
});
} else if (has(this.props, 'value')) {
this.setState({
focusedValue: undefined,
isActive: false
});
} else {
this.setState({
focusedValue: undefined,
isActive: false,
value: value
});
}
if (this.props.onUpdate) {
this.props.onUpdate({ value: value });
}
}
/**
* Returns the HTML function to be rendered by this component.
*/
render() {
const currentValue = this._getCurrentValue();
const tabIndex = !this.props.disabled ? this.props.tabIndex : -1;
let characterStyle = extend({}, style.characterStyle, this.props.characterStyle);
if (this.state.isActive) {
characterStyle = extend({}, characterStyle, style.activeCharacterStyle, this.props.activeCharacterStyle);
} else if (this.state.isHover) {
characterStyle = extend({}, characterStyle, style.hoverCharacterStyle, this.props.hoverCharacterStyle);
}
let wrapperStyle = extend({}, style.style, this.props.style);
if (this.props.disabled) {
wrapperStyle = extend({}, wrapperStyle, style.disabledStyle, this.props.disabledStyle);
if (this.state.isHover) {
wrapperStyle = extend(wrapperStyle, style.disabledHoverStyle, this.props.disabledHoverStyle);
}
} else {
if (this.state.isFocus && this.preventFocusStyleForTouchAndClick) {
wrapperStyle = extend({}, wrapperStyle, style.focusStyle, this.props.focusStyle);
}
if (this.state.isHover) {
wrapperStyle = extend(wrapperStyle, style.hoverStyle, this.props.hoverStyle);
}
}
return (
<div ref="wrapper"
style={ wrapperStyle }
className={ unionClassNames(this.props.className, this.ratingWrapperStyleId) }
onKeyDown={ this._onKeyDown.bind(this) }
onMouseEnter={ this._onMouseEnter.bind(this) }
onMouseMove={ this._onMouseMove.bind(this) }
onMouseLeave={ this._onMouseLeave.bind(this) }
onMouseUp={ this._onMouseUp.bind(this) }
onMouseDown={ this._onMouseDown.bind(this) }
onTouchStart={ this._onTouchStart.bind(this) }
onTouchMove={ this._onTouchMove.bind(this) }
onTouchEnd={ this._onTouchEnd.bind(this) }
onTouchCancel={ this._onTouchCancel.bind(this) }
onContextMenu={ this._onContextMenu.bind(this) }
onBlur={ this._onBlur.bind(this) }
onFocus={ this._onFocus.bind(this) }
tabIndex={ tabIndex }
aria-label = { this.props['aria-label'] }
aria-valuemax = { 5 }
aria-valuemin = { 1 }
aria-valuenow = { this.state.value }
aria-disabled = { this.props.disabled }
{...this.state.wrapperProps}>
{
React.Children.map([1, 2, 3, 4, 5], (value) => {
const ratingStyle = (currentValue >= value) ? characterStyle : {};
return (
<span data-belle-value= { value }
style={ ratingStyle }
{...this.state.characterProps}>
{ this.props.character }
</span>
);
})
}
</div>
);
}
}
| src/components/Rating.js | import React, {Component} from 'react';
import { canUseDOM } from 'exenv';
import {extend, omit, has} from '../utils/helpers';
import style from '../style/rating.js';
import {injectStyles, removeStyle} from '../utils/inject-style';
import unionClassNames from '../utils/union-class-names';
import config from '../config/rating';
import {requestAnimationFrame, cancelAnimationFrame} from '../utils/animation-frame-management';
/**
* sanitize properties for the wrapping div.
*/
function sanitizeWrapperProps(properties) {
return omit(properties, [
'className',
'onKeyDown',
'onMouseEnter',
'onMouseMove',
'onMouseLeave',
'onMouseUp',
'onMouseDown',
'onTouchStart',
'onTouchMove',
'onTouchEnd',
'onTouchCancel',
'onBlur',
'onFocus',
'tabIndex',
'aria-label',
'aria-valuemax',
'aria-valuemin',
'aria-valuenow',
'aria-disabled',
'style',
'focusStyle',
'disabledStyle',
'characterStyle',
'activeCharacterStyle',
'hoverCharacterStyle',
'characterProps'
]);
}
/**
* sanitize properties for the character span.
*/
function sanitizeCharacterProps(properties) {
return omit(properties, [
'data-belle-value',
'style'
]);
}
/**
* Injects pseudo classes for styles into the DOM.
*/
function updatePseudoClassStyle(ratingWrapperStyleId, properties, preventFocusStyleForTouchAndClick) {
let ratingFocusStyle;
if (preventFocusStyleForTouchAndClick) {
ratingFocusStyle = { outline: 0 };
} else {
ratingFocusStyle = extend({}, style.focusStyle, properties.focusStyle);
}
const styles = [
{
id: ratingWrapperStyleId,
style: ratingFocusStyle,
pseudoClass: 'focus'
}
];
injectStyles(styles);
}
/**
* Rating component
*
* The component leverages 5 characters (by default stars) to allow the user to
* to rate.
*/
export default class Rating extends Component {
constructor(properties) {
super(properties);
let value;
if (has(properties, 'valueLink')) {
value = properties.valueLink.value;
} else if (has(properties, 'value')) {
value = properties.value;
} else if (has(properties, 'defaultValue')) {
value = properties.defaultValue;
}
this.state = {
value: value,
focusedValue: undefined,
generalProps: sanitizeWrapperProps(properties),
characterProps: sanitizeCharacterProps(properties.characterProps),
isFocus: false,
isActive: false
};
this.preventFocusStyleForTouchAndClick = has(properties, 'preventFocusStyleForTouchAndClick') ? properties.preventFocusStyleForTouchAndClick : config.preventFocusStyleForTouchAndClick;
}
static displayName = 'Rating';
static propTypes = {
defaultValue: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
value: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
valueLink: React.PropTypes.shape({
value: React.PropTypes.oneOf([1, 2, 3, 4, 5]),
requestChange: React.PropTypes.func.isRequired
}),
disabled: React.PropTypes.bool,
tabIndex: React.PropTypes.number,
character: React.PropTypes.string,
characterProps: React.PropTypes.object,
preventFocusStyleForTouchAndClick: React.PropTypes.bool,
'aria-label': React.PropTypes.string,
style: React.PropTypes.object,
className: React.PropTypes.string,
focusStyle: React.PropTypes.object,
disabledStyle: React.PropTypes.object,
hoverStyle: React.PropTypes.object,
disabledHoverStyle: React.PropTypes.object,
characterStyle: React.PropTypes.object,
activeCharacterStyle: React.PropTypes.object,
hoverCharacterStyle: React.PropTypes.object,
onUpdate: React.PropTypes.func,
onMouseDown: React.PropTypes.func,
onMouseUp: React.PropTypes.func,
onMouseEnter: React.PropTypes.func,
onMouseMove: React.PropTypes.func,
onMouseLeave: React.PropTypes.func,
onTouchStart: React.PropTypes.func,
onTouchMove: React.PropTypes.func,
onTouchEnd: React.PropTypes.func,
onTouchCancel: React.PropTypes.func,
onFocus: React.PropTypes.func,
onBlur: React.PropTypes.func,
onKeyDown: React.PropTypes.func
};
/**
* Setting default prop values.
*/
static defaultProps = {
disabled: false,
tabIndex: 0,
character: '★',
'aria-label': 'rating'
};
/**
* Apply pseudo class styling to the wrapper div.
*/
componentWillMount() {
const id = this._reactInternalInstance._rootNodeID.replace(/\./g, '-');
this.ratingWrapperStyleId = `rating-wrapper-style-id${id}`;
updatePseudoClassStyle(this.ratingWrapperStyleId, this.props, this.preventFocusStyleForTouchAndClick);
if (canUseDOM) {
this.mouseUpOnDocumentCallback = this._onMouseUpOnDocument.bind(this);
document.addEventListener('mouseup', this.mouseUpOnDocumentCallback);
}
}
componentWillReceiveProps(properties) {
const newState = {
wrapperProps: sanitizeWrapperProps(properties),
characterProps: sanitizeCharacterProps(properties.characterProps)
};
if (properties.valueLink) {
newState.value = properties.valueLink.value;
} else if (properties.value) {
newState.value = properties.value;
}
this.setState(newState);
this.preventFocusStyleForTouchAndClick = has(properties, 'preventFocusStyleForTouchAndClick') ? properties.preventFocusStyleForTouchAndClick : config.preventFocusStyleForTouchAndClick;
removeStyle(this.ratingWrapperStyleId);
updatePseudoClassStyle(this.ratingWrapperStyleId, properties, this.preventFocusStyleForTouchAndClick);
}
/**
* Removes pseudo classes from the DOM once component gets removed.
*/
componentWillUnmount() {
removeStyle(this.ratingWrapperStyleId);
if (canUseDOM) {
document.removeEventListener('mouseup', this.mouseUpOnDocumentCallback);
}
}
/**
* As soon as the mouse enters the component the focusedValue is updated based
* on the value of the targeted span.
*/
_onMouseEnter(event) {
// In case the user pressed the mouse and then hovers over the rating and
// releases the mousUp should no be trigger. Only when the mouseDown starts
// inside.
// Activating inside, going out & coming back should still be possible.
if (!this.state.isActive) {
this.preventNextMouseUpTriggerUpdate = true;
}
if (!this.props.disabled) {
const value = Number(event.target.getAttribute('data-belle-value'));
this.setState({
focusedValue: value,
isHover: true
});
} else {
this.setState({
isHover: true
});
}
if (this.props.onMouseEnter) {
this.props.onMouseEnter(event);
}
}
/**
* As the mouse moved over the component and enters a new star the focusedValue
* is updated based on the value of the targeted span.
*/
_onMouseMove(event) {
if (!this.props.disabled) {
const value = Number(event.target.getAttribute('data-belle-value'));
if (this.state.focusedValue !== value) {
this.setState({
focusedValue: value
});
}
}
if (this.props.onMouseMove) {
this.props.onMouseMove(event);
}
}
/**
* Resets the component as the mouse leaves the hover area.
*/
_onMouseLeave(event) {
if (!this.props.disabled) {
this.setState({
focusedValue: undefined,
isHover: false
});
} else {
this.setState({
isHover: false
});
}
if (this.props.onMouseLeave) {
this.props.onMouseLeave(event);
}
}
/**
* Sets isActive state to true.
*/
_onMouseDown(event) {
if (!this.props.disabled && event.buttons === 1) {
this.setState({ isActive: true });
this.preventNextMouseUpTriggerUpdate = false;
}
if (this.props.onMouseDown) {
this.props.onMouseDown(event);
}
}
/**
* Sets isActive state to false.
*/
_onMouseUp(event) {
if (!this.props.disabled && !this.preventNextMouseUpTriggerUpdate) {
const value = Number(event.target.getAttribute('data-belle-value'));
this._updateComponent(value);
}
if (this.props.onMouseUp) {
this.props.onMouseUp(event);
}
}
_onMouseUpOnDocument() {
this.setState({ isActive: false });
}
_onContextMenu() {
this.setState({ isActive: false });
}
/**
* Change focusValue and sets isActive state to true.
*/
_onTouchStart(event) {
event.preventDefault();
if (!this.props.disabled && event.touches.length === 1) {
const value = Number(event.target.getAttribute('data-belle-value'));
this.setState({
focusedValue: value,
isActive: true
});
}
if (this.props.onTouchStart) {
this.props.onTouchStart(event);
}
}
/**
* The function will be passed to requestAnimationFrame for touchMove
*/
_updateComponentOnTouchMove(touches) {
const touchedElement = document.elementFromPoint(touches.clientX, touches.clientY);
const value = Number(touchedElement.getAttribute('data-belle-value'));
if (value && this.state.focusedValue !== value) {
this.setState({
focusedValue: value
});
}
}
/**
* set the focusedValue depending on mouse position
*/
_onTouchMove(event) {
if (!this.props.disabled && event.touches.length === 1) {
const touches = event.touches[0];
// the requestAnimationFrame function must be executed in the context of window
// see http://stackoverflow.com/a/9678166/837709
const animationFrame = requestAnimationFrame.call(
window,
this._updateComponentOnTouchMove.bind(this, touches)
);
if (this.previousTouchMoveFrame) {
// the cancelAnimationFrame function must be executed in the context of window
// see http://stackoverflow.com/a/9678166/837709
cancelAnimationFrame.call(window, this.previousTouchMoveFrame);
}
this.previousTouchMoveFrame = animationFrame;
}
if (this.props.onTouchMove) {
this.props.onTouchMove(event);
}
}
/**
* update the component when touch ends
*/
_onTouchEnd(event) {
if (!this.props.disabled) {
event.preventDefault();
this.setState({isActive: false});
const value = this.state.focusedValue;
this._updateComponent(value);
}
if (this.props.onTouchEnd) {
this.props.onTouchEnd(event);
}
}
/**
* reset the component in case of touch cancel
*/
_onTouchCancel(event) {
if (!this.props.disabled) {
this.setState({
isActive: false,
focusedValue: undefined
});
}
if (this.props.onTouchCancel) {
this.props.onTouchCancel(event);
}
}
/**
* reset the component on blur
*/
_onBlur(event) {
if (!this.props.disabled) {
this.setState({
focusedValue: undefined,
isFocus: false,
isActive: false
});
}
if (this.props.onBlur) {
this.props.onBlur(event);
}
}
/**
* enable focus styling of component when tab is used to focus component
*/
_onFocus() {
if (!this.state.isActive && !this.props.disabled) {
this.setState({isFocus: true});
}
if (this.props.onFocus) {
this.props.onFocus(event);
}
}
/**
* update component when component is clicked, touch ends, enter or space key are hit
* different update logic will apply depending on whether component has property defaultValue, value or valueLink specified
*/
_updateComponent(value) {
if (has(this.props, 'valueLink')) {
this.props.valueLink.requestChange(value);
this.setState({
focusedValue: undefined,
isActive: false
});
} else if (has(this.props, 'value')) {
this.setState({
focusedValue: undefined,
isActive: false
});
} else {
this.setState({
focusedValue: undefined,
isActive: false,
value: value
});
}
if (this.props.onUpdate) {
this.props.onUpdate({ value: value });
}
}
/**
* Manages the keyboard events.
*
* In case the Rating Component is in focus Space, ArrowUp will result in increasing the value and arrow down will result in decreasing the value.
* Enter/ space will result in updating the value of the component.
*
* Pressing Escape will reset the value to last value.
*
*/
_onKeyDown(event) {
if (!this.props.disabled) {
if (event.key === 'ArrowDown' || event.key === 'ArrowLeft') {
event.preventDefault();
this._onArrowDownKeyDown();
} else if (event.key === 'ArrowUp' || event.key === 'ArrowRight') {
event.preventDefault();
this._onArrowUpKeyDown();
} else if (event.key === 'Enter' || event.key === ' ') {
event.preventDefault();
this._onEnterSpaceKeyDown();
} else if (event.key === 'Escape') {
event.preventDefault();
this._onEscapeKeyDown();
}
}
if (this.props.onKeyDown) {
this.props.onKeyDown(event);
}
}
/**
* decrease the value by 1 when arrow down key is pressed
*/
_onArrowDownKeyDown() {
let newValue = this.state.focusedValue !== undefined ? this.state.focusedValue : this.state.value;
newValue = newValue > 0 ? (newValue - 1) : 0;
this.setState({
focusedValue: newValue
});
}
/**
* increase value by 1 when arrow up key is pressed
*/
_onArrowUpKeyDown() {
let newValue = this.state.focusedValue !== undefined ? this.state.focusedValue : this.state.value;
if (!newValue) {
newValue = 1;
} else if (newValue < 5) {
newValue = newValue + 1;
} else {
newValue = 5;
}
this.setState({
focusedValue: newValue
});
}
/**
* set component value to current focus value
*/
_onEnterSpaceKeyDown() {
let newValue;
if (this.state.focusedValue !== undefined) {
if (this.state.focusedValue === 0) {
newValue = undefined;
} else {
newValue = this.state.focusedValue;
}
this._updateComponent(newValue);
}
}
/**
* reset component when escape key is pressed
* esc key should just reset the component displayed rating without removing hover or focus styles
*/
_onEscapeKeyDown() {
this.setState({
focusedValue: undefined
});
}
/**
* Returns current value of rating to be displayed on the component
*/
_getCurrentValue() {
let value;
if (this.state.focusedValue !== undefined) {
value = this.state.focusedValue;
} else {
value = (this.state.value) ? this.state.value : 0;
}
return value;
}
/**
* Returns the HTML function to be rendered by this component.
*/
render() {
const currentValue = this._getCurrentValue();
const tabIndex = !this.props.disabled ? this.props.tabIndex : -1;
let characterStyle = extend({}, style.characterStyle, this.props.characterStyle);
if (this.state.isActive) {
characterStyle = extend({}, characterStyle, style.activeCharacterStyle, this.props.activeCharacterStyle);
} else if (this.state.isHover) {
characterStyle = extend({}, characterStyle, style.hoverCharacterStyle, this.props.hoverCharacterStyle);
}
let wrapperStyle = extend({}, style.style, this.props.style);
if (this.props.disabled) {
wrapperStyle = extend({}, wrapperStyle, style.disabledStyle, this.props.disabledStyle);
if (this.state.isHover) {
wrapperStyle = extend(wrapperStyle, style.disabledHoverStyle, this.props.disabledHoverStyle);
}
} else {
if (this.state.isFocus && this.preventFocusStyleForTouchAndClick) {
wrapperStyle = extend({}, wrapperStyle, style.focusStyle, this.props.focusStyle);
}
if (this.state.isHover) {
wrapperStyle = extend(wrapperStyle, style.hoverStyle, this.props.hoverStyle);
}
}
return (
<div ref="wrapper"
style={ wrapperStyle }
className={ unionClassNames(this.props.className, this.ratingWrapperStyleId) }
onKeyDown={ this._onKeyDown.bind(this) }
onMouseEnter={ this._onMouseEnter.bind(this) }
onMouseMove={ this._onMouseMove.bind(this) }
onMouseLeave={ this._onMouseLeave.bind(this) }
onMouseUp={ this._onMouseUp.bind(this) }
onMouseDown={ this._onMouseDown.bind(this) }
onTouchStart={ this._onTouchStart.bind(this) }
onTouchMove={ this._onTouchMove.bind(this) }
onTouchEnd={ this._onTouchEnd.bind(this) }
onTouchCancel={ this._onTouchCancel.bind(this) }
onContextMenu={ this._onContextMenu.bind(this) }
onBlur={ this._onBlur.bind(this) }
onFocus={ this._onFocus.bind(this) }
tabIndex={ tabIndex }
aria-label = { this.props['aria-label'] }
aria-valuemax = { 5 }
aria-valuemin = { 1 }
aria-valuenow = { this.state.value }
aria-disabled = { this.props.disabled }
{...this.state.wrapperProps}>
{
React.Children.map([1, 2, 3, 4, 5], (value) => {
const ratingStyle = (currentValue >= value) ? characterStyle : {};
return (
<span data-belle-value= { value }
style={ ratingStyle }
{...this.state.characterProps}>
{ this.props.character }
</span>
);
})
}
</div>
);
}
}
| refactor(Rating): move and update function names
| src/components/Rating.js | refactor(Rating): move and update function names | <ide><path>rc/components/Rating.js
<ide> _onMouseUp(event) {
<ide> if (!this.props.disabled && !this.preventNextMouseUpTriggerUpdate) {
<ide> const value = Number(event.target.getAttribute('data-belle-value'));
<del> this._updateComponent(value);
<add> this._triggerComponentUpdate(value);
<ide> }
<ide>
<ide> if (this.props.onMouseUp) {
<ide> }
<ide> if (this.props.onTouchStart) {
<ide> this.props.onTouchStart(event);
<del> }
<del> }
<del>
<del> /**
<del> * The function will be passed to requestAnimationFrame for touchMove
<del> */
<del> _updateComponentOnTouchMove(touches) {
<del> const touchedElement = document.elementFromPoint(touches.clientX, touches.clientY);
<del> const value = Number(touchedElement.getAttribute('data-belle-value'));
<del> if (value && this.state.focusedValue !== value) {
<del> this.setState({
<del> focusedValue: value
<del> });
<ide> }
<ide> }
<ide>
<ide> // see http://stackoverflow.com/a/9678166/837709
<ide> const animationFrame = requestAnimationFrame.call(
<ide> window,
<del> this._updateComponentOnTouchMove.bind(this, touches)
<add> this._triggerComponentUpdateOnTouchMove.bind(this, touches)
<ide> );
<ide>
<ide> if (this.previousTouchMoveFrame) {
<ide> event.preventDefault();
<ide> this.setState({isActive: false});
<ide> const value = this.state.focusedValue;
<del> this._updateComponent(value);
<add> this._triggerComponentUpdate(value);
<ide> }
<ide>
<ide> if (this.props.onTouchEnd) {
<ide>
<ide> if (this.props.onFocus) {
<ide> this.props.onFocus(event);
<del> }
<del> }
<del>
<del> /**
<del> * update component when component is clicked, touch ends, enter or space key are hit
<del> * different update logic will apply depending on whether component has property defaultValue, value or valueLink specified
<del> */
<del> _updateComponent(value) {
<del> if (has(this.props, 'valueLink')) {
<del> this.props.valueLink.requestChange(value);
<del> this.setState({
<del> focusedValue: undefined,
<del> isActive: false
<del> });
<del> } else if (has(this.props, 'value')) {
<del> this.setState({
<del> focusedValue: undefined,
<del> isActive: false
<del> });
<del> } else {
<del> this.setState({
<del> focusedValue: undefined,
<del> isActive: false,
<del> value: value
<del> });
<del> }
<del>
<del> if (this.props.onUpdate) {
<del> this.props.onUpdate({ value: value });
<ide> }
<ide> }
<ide>
<ide> } else {
<ide> newValue = this.state.focusedValue;
<ide> }
<del> this._updateComponent(newValue);
<add> this._triggerComponentUpdate(newValue);
<ide> }
<ide> }
<ide>
<ide> value = (this.state.value) ? this.state.value : 0;
<ide> }
<ide> return value;
<add> }
<add>
<add> /**
<add> * The function will be passed to requestAnimationFrame for touchMove
<add> */
<add> _triggerComponentUpdateOnTouchMove(touches) {
<add> const touchedElement = document.elementFromPoint(touches.clientX, touches.clientY);
<add> const value = Number(touchedElement.getAttribute('data-belle-value'));
<add> if (value && this.state.focusedValue !== value) {
<add> this.setState({
<add> focusedValue: value
<add> });
<add> }
<add> }
<add>
<add>
<add> /**
<add> * update component when component is clicked, touch ends, enter or space key are hit
<add> * different update logic will apply depending on whether component has property defaultValue, value or valueLink specified
<add> */
<add> _triggerComponentUpdate(value) {
<add> if (has(this.props, 'valueLink')) {
<add> this.props.valueLink.requestChange(value);
<add> this.setState({
<add> focusedValue: undefined,
<add> isActive: false
<add> });
<add> } else if (has(this.props, 'value')) {
<add> this.setState({
<add> focusedValue: undefined,
<add> isActive: false
<add> });
<add> } else {
<add> this.setState({
<add> focusedValue: undefined,
<add> isActive: false,
<add> value: value
<add> });
<add> }
<add>
<add> if (this.props.onUpdate) {
<add> this.props.onUpdate({ value: value });
<add> }
<ide> }
<ide>
<ide> /** |
|
JavaScript | mpl-2.0 | 3fc760b6e9136d9ea42e409bafd2de13f66b626a | 0 | gladly-team/tab,gladly-team/tab,gladly-team/tab | import createCampaignConfiguration from './createCampaignConfiguration'
const campaignTitle = '## COVID-19 Food Bank Support'
const campaignDescription = `
#### Thanks to you, our community [raised thousands of dollars](https://tab.gladly.io/covid-19/) for the World Health Organization over the last few days.
#### In addition to health systems, COVID-19 has strained the ability of food banks to take care of people in need. The next phase of our support will help the [Food Bank for New York City](https://www.foodbanknyc.org/covid-19/) keep families fed during this crisis.
#### Right now, tabs you open are providing meals for our fellow humans in NYC. Together, we can feed thousands of people in need—so please open a few tabs and encourage your friends to do the same!
`
const campaignEndTitle = '## 10,000 Meals: We Did It'
const campaignEndDescription = `
#### With your help, our community just gave 10,000 meals to [people in New York City](https://www.foodbanknyc.org/covid-19/) who have been hurt by the COVID-19 crisis. This is a challenging time, and we are **immensely** grateful for all of you who have stepped up to make a difference.
#### This is still just the beginning of [our fight against this pandemic](https://tab.gladly.io/covid-19/)—more to come shortly.
#### Can you get some friends on board? Share this incredible milestone:
`
// Hardcode campaign data here.
const CURRENT_CAMPAIGN = createCampaignConfiguration({
campaignId: 'NYCFoodBank2020',
// charityId: undefined,
content: {
titleMarkdown: campaignTitle,
descriptionMarkdown: campaignDescription,
},
countMoneyRaised: true,
countNewUsers: false,
countTabsOpened: false,
// Logic on when to end the campaign.
endTriggers: {
whenGoalAchieved: true,
whenTimeEnds: false,
},
goal: {
impactUnitSingular: 'meal',
impactUnitPlural: 'meals',
impactVerbPastParticiple: 'given',
impactVerbPastTense: 'gave',
limitProgressToTargetMax: true,
numberSource: 'moneyRaised',
showProgressBarLabel: true,
showProgressBarEndText: false,
targetNumber: 10000,
transformNumberSourceValue: moneyRaised => {
// The moneyRaised value is in $USD, and it costs $0.20 per meal.
return Math.floor(moneyRaised * 5)
},
},
// Modifications to the campaign when the campaign has
// ended.
onEnd: {
content: {
titleMarkdown: campaignEndTitle,
descriptionMarkdown: campaignEndDescription,
},
goal: {
// Keep the progress bar label instead of the ending text.
showProgressBarLabel: true,
showProgressBarEndText: false,
},
showSocialSharing: true,
socialSharing: {
url: 'https://tab.gladly.io/covid-19/',
EmailShareButtonProps: {
subject: 'Opening tabs for COVID-19 relief',
body:
"Hey!\n\nI've been opening tabs for COVID-19 relief on Tab for a Cause (https://tab.gladly.io), and we just gave 10,000 meals to the Food Bank for NYC.\n\nIt's free (all you need to do is open tabs in your browser). Join in as we continue to fight this pandemic!",
},
FacebookShareButtonProps: {
quote:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs.',
},
RedditShareButtonProps: {
title: 'Tabs transformed into 10,000 meals for the Food Bank for NYC',
},
TumblrShareButtonProps: {
title: 'Tabs transformed into 10,000 meals COVID-19',
caption:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs. Join in!',
},
TwitterShareButtonProps: {
title:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs. Join in!',
related: ['@TabForACause'],
},
},
},
showCountdownTimer: false,
showHeartsDonationButton: false,
showProgressBar: true,
showSocialSharing: false,
// socialSharing: undefined,
theme: {
color: {
main: '#ff7314',
light: '#f6924e',
},
},
time: {
start: '2020-03-31T16:00:00.000Z',
end: '2020-04-14T18:00:00.000Z',
},
})
/**
* Return the CampaignConfiguration object for the current campaign.
* @return {Promise<Object>} campaignConfig- see createCampaignConfiguration
* for structure.
*/
const getCurrentCampaignConfig = () => CURRENT_CAMPAIGN
// Outside modules shouldn't use this config. Instead, they should
// call getCampaign.js to get the CampaignData object.
export default getCurrentCampaignConfig
| graphql/database/globals/getCurrentCampaignConfig.js | import createCampaignConfiguration from './createCampaignConfiguration'
const campaignTitle = '## COVID-19 Food Bank Support'
const campaignDescription = `
#### Thanks to you, our community [raised thousands of dollars](https://tab.gladly.io/covid-19/) for the World Health Organization over the last few days.
#### In addition to health systems, COVID-19 has strained the ability of food banks to take care of people in need. The next phase of our support will help the [Food Bank for New York City](https://www.foodbanknyc.org/covid-19/) keep families fed during this crisis.
#### Right now, tabs you open are providing meals for our fellow humans in NYC. Together, we can feed thousands of people in need—so please open a few tabs and encourage your friends to do the same!
`
const campaignEndTitle = '## Thank You for Giving Food'
const campaignEndDescription = `
#### With your help, we gave thousands of meals to people in New York City who have been hurt by the COVID-19 crisis.
`
// Hardcode campaign data here.
const CURRENT_CAMPAIGN = createCampaignConfiguration({
campaignId: 'NYCFoodBank2020',
// charityId: undefined,
content: {
titleMarkdown: campaignTitle,
descriptionMarkdown: campaignDescription,
},
countMoneyRaised: true,
countNewUsers: false,
countTabsOpened: false,
// Logic on when to end the campaign.
endTriggers: {
whenGoalAchieved: true,
whenTimeEnds: false,
},
goal: {
impactUnitSingular: 'meal',
impactUnitPlural: 'meals',
impactVerbPastParticiple: 'given',
impactVerbPastTense: 'gave',
limitProgressToTargetMax: true,
numberSource: 'moneyRaised',
showProgressBarLabel: true,
showProgressBarEndText: false,
targetNumber: 10000,
transformNumberSourceValue: moneyRaised => {
// The moneyRaised value is in $USD, and it costs $0.20 per meal.
return Math.floor(moneyRaised * 5)
},
},
// Modifications to the campaign when the campaign has
// ended.
onEnd: {
content: {
titleMarkdown: campaignEndTitle,
descriptionMarkdown: campaignEndDescription,
},
goal: {
// Replace the progress bar labels with the ending text.
showProgressBarLabel: false,
showProgressBarEndText: true,
},
showSocialSharing: true,
socialSharing: {
url: 'https://tab.gladly.io/covid-19/',
EmailShareButtonProps: {
subject: 'Opening tabs for COVID-19 relief',
body:
"Hey!\n\nI've been opening tabs for COVID-19 relief on Tab for a Cause (https://tab.gladly.io), and we just gave 10,000 meals to the Food Bank for NYC.\n\nIt's free (all you need to do is open tabs in your browser). Join in as we continue to fight this pandemic!",
},
FacebookShareButtonProps: {
quote:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs.',
},
RedditShareButtonProps: {
title: 'Tabs transformed into 10,000 meals for the Food Bank for NYC',
},
TumblrShareButtonProps: {
title: 'Tabs transformed into 10,000 meals COVID-19',
caption:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs. Join in!',
},
TwitterShareButtonProps: {
title:
'Our community just gave 10,000 meals to the Food Bank for NYC for COVID-19 relief—just by opening browser tabs. Join in!',
related: ['@TabForACause'],
},
},
},
showCountdownTimer: false,
showHeartsDonationButton: false,
showProgressBar: true,
showSocialSharing: false,
// socialSharing: undefined,
theme: {
color: {
main: '#ff7314',
light: '#f6924e',
},
},
time: {
start: '2020-03-31T16:00:00.000Z',
end: '2020-04-14T18:00:00.000Z',
},
})
/**
* Return the CampaignConfiguration object for the current campaign.
* @return {Promise<Object>} campaignConfig- see createCampaignConfiguration
* for structure.
*/
const getCurrentCampaignConfig = () => CURRENT_CAMPAIGN
// Outside modules shouldn't use this config. Instead, they should
// call getCampaign.js to get the CampaignData object.
export default getCurrentCampaignConfig
| Change campaign end content to highlight goal achievement and ask for sharing
| graphql/database/globals/getCurrentCampaignConfig.js | Change campaign end content to highlight goal achievement and ask for sharing | <ide><path>raphql/database/globals/getCurrentCampaignConfig.js
<ide> #### In addition to health systems, COVID-19 has strained the ability of food banks to take care of people in need. The next phase of our support will help the [Food Bank for New York City](https://www.foodbanknyc.org/covid-19/) keep families fed during this crisis.
<ide> #### Right now, tabs you open are providing meals for our fellow humans in NYC. Together, we can feed thousands of people in need—so please open a few tabs and encourage your friends to do the same!
<ide> `
<del>const campaignEndTitle = '## Thank You for Giving Food'
<add>const campaignEndTitle = '## 10,000 Meals: We Did It'
<ide> const campaignEndDescription = `
<del>#### With your help, we gave thousands of meals to people in New York City who have been hurt by the COVID-19 crisis.
<add>#### With your help, our community just gave 10,000 meals to [people in New York City](https://www.foodbanknyc.org/covid-19/) who have been hurt by the COVID-19 crisis. This is a challenging time, and we are **immensely** grateful for all of you who have stepped up to make a difference.
<add>#### This is still just the beginning of [our fight against this pandemic](https://tab.gladly.io/covid-19/)—more to come shortly.
<add>#### Can you get some friends on board? Share this incredible milestone:
<ide> `
<ide>
<ide> // Hardcode campaign data here.
<ide> descriptionMarkdown: campaignEndDescription,
<ide> },
<ide> goal: {
<del> // Replace the progress bar labels with the ending text.
<del> showProgressBarLabel: false,
<del> showProgressBarEndText: true,
<add> // Keep the progress bar label instead of the ending text.
<add> showProgressBarLabel: true,
<add> showProgressBarEndText: false,
<ide> },
<ide> showSocialSharing: true,
<ide> socialSharing: { |
|
Java | apache-2.0 | 2283b845f015134fbaa8e20a3dbb6bcea9db8464 | 0 | quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus | package io.quarkus.keycloak.pep.deployment;
import java.util.Map;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.BeanContainerBuildItem;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.EnableAllSecurityServicesBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerAuthorizer;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerConfig;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerRecorder;
import io.quarkus.oidc.OIDCException;
import io.quarkus.oidc.runtime.OidcBuildTimeConfig;
import io.quarkus.oidc.runtime.OidcConfig;
import io.quarkus.vertx.http.deployment.RequireBodyHandlerBuildItem;
public class KeycloakPolicyEnforcerBuildStep {
@BuildStep
FeatureBuildItem featureBuildItem() {
return new FeatureBuildItem(FeatureBuildItem.KEYCLOAK_AUTHORIZATION);
}
@BuildStep
RequireBodyHandlerBuildItem requireBody(KeycloakPolicyEnforcerConfig config) {
if (config.policyEnforcer.enable) {
if (isBodyClaimInformationPointDefined(config.policyEnforcer.claimInformationPoint.simpleConfig)) {
return new RequireBodyHandlerBuildItem();
}
for (KeycloakPolicyEnforcerConfig.KeycloakConfigPolicyEnforcer.PathConfig path : config.policyEnforcer.paths
.values()) {
if (isBodyClaimInformationPointDefined(path.claimInformationPoint.simpleConfig)) {
return new RequireBodyHandlerBuildItem();
}
}
}
return null;
}
private boolean isBodyClaimInformationPointDefined(Map<String, Map<String, String>> claims) {
for (Map.Entry<String, Map<String, String>> entry : claims.entrySet()) {
Map<String, String> value = entry.getValue();
for (String nestedValue : value.values()) {
if (nestedValue.contains("request.body")) {
return true;
}
}
}
return false;
}
@BuildStep
public AdditionalBeanBuildItem beans(KeycloakPolicyEnforcerConfig config) {
if (config.policyEnforcer.enable) {
return AdditionalBeanBuildItem.builder().setUnremovable()
.addBeanClass(KeycloakPolicyEnforcerAuthorizer.class).build();
}
return null;
}
@BuildStep
EnableAllSecurityServicesBuildItem security() {
return new EnableAllSecurityServicesBuildItem();
}
@Record(ExecutionTime.RUNTIME_INIT)
@BuildStep
public void setup(OidcBuildTimeConfig buildTimeConfig, KeycloakPolicyEnforcerConfig keycloakConfig,
OidcConfig runTimeConfig, KeycloakPolicyEnforcerRecorder recorder, BeanContainerBuildItem bc) {
if (!buildTimeConfig.applicationType.equals(OidcBuildTimeConfig.ApplicationType.SERVICE)) {
throw new OIDCException("Application type [" + buildTimeConfig.applicationType + "] not supported");
}
if (keycloakConfig.policyEnforcer.enable) {
recorder.setup(runTimeConfig, keycloakConfig, bc.getValue());
}
}
}
| extensions/keycloak-authorization/deployment/src/main/java/io/quarkus/keycloak/pep/deployment/KeycloakPolicyEnforcerBuildStep.java | package io.quarkus.keycloak.pep.deployment;
import java.util.Map;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.BeanContainerBuildItem;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.EnableAllSecurityServicesBuildItem;
import io.quarkus.deployment.builditem.FeatureBuildItem;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerAuthorizer;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerConfig;
import io.quarkus.keycloak.pep.runtime.KeycloakPolicyEnforcerRecorder;
import io.quarkus.oidc.OIDCException;
import io.quarkus.oidc.runtime.OidcBuildTimeConfig;
import io.quarkus.oidc.runtime.OidcConfig;
import io.quarkus.vertx.http.deployment.RequireBodyHandlerBuildItem;
public class KeycloakPolicyEnforcerBuildStep {
@BuildStep
FeatureBuildItem featureBuildItem() {
return new FeatureBuildItem(FeatureBuildItem.KEYCLOAK_AUTHORIZATION);
}
@BuildStep
RequireBodyHandlerBuildItem requireBody(KeycloakPolicyEnforcerConfig config) {
if (config.policyEnforcer.enable) {
if (isBodyClaimInformationPointDefined(config.policyEnforcer.claimInformationPoint.simpleConfig)) {
return new RequireBodyHandlerBuildItem();
}
for (KeycloakPolicyEnforcerConfig.KeycloakConfigPolicyEnforcer.PathConfig path : config.policyEnforcer.paths
.values()) {
if (isBodyClaimInformationPointDefined(path.claimInformationPoint.simpleConfig)) {
return new RequireBodyHandlerBuildItem();
}
}
}
return null;
}
private boolean isBodyClaimInformationPointDefined(Map<String, Map<String, String>> claims) {
for (Map.Entry<String, Map<String, String>> entry : claims.entrySet()) {
Map<String, String> value = entry.getValue();
if (value.get(entry.getKey()).contains("request.body")) {
return true;
}
}
return false;
}
@BuildStep
public AdditionalBeanBuildItem beans(KeycloakPolicyEnforcerConfig config) {
if (config.policyEnforcer.enable) {
return AdditionalBeanBuildItem.builder().setUnremovable()
.addBeanClass(KeycloakPolicyEnforcerAuthorizer.class).build();
}
return null;
}
@BuildStep
EnableAllSecurityServicesBuildItem security() {
return new EnableAllSecurityServicesBuildItem();
}
@Record(ExecutionTime.RUNTIME_INIT)
@BuildStep
public void setup(OidcBuildTimeConfig buildTimeConfig, KeycloakPolicyEnforcerConfig keycloakConfig,
OidcConfig runTimeConfig, KeycloakPolicyEnforcerRecorder recorder, BeanContainerBuildItem bc) {
if (!buildTimeConfig.applicationType.equals(OidcBuildTimeConfig.ApplicationType.SERVICE)) {
throw new OIDCException("Application type [" + buildTimeConfig.applicationType + "] not supported");
}
if (keycloakConfig.policyEnforcer.enable) {
recorder.setup(runTimeConfig, keycloakConfig, bc.getValue());
}
}
}
| Fix Keycloack Authorization require body handler detection
| extensions/keycloak-authorization/deployment/src/main/java/io/quarkus/keycloak/pep/deployment/KeycloakPolicyEnforcerBuildStep.java | Fix Keycloack Authorization require body handler detection | <ide><path>xtensions/keycloak-authorization/deployment/src/main/java/io/quarkus/keycloak/pep/deployment/KeycloakPolicyEnforcerBuildStep.java
<ide> for (Map.Entry<String, Map<String, String>> entry : claims.entrySet()) {
<ide> Map<String, String> value = entry.getValue();
<ide>
<del> if (value.get(entry.getKey()).contains("request.body")) {
<del> return true;
<add> for (String nestedValue : value.values()) {
<add> if (nestedValue.contains("request.body")) {
<add> return true;
<add> }
<ide> }
<ide> }
<ide> |
|
Java | agpl-3.0 | 0db54b93cd194bf0c089783592ef6ce3f92f1cc7 | 0 | CeON/CERMINE,CeON/CERMINE,CeON/CERMINE,CeON/CERMINE | /**
* This file is part of CERMINE project.
* Copyright (c) 2011-2016 ICM-UW
*
* CERMINE is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* CERMINE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with CERMINE. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.edu.icm.cermine.service;
import java.util.Date;
/**
* @author Aleksander Nowinski ([email protected])
*/
public class ExtractionTask {
public static enum TaskStatus {
CREATED("queue", "SUBMITTED"),
QUEUED("queue", "QUEUED"),
PROCESSING("processing", "PROCESSING"),
FINISHED("success", "SUCCESS", true),
FAILED("failure", "FAILURE", true);
String css;
String text;
boolean finalState;
TaskStatus(String css, String text) {
this(css, text, false);
}
TaskStatus(String css, String text, boolean f) {
this.css = css;
this.text = text;
finalState = f;
}
public String getCss() {
return css;
}
public String getText() {
return text;
}
public boolean isFinalState() {
return finalState;
}
}
private long id;
byte[] pdf;
String fileName;
String md5Sum;
private TaskStatus status;
private Date creationDate;
private String clientAddress;
private ExtractionResult result;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public byte[] getPdf() {
return pdf;
}
public void setPdf(byte[] pdf) {
this.pdf = pdf;
}
public String getMd5Sum() {
return md5Sum;
}
public void setMd5Sum(String md5Sum) {
this.md5Sum = md5Sum;
}
public TaskStatus getStatus() {
return status;
}
public void setStatus(TaskStatus status) {
this.status = status;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getClientAddress() {
return clientAddress;
}
public void setClientAddress(String clientAddress) {
this.clientAddress = clientAddress;
}
public ExtractionResult getResult() {
return result;
}
public void setResult(ExtractionResult result) {
this.result = result;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public boolean isFinished() {
return status.isFinalState();
}
public boolean isSucceeded() {
return isFinished() && status!=TaskStatus.FAILED;
}
public boolean isFailed() {
return status==TaskStatus.FAILED;
}
}
| cermine-web/src/main/java/pl/edu/icm/cermine/service/ExtractionTask.java | /**
* This file is part of CERMINE project.
* Copyright (c) 2011-2016 ICM-UW
*
* CERMINE is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* CERMINE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with CERMINE. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.edu.icm.cermine.service;
import java.util.Date;
/**
* @author Aleksander Nowinski ([email protected])
*/
public class ExtractionTask {
public static enum TaskStatus {
CREATED("queue", "SUBMITTED"),
QUEUED("queue", "QUEUED"),
PROCESSING("processing", "PROCESSING"),
FINISHED("success", "SUCCESS", true),
FAILED("failure", "FAILURE", true);
String css;
String text;
boolean finalState;
TaskStatus(String css, String text) {
this(css, text, false);
}
TaskStatus(String css, String text, boolean f) {
this.css = css;
this.text = text;
finalState = f;
}
public String getCss() {
return css;
}
public String getText() {
return text;
}
public boolean isFinalState() {
return finalState;
}
}
private long id;
byte[] pdf;
String fileName;
String md5Sum;
private TaskStatus status;
private Date creationDate;
private String clientAddress;
private ExtractionResult result;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public byte[] getPdf() {
return pdf;
}
public void setPdf(byte[] pdf) {
if (pdf == null) {
this.pdf = null;
} else {
this.pdf = pdf.clone();
}
}
public String getMd5Sum() {
return md5Sum;
}
public void setMd5Sum(String md5Sum) {
this.md5Sum = md5Sum;
}
public TaskStatus getStatus() {
return status;
}
public void setStatus(TaskStatus status) {
this.status = status;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getClientAddress() {
return clientAddress;
}
public void setClientAddress(String clientAddress) {
this.clientAddress = clientAddress;
}
public ExtractionResult getResult() {
return result;
}
public void setResult(ExtractionResult result) {
this.result = result;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public boolean isFinished() {
return status.isFinalState();
}
public boolean isSucceeded() {
return isFinished() && status!=TaskStatus.FAILED;
}
public boolean isFailed() {
return status==TaskStatus.FAILED;
}
}
| simple setter restored | cermine-web/src/main/java/pl/edu/icm/cermine/service/ExtractionTask.java | simple setter restored | <ide><path>ermine-web/src/main/java/pl/edu/icm/cermine/service/ExtractionTask.java
<ide> }
<ide>
<ide> public void setPdf(byte[] pdf) {
<del> if (pdf == null) {
<del> this.pdf = null;
<del> } else {
<del> this.pdf = pdf.clone();
<del> }
<add> this.pdf = pdf;
<ide> }
<ide>
<ide> public String getMd5Sum() { |
|
Java | bsd-3-clause | 4b500ce4549bfe5b0f69dc59b9bb01ff9afcb113 | 0 | Shockwave4546/FRC-2015 | // RobotBuilder Version: 1.5
//
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in the future.
package org.usfirst.frc.team4546.robot2.commands;
import edu.wpi.first.wpilibj.command.Command;
import org.usfirst.frc.team4546.robot2.Robot;
/**
*
*/
public class Drive extends Command {
double driveX = 0;
double driveY = 0;
double driveZ = 0;
public Drive() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES
requires(Robot.drivetrain);
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES
}
// Called just before this Command runs the first time
protected void initialize() {
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
if (Robot.oi.driveStick.getX() <= .055) {
driveX = 0;
} else {
driveX = Robot.oi.driveStick.getX();
}
if (Robot.oi.driveStick.getY() <= .055) {
driveY = 0;
} else {
driveY = Robot.oi.driveStick.getY();
}
if (Robot.oi.driveStick.getZ() <= .055) {
driveZ = 0;
} else {
driveZ = Robot.oi.driveStick.getZ();
}
Robot.drivetrain.mecanumDrive(driveX, driveY, driveZ, Robot.gyro.getAngle(), Robot.speed);
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return false;
}
// Called once after isFinished returns true
protected void end() {
Robot.drivetrain.driveStop();
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
Robot.drivetrain.driveStop();
}
}
| src/org/usfirst/frc/team4546/robot2/commands/Drive.java | // RobotBuilder Version: 1.5
//
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in the future.
package org.usfirst.frc.team4546.robot2.commands;
import edu.wpi.first.wpilibj.command.Command;
import org.usfirst.frc.team4546.robot2.Robot;
/**
*
*/
public class Drive extends Command {
double driveX = 0;
double driveY = 0;
double driveZ = 0;
public Drive() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES
requires(Robot.drivetrain);
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES
}
// Called just before this Command runs the first time
protected void initialize() {
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
if (Robot.oi.driveStick.getX() <= .55) {
driveX = 0;
} else {
driveX = Robot.oi.driveStick.getX();
}
if (Robot.oi.driveStick.getY() <= .55) {
driveY = 0;
} else {
driveY = Robot.oi.driveStick.getY();
}
if (Robot.oi.driveStick.getZ() <= .55) {
driveZ = 0;
} else {
driveZ = Robot.oi.driveStick.getZ();
}
Robot.drivetrain.mecanumDrive(driveX, driveY, driveZ, Robot.gyro.getAngle(), Robot.speed);
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return false;
}
// Called once after isFinished returns true
protected void end() {
Robot.drivetrain.driveStop();
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
Robot.drivetrain.driveStop();
}
}
| Fixed deadzones being 10 times too big | src/org/usfirst/frc/team4546/robot2/commands/Drive.java | Fixed deadzones being 10 times too big | <ide><path>rc/org/usfirst/frc/team4546/robot2/commands/Drive.java
<ide> // Called repeatedly when this Command is scheduled to run
<ide> protected void execute() {
<ide>
<del> if (Robot.oi.driveStick.getX() <= .55) {
<add> if (Robot.oi.driveStick.getX() <= .055) {
<ide>
<ide> driveX = 0;
<ide> } else {
<ide> driveX = Robot.oi.driveStick.getX();
<ide> }
<ide>
<del> if (Robot.oi.driveStick.getY() <= .55) {
<add> if (Robot.oi.driveStick.getY() <= .055) {
<ide>
<ide> driveY = 0;
<ide> } else {
<ide> driveY = Robot.oi.driveStick.getY();
<ide> }
<ide>
<del> if (Robot.oi.driveStick.getZ() <= .55) {
<add> if (Robot.oi.driveStick.getZ() <= .055) {
<ide>
<ide> driveZ = 0;
<ide> } else { |
|
Java | mpl-2.0 | 529eea3d672ab7a340268032083a434c3c01fe0d | 0 | sensiasoft/lib-swe-common | /***************************** BEGIN LICENSE BLOCK ***************************
The contents of this file are subject to the Mozilla Public License Version
1.1 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.mozilla.org/MPL/MPL-1.1.html
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
for the specific language governing rights and limitations under the License.
The Original Code is the "SensorML DataProcessing Engine".
The Initial Developer of the Original Code is the VAST team at the
University of Alabama in Huntsville (UAH). <http://vast.uah.edu>
Portions created by the Initial Developer are Copyright (C) 2007
the Initial Developer. All Rights Reserved.
Please Contact Mike Botts <[email protected]> for more information.
Contributor(s):
Alexandre Robin <[email protected]>
******************************* END LICENSE BLOCK ***************************/
package org.vast.swe;
import java.io.IOException;
import net.opengis.swe.v20.DataComponent;
import net.opengis.swe.v20.DataEncoding;
import net.opengis.swe.v20.DataBlock;
import net.opengis.swe.v20.XMLEncoding;
import org.vast.cdm.common.DataSink;
import org.vast.cdm.common.DataSource;
import org.vast.cdm.common.DataStreamParser;
import org.vast.cdm.common.DataStreamWriter;
import org.vast.data.DataList;
/**
* <p>
* Implementation of SWE input/output data stream storing data in memory,
* This class also contains methods for parsing/writing the stored data.
* </p>
*
* @author Alex Robin <[email protected]>
* @since Feb 21, 2007
* */
public class SWEData extends DataList implements ISweInputDataStream, ISweOutputDataStream
{
private static final long serialVersionUID = 3128971142750657973L;
protected DataSource dataSource;
public SWEData()
{
}
@Override
public SWEData copy()
{
SWEData newObj = new SWEData();
copyTo(newObj);
newObj.dataSource = this.dataSource;
return newObj;
}
@Override
public DataComponent getNextElement()
{
return nextComponent();
}
@Override
public DataBlock getNextDataBlock()
{
return nextDataBlock();
}
@Override
public void pushNextDataBlock(DataBlock dataBlock)
{
addData(dataBlock);
}
public DataSource getDataSource()
{
return dataSource;
}
public void setDataSource(DataSource dataSource)
{
this.dataSource = dataSource;
}
/**
* Retrieves parser created for this SWE structure/encoding pair
* Allows the use of the parser on a separate input streams w/ same structure
* @return parser instance
*/
public DataStreamParser getDataParser()
{
DataStreamParser parser = SWEHelper.createDataParser(getEncoding());
parser.setDataComponents((DataComponent)getElementType());
return parser;
}
/**
* Retrieves writer created for this structure/encoding pair
* Allows the use of the writer on separate output streams
* @return writer instance
*/
public DataStreamWriter getDataWriter()
{
DataStreamWriter writer = SWEHelper.createDataWriter(getEncoding());
writer.setDataComponents((DataComponent)getElementType());
return writer;
}
/**
* Parses data from the internally stored data source stream
* and stores data blocks in a DataList
* @throws IOException
*/
public void parseData() throws IOException
{
assert(this.dataSource != null);
parseData(this.dataSource);
}
/**
* Parses data from the given data source stream and stores
* data blocks in the DataList
* @param dataSource
* @throws IOException
*/
public void parseData(DataSource dataSource) throws IOException
{
DataEncoding encoding = getEncoding();
// special case for reading XML encoded stream from a DOM
if (dataSource instanceof DataSourceDOM && encoding instanceof XMLEncoding)
{
DataSourceDOM domSrc = (DataSourceDOM)dataSource;
XmlDataParserDOM parser = new XmlDataParserDOM();
parser.setDataEncoding(encoding);
parser.setDataComponents((DataComponent)getElementType());
parser.setDataHandler(new DefaultParserHandler(this));
parser.read(domSrc.getDom(), domSrc.getParentElt());
}
else
{
if (dataSource instanceof DataSourceDOM)
encoding = SWEHelper.ensureXmlCompatible(encoding);
DataStreamParser parser = SWEHelper.createDataParser(encoding);
parser.setDataComponents((DataComponent)getElementType());
parser.setDataHandler(new DefaultParserHandler(this));
parser.parse(dataSource.getDataStream());
}
}
/**
* Writes data blocks to the data stream specified
* @param dataSink
* @throws IOException
*/
public void writeData(DataSink dataSink) throws IOException
{
DataEncoding encoding = getEncoding();
// special case for writing XML encoded stream in a DOM
if (dataSink instanceof DataSinkDOM && encoding instanceof XMLEncoding)
{
DataSinkDOM domSink = (DataSinkDOM)dataSink;
XmlDataWriterDOM writer = new XmlDataWriterDOM();
writer.setDataEncoding(encoding);
writer.setDataComponents((DataComponent)getElementType());
writer.setDataHandler(new DefaultWriterHandler(this, writer));
writer.write(domSink.getDom(), domSink.getParentElt());
}
else
{
if (dataSink instanceof DataSinkDOM)
encoding = SWEHelper.ensureXmlCompatible(encoding);
DataStreamWriter writer = SWEHelper.createDataWriter(encoding);
writer.setParentArray(this);
writer.write(dataSink.getDataStream());
writer.flush();
dataSink.flush();
}
}
@Override
public SWEData clone()
{
return (SWEData)super.clone();
}
@Override
public void setElementType(DataComponent elementType)
{
this.setElementType(elementType.getName(), elementType);
}
@Override
public int getNumElements()
{
return getComponentCount();
}
}
| swe-common-core/src/main/java/org/vast/swe/SWEData.java | /***************************** BEGIN LICENSE BLOCK ***************************
The contents of this file are subject to the Mozilla Public License Version
1.1 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.mozilla.org/MPL/MPL-1.1.html
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
for the specific language governing rights and limitations under the License.
The Original Code is the "SensorML DataProcessing Engine".
The Initial Developer of the Original Code is the VAST team at the
University of Alabama in Huntsville (UAH). <http://vast.uah.edu>
Portions created by the Initial Developer are Copyright (C) 2007
the Initial Developer. All Rights Reserved.
Please Contact Mike Botts <[email protected]> for more information.
Contributor(s):
Alexandre Robin <[email protected]>
******************************* END LICENSE BLOCK ***************************/
package org.vast.swe;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import net.opengis.swe.v20.DataComponent;
import net.opengis.swe.v20.DataEncoding;
import net.opengis.swe.v20.DataBlock;
import net.opengis.swe.v20.XMLEncoding;
import org.vast.cdm.common.DataSink;
import org.vast.cdm.common.DataSource;
import org.vast.cdm.common.DataStreamParser;
import org.vast.cdm.common.DataStreamWriter;
import org.vast.data.DataList;
/**
* <p>
* Implementation of SWE input/output data stream storing data in memory,
* This class also contains methods for parsing/writing the stored data.
* </p>
*
* @author Alex Robin <[email protected]>
* @since Feb 21, 2007
* */
public class SWEData extends DataList implements ISweInputDataStream, ISweOutputDataStream
{
private static final long serialVersionUID = 3128971142750657973L;
protected DataSource dataSource;
public SWEData()
{
}
@Override
public SWEData copy()
{
SWEData newObj = new SWEData();
copyTo(newObj);
newObj.dataSource = this.dataSource;
return newObj;
}
@Override
public DataComponent getNextElement()
{
return nextComponent();
}
@Override
public DataBlock getNextDataBlock()
{
return nextDataBlock();
}
@Override
public void pushNextDataBlock(DataBlock dataBlock)
{
addData(dataBlock);
}
public DataSource getDataSource()
{
return dataSource;
}
public void setDataSource(DataSource dataSource)
{
this.dataSource = dataSource;
}
/**
* Retrieves parser created for this SWE structure/encoding pair
* Allows the use of the parser on a separate input streams w/ same structure
* @return parser instance
*/
public DataStreamParser getDataParser()
{
DataStreamParser parser = SWEHelper.createDataParser(getEncoding());
parser.setDataComponents((DataComponent)getElementType());
return parser;
}
/**
* Retrieves writer created for this structure/encoding pair
* Allows the use of the writer on separate output streams
* @return writer instance
*/
public DataStreamWriter getDataWriter()
{
DataStreamWriter writer = SWEHelper.createDataWriter(getEncoding());
writer.setDataComponents((DataComponent)getElementType());
return writer;
}
/**
* Parses data from the internally stored data source stream
* and stores data blocks in a DataList
* @throws IOException
*/
public void parseData() throws IOException
{
assert(this.dataSource != null);
parseData(this.dataSource);
}
/**
* Parses data from the given data source stream and stores
* data blocks in the DataList
* @param dataSource
* @throws IOException
*/
public void parseData(DataSource dataSource) throws IOException
{
DataEncoding encoding = getEncoding();
// special case for reading XML encoded stream from a DOM
if (dataSource instanceof DataSourceDOM && encoding instanceof XMLEncoding)
{
DataSourceDOM domSrc = (DataSourceDOM)dataSource;
XmlDataParserDOM parser = new XmlDataParserDOM();
parser.setDataEncoding(encoding);
parser.setDataComponents((DataComponent)getElementType());
parser.setDataHandler(new DefaultParserHandler(this));
parser.read(domSrc.getDom(), domSrc.getParentElt());
}
else
{
if (dataSource instanceof DataSourceDOM)
encoding = SWEHelper.ensureXmlCompatible(encoding);
DataStreamParser parser = SWEHelper.createDataParser(encoding);
parser.setDataComponents((DataComponent)getElementType());
parser.setDataHandler(new DefaultParserHandler(this));
InputStream dataStream = dataSource.getDataStream();
parser.parse(dataStream);
}
}
/**
* Writes data blocks to the data stream specified
* @param dataSink
* @throws IOException
*/
public void writeData(DataSink dataSink) throws IOException
{
DataEncoding encoding = getEncoding();
// special case for writing XML encoded stream in a DOM
if (dataSink instanceof DataSinkDOM && encoding instanceof XMLEncoding)
{
DataSinkDOM domSink = (DataSinkDOM)dataSink;
XmlDataWriterDOM writer = new XmlDataWriterDOM();
writer.setDataEncoding(encoding);
writer.setDataComponents((DataComponent)getElementType());
writer.setDataHandler(new DefaultWriterHandler(this, writer));
writer.write(domSink.getDom(), domSink.getParentElt());
}
else
{
if (dataSink instanceof DataSinkDOM)
encoding = SWEHelper.ensureXmlCompatible(encoding);
DataStreamWriter writer = SWEHelper.createDataWriter(encoding);
writer.setDataComponents((DataComponent)getElementType());
writer.setDataHandler(new DefaultWriterHandler(this, writer));
OutputStream dataStream = dataSink.getDataStream();
writer.write(dataStream);
writer.flush();
dataSink.flush();
}
}
@Override
public SWEData clone()
{
return (SWEData)super.clone();
}
@Override
public void setElementType(DataComponent elementType)
{
this.setElementType(elementType.getName(), elementType);
}
@Override
public int getNumElements()
{
return getComponentCount();
}
}
| Fixed bug in SWEData block writer now that we're appendign block
separators after each record | swe-common-core/src/main/java/org/vast/swe/SWEData.java | Fixed bug in SWEData block writer now that we're appendign block separators after each record | <ide><path>we-common-core/src/main/java/org/vast/swe/SWEData.java
<ide> package org.vast.swe;
<ide>
<ide> import java.io.IOException;
<del>import java.io.InputStream;
<del>import java.io.OutputStream;
<ide> import net.opengis.swe.v20.DataComponent;
<ide> import net.opengis.swe.v20.DataEncoding;
<ide> import net.opengis.swe.v20.DataBlock;
<ide>
<ide> DataStreamParser parser = SWEHelper.createDataParser(encoding);
<ide> parser.setDataComponents((DataComponent)getElementType());
<del> parser.setDataHandler(new DefaultParserHandler(this));
<del>
<del> InputStream dataStream = dataSource.getDataStream();
<del> parser.parse(dataStream);
<add> parser.setDataHandler(new DefaultParserHandler(this));
<add> parser.parse(dataSource.getDataStream());
<ide> }
<ide> }
<ide>
<ide> encoding = SWEHelper.ensureXmlCompatible(encoding);
<ide>
<ide> DataStreamWriter writer = SWEHelper.createDataWriter(encoding);
<del> writer.setDataComponents((DataComponent)getElementType());
<del> writer.setDataHandler(new DefaultWriterHandler(this, writer));
<del>
<del> OutputStream dataStream = dataSink.getDataStream();
<del> writer.write(dataStream);
<add> writer.setParentArray(this);
<add> writer.write(dataSink.getDataStream());
<ide> writer.flush();
<ide> dataSink.flush();
<ide> } |
|
Java | apache-2.0 | 04b77fd2600ec64fa41d578667ba2bc3f35570d2 | 0 | strapdata/elassandra,strapdata/elassandra,vroyer/elassandra,strapdata/elassandra,strapdata/elassandra,vroyer/elassandra,strapdata/elassandra,vroyer/elassandra | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.mocksocket.MockServerSocket;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public class RemoteClusterConnectionTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
private MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes, Version version) {
return startTransport(id, knownNodes, version, threadPool);
}
public static MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes, Version version, ThreadPool threadPool) {
return startTransport(id, knownNodes, version, threadPool, Settings.EMPTY);
}
public static MockTransportService startTransport(
final String id,
final List<DiscoveryNode> knownNodes,
final Version version,
final ThreadPool threadPool,
final Settings settings) {
boolean success = false;
final Settings s = Settings.builder().put(settings).put("node.name", id).build();
ClusterName clusterName = ClusterName.CLUSTER_NAME_SETTING.get(s);
MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null);
try {
newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ClusterSearchShardsRequest::new, ThreadPool.Names.SAME,
(request, channel) -> {
channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0],
knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap()));
});
newService.registerRequestHandler(ClusterStateAction.NAME, ClusterStateRequest::new, ThreadPool.Names.SAME,
(request, channel) -> {
DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
for (DiscoveryNode node : knownNodes) {
builder.add(node);
}
ClusterState build = ClusterState.builder(clusterName).nodes(builder.build()).build();
channel.sendResponse(new ClusterStateResponse(clusterName, build, 0L));
});
newService.start();
newService.acceptIncomingRequests();
success = true;
return newService;
} finally {
if (success == false) {
newService.close();
}
}
}
public void testDiscoverSingleNode() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testDiscoverSingleNodeWithIncompatibleSeed() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService incompatibleTransport = startTransport("incompat_seed_node", knownNodes, Version.fromString("2.0.0"));
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode incompatibleSeedNode = incompatibleTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(incompatibleTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(incompatibleSeedNode, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, seedNodes);
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertFalse(service.nodeConnected(incompatibleSeedNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testNodeDisconnected() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT);
MockTransportService spareTransport = startTransport("spare_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode spareNode = spareTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertFalse(service.nodeConnected(spareNode));
knownNodes.add(spareNode);
CountDownLatch latchDisconnect = new CountDownLatch(1);
CountDownLatch latchConnected = new CountDownLatch(1);
service.addConnectionListener(new TransportConnectionListener() {
@Override
public void onNodeDisconnected(DiscoveryNode node) {
if (node.equals(discoverableNode)) {
latchDisconnect.countDown();
}
}
@Override
public void onNodeConnected(DiscoveryNode node) {
if (node.equals(spareNode)) {
latchConnected.countDown();
}
}
});
discoverableTransport.close();
// now make sure we try to connect again to other nodes once we got disconnected
assertTrue(latchDisconnect.await(10, TimeUnit.SECONDS));
assertTrue(latchConnected.await(10, TimeUnit.SECONDS));
assertTrue(service.nodeConnected(spareNode));
}
}
}
}
public void testFilterDiscoveredNodes() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
DiscoveryNode rejectedNode = randomBoolean() ? seedNode : discoverableNode;
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
if (rejectedNode.equals(seedNode)) {
assertFalse(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
} else {
assertTrue(service.nodeConnected(seedNode));
assertFalse(service.nodeConnected(discoverableNode));
}
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
private void updateSeedNodes(RemoteClusterConnection connection, List<DiscoveryNode> seedNodes) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
exceptionAtomicReference.set(x);
latch.countDown();
});
connection.updateSeedNodes(seedNodes, listener);
latch.await();
if (exceptionAtomicReference.get() != null) {
throw exceptionAtomicReference.get();
}
}
public void testConnectWithIncompatibleTransports() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.fromString("2.0.0"))) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(seedNode)));
assertFalse(service.nodeConnected(seedNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testRemoteConnectionVersionMatchesTransportConnectionVersion() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
final Version previousVersion = VersionUtils.getPreviousVersion();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, previousVersion);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
assertThat(seedNode, notNullValue());
knownNodes.add(seedNode);
DiscoveryNode oldVersionNode = discoverableTransport.getLocalDiscoNode();
assertThat(oldVersionNode, notNullValue());
knownNodes.add(oldVersionNode);
assertThat(seedNode.getVersion(), not(equalTo(oldVersionNode.getVersion())));
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
final Transport.Connection seedConnection = new Transport.Connection() {
@Override
public DiscoveryNode getNode() {
return seedNode;
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws IOException, TransportException {
// no-op
}
@Override
public void close() throws IOException {
// no-op
}
};
service.addDelegate(seedNode.getAddress(), new MockTransportService.DelegateTransport(service.getOriginalTransport()) {
@Override
public Connection getConnection(DiscoveryNode node) {
if (node == seedNode) {
return seedConnection;
}
return super.getConnection(node);
}
});
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
connection.addConnectedNode(seedNode);
for (DiscoveryNode node : knownNodes) {
final Transport.Connection transportConnection = connection.getConnection(node);
assertThat(transportConnection.getVersion(), equalTo(previousVersion));
}
assertThat(knownNodes, iterableWithSize(2));
}
}
}
}
@SuppressForbidden(reason = "calls getLocalHost here but it's fine in this case")
public void testSlowNodeCanBeCanceled() throws IOException, InterruptedException {
try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
socket.setReuseAddress(true);
DiscoveryNode seedNode = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(),
emptySet(), Version.CURRENT);
CountDownLatch acceptedLatch = new CountDownLatch(1);
CountDownLatch closeRemote = new CountDownLatch(1);
Thread t = new Thread() {
@Override
public void run() {
try (Socket accept = socket.accept()) {
acceptedLatch.countDown();
closeRemote.await();
} catch (IOException e) {
// that's fine we might close
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
};
t.start();
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
CountDownLatch listenerCalled = new CountDownLatch(1);
AtomicReference<Exception> exceptionReference = new AtomicReference<>();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
ActionListener<Void> listener = ActionListener.wrap(x -> {
listenerCalled.countDown();
fail("expected exception");
}, x -> {
exceptionReference.set(x);
listenerCalled.countDown();
});
connection.updateSeedNodes(Arrays.asList(seedNode), listener);
acceptedLatch.await();
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
assertTrue(connection.assertNoRunningConnections());
}
closeRemote.countDown();
listenerCalled.await();
assertNotNull(exceptionReference.get());
expectThrows(CancellableThreads.ExecutionCancelledException.class, () -> {throw exceptionReference.get();});
}
}
}
public void testFetchShards() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
List<DiscoveryNode> nodes = Collections.singletonList(seedNode);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
nodes, service, Integer.MAX_VALUE, n -> true)) {
if (randomBoolean()) {
updateSeedNodes(connection, nodes);
}
if (randomBoolean()) {
connection.updateSkipUnavailable(randomBoolean());
}
SearchRequest request = new SearchRequest("test-index");
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index")
.indicesOptions(request.indicesOptions()).local(true).preference(request.preference())
.routing(request.routing());
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
responseLatch.await();
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get();
assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes()));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testFetchShardsSkipUnavailable() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedNode);
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Collections.singletonList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
SearchRequest request = new SearchRequest("test-index");
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index")
.indicesOptions(request.indicesOptions()).local(true).preference(request.preference())
.routing(request.routing());
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(5, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response != ClusterSearchShardsResponse.EMPTY);
assertEquals(knownNodes, Arrays.asList(response.getNodes()));
}
CountDownLatch disconnectedLatch = new CountDownLatch(1);
service.addConnectionListener(new TransportConnectionListener() {
@Override
public void onNodeDisconnected(DiscoveryNode node) {
if (node.equals(seedNode)) {
disconnectedLatch.countDown();
}
}
});
service.addFailToSendNoConnectRule(seedTransport);
if (randomBoolean()) {
connection.updateSkipUnavailable(false);
}
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNotNull(failReference.get());
assertNull(reference.get());
assertThat(failReference.get(), instanceOf(TransportException.class));
}
connection.updateSkipUnavailable(true);
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response == ClusterSearchShardsResponse.EMPTY);
}
//give transport service enough time to realize that the node is down, and to notify the connection listeners
//so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next
assertTrue(disconnectedLatch.await(1, TimeUnit.SECONDS));
if (randomBoolean()) {
connection.updateSkipUnavailable(false);
}
service.clearAllRules();
//check that we reconnect once the node is back up
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response != ClusterSearchShardsResponse.EMPTY);
assertEquals(knownNodes, Arrays.asList(response.getNodes()));
}
}
}
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/28695")
public void testTriggerUpdatesConcurrently() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService seedTransport1 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode seedNode1 = seedTransport1.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
int numThreads = randomIntBetween(4, 10);
Thread[] threads = new Thread[numThreads];
CyclicBarrier barrier = new CyclicBarrier(numThreads);
for (int i = 0; i < threads.length; i++) {
final int numConnectionAttempts = randomIntBetween(10, 200);
threads[i] = new Thread() {
@Override
public void run() {
try {
barrier.await();
CountDownLatch latch = new CountDownLatch(numConnectionAttempts);
for (int i = 0; i < numConnectionAttempts; i++) {
AtomicBoolean executed = new AtomicBoolean(false);
ActionListener<Void> listener = ActionListener.wrap(x -> {
assertTrue(executed.compareAndSet(false, true));
latch.countDown();}, x -> {
assertTrue(executed.compareAndSet(false, true));
latch.countDown();
if (x instanceof RejectedExecutionException) {
// that's fine
} else {
throw new AssertionError(x);
}
});
connection.updateSeedNodes(seedNodes, listener);
}
latch.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
}
};
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(service.nodeConnected(seedNode1));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService seedTransport1 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode seedNode1 = seedTransport1.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
int numThreads = randomIntBetween(4, 10);
Thread[] threads = new Thread[numThreads];
CyclicBarrier barrier = new CyclicBarrier(numThreads + 1);
for (int i = 0; i < threads.length; i++) {
final int numConnectionAttempts = randomIntBetween(10, 100);
threads[i] = new Thread() {
@Override
public void run() {
try {
barrier.await();
CountDownLatch latch = new CountDownLatch(numConnectionAttempts);
for (int i = 0; i < numConnectionAttempts; i++) {
AtomicReference<Exception> executed = new AtomicReference<>();
ActionListener<Void> listener = ActionListener.wrap(
x -> {
if (executed.compareAndSet(null, new RuntimeException())) {
latch.countDown();
} else {
throw new AssertionError("shit's been called twice", executed.get());
}
},
x -> {
if (executed.compareAndSet(null, x)) {
latch.countDown();
} else {
final String message = x.getMessage();
if ((executed.get().getClass() == x.getClass()
&& "operation was cancelled reason [connect handler is closed]".equals(message)
&& message.equals(executed.get().getMessage())) == false) {
// we do cancel the operation and that means that if timing allows it, the caller
// of a blocking call as well as the handler will get the exception from the
// ExecutionCancelledException concurrently. unless that is the case we fail
// if we get called more than once!
AssertionError assertionError = new AssertionError("shit's been called twice", x);
assertionError.addSuppressed(executed.get());
throw assertionError;
}
}
if (x instanceof RejectedExecutionException || x instanceof AlreadyClosedException
|| x instanceof CancellableThreads.ExecutionCancelledException) {
// that's fine
} else {
throw new AssertionError(x);
}
});
connection.updateSeedNodes(seedNodes, listener);
}
latch.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
}
};
threads[i].start();
}
barrier.await();
connection.close();
}
}
}
}
private static void installNodeStatsHandler(TransportService service, DiscoveryNode...nodes) {
service.registerRequestHandler(NodesInfoAction.NAME, NodesInfoRequest::new, ThreadPool.Names.SAME, false, false,
(request, channel) -> {
List<NodeInfo> nodeInfos = new ArrayList<>();
int port = 80;
for (DiscoveryNode node : nodes) {
HttpInfo http = new HttpInfo(new BoundTransportAddress(new TransportAddress[]{node.getAddress()},
new TransportAddress(node.getAddress().address().getAddress(), port++)), 100);
nodeInfos.add(new NodeInfo(node.getVersion(), Build.CURRENT, node, null, null, null, null, null, null, http, null,
null, null));
}
channel.sendResponse(new NodesInfoResponse(ClusterName.DEFAULT, nodeInfos, Collections.emptyList()));
});
}
public void testGetConnectionInfo() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService transport1 = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService transport2 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService transport3 = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode node1 = transport1.getLocalDiscoNode();
DiscoveryNode node2 = transport3.getLocalDiscoNode();
DiscoveryNode node3 = transport2.getLocalDiscoNode();
knownNodes.add(transport1.getLocalDiscoNode());
knownNodes.add(transport3.getLocalDiscoNode());
knownNodes.add(transport2.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(node3, node1, node2);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
int maxNumConnections = randomIntBetween(1, 5);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, maxNumConnections, n -> true)) {
// test no nodes connected
RemoteConnectionInfo remoteConnectionInfo = assertSerialization(getRemoteConnectionInfo(connection));
assertNotNull(remoteConnectionInfo);
assertEquals(0, remoteConnectionInfo.numNodesConnected);
assertEquals(0, remoteConnectionInfo.seedNodes.size());
assertEquals(0, remoteConnectionInfo.httpAddresses.size());
assertEquals(maxNumConnections, remoteConnectionInfo.connectionsPerCluster);
assertEquals("test-cluster", remoteConnectionInfo.clusterAlias);
updateSeedNodes(connection, seedNodes);
expectThrows(RemoteTransportException.class, () -> getRemoteConnectionInfo(connection));
for (MockTransportService s : Arrays.asList(transport1, transport2, transport3)) {
installNodeStatsHandler(s, node1, node2, node3);
}
remoteConnectionInfo = getRemoteConnectionInfo(connection);
remoteConnectionInfo = assertSerialization(remoteConnectionInfo);
assertNotNull(remoteConnectionInfo);
assertEquals(connection.getNumNodesConnected(), remoteConnectionInfo.numNodesConnected);
assertEquals(Math.min(3, maxNumConnections), connection.getNumNodesConnected());
assertEquals(3, remoteConnectionInfo.seedNodes.size());
assertEquals(remoteConnectionInfo.httpAddresses.size(), Math.min(3, maxNumConnections));
assertEquals(maxNumConnections, remoteConnectionInfo.connectionsPerCluster);
assertEquals("test-cluster", remoteConnectionInfo.clusterAlias);
for (TransportAddress address : remoteConnectionInfo.httpAddresses) {
assertTrue("port range mismatch: " + address.getPort(), address.getPort() >= 80 && address.getPort() <= 90);
}
}
}
}
}
public void testRemoteConnectionInfo() throws IOException {
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats);
RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 4, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster_1",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 87)),
4, 3, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(325), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
5, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
}
private static RemoteConnectionInfo assertSerialization(RemoteConnectionInfo info) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(Version.CURRENT);
info.writeTo(out);
StreamInput in = out.bytes().streamInput();
in.setVersion(Version.CURRENT);
RemoteConnectionInfo remoteConnectionInfo = new RemoteConnectionInfo(in);
assertEquals(info, remoteConnectionInfo);
assertEquals(info.hashCode(), remoteConnectionInfo.hashCode());
return randomBoolean() ? info : remoteConnectionInfo;
}
}
public void testRemoteConnectionInfoBwComp() throws IOException {
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_5, Version.V_6_0_0);
RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster",
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
final byte[] data;
if (version.before(Version.V_6_0_0_alpha1)) {
String encoded = "AQABBAAAAAAHMC4wLjAuMAAAAAEBAAEEAAAAAAcwLjAuMC4wAAAAUAQ8BAQMdGVzdF9jbHVzdGVyAAAAAAAAAA==";
data = Base64.getDecoder().decode(encoded);
} else {
String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIAAAAAAAAAAAAAAA==";
data = Base64.getDecoder().decode(encoded);
}
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
RemoteConnectionInfo deserialized = new RemoteConnectionInfo(in);
assertEquals(expected, deserialized);
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
deserialized.writeTo(out);
try (StreamInput in2 = StreamInput.wrap(out.bytes().toBytesRef().bytes)) {
in2.setVersion(version);
RemoteConnectionInfo deserialized2 = new RemoteConnectionInfo(in2);
assertEquals(expected, deserialized2);
}
}
}
}
public void testRenderConnectionInfoXContent() throws IOException {
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,80)),
4, 3, TimeValue.timeValueMinutes(30), true);
stats = assertSerialization(stats);
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"http_addresses\":[\"0.0.0.0:80\"],\"connected\":true," +
"\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," +
"\"skip_unavailable\":true}}", builder.string());
stats = new RemoteConnectionInfo("some_other_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1), new TransportAddress(TransportAddress.META_ADDRESS,2)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,80), new TransportAddress(TransportAddress.META_ADDRESS,81)),
2, 0, TimeValue.timeValueSeconds(30), false);
stats = assertSerialization(stats);
builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],\"http_addresses\":[\"0.0.0.0:80\",\"0.0.0.0:81\"],"
+ "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," +
"\"skip_unavailable\":false}}", builder.string());
}
private RemoteConnectionInfo getRemoteConnectionInfo(RemoteClusterConnection connection) throws Exception {
AtomicReference<RemoteConnectionInfo> statsRef = new AtomicReference<>();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
CountDownLatch latch = new CountDownLatch(1);
connection.getConnectionInfo(new ActionListener<RemoteConnectionInfo>() {
@Override
public void onResponse(RemoteConnectionInfo remoteConnectionInfo) {
statsRef.set(remoteConnectionInfo);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exceptionRef.set(e);
latch.countDown();
}
});
latch.await();
if (exceptionRef.get() != null) {
throw exceptionRef.get();
}
return statsRef.get();
}
public void testEnsureConnected() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
assertFalse(service.nodeConnected(seedNode));
assertFalse(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
CountDownLatch latch = new CountDownLatch(1);
connection.ensureConnected(new LatchedActionListener<>(new ActionListener<Void>() {
@Override
public void onResponse(Void aVoid) {
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
}, latch));
latch.await();
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
// exec again we are already connected
connection.ensureConnected(new LatchedActionListener<>(new ActionListener<Void>() {
@Override
public void onResponse(Void aVoid) {
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
}, latch));
latch.await();
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testCollectNodes() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
if (randomBoolean()) {
updateSeedNodes(connection, Arrays.asList(seedNode));
}
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<Function<String, DiscoveryNode>> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
ActionListener<Function<String, DiscoveryNode>> shardsListener = ActionListener.wrap(
x -> {
reference.set(x);
responseLatch.countDown();
},
x -> {
failReference.set(x);
responseLatch.countDown();
});
connection.collectNodes(shardsListener);
responseLatch.await();
assertNull(failReference.get());
assertNotNull(reference.get());
Function<String, DiscoveryNode> function = reference.get();
assertEquals(seedNode, function.apply(seedNode.getId()));
assertNull(function.apply(seedNode.getId() + "foo"));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testConnectedNodesConcurrentAccess() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
List<MockTransportService> discoverableTransports = new CopyOnWriteArrayList<>();
try {
final int numDiscoverableNodes = randomIntBetween(5, 20);
List<DiscoveryNode> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
for (int i = 0; i < numDiscoverableNodes; i++ ) {
MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT);
discoverableNodes.add(transportService.getLocalDiscoNode());
discoverableTransports.add(transportService);
}
List<DiscoveryNode> seedNodes = randomSubsetOf(discoverableNodes);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
final int numGetThreads = randomIntBetween(4, 10);
final Thread[] getThreads = new Thread[numGetThreads];
final int numModifyingThreads = randomIntBetween(4, 10);
final Thread[] modifyingThreads = new Thread[numModifyingThreads];
CyclicBarrier barrier = new CyclicBarrier(numGetThreads + numModifyingThreads);
for (int i = 0; i < getThreads.length; i++) {
final int numGetCalls = randomIntBetween(1000, 10000);
getThreads[i] = new Thread(() -> {
try {
barrier.await();
for (int j = 0; j < numGetCalls; j++) {
try {
DiscoveryNode node = connection.getConnectedNode();
assertNotNull(node);
} catch (IllegalStateException e) {
if (e.getMessage().startsWith("No node available for cluster:") == false) {
throw e;
}
}
}
} catch (Exception ex) {
throw new AssertionError(ex);
}
});
getThreads[i].start();
}
final AtomicInteger counter = new AtomicInteger();
for (int i = 0; i < modifyingThreads.length; i++) {
final int numDisconnects = randomIntBetween(5, 10);
modifyingThreads[i] = new Thread(() -> {
try {
barrier.await();
for (int j = 0; j < numDisconnects; j++) {
if (randomBoolean()) {
MockTransportService transportService =
startTransport("discoverable_node_added" + counter.incrementAndGet(), knownNodes,
Version.CURRENT);
discoverableTransports.add(transportService);
connection.addConnectedNode(transportService.getLocalDiscoNode());
} else {
DiscoveryNode node = randomFrom(discoverableNodes);
connection.onNodeDisconnected(node);
}
}
} catch (Exception ex) {
throw new AssertionError(ex);
}
});
modifyingThreads[i].start();
}
for (Thread thread : getThreads) {
thread.join();
}
for (Thread thread : modifyingThreads) {
thread.join();
}
}
}
} finally {
IOUtils.closeWhileHandlingException(discoverableTransports);
}
}
public void testClusterNameIsChecked() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
List<DiscoveryNode> otherClusterKnownNodes = new CopyOnWriteArrayList<>();
Settings settings = Settings.builder().put("cluster.name", "testClusterNameIsChecked").build();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT, threadPool, settings);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT, threadPool,
settings);
MockTransportService otherClusterTransport = startTransport("other_cluster_discoverable_node", otherClusterKnownNodes,
Version.CURRENT, threadPool, Settings.builder().put("cluster.name", "otherCluster").build());
MockTransportService otherClusterDiscoverable= startTransport("other_cluster_discoverable_node", otherClusterKnownNodes,
Version.CURRENT, threadPool, Settings.builder().put("cluster.name", "otherCluster").build())) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
otherClusterKnownNodes.add(otherClusterDiscoverable.getLocalDiscoNode());
otherClusterKnownNodes.add(otherClusterTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
List<DiscoveryNode> discoveryNodes = Arrays.asList(otherClusterTransport.getLocalDiscoNode(), seedNode);
Collections.shuffle(discoveryNodes, random());
updateSeedNodes(connection, discoveryNodes);
assertTrue(service.nodeConnected(seedNode));
for (DiscoveryNode otherClusterNode : otherClusterKnownNodes) {
assertFalse(service.nodeConnected(otherClusterNode));
}
assertFalse(service.nodeConnected(otherClusterTransport.getLocalDiscoNode()));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () ->
updateSeedNodes(connection, Arrays.asList(otherClusterTransport.getLocalDiscoNode())));
assertThat(illegalStateException.getMessage(),
startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" +
" - {other_cluster_discoverable_node}"));
}
}
}
}
}
| server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.mocksocket.MockServerSocket;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public class RemoteClusterConnectionTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
private MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes, Version version) {
return startTransport(id, knownNodes, version, threadPool);
}
public static MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes, Version version, ThreadPool threadPool) {
return startTransport(id, knownNodes, version, threadPool, Settings.EMPTY);
}
public static MockTransportService startTransport(
final String id,
final List<DiscoveryNode> knownNodes,
final Version version,
final ThreadPool threadPool,
final Settings settings) {
boolean success = false;
final Settings s = Settings.builder().put(settings).put("node.name", id).build();
ClusterName clusterName = ClusterName.CLUSTER_NAME_SETTING.get(s);
MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null);
try {
newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ClusterSearchShardsRequest::new, ThreadPool.Names.SAME,
(request, channel) -> {
channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0],
knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap()));
});
newService.registerRequestHandler(ClusterStateAction.NAME, ClusterStateRequest::new, ThreadPool.Names.SAME,
(request, channel) -> {
DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
for (DiscoveryNode node : knownNodes) {
builder.add(node);
}
ClusterState build = ClusterState.builder(clusterName).nodes(builder.build()).build();
channel.sendResponse(new ClusterStateResponse(clusterName, build, 0L));
});
newService.start();
newService.acceptIncomingRequests();
success = true;
return newService;
} finally {
if (success == false) {
newService.close();
}
}
}
public void testDiscoverSingleNode() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testDiscoverSingleNodeWithIncompatibleSeed() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService incompatibleTransport = startTransport("incompat_seed_node", knownNodes, Version.fromString("2.0.0"));
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode incompatibleSeedNode = incompatibleTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(incompatibleTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(incompatibleSeedNode, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, seedNodes);
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertFalse(service.nodeConnected(incompatibleSeedNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testNodeDisconnected() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT);
MockTransportService spareTransport = startTransport("spare_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode spareNode = spareTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertFalse(service.nodeConnected(spareNode));
knownNodes.add(spareNode);
CountDownLatch latchDisconnect = new CountDownLatch(1);
CountDownLatch latchConnected = new CountDownLatch(1);
service.addConnectionListener(new TransportConnectionListener() {
@Override
public void onNodeDisconnected(DiscoveryNode node) {
if (node.equals(discoverableNode)) {
latchDisconnect.countDown();
}
}
@Override
public void onNodeConnected(DiscoveryNode node) {
if (node.equals(spareNode)) {
latchConnected.countDown();
}
}
});
discoverableTransport.close();
// now make sure we try to connect again to other nodes once we got disconnected
assertTrue(latchDisconnect.await(10, TimeUnit.SECONDS));
assertTrue(latchConnected.await(10, TimeUnit.SECONDS));
assertTrue(service.nodeConnected(spareNode));
}
}
}
}
public void testFilterDiscoveredNodes() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
DiscoveryNode rejectedNode = randomBoolean() ? seedNode : discoverableNode;
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
if (rejectedNode.equals(seedNode)) {
assertFalse(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
} else {
assertTrue(service.nodeConnected(seedNode));
assertFalse(service.nodeConnected(discoverableNode));
}
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
private void updateSeedNodes(RemoteClusterConnection connection, List<DiscoveryNode> seedNodes) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
exceptionAtomicReference.set(x);
latch.countDown();
});
connection.updateSeedNodes(seedNodes, listener);
latch.await();
if (exceptionAtomicReference.get() != null) {
throw exceptionAtomicReference.get();
}
}
public void testConnectWithIncompatibleTransports() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.fromString("2.0.0"))) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(seedNode)));
assertFalse(service.nodeConnected(seedNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testRemoteConnectionVersionMatchesTransportConnectionVersion() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
final Version previousVersion = VersionUtils.getPreviousVersion();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, previousVersion);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
assertThat(seedNode, notNullValue());
knownNodes.add(seedNode);
DiscoveryNode oldVersionNode = discoverableTransport.getLocalDiscoNode();
assertThat(oldVersionNode, notNullValue());
knownNodes.add(oldVersionNode);
assertThat(seedNode.getVersion(), not(equalTo(oldVersionNode.getVersion())));
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
final Transport.Connection seedConnection = new Transport.Connection() {
@Override
public DiscoveryNode getNode() {
return seedNode;
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws IOException, TransportException {
// no-op
}
@Override
public void close() throws IOException {
// no-op
}
};
service.addDelegate(seedNode.getAddress(), new MockTransportService.DelegateTransport(service.getOriginalTransport()) {
@Override
public Connection getConnection(DiscoveryNode node) {
if (node == seedNode) {
return seedConnection;
}
return super.getConnection(node);
}
});
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
connection.addConnectedNode(seedNode);
for (DiscoveryNode node : knownNodes) {
final Transport.Connection transportConnection = connection.getConnection(node);
assertThat(transportConnection.getVersion(), equalTo(previousVersion));
}
assertThat(knownNodes, iterableWithSize(2));
}
}
}
}
@SuppressForbidden(reason = "calls getLocalHost here but it's fine in this case")
public void testSlowNodeCanBeCanceled() throws IOException, InterruptedException {
try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
socket.setReuseAddress(true);
DiscoveryNode seedNode = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(),
emptySet(), Version.CURRENT);
CountDownLatch acceptedLatch = new CountDownLatch(1);
CountDownLatch closeRemote = new CountDownLatch(1);
Thread t = new Thread() {
@Override
public void run() {
try (Socket accept = socket.accept()) {
acceptedLatch.countDown();
closeRemote.await();
} catch (IOException e) {
// that's fine we might close
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
};
t.start();
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
CountDownLatch listenerCalled = new CountDownLatch(1);
AtomicReference<Exception> exceptionReference = new AtomicReference<>();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
ActionListener<Void> listener = ActionListener.wrap(x -> {
listenerCalled.countDown();
fail("expected exception");
}, x -> {
exceptionReference.set(x);
listenerCalled.countDown();
});
connection.updateSeedNodes(Arrays.asList(seedNode), listener);
acceptedLatch.await();
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
assertTrue(connection.assertNoRunningConnections());
}
closeRemote.countDown();
listenerCalled.await();
assertNotNull(exceptionReference.get());
expectThrows(CancellableThreads.ExecutionCancelledException.class, () -> {throw exceptionReference.get();});
}
}
}
public void testFetchShards() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
List<DiscoveryNode> nodes = Collections.singletonList(seedNode);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
nodes, service, Integer.MAX_VALUE, n -> true)) {
if (randomBoolean()) {
updateSeedNodes(connection, nodes);
}
if (randomBoolean()) {
connection.updateSkipUnavailable(randomBoolean());
}
SearchRequest request = new SearchRequest("test-index");
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index")
.indicesOptions(request.indicesOptions()).local(true).preference(request.preference())
.routing(request.routing());
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
responseLatch.await();
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get();
assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes()));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testFetchShardsSkipUnavailable() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedNode);
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Collections.singletonList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
SearchRequest request = new SearchRequest("test-index");
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index")
.indicesOptions(request.indicesOptions()).local(true).preference(request.preference())
.routing(request.routing());
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(5, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response != ClusterSearchShardsResponse.EMPTY);
assertEquals(knownNodes, Arrays.asList(response.getNodes()));
}
CountDownLatch disconnectedLatch = new CountDownLatch(1);
service.addConnectionListener(new TransportConnectionListener() {
@Override
public void onNodeDisconnected(DiscoveryNode node) {
if (node.equals(seedNode)) {
disconnectedLatch.countDown();
}
}
});
service.addFailToSendNoConnectRule(seedTransport);
if (randomBoolean()) {
connection.updateSkipUnavailable(false);
}
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNotNull(failReference.get());
assertNull(reference.get());
assertThat(failReference.get(), instanceOf(TransportException.class));
}
connection.updateSkipUnavailable(true);
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response == ClusterSearchShardsResponse.EMPTY);
}
//give transport service enough time to realize that the node is down, and to notify the connection listeners
//so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next
assertTrue(disconnectedLatch.await(1, TimeUnit.SECONDS));
if (randomBoolean()) {
connection.updateSkipUnavailable(false);
}
service.clearAllRules();
//check that we reconnect once the node is back up
{
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
connection.fetchSearchShards(searchShardsRequest,
new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch));
assertTrue(responseLatch.await(1, TimeUnit.SECONDS));
assertNull(failReference.get());
assertNotNull(reference.get());
ClusterSearchShardsResponse response = reference.get();
assertTrue(response != ClusterSearchShardsResponse.EMPTY);
assertEquals(knownNodes, Arrays.asList(response.getNodes()));
}
}
}
}
}
public void testTriggerUpdatesConcurrently() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService seedTransport1 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
DiscoveryNode seedNode1 = seedTransport1.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
int numThreads = randomIntBetween(4, 10);
Thread[] threads = new Thread[numThreads];
CyclicBarrier barrier = new CyclicBarrier(numThreads);
for (int i = 0; i < threads.length; i++) {
final int numConnectionAttempts = randomIntBetween(10, 200);
threads[i] = new Thread() {
@Override
public void run() {
try {
barrier.await();
CountDownLatch latch = new CountDownLatch(numConnectionAttempts);
for (int i = 0; i < numConnectionAttempts; i++) {
AtomicBoolean executed = new AtomicBoolean(false);
ActionListener<Void> listener = ActionListener.wrap(x -> {
assertTrue(executed.compareAndSet(false, true));
latch.countDown();}, x -> {
assertTrue(executed.compareAndSet(false, true));
latch.countDown();
if (x instanceof RejectedExecutionException) {
// that's fine
} else {
throw new AssertionError(x);
}
});
connection.updateSeedNodes(seedNodes, listener);
}
latch.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
}
};
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(service.nodeConnected(seedNode1));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService seedTransport1 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode seedNode1 = seedTransport1.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
int numThreads = randomIntBetween(4, 10);
Thread[] threads = new Thread[numThreads];
CyclicBarrier barrier = new CyclicBarrier(numThreads + 1);
for (int i = 0; i < threads.length; i++) {
final int numConnectionAttempts = randomIntBetween(10, 100);
threads[i] = new Thread() {
@Override
public void run() {
try {
barrier.await();
CountDownLatch latch = new CountDownLatch(numConnectionAttempts);
for (int i = 0; i < numConnectionAttempts; i++) {
AtomicReference<Exception> executed = new AtomicReference<>();
ActionListener<Void> listener = ActionListener.wrap(
x -> {
if (executed.compareAndSet(null, new RuntimeException())) {
latch.countDown();
} else {
throw new AssertionError("shit's been called twice", executed.get());
}
},
x -> {
if (executed.compareAndSet(null, x)) {
latch.countDown();
} else {
final String message = x.getMessage();
if ((executed.get().getClass() == x.getClass()
&& "operation was cancelled reason [connect handler is closed]".equals(message)
&& message.equals(executed.get().getMessage())) == false) {
// we do cancel the operation and that means that if timing allows it, the caller
// of a blocking call as well as the handler will get the exception from the
// ExecutionCancelledException concurrently. unless that is the case we fail
// if we get called more than once!
AssertionError assertionError = new AssertionError("shit's been called twice", x);
assertionError.addSuppressed(executed.get());
throw assertionError;
}
}
if (x instanceof RejectedExecutionException || x instanceof AlreadyClosedException
|| x instanceof CancellableThreads.ExecutionCancelledException) {
// that's fine
} else {
throw new AssertionError(x);
}
});
connection.updateSeedNodes(seedNodes, listener);
}
latch.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
}
};
threads[i].start();
}
barrier.await();
connection.close();
}
}
}
}
private static void installNodeStatsHandler(TransportService service, DiscoveryNode...nodes) {
service.registerRequestHandler(NodesInfoAction.NAME, NodesInfoRequest::new, ThreadPool.Names.SAME, false, false,
(request, channel) -> {
List<NodeInfo> nodeInfos = new ArrayList<>();
int port = 80;
for (DiscoveryNode node : nodes) {
HttpInfo http = new HttpInfo(new BoundTransportAddress(new TransportAddress[]{node.getAddress()},
new TransportAddress(node.getAddress().address().getAddress(), port++)), 100);
nodeInfos.add(new NodeInfo(node.getVersion(), Build.CURRENT, node, null, null, null, null, null, null, http, null,
null, null));
}
channel.sendResponse(new NodesInfoResponse(ClusterName.DEFAULT, nodeInfos, Collections.emptyList()));
});
}
public void testGetConnectionInfo() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService transport1 = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService transport2 = startTransport("seed_node_1", knownNodes, Version.CURRENT);
MockTransportService transport3 = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode node1 = transport1.getLocalDiscoNode();
DiscoveryNode node2 = transport3.getLocalDiscoNode();
DiscoveryNode node3 = transport2.getLocalDiscoNode();
knownNodes.add(transport1.getLocalDiscoNode());
knownNodes.add(transport3.getLocalDiscoNode());
knownNodes.add(transport2.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
List<DiscoveryNode> seedNodes = Arrays.asList(node3, node1, node2);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
int maxNumConnections = randomIntBetween(1, 5);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, maxNumConnections, n -> true)) {
// test no nodes connected
RemoteConnectionInfo remoteConnectionInfo = assertSerialization(getRemoteConnectionInfo(connection));
assertNotNull(remoteConnectionInfo);
assertEquals(0, remoteConnectionInfo.numNodesConnected);
assertEquals(0, remoteConnectionInfo.seedNodes.size());
assertEquals(0, remoteConnectionInfo.httpAddresses.size());
assertEquals(maxNumConnections, remoteConnectionInfo.connectionsPerCluster);
assertEquals("test-cluster", remoteConnectionInfo.clusterAlias);
updateSeedNodes(connection, seedNodes);
expectThrows(RemoteTransportException.class, () -> getRemoteConnectionInfo(connection));
for (MockTransportService s : Arrays.asList(transport1, transport2, transport3)) {
installNodeStatsHandler(s, node1, node2, node3);
}
remoteConnectionInfo = getRemoteConnectionInfo(connection);
remoteConnectionInfo = assertSerialization(remoteConnectionInfo);
assertNotNull(remoteConnectionInfo);
assertEquals(connection.getNumNodesConnected(), remoteConnectionInfo.numNodesConnected);
assertEquals(Math.min(3, maxNumConnections), connection.getNumNodesConnected());
assertEquals(3, remoteConnectionInfo.seedNodes.size());
assertEquals(remoteConnectionInfo.httpAddresses.size(), Math.min(3, maxNumConnections));
assertEquals(maxNumConnections, remoteConnectionInfo.connectionsPerCluster);
assertEquals("test-cluster", remoteConnectionInfo.clusterAlias);
for (TransportAddress address : remoteConnectionInfo.httpAddresses) {
assertTrue("port range mismatch: " + address.getPort(), address.getPort() >= 80 && address.getPort() <= 90);
}
}
}
}
}
public void testRemoteConnectionInfo() throws IOException {
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats);
RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 4, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster_1",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 87)),
4, 3, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 3, TimeValue.timeValueMinutes(325), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
stats1 = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
5, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
}
private static RemoteConnectionInfo assertSerialization(RemoteConnectionInfo info) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(Version.CURRENT);
info.writeTo(out);
StreamInput in = out.bytes().streamInput();
in.setVersion(Version.CURRENT);
RemoteConnectionInfo remoteConnectionInfo = new RemoteConnectionInfo(in);
assertEquals(info, remoteConnectionInfo);
assertEquals(info.hashCode(), remoteConnectionInfo.hashCode());
return randomBoolean() ? info : remoteConnectionInfo;
}
}
public void testRemoteConnectionInfoBwComp() throws IOException {
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_5, Version.V_6_0_0);
RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster",
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 80)),
4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
final byte[] data;
if (version.before(Version.V_6_0_0_alpha1)) {
String encoded = "AQABBAAAAAAHMC4wLjAuMAAAAAEBAAEEAAAAAAcwLjAuMC4wAAAAUAQ8BAQMdGVzdF9jbHVzdGVyAAAAAAAAAA==";
data = Base64.getDecoder().decode(encoded);
} else {
String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIAAAAAAAAAAAAAAA==";
data = Base64.getDecoder().decode(encoded);
}
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
RemoteConnectionInfo deserialized = new RemoteConnectionInfo(in);
assertEquals(expected, deserialized);
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
deserialized.writeTo(out);
try (StreamInput in2 = StreamInput.wrap(out.bytes().toBytesRef().bytes)) {
in2.setVersion(version);
RemoteConnectionInfo deserialized2 = new RemoteConnectionInfo(in2);
assertEquals(expected, deserialized2);
}
}
}
}
public void testRenderConnectionInfoXContent() throws IOException {
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,80)),
4, 3, TimeValue.timeValueMinutes(30), true);
stats = assertSerialization(stats);
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"http_addresses\":[\"0.0.0.0:80\"],\"connected\":true," +
"\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," +
"\"skip_unavailable\":true}}", builder.string());
stats = new RemoteConnectionInfo("some_other_cluster",
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1), new TransportAddress(TransportAddress.META_ADDRESS,2)),
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,80), new TransportAddress(TransportAddress.META_ADDRESS,81)),
2, 0, TimeValue.timeValueSeconds(30), false);
stats = assertSerialization(stats);
builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],\"http_addresses\":[\"0.0.0.0:80\",\"0.0.0.0:81\"],"
+ "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," +
"\"skip_unavailable\":false}}", builder.string());
}
private RemoteConnectionInfo getRemoteConnectionInfo(RemoteClusterConnection connection) throws Exception {
AtomicReference<RemoteConnectionInfo> statsRef = new AtomicReference<>();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
CountDownLatch latch = new CountDownLatch(1);
connection.getConnectionInfo(new ActionListener<RemoteConnectionInfo>() {
@Override
public void onResponse(RemoteConnectionInfo remoteConnectionInfo) {
statsRef.set(remoteConnectionInfo);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exceptionRef.set(e);
latch.countDown();
}
});
latch.await();
if (exceptionRef.get() != null) {
throw exceptionRef.get();
}
return statsRef.get();
}
public void testEnsureConnected() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
assertFalse(service.nodeConnected(seedNode));
assertFalse(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
CountDownLatch latch = new CountDownLatch(1);
connection.ensureConnected(new LatchedActionListener<>(new ActionListener<Void>() {
@Override
public void onResponse(Void aVoid) {
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
}, latch));
latch.await();
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
// exec again we are already connected
connection.ensureConnected(new LatchedActionListener<>(new ActionListener<Void>() {
@Override
public void onResponse(Void aVoid) {
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
}, latch));
latch.await();
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testCollectNodes() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
if (randomBoolean()) {
updateSeedNodes(connection, Arrays.asList(seedNode));
}
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<Function<String, DiscoveryNode>> reference = new AtomicReference<>();
AtomicReference<Exception> failReference = new AtomicReference<>();
ActionListener<Function<String, DiscoveryNode>> shardsListener = ActionListener.wrap(
x -> {
reference.set(x);
responseLatch.countDown();
},
x -> {
failReference.set(x);
responseLatch.countDown();
});
connection.collectNodes(shardsListener);
responseLatch.await();
assertNull(failReference.get());
assertNotNull(reference.get());
Function<String, DiscoveryNode> function = reference.get();
assertEquals(seedNode, function.apply(seedNode.getId()));
assertNull(function.apply(seedNode.getId() + "foo"));
assertTrue(connection.assertNoRunningConnections());
}
}
}
}
public void testConnectedNodesConcurrentAccess() throws IOException, InterruptedException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
List<MockTransportService> discoverableTransports = new CopyOnWriteArrayList<>();
try {
final int numDiscoverableNodes = randomIntBetween(5, 20);
List<DiscoveryNode> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
for (int i = 0; i < numDiscoverableNodes; i++ ) {
MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT);
discoverableNodes.add(transportService.getLocalDiscoNode());
discoverableTransports.add(transportService);
}
List<DiscoveryNode> seedNodes = randomSubsetOf(discoverableNodes);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
seedNodes, service, Integer.MAX_VALUE, n -> true)) {
final int numGetThreads = randomIntBetween(4, 10);
final Thread[] getThreads = new Thread[numGetThreads];
final int numModifyingThreads = randomIntBetween(4, 10);
final Thread[] modifyingThreads = new Thread[numModifyingThreads];
CyclicBarrier barrier = new CyclicBarrier(numGetThreads + numModifyingThreads);
for (int i = 0; i < getThreads.length; i++) {
final int numGetCalls = randomIntBetween(1000, 10000);
getThreads[i] = new Thread(() -> {
try {
barrier.await();
for (int j = 0; j < numGetCalls; j++) {
try {
DiscoveryNode node = connection.getConnectedNode();
assertNotNull(node);
} catch (IllegalStateException e) {
if (e.getMessage().startsWith("No node available for cluster:") == false) {
throw e;
}
}
}
} catch (Exception ex) {
throw new AssertionError(ex);
}
});
getThreads[i].start();
}
final AtomicInteger counter = new AtomicInteger();
for (int i = 0; i < modifyingThreads.length; i++) {
final int numDisconnects = randomIntBetween(5, 10);
modifyingThreads[i] = new Thread(() -> {
try {
barrier.await();
for (int j = 0; j < numDisconnects; j++) {
if (randomBoolean()) {
MockTransportService transportService =
startTransport("discoverable_node_added" + counter.incrementAndGet(), knownNodes,
Version.CURRENT);
discoverableTransports.add(transportService);
connection.addConnectedNode(transportService.getLocalDiscoNode());
} else {
DiscoveryNode node = randomFrom(discoverableNodes);
connection.onNodeDisconnected(node);
}
}
} catch (Exception ex) {
throw new AssertionError(ex);
}
});
modifyingThreads[i].start();
}
for (Thread thread : getThreads) {
thread.join();
}
for (Thread thread : modifyingThreads) {
thread.join();
}
}
}
} finally {
IOUtils.closeWhileHandlingException(discoverableTransports);
}
}
public void testClusterNameIsChecked() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
List<DiscoveryNode> otherClusterKnownNodes = new CopyOnWriteArrayList<>();
Settings settings = Settings.builder().put("cluster.name", "testClusterNameIsChecked").build();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT, threadPool, settings);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT, threadPool,
settings);
MockTransportService otherClusterTransport = startTransport("other_cluster_discoverable_node", otherClusterKnownNodes,
Version.CURRENT, threadPool, Settings.builder().put("cluster.name", "otherCluster").build());
MockTransportService otherClusterDiscoverable= startTransport("other_cluster_discoverable_node", otherClusterKnownNodes,
Version.CURRENT, threadPool, Settings.builder().put("cluster.name", "otherCluster").build())) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
otherClusterKnownNodes.add(otherClusterDiscoverable.getLocalDiscoNode());
otherClusterKnownNodes.add(otherClusterTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
updateSeedNodes(connection, Arrays.asList(seedNode));
assertTrue(service.nodeConnected(seedNode));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
List<DiscoveryNode> discoveryNodes = Arrays.asList(otherClusterTransport.getLocalDiscoNode(), seedNode);
Collections.shuffle(discoveryNodes, random());
updateSeedNodes(connection, discoveryNodes);
assertTrue(service.nodeConnected(seedNode));
for (DiscoveryNode otherClusterNode : otherClusterKnownNodes) {
assertFalse(service.nodeConnected(otherClusterNode));
}
assertFalse(service.nodeConnected(otherClusterTransport.getLocalDiscoNode()));
assertTrue(service.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () ->
updateSeedNodes(connection, Arrays.asList(otherClusterTransport.getLocalDiscoNode())));
assertThat(illegalStateException.getMessage(),
startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" +
" - {other_cluster_discoverable_node}"));
}
}
}
}
}
| [TEST] AwaitsFix testTriggerUpdatesConcurrently
| server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java | [TEST] AwaitsFix testTriggerUpdatesConcurrently | <ide><path>erver/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java
<ide> }
<ide> }
<ide>
<add> @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/28695")
<ide> public void testTriggerUpdatesConcurrently() throws IOException, InterruptedException {
<ide> List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
<ide> try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); |
|
Java | apache-2.0 | 6df278422eb737ae02969e1f471379551d9b1177 | 0 | firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.ml.modeldownloader.internal;
import android.content.Context;
import android.content.pm.PackageManager;
import android.text.TextUtils;
import android.util.JsonReader;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.google.android.gms.common.util.AndroidUtilsLight;
import com.google.android.gms.common.util.Hex;
import com.google.android.gms.common.util.VisibleForTesting;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.google.firebase.FirebaseApp;
import com.google.firebase.installations.FirebaseInstallationsApi;
import com.google.firebase.installations.InstallationTokenResult;
import com.google.firebase.ml.modeldownloader.CustomModel;
import com.google.firebase.ml.modeldownloader.FirebaseMlException;
import com.google.firebase.ml.modeldownloader.FirebaseMlException.Code;
import com.google.firebase.ml.modeldownloader.internal.FirebaseMlLogEvent.ModelDownloadLogEvent.ErrorCode;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.zip.GZIPInputStream;
import org.json.JSONObject;
/**
* Calls the Download Service API and returns the information related to the current status of a
* custom model. If new model available returns 200 and new model details such as the download url;
* If same model is available returns 304 (not modified) Otherwise returns an error.
*
* @hide
*/
public class CustomModelDownloadService {
private static final String TAG = "CustomModelDownloadSer";
private static final int CONNECTION_TIME_OUT_MS = 2000; // 2 seconds.
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final String ISO_DATE_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
private static final String ACCEPT_ENCODING_HEADER_KEY = "Accept-Encoding";
private static final String CONTENT_ENCODING_HEADER_KEY = "Content-Encoding";
private static final String GZIP_CONTENT_ENCODING = "gzip";
private static final String FIREBASE_DOWNLOAD_HOST = "https://firebaseml.googleapis.com";
private static final String ERROR_RESPONSE_ERROR = "error";
private static final String ERROR_RESPONSE_MESSAGE = "message";
@VisibleForTesting static final String ETAG_HEADER = "etag";
@VisibleForTesting static final String CONTENT_TYPE = "Content-Type";
@VisibleForTesting static final String APPLICATION_JSON = "application/json; charset=UTF-8";
@VisibleForTesting static final String IF_NONE_MATCH_HEADER_KEY = "If-None-Match";
@VisibleForTesting
static final String INSTALLATIONS_AUTH_TOKEN_HEADER = "X-Goog-Firebase-Installations-Auth";
@VisibleForTesting static final String API_KEY_HEADER = "x-goog-api-key";
@VisibleForTesting static final String X_ANDROID_PACKAGE_HEADER = "X-Android-Package";
@VisibleForTesting static final String X_ANDROID_CERT_HEADER = "X-Android-Cert";
@VisibleForTesting
static final String DOWNLOAD_MODEL_REGEX = "%s/v1beta2/projects/%s/models/%s:download";
private final ExecutorService executorService;
private final FirebaseInstallationsApi firebaseInstallations;
private final FirebaseMlLogger eventLogger;
private final String apiKey;
@Nullable private final String fingerprintHashForPackage;
private final Context context;
private String downloadHost = FIREBASE_DOWNLOAD_HOST;
public CustomModelDownloadService(
FirebaseApp firebaseApp, FirebaseInstallationsApi installationsApi) {
context = firebaseApp.getApplicationContext();
firebaseInstallations = installationsApi;
apiKey = firebaseApp.getOptions().getApiKey();
fingerprintHashForPackage = getFingerprintHashForPackage(context);
executorService = Executors.newCachedThreadPool();
this.eventLogger = FirebaseMlLogger.getInstance();
}
@VisibleForTesting
CustomModelDownloadService(
Context context,
FirebaseInstallationsApi firebaseInstallations,
ExecutorService executorService,
String apiKey,
String fingerprintHashForPackage,
String downloadHost,
FirebaseMlLogger eventLogger) {
this.context = context;
this.firebaseInstallations = firebaseInstallations;
this.executorService = executorService;
this.apiKey = apiKey;
this.fingerprintHashForPackage = fingerprintHashForPackage;
this.downloadHost = downloadHost;
this.eventLogger = eventLogger;
}
/**
* Calls the Firebase ML Download Service to retrieve the download url for the modelName. Use when
* a download attempt fails due to an expired timestamp.
*
* @param projectNumber - firebase project number
* @param modelName - model name
* @return - updated model with new download url and expiry time
*/
@NonNull
public Task<CustomModel> getNewDownloadUrlWithExpiry(String projectNumber, String modelName) {
return getCustomModelDetails(projectNumber, modelName, null);
}
/**
* Gets the download details for the custom model, returns task with null result if the current
* model is the latest.
*
* @param projectNumber - firebase project number
* @param modelName - model name
* @param modelHash - current model hash - input empty string if no current download exists or to
* force retrieval of a new download url
* @return The download details for the model or null if the current model hash matches the latest
* model.
*/
@NonNull
public Task<CustomModel> getCustomModelDetails(
String projectNumber, String modelName, String modelHash) {
try {
if (TextUtils.isEmpty(modelName))
throw new FirebaseMlException(
"Error cannot retrieve model from reading an empty modelName",
FirebaseMlException.INVALID_ARGUMENT);
URL url =
new URL(String.format(DOWNLOAD_MODEL_REGEX, downloadHost, projectNumber, modelName));
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setConnectTimeout(CONNECTION_TIME_OUT_MS);
connection.setRequestProperty(ACCEPT_ENCODING_HEADER_KEY, GZIP_CONTENT_ENCODING);
connection.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON);
if (modelHash != null && !modelHash.isEmpty()) {
connection.setRequestProperty(IF_NONE_MATCH_HEADER_KEY, modelHash);
}
Task<InstallationTokenResult> installationAuthTokenTask =
firebaseInstallations.getToken(false);
return installationAuthTokenTask.continueWithTask(
executorService,
(CustomModelTask) -> {
if (!installationAuthTokenTask.isSuccessful()) {
ErrorCode errorCode = ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED;
String errorMessage = "Failed to get model due to authentication error";
int exceptionCode = FirebaseMlException.UNAUTHENTICATED;
if (installationAuthTokenTask.getException() != null
&& (installationAuthTokenTask.getException() instanceof UnknownHostException
|| installationAuthTokenTask.getException().getCause()
instanceof UnknownHostException)) {
errorCode = ErrorCode.NO_NETWORK_CONNECTION;
errorMessage = "Failed to retrieve model info due to no internet connection.";
exceptionCode = FirebaseMlException.NO_NETWORK_CONNECTION;
}
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash != null ? modelHash : "", 0, 0L),
false,
errorCode.getValue());
return Tasks.forException(new FirebaseMlException(errorMessage, exceptionCode));
}
connection.setRequestProperty(
INSTALLATIONS_AUTH_TOKEN_HEADER, installationAuthTokenTask.getResult().getToken());
connection.setRequestProperty(API_KEY_HEADER, apiKey);
// Headers required for Android API Key Restrictions.
connection.setRequestProperty(X_ANDROID_PACKAGE_HEADER, context.getPackageName());
if (fingerprintHashForPackage != null) {
connection.setRequestProperty(X_ANDROID_CERT_HEADER, fingerprintHashForPackage);
}
return fetchDownloadDetails(modelName, connection);
});
} catch (IOException e) {
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Error reading custom model from download service: " + e.getMessage(),
FirebaseMlException.INVALID_ARGUMENT));
} catch (FirebaseMlException e) {
return Tasks.forException(e);
}
}
@VisibleForTesting
static long parseTokenExpirationTimestamp(String expiresIn) {
if (expiresIn == null || expiresIn.length() == 0) {
return 0;
}
try {
SimpleDateFormat iso8601Format = new SimpleDateFormat(ISO_DATE_PATTERN, Locale.US);
iso8601Format.setTimeZone(TimeZone.getTimeZone("UTC"));
Date date = iso8601Format.parse(expiresIn);
return date.getTime();
} catch (ParseException pe) {
// log error and maybe throw an error
Log.w(TAG, "unable to parse datetime:" + expiresIn, pe);
return 0;
}
}
private Task<CustomModel> fetchDownloadDetails(String modelName, HttpURLConnection connection) {
try {
connection.connect();
int httpResponseCode = connection.getResponseCode();
String errorMessage = getErrorStream(connection);
switch (httpResponseCode) {
case HttpURLConnection.HTTP_OK:
return readCustomModelResponse(modelName, connection);
case HttpURLConnection.HTTP_NOT_MODIFIED:
return Tasks.forResult(null);
case HttpURLConnection.HTTP_NOT_FOUND:
return Tasks.forException(
new FirebaseMlException(
String.format(Locale.getDefault(), "No model found with name: %s", modelName),
FirebaseMlException.NOT_FOUND));
case HttpURLConnection.HTTP_BAD_REQUEST:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Bad http request for model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.INVALID_ARGUMENT);
case 429: // too many requests
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Too many requests to server please wait before trying again: %s",
errorMessage),
FirebaseMlException.RESOURCE_EXHAUSTED);
case HttpURLConnection.HTTP_INTERNAL_ERROR:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Server issue while fetching model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.INTERNAL);
case HttpURLConnection.HTTP_UNAUTHORIZED:
case HttpURLConnection.HTTP_FORBIDDEN:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Permission error while fetching model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.PERMISSION_DENIED);
default:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Failed to connect to Firebase ML download server: %s",
errorMessage),
FirebaseMlException.INTERNAL);
}
} catch (IOException e) {
ErrorCode errorCode = ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED;
String errorMessage = "Failed to get model URL";
int exceptionCode = FirebaseMlException.INTERNAL;
if (e instanceof UnknownHostException) {
errorCode = ErrorCode.NO_NETWORK_CONNECTION;
errorMessage = "Failed to retrieve model info due to no internet connection.";
exceptionCode = FirebaseMlException.NO_NETWORK_CONNECTION;
}
eventLogger.logModelInfoRetrieverFailure(new CustomModel(modelName, "", 0, 0), errorCode);
return Tasks.forException(new FirebaseMlException(errorMessage, exceptionCode));
}
}
private Task<CustomModel> setAndLogException(
String modelName, int httpResponseCode, String errorMessage, @Code int invalidArgument) {
eventLogger.logModelInfoRetrieverFailure(
new CustomModel(modelName, "", 0, 0),
ErrorCode.MODEL_INFO_DOWNLOAD_UNSUCCESSFUL_HTTP_STATUS,
httpResponseCode);
return Tasks.forException(new FirebaseMlException(errorMessage, invalidArgument));
}
private Task<CustomModel> readCustomModelResponse(
@NonNull String modelName, HttpURLConnection connection) throws IOException {
String encodingKey = connection.getHeaderField(CONTENT_ENCODING_HEADER_KEY);
InputStream inputStream = maybeUnGzip(connection.getInputStream(), encodingKey);
JsonReader reader = new JsonReader(new InputStreamReader(inputStream, UTF_8));
long fileSize = 0L;
String downloadUrl = "";
long expireTime = 0L;
String modelHash = maybeUnGzipHeader(connection.getHeaderField(ETAG_HEADER), encodingKey);
if (modelHash == null || modelHash.isEmpty()) {
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Model hash not set in download response.", FirebaseMlException.INTERNAL));
}
// JsonReader.peek will sometimes throw AssertionErrors in Android 8.0 and above. See
// b/79920590 for details.
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("downloadUri")) {
downloadUrl = reader.nextString();
} else if (name.equals("expireTime")) {
expireTime = parseTokenExpirationTimestamp(reader.nextString());
} else if (name.equals("sizeBytes")) {
fileSize = reader.nextLong();
} else if (name.equals("modelFormat")) {
String modelFormat = reader.nextString();
if (modelFormat.equals("MODEL_FORMAT_UNSPECIFIED")) {
// log error but continue... this shouldn't happen
Log.w(TAG, "Ignoring unexpected model type: " + modelFormat);
}
} else {
reader.skipValue();
}
}
reader.endObject();
reader.close();
inputStream.close();
if (!downloadUrl.isEmpty() && expireTime > 0L) {
CustomModel model = new CustomModel(modelName, modelHash, fileSize, downloadUrl, expireTime);
eventLogger.logModelInfoRetrieverSuccess(model);
return Tasks.forResult(model);
}
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Model info could not be extracted from download response.",
FirebaseMlException.INTERNAL));
}
private static InputStream maybeUnGzip(InputStream input, String contentEncoding)
throws IOException {
if (GZIP_CONTENT_ENCODING.equals(contentEncoding)) {
return new GZIPInputStream(input);
}
return input;
}
private static String maybeUnGzipHeader(String header, String contentEncoding) {
// fix to remove --gzip when content header is gzip for mockwire
if (GZIP_CONTENT_ENCODING.equals(contentEncoding) && header.endsWith("--gzip")) {
return header.substring(0, header.lastIndexOf("--gzip"));
}
return header;
}
private String getErrorStreamString(HttpURLConnection connection) {
InputStream errorStream = connection.getErrorStream();
if (errorStream == null) {
return null;
}
String encodingKey = connection.getHeaderField(CONTENT_ENCODING_HEADER_KEY);
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(maybeUnGzip(errorStream, encodingKey), UTF_8))) {
StringBuilder response = new StringBuilder();
for (String input = reader.readLine(); input != null; input = reader.readLine()) {
response.append(input).append('\n');
}
return response.toString();
} catch (IOException ex) {
Log.d(TAG, "Error extracting errorStream from failed connection attempt", ex);
return null;
}
}
private String getErrorStream(HttpURLConnection connection) {
String errorStreamString = getErrorStreamString(connection);
if (errorStreamString != null) {
try {
JSONObject responseData = new JSONObject(errorStreamString);
JSONObject responseError = responseData.getJSONObject(ERROR_RESPONSE_ERROR);
if (responseError != null && responseError.has(ERROR_RESPONSE_MESSAGE)) {
errorStreamString = responseError.getString(ERROR_RESPONSE_MESSAGE);
return String.format(
Locale.ENGLISH,
"HTTP response from Firebase Download Service: [%d - %s: %s]",
connection.getResponseCode(),
connection.getResponseMessage(),
errorStreamString);
}
} catch (Exception ex) {
Log.d(TAG, "Error extracting errorStream from failed connection attempt", ex);
}
}
return errorStreamString;
}
/** Gets the Android package's SHA-1 fingerprint. */
@Nullable
private static String getFingerprintHashForPackage(Context context) {
byte[] hash;
try {
hash = AndroidUtilsLight.getPackageCertificateHashBytes(context, context.getPackageName());
if (hash == null) {
Log.e(TAG, "Could not get fingerprint hash for package: " + context.getPackageName());
return null;
} else {
return Hex.bytesToStringUppercase(hash, /* zeroTerminated= */ false);
}
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "No such package: " + context.getPackageName(), e);
return null;
}
}
}
| firebase-ml-modeldownloader/src/main/java/com/google/firebase/ml/modeldownloader/internal/CustomModelDownloadService.java | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.ml.modeldownloader.internal;
import android.content.Context;
import android.content.pm.PackageManager;
import android.util.JsonReader;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.google.android.gms.common.util.AndroidUtilsLight;
import com.google.android.gms.common.util.Hex;
import com.google.android.gms.common.util.VisibleForTesting;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.google.firebase.FirebaseApp;
import com.google.firebase.installations.FirebaseInstallationsApi;
import com.google.firebase.installations.InstallationTokenResult;
import com.google.firebase.ml.modeldownloader.CustomModel;
import com.google.firebase.ml.modeldownloader.FirebaseMlException;
import com.google.firebase.ml.modeldownloader.FirebaseMlException.Code;
import com.google.firebase.ml.modeldownloader.internal.FirebaseMlLogEvent.ModelDownloadLogEvent.ErrorCode;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.zip.GZIPInputStream;
import org.json.JSONObject;
/**
* Calls the Download Service API and returns the information related to the current status of a
* custom model. If new model available returns 200 and new model details such as the download url;
* If same model is available returns 304 (not modified) Otherwise returns an error.
*
* @hide
*/
public class CustomModelDownloadService {
private static final String TAG = "CustomModelDownloadSer";
private static final int CONNECTION_TIME_OUT_MS = 2000; // 2 seconds.
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final String ISO_DATE_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
private static final String ACCEPT_ENCODING_HEADER_KEY = "Accept-Encoding";
private static final String CONTENT_ENCODING_HEADER_KEY = "Content-Encoding";
private static final String GZIP_CONTENT_ENCODING = "gzip";
private static final String FIREBASE_DOWNLOAD_HOST = "https://firebaseml.googleapis.com";
private static final String ERROR_RESPONSE_ERROR = "error";
private static final String ERROR_RESPONSE_MESSAGE = "message";
@VisibleForTesting static final String ETAG_HEADER = "etag";
@VisibleForTesting static final String CONTENT_TYPE = "Content-Type";
@VisibleForTesting static final String APPLICATION_JSON = "application/json; charset=UTF-8";
@VisibleForTesting static final String IF_NONE_MATCH_HEADER_KEY = "If-None-Match";
@VisibleForTesting
static final String INSTALLATIONS_AUTH_TOKEN_HEADER = "X-Goog-Firebase-Installations-Auth";
@VisibleForTesting static final String API_KEY_HEADER = "x-goog-api-key";
@VisibleForTesting static final String X_ANDROID_PACKAGE_HEADER = "X-Android-Package";
@VisibleForTesting static final String X_ANDROID_CERT_HEADER = "X-Android-Cert";
@VisibleForTesting
static final String DOWNLOAD_MODEL_REGEX = "%s/v1beta2/projects/%s/models/%s:download";
private final ExecutorService executorService;
private final FirebaseInstallationsApi firebaseInstallations;
private final FirebaseMlLogger eventLogger;
private final String apiKey;
@Nullable private final String fingerprintHashForPackage;
private final Context context;
private String downloadHost = FIREBASE_DOWNLOAD_HOST;
public CustomModelDownloadService(
FirebaseApp firebaseApp, FirebaseInstallationsApi installationsApi) {
context = firebaseApp.getApplicationContext();
firebaseInstallations = installationsApi;
apiKey = firebaseApp.getOptions().getApiKey();
fingerprintHashForPackage = getFingerprintHashForPackage(context);
executorService = Executors.newCachedThreadPool();
this.eventLogger = FirebaseMlLogger.getInstance();
}
@VisibleForTesting
CustomModelDownloadService(
Context context,
FirebaseInstallationsApi firebaseInstallations,
ExecutorService executorService,
String apiKey,
String fingerprintHashForPackage,
String downloadHost,
FirebaseMlLogger eventLogger) {
this.context = context;
this.firebaseInstallations = firebaseInstallations;
this.executorService = executorService;
this.apiKey = apiKey;
this.fingerprintHashForPackage = fingerprintHashForPackage;
this.downloadHost = downloadHost;
this.eventLogger = eventLogger;
}
/**
* Calls the Firebase ML Download Service to retrieve the download url for the modelName. Use when
* a download attempt fails due to an expired timestamp.
*
* @param projectNumber - firebase project number
* @param modelName - model name
* @return - updated model with new download url and expiry time
*/
@NonNull
public Task<CustomModel> getNewDownloadUrlWithExpiry(String projectNumber, String modelName) {
return getCustomModelDetails(projectNumber, modelName, null);
}
/**
* Gets the download details for the custom model, returns task with null result if the current
* model is the latest.
*
* @param projectNumber - firebase project number
* @param modelName - model name
* @param modelHash - current model hash - input empty string if no current download exists or to
* force retrieval of a new download url
* @return The download details for the model or null if the current model hash matches the latest
* model.
*/
@NonNull
public Task<CustomModel> getCustomModelDetails(
String projectNumber, String modelName, String modelHash) {
try {
URL url =
new URL(String.format(DOWNLOAD_MODEL_REGEX, downloadHost, projectNumber, modelName));
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setConnectTimeout(CONNECTION_TIME_OUT_MS);
connection.setRequestProperty(ACCEPT_ENCODING_HEADER_KEY, GZIP_CONTENT_ENCODING);
connection.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON);
if (modelHash != null && !modelHash.isEmpty()) {
connection.setRequestProperty(IF_NONE_MATCH_HEADER_KEY, modelHash);
}
Task<InstallationTokenResult> installationAuthTokenTask =
firebaseInstallations.getToken(false);
return installationAuthTokenTask.continueWithTask(
executorService,
(CustomModelTask) -> {
if (!installationAuthTokenTask.isSuccessful()) {
ErrorCode errorCode = ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED;
String errorMessage = "Failed to get model due to authentication error";
int exceptionCode = FirebaseMlException.UNAUTHENTICATED;
if (installationAuthTokenTask.getException() != null
&& (installationAuthTokenTask.getException() instanceof UnknownHostException
|| installationAuthTokenTask.getException().getCause()
instanceof UnknownHostException)) {
errorCode = ErrorCode.NO_NETWORK_CONNECTION;
errorMessage = "Failed to retrieve model info due to no internet connection.";
exceptionCode = FirebaseMlException.NO_NETWORK_CONNECTION;
}
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash != null ? modelHash : "", 0, 0L),
false,
errorCode.getValue());
return Tasks.forException(new FirebaseMlException(errorMessage, exceptionCode));
}
connection.setRequestProperty(
INSTALLATIONS_AUTH_TOKEN_HEADER, installationAuthTokenTask.getResult().getToken());
connection.setRequestProperty(API_KEY_HEADER, apiKey);
// Headers required for Android API Key Restrictions.
connection.setRequestProperty(X_ANDROID_PACKAGE_HEADER, context.getPackageName());
if (fingerprintHashForPackage != null) {
connection.setRequestProperty(X_ANDROID_CERT_HEADER, fingerprintHashForPackage);
}
return fetchDownloadDetails(modelName, connection);
});
} catch (IOException e) {
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Error reading custom model from download service: " + e.getMessage(),
FirebaseMlException.INVALID_ARGUMENT));
}
}
@VisibleForTesting
static long parseTokenExpirationTimestamp(String expiresIn) {
if (expiresIn == null || expiresIn.length() == 0) {
return 0;
}
try {
SimpleDateFormat iso8601Format = new SimpleDateFormat(ISO_DATE_PATTERN, Locale.US);
iso8601Format.setTimeZone(TimeZone.getTimeZone("UTC"));
Date date = iso8601Format.parse(expiresIn);
return date.getTime();
} catch (ParseException pe) {
// log error and maybe throw an error
Log.w(TAG, "unable to parse datetime:" + expiresIn, pe);
return 0;
}
}
private Task<CustomModel> fetchDownloadDetails(String modelName, HttpURLConnection connection) {
try {
connection.connect();
int httpResponseCode = connection.getResponseCode();
String errorMessage = getErrorStream(connection);
switch (httpResponseCode) {
case HttpURLConnection.HTTP_OK:
return readCustomModelResponse(modelName, connection);
case HttpURLConnection.HTTP_NOT_MODIFIED:
return Tasks.forResult(null);
case HttpURLConnection.HTTP_NOT_FOUND:
return Tasks.forException(
new FirebaseMlException(
String.format(Locale.getDefault(), "No model found with name: %s", modelName),
FirebaseMlException.NOT_FOUND));
case HttpURLConnection.HTTP_BAD_REQUEST:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Bad http request for model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.INVALID_ARGUMENT);
case 429: // too many requests
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Too many requests to server please wait before trying again: %s",
errorMessage),
FirebaseMlException.RESOURCE_EXHAUSTED);
case HttpURLConnection.HTTP_INTERNAL_ERROR:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Server issue while fetching model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.INTERNAL);
case HttpURLConnection.HTTP_UNAUTHORIZED:
case HttpURLConnection.HTTP_FORBIDDEN:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Permission error while fetching model (%s): %s",
modelName,
errorMessage),
FirebaseMlException.PERMISSION_DENIED);
default:
return setAndLogException(
modelName,
httpResponseCode,
String.format(
Locale.getDefault(),
"Failed to connect to Firebase ML download server: %s",
errorMessage),
FirebaseMlException.INTERNAL);
}
} catch (IOException e) {
ErrorCode errorCode = ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED;
String errorMessage = "Failed to get model URL";
int exceptionCode = FirebaseMlException.INTERNAL;
if (e instanceof UnknownHostException) {
errorCode = ErrorCode.NO_NETWORK_CONNECTION;
errorMessage = "Failed to retrieve model info due to no internet connection.";
exceptionCode = FirebaseMlException.NO_NETWORK_CONNECTION;
}
eventLogger.logModelInfoRetrieverFailure(new CustomModel(modelName, "", 0, 0), errorCode);
return Tasks.forException(new FirebaseMlException(errorMessage, exceptionCode));
}
}
private Task<CustomModel> setAndLogException(
String modelName, int httpResponseCode, String errorMessage, @Code int invalidArgument) {
eventLogger.logModelInfoRetrieverFailure(
new CustomModel(modelName, "", 0, 0),
ErrorCode.MODEL_INFO_DOWNLOAD_UNSUCCESSFUL_HTTP_STATUS,
httpResponseCode);
return Tasks.forException(new FirebaseMlException(errorMessage, invalidArgument));
}
private Task<CustomModel> readCustomModelResponse(
@NonNull String modelName, HttpURLConnection connection) throws IOException {
String encodingKey = connection.getHeaderField(CONTENT_ENCODING_HEADER_KEY);
InputStream inputStream = maybeUnGzip(connection.getInputStream(), encodingKey);
JsonReader reader = new JsonReader(new InputStreamReader(inputStream, UTF_8));
long fileSize = 0L;
String downloadUrl = "";
long expireTime = 0L;
String modelHash = maybeUnGzipHeader(connection.getHeaderField(ETAG_HEADER), encodingKey);
if (modelHash == null || modelHash.isEmpty()) {
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Model hash not set in download response.", FirebaseMlException.INTERNAL));
}
// JsonReader.peek will sometimes throw AssertionErrors in Android 8.0 and above. See
// b/79920590 for details.
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("downloadUri")) {
downloadUrl = reader.nextString();
} else if (name.equals("expireTime")) {
expireTime = parseTokenExpirationTimestamp(reader.nextString());
} else if (name.equals("sizeBytes")) {
fileSize = reader.nextLong();
} else if (name.equals("modelFormat")) {
String modelFormat = reader.nextString();
if (modelFormat.equals("MODEL_FORMAT_UNSPECIFIED")) {
// log error but continue... this shouldn't happen
Log.w(TAG, "Ignoring unexpected model type: " + modelFormat);
}
} else {
reader.skipValue();
}
}
reader.endObject();
reader.close();
inputStream.close();
if (!downloadUrl.isEmpty() && expireTime > 0L) {
CustomModel model = new CustomModel(modelName, modelHash, fileSize, downloadUrl, expireTime);
eventLogger.logModelInfoRetrieverSuccess(model);
return Tasks.forResult(model);
}
eventLogger.logDownloadFailureWithReason(
new CustomModel(modelName, modelHash, 0, 0L),
false,
ErrorCode.MODEL_INFO_DOWNLOAD_CONNECTION_FAILED.getValue());
return Tasks.forException(
new FirebaseMlException(
"Model info could not be extracted from download response.",
FirebaseMlException.INTERNAL));
}
private static InputStream maybeUnGzip(InputStream input, String contentEncoding)
throws IOException {
if (GZIP_CONTENT_ENCODING.equals(contentEncoding)) {
return new GZIPInputStream(input);
}
return input;
}
private static String maybeUnGzipHeader(String header, String contentEncoding) {
// fix to remove --gzip when content header is gzip for mockwire
if (GZIP_CONTENT_ENCODING.equals(contentEncoding) && header.endsWith("--gzip")) {
return header.substring(0, header.lastIndexOf("--gzip"));
}
return header;
}
private String getErrorStreamString(HttpURLConnection connection) {
InputStream errorStream = connection.getErrorStream();
if (errorStream == null) {
return null;
}
String encodingKey = connection.getHeaderField(CONTENT_ENCODING_HEADER_KEY);
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(maybeUnGzip(errorStream, encodingKey), UTF_8))) {
StringBuilder response = new StringBuilder();
for (String input = reader.readLine(); input != null; input = reader.readLine()) {
response.append(input).append('\n');
}
return response.toString();
} catch (IOException ex) {
Log.d(TAG, "Error extracting errorStream from failed connection attempt", ex);
return null;
}
}
private String getErrorStream(HttpURLConnection connection) {
String errorStreamString = getErrorStreamString(connection);
if (errorStreamString != null) {
try {
JSONObject responseData = new JSONObject(errorStreamString);
JSONObject responseError = responseData.getJSONObject(ERROR_RESPONSE_ERROR);
if (responseError != null && responseError.has(ERROR_RESPONSE_MESSAGE)) {
errorStreamString = responseError.getString(ERROR_RESPONSE_MESSAGE);
return String.format(
Locale.ENGLISH,
"HTTP response from Firebase Download Service: [%d - %s: %s]",
connection.getResponseCode(),
connection.getResponseMessage(),
errorStreamString);
}
} catch (Exception ex) {
Log.d(TAG, "Error extracting errorStream from failed connection attempt", ex);
}
}
return errorStreamString;
}
/** Gets the Android package's SHA-1 fingerprint. */
@Nullable
private static String getFingerprintHashForPackage(Context context) {
byte[] hash;
try {
hash = AndroidUtilsLight.getPackageCertificateHashBytes(context, context.getPackageName());
if (hash == null) {
Log.e(TAG, "Could not get fingerprint hash for package: " + context.getPackageName());
return null;
} else {
return Hex.bytesToStringUppercase(hash, /* zeroTerminated= */ false);
}
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "No such package: " + context.getPackageName(), e);
return null;
}
}
}
| return exception if modelname is empty (#4226)
| firebase-ml-modeldownloader/src/main/java/com/google/firebase/ml/modeldownloader/internal/CustomModelDownloadService.java | return exception if modelname is empty (#4226) | <ide><path>irebase-ml-modeldownloader/src/main/java/com/google/firebase/ml/modeldownloader/internal/CustomModelDownloadService.java
<ide>
<ide> import android.content.Context;
<ide> import android.content.pm.PackageManager;
<add>import android.text.TextUtils;
<ide> import android.util.JsonReader;
<ide> import android.util.Log;
<ide> import androidx.annotation.NonNull;
<ide> public Task<CustomModel> getCustomModelDetails(
<ide> String projectNumber, String modelName, String modelHash) {
<ide> try {
<add>
<add> if (TextUtils.isEmpty(modelName))
<add> throw new FirebaseMlException(
<add> "Error cannot retrieve model from reading an empty modelName",
<add> FirebaseMlException.INVALID_ARGUMENT);
<add>
<ide> URL url =
<ide> new URL(String.format(DOWNLOAD_MODEL_REGEX, downloadHost, projectNumber, modelName));
<ide> HttpURLConnection connection = (HttpURLConnection) url.openConnection();
<ide> new FirebaseMlException(
<ide> "Error reading custom model from download service: " + e.getMessage(),
<ide> FirebaseMlException.INVALID_ARGUMENT));
<add> } catch (FirebaseMlException e) {
<add> return Tasks.forException(e);
<ide> }
<ide> }
<ide> |
|
Java | mit | e67a33640d96489a05e184ae50ac108a3d0ef7ff | 0 | s-aska/Justaway-for-Android-Original,nirvash/Justaway-for-Android-Original,teshi04/Justaway-for-Android-Original | package info.justaway.adapter;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TableLayout;
import android.widget.TextView;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.greenrobot.event.EventBus;
import info.justaway.BuildConfig;
import info.justaway.JustawayApplication;
import info.justaway.ProfileActivity;
import info.justaway.R;
import info.justaway.ScaleImageActivity;
import info.justaway.event.AlertDialogEvent;
import info.justaway.event.action.SeenTopEvent;
import info.justaway.model.Row;
import twitter4j.DirectMessage;
import twitter4j.MediaEntity;
import twitter4j.Status;
import twitter4j.URLEntity;
import twitter4j.User;
public class TwitterAdapter extends ArrayAdapter<Row> {
static class ViewHolder {
LinearLayout action;
TextView action_icon;
TextView action_by_display_name;
TextView action_by_screen_name;
ImageView icon;
TextView display_name;
TextView screen_name;
TextView fontello_lock;
TextView datetime_relative;
TextView status;
LinearLayout images;
TableLayout menu_and_via;
TextView do_reply;
TextView do_retweet;
TextView retweet_count;
TextView do_fav;
TextView fav_count;
TextView via;
TextView datetime;
LinearLayout retweet;
ImageView retweet_icon;
TextView retweet_by;
}
private JustawayApplication mApplication;
private Context mContext;
private ArrayList<Row> mStatuses = new ArrayList<Row>();
private LayoutInflater mInflater;
private int mLayout;
private int mColorBlue = 0;
private static final int LIMIT = 100;
private int mLimit = LIMIT;
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM'/'dd' 'HH':'mm':'ss",
Locale.ENGLISH);
private static final Pattern TWITPIC_PATTERN = Pattern.compile("^http://twitpic\\.com/(\\w+)$");
private static final Pattern TWIPPLE_PATTERN = Pattern.compile("^http://p\\.twipple\\.jp/(\\w+)$");
private static final Pattern INSTAGRAM_PATTERN = Pattern.compile("^http://instagram\\.com/p/([^/]+)/$");
private static final Pattern IMAGES_PATTERN = Pattern.compile("^https?://.*\\.(png|gif|jpeg|jpg)$");
private static final Pattern YOUTUBE_PATTERN = Pattern.compile("^https?://(?:www\\.youtube\\.com/watch\\?.*v=|youtu\\.be/)([\\w-]+)");
private static final Pattern NICONICO_PATTERN = Pattern.compile("^http://(?:www\\.nicovideo\\.jp/watch|nico\\.ms)/sm(\\d+)$");
public TwitterAdapter(Context context, int textViewResourceId) {
super(context, textViewResourceId);
this.mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.mContext = context;
this.mLayout = textViewResourceId;
this.mApplication = JustawayApplication.getApplication();
}
public void extensionAdd(Row row) {
if (JustawayApplication.isMute(row)) {
return;
}
super.add(row);
mStatuses.add(row);
filter(row);
mLimit++;
}
@Override
public void add(Row row) {
if (JustawayApplication.isMute(row)) {
return;
}
if (exists(row)) {
return;
}
super.add(row);
mStatuses.add(row);
filter(row);
limitation();
}
@Override
public void insert(Row row, int index) {
if (JustawayApplication.isMute(row)) {
return;
}
if (exists(row)) {
return;
}
super.insert(row, index);
mStatuses.add(index, row);
filter(row);
limitation();
}
public boolean exists(Row row) {
if (row.isStatus()) {
for (Row status : mStatuses) {
if (status.isStatus() && status.getStatus().getId() == row.getStatus().getId()) {
return true;
}
}
} else if (row.isDirectMessage()) {
for (Row status : mStatuses) {
if (status.isDirectMessage() && status.getMessage().getId() == row.getMessage().getId()) {
return true;
}
}
} else if (row.isFavorite()) {
for (Row status : mStatuses) {
if (status.isFavorite() && status.getStatus().getId() == row.getStatus().getId() &&
status.getSource().getId() == row.getSource().getId()) {
return true;
}
}
}
return false;
}
@Override
public void remove(Row row) {
super.remove(row);
mStatuses.remove(row);
}
private void filter(Row row) {
Status status = row.getStatus();
if (status != null && status.isRetweeted()) {
Status retweet = status.getRetweetedStatus();
if (retweet != null && status.getUser().getId() == mApplication.getUserId()) {
mApplication.setRtId(retweet.getId(), status.getId());
}
}
}
@SuppressWarnings("unused")
public void replaceStatus(Status status) {
for (Row row : mStatuses) {
if (!row.isDirectMessage() && row.getStatus().getId() == status.getId()) {
row.setStatus(status);
notifyDataSetChanged();
break;
}
}
}
public void removeStatus(long statusId) {
for (Row row : mStatuses) {
if (!row.isDirectMessage() && row.getStatus().getId() == statusId) {
remove(row);
break;
}
}
}
public void removeDirectMessage(long directMessageId) {
for (Row row : mStatuses) {
if (row.isDirectMessage() && row.getMessage().getId() == directMessageId) {
remove(row);
break;
}
}
}
public void limitation() {
int size = mStatuses.size();
if (size > mLimit) {
int count = size - mLimit;
for (int i = 0; i < count; i++) {
super.remove(mStatuses.remove(size - i - 1));
}
}
}
@Override
public void clear() {
super.clear();
mStatuses.clear();
mLimit = LIMIT;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
// ビューを受け取る
View view = convertView;
if (view == null) {
// 受け取ったビューがnullなら新しくビューを生成
view = mInflater.inflate(this.mLayout, null);
if (view == null) {
return null;
}
holder = new ViewHolder();
holder.action = (LinearLayout) view.findViewById(R.id.action);
holder.action_icon = (TextView) view.findViewById(R.id.action_icon);
holder.action_by_display_name = (TextView) view.findViewById(R.id.action_by_display_name);
holder.action_by_screen_name = (TextView) view.findViewById(R.id.action_by_screen_name);
holder.icon = (ImageView) view.findViewById(R.id.icon);
holder.display_name = (TextView) view.findViewById(R.id.display_name);
holder.screen_name = (TextView) view.findViewById(R.id.screen_name);
holder.fontello_lock = (TextView) view.findViewById(R.id.fontello_lock);
holder.datetime_relative = (TextView) view.findViewById(R.id.datetime_relative);
holder.status = (TextView) view.findViewById(R.id.status);
holder.status.setTag(12);
holder.images = (LinearLayout) view.findViewById(R.id.images);
holder.menu_and_via = (TableLayout) view.findViewById(R.id.menu_and_via);
holder.do_reply = (TextView) view.findViewById(R.id.do_reply);
holder.do_retweet = (TextView) view.findViewById(R.id.do_retweet);
holder.retweet_count = (TextView) view.findViewById(R.id.retweet_count);
holder.do_fav = (TextView) view.findViewById(R.id.do_fav);
holder.fav_count = (TextView) view.findViewById(R.id.fav_count);
holder.via = (TextView) view.findViewById(R.id.via);
holder.datetime = (TextView) view.findViewById(R.id.datetime);
holder.retweet = (LinearLayout) view.findViewById(R.id.retweet);
holder.retweet_icon = (ImageView) view.findViewById(R.id.retweet_icon);
holder.retweet_by = (TextView) view.findViewById(R.id.retweet_by);
view.setTag(holder);
} else {
holder = (ViewHolder) view.getTag();
}
if (mApplication.getFontSize() != (Integer) holder.status.getTag()) {
holder.status.setTag(mApplication.getFontSize());
holder.status.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize());
holder.display_name.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize());
holder.screen_name.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize() - 2);
holder.datetime_relative.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize() - 2);
}
// 表示すべきデータの取得
Row row = mStatuses.get(position);
if (row.isDirectMessage()) {
DirectMessage message = row.getMessage();
if (message == null) {
return view;
}
renderMessage(holder, message);
} else {
Status status = row.getStatus();
if (status == null) {
return view;
}
Status retweet = status.getRetweetedStatus();
if (row.isFavorite()) {
renderStatus(holder, status, null, row.getSource());
} else if (retweet == null) {
renderStatus(holder, status, null, null);
} else {
renderStatus(holder, retweet, status, null);
}
}
if (position == 0) {
EventBus.getDefault().post(new SeenTopEvent());
}
return view;
}
private void renderMessage(ViewHolder holder, final DirectMessage message) {
Typeface fontello = JustawayApplication.getFontello();
long userId = JustawayApplication.getApplication().getUserId();
holder.do_retweet.setVisibility(View.GONE);
holder.do_fav.setVisibility(View.GONE);
holder.retweet_count.setVisibility(View.GONE);
holder.fav_count.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
if (message.getSender().getId() == userId) {
holder.do_reply.setVisibility(View.GONE);
} else {
holder.do_reply.setVisibility(View.VISIBLE);
holder.do_reply.setTypeface(fontello);
holder.do_reply.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mApplication.doReplyDirectMessage(message, mContext);
}
});
}
holder.display_name.setText(message.getSender().getName());
holder.screen_name.setText("@"
+ message.getSender().getScreenName());
holder.status.setText("D " + message.getRecipientScreenName()
+ " " + message.getText());
holder.datetime
.setText(getAbsoluteTime(message.getCreatedAt()));
holder.datetime_relative.setText(getRelativeTime(message.getCreatedAt()));
holder.via.setVisibility(View.GONE);
holder.retweet.setVisibility(View.GONE);
holder.images.setVisibility(View.GONE);
mApplication.displayUserIcon(message.getSender(), holder.icon);
holder.icon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ProfileActivity.class);
intent.putExtra("screenName", message.getSender().getScreenName());
mContext.startActivity(intent);
}
});
holder.action.setVisibility(View.GONE);
holder.fontello_lock.setVisibility(View.INVISIBLE);
}
private void renderStatus(final ViewHolder holder, final Status status, Status retweet,
User favorite) {
long userId = JustawayApplication.getApplication().getUserId();
Typeface fontello = JustawayApplication.getFontello();
if (status.getFavoriteCount() > 0) {
holder.fav_count.setText(String.valueOf(status.getFavoriteCount()));
holder.fav_count.setVisibility(View.VISIBLE);
} else {
holder.fav_count.setText("0");
holder.fav_count.setVisibility(View.INVISIBLE);
}
if (status.getRetweetCount() > 0) {
holder.retweet_count.setText(String.valueOf(status.getRetweetCount()));
holder.retweet_count.setVisibility(View.VISIBLE);
} else {
holder.retweet_count.setText("0");
holder.retweet_count.setVisibility(View.INVISIBLE);
}
holder.do_reply.setTypeface(fontello);
holder.do_reply.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mApplication.doReplyAll(status, mContext);
}
});
holder.do_retweet.setTypeface(fontello);
holder.do_retweet.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (status.getUser().isProtected()) {
JustawayApplication.showToast(R.string.toast_protected_tweet_can_not_share);
return;
}
Long id = mApplication.getRtId(status);
if (id != null) {
if (id == 0) {
JustawayApplication.showToast(R.string.toast_destroy_retweet_progress);
} else {
DialogFragment dialog = new DestroyRetweetDialogFragment();
Bundle args = new Bundle(1);
args.putSerializable("status", status);
dialog.setArguments(args);
EventBus.getDefault().post(new AlertDialogEvent(dialog));
}
} else {
DialogFragment dialog = new RetweetDialogFragment();
Bundle args = new Bundle(1);
args.putSerializable("status", status);
dialog.setArguments(args);
EventBus.getDefault().post(new AlertDialogEvent(dialog));
}
}
});
holder.do_fav.setTypeface(fontello);
holder.do_fav.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (holder.do_fav.getTag().equals("is_fav")) {
holder.do_fav.setTag("no_fav");
holder.do_fav.setTextColor(Color.parseColor("#666666"));
mApplication.doDestroyFavorite(status.getId());
} else {
holder.do_fav.setTag("is_fav");
holder.do_fav.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
mApplication.doFavorite(status.getId());
}
}
});
if (mApplication.getRtId(status) != null) {
holder.do_retweet.setTextColor(mContext.getResources().getColor(R.color.holo_green_light));
} else {
holder.do_retweet.setTextColor(Color.parseColor("#666666"));
}
if (mApplication.isFav(status)) {
holder.do_fav.setTag("is_fav");
holder.do_fav.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
} else {
holder.do_fav.setTag("no_fav");
holder.do_fav.setTextColor(Color.parseColor("#666666"));
}
holder.display_name.setText(status.getUser().getName());
holder.screen_name.setText("@" + status.getUser().getScreenName());
holder.datetime_relative.setText(getRelativeTime(status.getCreatedAt()));
holder.datetime.setText(getAbsoluteTime(status.getCreatedAt()));
String via = mApplication.getClientName(status.getSource());
holder.via.setText("via " + via);
holder.via.setVisibility(View.VISIBLE);
/**
* デバッグモードの時だけ Justaway for Android をハイライト
*/
if (BuildConfig.DEBUG) {
if (via.equals("Justaway for Android")) {
if (mColorBlue == 0) {
mColorBlue = mApplication.getThemeTextColor((Activity) mContext, R.attr.holo_blue);
}
holder.via.setTextColor(mColorBlue);
} else {
holder.via.setTextColor(Color.parseColor("#666666"));
}
}
holder.action_icon.setTypeface(fontello);
// favの場合
if (favorite != null) {
holder.action_icon.setText(R.string.fontello_star);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
holder.action_by_display_name.setText(favorite.getName());
holder.action_by_screen_name.setText("@" + favorite.getScreenName());
holder.retweet.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.action.setVisibility(View.VISIBLE);
}
// RTの場合
else if (retweet != null) {
// 自分のツイート
if (userId == status.getUser().getId()) {
holder.action_icon.setText(R.string.fontello_retweet);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_green_light));
holder.action_by_display_name.setText(retweet.getUser().getName());
holder.action_by_screen_name.setText("@" + retweet.getUser().getScreenName());
holder.retweet.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.action.setVisibility(View.VISIBLE);
} else {
if (mApplication.getUserIconSize().equals("none")) {
holder.retweet_icon.setVisibility(View.GONE);
} else {
holder.retweet_icon.setVisibility(View.VISIBLE);
mApplication.displayRoundedImage(retweet.getUser().getProfileImageURL(), holder.retweet_icon);
}
holder.retweet_by.setText("RT by " + retweet.getUser().getName() + " @" + retweet.getUser().getScreenName());
holder.action.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.retweet.setVisibility(View.VISIBLE);
}
} else {
// 自分へのリプ
if (mApplication.isMentionForMe(status)) {
holder.action_icon.setText(R.string.fontello_at);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_red_light));
holder.action_by_display_name.setText(status.getUser().getName());
holder.action_by_screen_name.setText("@" + status.getUser().getScreenName());
holder.action.setVisibility(View.VISIBLE);
holder.retweet.setVisibility(View.GONE);
} else {
holder.action.setVisibility(View.GONE);
holder.retweet.setVisibility(View.GONE);
}
holder.menu_and_via.setVisibility(View.VISIBLE);
}
if (status.getUser().isProtected()) {
holder.fontello_lock.setTypeface(fontello);
holder.fontello_lock.setVisibility(View.VISIBLE);
} else {
holder.fontello_lock.setVisibility(View.INVISIBLE);
}
mApplication.displayUserIcon(status.getUser(), holder.icon);
holder.icon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ProfileActivity.class);
intent.putExtra("screenName", status.getUser().getScreenName());
mContext.startActivity(intent);
}
});
boolean displayThumbnail = mApplication.getDisplayThumbnailOn();
URLEntity[] urls = retweet != null ? retweet.getURLEntities() : status.getURLEntities();
ArrayList<String> imageUrls = new ArrayList<String>();
String statusString = status.getText();
for (URLEntity url : urls) {
Pattern p = Pattern.compile(url.getURL());
Matcher m = p.matcher(statusString);
statusString = m.replaceAll(url.getExpandedURL());
if (!displayThumbnail) {
continue;
}
Matcher twitpic_matcher = TWITPIC_PATTERN.matcher(url.getExpandedURL());
if (twitpic_matcher.find()) {
imageUrls.add("http://twitpic.com/show/full/" + twitpic_matcher.group(1));
continue;
}
Matcher twipple_matcher = TWIPPLE_PATTERN.matcher(url.getExpandedURL());
if (twipple_matcher.find()) {
imageUrls.add("http://p.twpl.jp/show/orig/" + twipple_matcher.group(1));
continue;
}
Matcher instagram_matcher = INSTAGRAM_PATTERN.matcher(url.getExpandedURL());
if (instagram_matcher.find()) {
imageUrls.add(url.getExpandedURL() + "media?size=l");
continue;
}
Matcher youtube_matcher = YOUTUBE_PATTERN.matcher(url.getExpandedURL());
if (youtube_matcher.find()) {
imageUrls.add("http://i.ytimg.com/vi/" + youtube_matcher.group(1) + "/hqdefault.jpg");
continue;
}
Matcher niconico_matcher = NICONICO_PATTERN.matcher(url.getExpandedURL());
if (niconico_matcher.find()) {
int id = Integer.valueOf(niconico_matcher.group(1));
int host = id % 4 + 1;
imageUrls.add("http://tn-skr" + host + ".smilevideo.jp/smile?i=" + id + ".L");
continue;
}
Matcher images_matcher = IMAGES_PATTERN.matcher(url.getExpandedURL());
if (images_matcher.find()) {
imageUrls.add(url.getExpandedURL());
}
}
holder.status.setText(statusString);
if (!displayThumbnail) {
holder.images.setVisibility(View.GONE);
return;
}
MediaEntity[] medias = retweet != null ? retweet.getMediaEntities() : status.getMediaEntities();
for (MediaEntity media : medias) {
imageUrls.add(media.getMediaURL());
}
holder.images.removeAllViews();
if (imageUrls.size() > 0) {
for (final String url : imageUrls) {
ImageView image = new ImageView(mContext);
image.setScaleType(ImageView.ScaleType.CENTER_CROP);
holder.images.addView(image, new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT, 120));
mApplication.displayRoundedImage(url, image);
// 画像タップで拡大表示(ピンチイン・ピンチアウトいつかちゃんとやる)
image.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ScaleImageActivity.class);
intent.putExtra("url", url);
mContext.startActivity(intent);
}
});
}
holder.images.setVisibility(View.VISIBLE);
} else {
holder.images.setVisibility(View.GONE);
}
}
private String getRelativeTime(Date date) {
int diff = (int) (((new Date()).getTime() - date.getTime()) / 1000);
if (diff < 1) {
return "now";
} else if (diff < 60) {
return diff + "s";
} else if (diff < 3600) {
return (diff / 60) + "m";
} else if (diff < 86400) {
return (diff / 3600) + "h";
} else {
return (diff / 86400) + "d";
}
}
private String getAbsoluteTime(Date date) {
return DATE_FORMAT.format(date);
}
public static final class RetweetDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Status status = (Status) getArguments().getSerializable("status");
if (status == null) {
return null;
}
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.confirm_retweet);
builder.setMessage(status.getText());
builder.setNeutralButton(getString(R.string.button_quote),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doQuote(status, getActivity());
dismiss();
}
}
);
builder.setPositiveButton(getString(R.string.button_retweet),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doRetweet(status.getId());
dismiss();
}
}
);
builder.setNegativeButton(getString(R.string.button_cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismiss();
}
}
);
return builder.create();
}
}
public static final class DestroyRetweetDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Status status = (Status) getArguments().getSerializable("status");
if (status == null) {
return null;
}
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.confirm_destroy_retweet);
builder.setMessage(status.getText());
builder.setPositiveButton(getString(R.string.button_destroy_retweet),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doDestroyRetweet(status);
dismiss();
}
}
);
builder.setNegativeButton(getString(R.string.button_cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismiss();
}
}
);
return builder.create();
}
}
}
| Justaway/src/main/java/info/justaway/adapter/TwitterAdapter.java | package info.justaway.adapter;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TableLayout;
import android.widget.TextView;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.greenrobot.event.EventBus;
import info.justaway.BuildConfig;
import info.justaway.JustawayApplication;
import info.justaway.ProfileActivity;
import info.justaway.R;
import info.justaway.ScaleImageActivity;
import info.justaway.event.AlertDialogEvent;
import info.justaway.event.action.SeenTopEvent;
import info.justaway.model.Row;
import twitter4j.DirectMessage;
import twitter4j.MediaEntity;
import twitter4j.Status;
import twitter4j.URLEntity;
import twitter4j.User;
public class TwitterAdapter extends ArrayAdapter<Row> {
static class ViewHolder {
LinearLayout action;
TextView action_icon;
TextView action_by_display_name;
TextView action_by_screen_name;
ImageView icon;
TextView display_name;
TextView screen_name;
TextView fontello_lock;
TextView datetime_relative;
TextView status;
LinearLayout images;
TableLayout menu_and_via;
TextView do_reply;
TextView do_retweet;
TextView retweet_count;
TextView do_fav;
TextView fav_count;
TextView via;
TextView datetime;
LinearLayout retweet;
ImageView retweet_icon;
TextView retweet_by;
}
private JustawayApplication mApplication;
private Context mContext;
private ArrayList<Row> mStatuses = new ArrayList<Row>();
private LayoutInflater mInflater;
private int mLayout;
private int mColorBlue = 0;
private static final int LIMIT = 100;
private int mLimit = LIMIT;
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM'/'dd' 'HH':'mm':'ss",
Locale.ENGLISH);
private static final Pattern TWITPIC_PATTERN = Pattern.compile("^http://twitpic\\.com/(\\w+)$");
private static final Pattern TWIPPLE_PATTERN = Pattern.compile("^http://p\\.twipple\\.jp/(\\w+)$");
private static final Pattern INSTAGRAM_PATTERN = Pattern.compile("^http://instagram\\.com/p/([^/]+)/$");
private static final Pattern IMAGES_PATTERN = Pattern.compile("^https?://.*\\.(png|gif|jpeg|jpg)$");
private static final Pattern YOUTUBE_PATTERN = Pattern.compile("^https?://(?:www\\.youtube\\.com/watch\\?.*v=|youtu\\.be/)([\\w-]+)");
private static final Pattern NICONICO_PATTERN = Pattern.compile("^http://(?:www\\.nicovideo\\.jp/watch|nico\\.ms)/sm(\\d+)$");
public TwitterAdapter(Context context, int textViewResourceId) {
super(context, textViewResourceId);
this.mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.mContext = context;
this.mLayout = textViewResourceId;
this.mApplication = JustawayApplication.getApplication();
}
public void extensionAdd(Row row) {
if (JustawayApplication.isMute(row)) {
return;
}
super.add(row);
this.filter(row);
this.mStatuses.add(row);
mLimit++;
}
@Override
public void add(Row row) {
if (JustawayApplication.isMute(row)) {
return;
}
super.add(row);
this.filter(row);
this.mStatuses.add(row);
this.limitation();
}
@Override
public void insert(Row row, int index) {
if (JustawayApplication.isMute(row)) {
return;
}
super.insert(row, index);
this.filter(row);
this.mStatuses.add(index, row);
this.limitation();
}
@Override
public void remove(Row row) {
super.remove(row);
this.mStatuses.remove(row);
}
private void filter(Row row) {
Status status = row.getStatus();
if (status != null && status.isRetweeted()) {
Status retweet = status.getRetweetedStatus();
if (retweet != null && status.getUser().getId() == mApplication.getUserId()) {
mApplication.setRtId(retweet.getId(), status.getId());
}
}
}
@SuppressWarnings("unused")
public void replaceStatus(Status status) {
for (Row row : mStatuses) {
if (!row.isDirectMessage() && row.getStatus().getId() == status.getId()) {
row.setStatus(status);
notifyDataSetChanged();
break;
}
}
}
public void removeStatus(long statusId) {
for (Row row : mStatuses) {
if (!row.isDirectMessage() && row.getStatus().getId() == statusId) {
remove(row);
break;
}
}
}
public void removeDirectMessage(long directMessageId) {
for (Row row : mStatuses) {
if (row.isDirectMessage() && row.getMessage().getId() == directMessageId) {
remove(row);
break;
}
}
}
public void limitation() {
int size = this.mStatuses.size();
if (size > mLimit) {
int count = size - mLimit;
for (int i = 0; i < count; i++) {
super.remove(this.mStatuses.remove(size - i - 1));
}
}
}
@Override
public void clear() {
super.clear();
this.mStatuses.clear();
mLimit = LIMIT;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
// ビューを受け取る
View view = convertView;
if (view == null) {
// 受け取ったビューがnullなら新しくビューを生成
view = mInflater.inflate(this.mLayout, null);
if (view == null) {
return null;
}
holder = new ViewHolder();
holder.action = (LinearLayout) view.findViewById(R.id.action);
holder.action_icon = (TextView) view.findViewById(R.id.action_icon);
holder.action_by_display_name = (TextView) view.findViewById(R.id.action_by_display_name);
holder.action_by_screen_name = (TextView) view.findViewById(R.id.action_by_screen_name);
holder.icon = (ImageView) view.findViewById(R.id.icon);
holder.display_name = (TextView) view.findViewById(R.id.display_name);
holder.screen_name = (TextView) view.findViewById(R.id.screen_name);
holder.fontello_lock = (TextView) view.findViewById(R.id.fontello_lock);
holder.datetime_relative = (TextView) view.findViewById(R.id.datetime_relative);
holder.status = (TextView) view.findViewById(R.id.status);
holder.status.setTag(12);
holder.images = (LinearLayout) view.findViewById(R.id.images);
holder.menu_and_via = (TableLayout) view.findViewById(R.id.menu_and_via);
holder.do_reply = (TextView) view.findViewById(R.id.do_reply);
holder.do_retweet = (TextView) view.findViewById(R.id.do_retweet);
holder.retweet_count = (TextView) view.findViewById(R.id.retweet_count);
holder.do_fav = (TextView) view.findViewById(R.id.do_fav);
holder.fav_count = (TextView) view.findViewById(R.id.fav_count);
holder.via = (TextView) view.findViewById(R.id.via);
holder.datetime = (TextView) view.findViewById(R.id.datetime);
holder.retweet = (LinearLayout) view.findViewById(R.id.retweet);
holder.retweet_icon = (ImageView) view.findViewById(R.id.retweet_icon);
holder.retweet_by = (TextView) view.findViewById(R.id.retweet_by);
view.setTag(holder);
} else {
holder = (ViewHolder) view.getTag();
}
if (mApplication.getFontSize() != (Integer) holder.status.getTag()) {
holder.status.setTag(mApplication.getFontSize());
holder.status.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize());
holder.display_name.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize());
holder.screen_name.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize() - 2);
holder.datetime_relative.setTextSize(TypedValue.COMPLEX_UNIT_SP, mApplication.getFontSize() - 2);
}
// 表示すべきデータの取得
Row row = mStatuses.get(position);
if (row.isDirectMessage()) {
DirectMessage message = row.getMessage();
if (message == null) {
return view;
}
renderMessage(holder, message);
} else {
Status status = row.getStatus();
if (status == null) {
return view;
}
Status retweet = status.getRetweetedStatus();
if (row.isFavorite()) {
renderStatus(holder, status, null, row.getSource());
} else if (retweet == null) {
renderStatus(holder, status, null, null);
} else {
renderStatus(holder, retweet, status, null);
}
}
if (position == 0) {
EventBus.getDefault().post(new SeenTopEvent());
}
return view;
}
private void renderMessage(ViewHolder holder, final DirectMessage message) {
Typeface fontello = JustawayApplication.getFontello();
long userId = JustawayApplication.getApplication().getUserId();
holder.do_retweet.setVisibility(View.GONE);
holder.do_fav.setVisibility(View.GONE);
holder.retweet_count.setVisibility(View.GONE);
holder.fav_count.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
if (message.getSender().getId() == userId) {
holder.do_reply.setVisibility(View.GONE);
} else {
holder.do_reply.setVisibility(View.VISIBLE);
holder.do_reply.setTypeface(fontello);
holder.do_reply.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mApplication.doReplyDirectMessage(message, mContext);
}
});
}
holder.display_name.setText(message.getSender().getName());
holder.screen_name.setText("@"
+ message.getSender().getScreenName());
holder.status.setText("D " + message.getRecipientScreenName()
+ " " + message.getText());
holder.datetime
.setText(getAbsoluteTime(message.getCreatedAt()));
holder.datetime_relative.setText(getRelativeTime(message.getCreatedAt()));
holder.via.setVisibility(View.GONE);
holder.retweet.setVisibility(View.GONE);
holder.images.setVisibility(View.GONE);
mApplication.displayUserIcon(message.getSender(), holder.icon);
holder.icon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ProfileActivity.class);
intent.putExtra("screenName", message.getSender().getScreenName());
mContext.startActivity(intent);
}
});
holder.action.setVisibility(View.GONE);
holder.fontello_lock.setVisibility(View.INVISIBLE);
}
private void renderStatus(final ViewHolder holder, final Status status, Status retweet,
User favorite) {
long userId = JustawayApplication.getApplication().getUserId();
Typeface fontello = JustawayApplication.getFontello();
if (status.getFavoriteCount() > 0) {
holder.fav_count.setText(String.valueOf(status.getFavoriteCount()));
holder.fav_count.setVisibility(View.VISIBLE);
} else {
holder.fav_count.setText("0");
holder.fav_count.setVisibility(View.INVISIBLE);
}
if (status.getRetweetCount() > 0) {
holder.retweet_count.setText(String.valueOf(status.getRetweetCount()));
holder.retweet_count.setVisibility(View.VISIBLE);
} else {
holder.retweet_count.setText("0");
holder.retweet_count.setVisibility(View.INVISIBLE);
}
holder.do_reply.setTypeface(fontello);
holder.do_reply.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mApplication.doReplyAll(status, mContext);
}
});
holder.do_retweet.setTypeface(fontello);
holder.do_retweet.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (status.getUser().isProtected()) {
JustawayApplication.showToast(R.string.toast_protected_tweet_can_not_share);
return;
}
Long id = mApplication.getRtId(status);
if (id != null) {
if (id == 0) {
JustawayApplication.showToast(R.string.toast_destroy_retweet_progress);
} else {
DialogFragment dialog = new DestroyRetweetDialogFragment();
Bundle args = new Bundle(1);
args.putSerializable("status", status);
dialog.setArguments(args);
EventBus.getDefault().post(new AlertDialogEvent(dialog));
}
} else {
DialogFragment dialog = new RetweetDialogFragment();
Bundle args = new Bundle(1);
args.putSerializable("status", status);
dialog.setArguments(args);
EventBus.getDefault().post(new AlertDialogEvent(dialog));
}
}
});
holder.do_fav.setTypeface(fontello);
holder.do_fav.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (holder.do_fav.getTag().equals("is_fav")) {
holder.do_fav.setTag("no_fav");
holder.do_fav.setTextColor(Color.parseColor("#666666"));
mApplication.doDestroyFavorite(status.getId());
} else {
holder.do_fav.setTag("is_fav");
holder.do_fav.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
mApplication.doFavorite(status.getId());
}
}
});
if (mApplication.getRtId(status) != null) {
holder.do_retweet.setTextColor(mContext.getResources().getColor(R.color.holo_green_light));
} else {
holder.do_retweet.setTextColor(Color.parseColor("#666666"));
}
if (mApplication.isFav(status)) {
holder.do_fav.setTag("is_fav");
holder.do_fav.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
} else {
holder.do_fav.setTag("no_fav");
holder.do_fav.setTextColor(Color.parseColor("#666666"));
}
holder.display_name.setText(status.getUser().getName());
holder.screen_name.setText("@" + status.getUser().getScreenName());
holder.datetime_relative.setText(getRelativeTime(status.getCreatedAt()));
holder.datetime.setText(getAbsoluteTime(status.getCreatedAt()));
String via = mApplication.getClientName(status.getSource());
holder.via.setText("via " + via);
holder.via.setVisibility(View.VISIBLE);
/**
* デバッグモードの時だけ Justaway for Android をハイライト
*/
if (BuildConfig.DEBUG) {
if (via.equals("Justaway for Android")) {
if (mColorBlue == 0) {
mColorBlue = mApplication.getThemeTextColor((Activity) mContext, R.attr.holo_blue);
}
holder.via.setTextColor(mColorBlue);
} else {
holder.via.setTextColor(Color.parseColor("#666666"));
}
}
holder.action_icon.setTypeface(fontello);
// favの場合
if (favorite != null) {
holder.action_icon.setText(R.string.fontello_star);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_orange_light));
holder.action_by_display_name.setText(favorite.getName());
holder.action_by_screen_name.setText("@" + favorite.getScreenName());
holder.retweet.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.action.setVisibility(View.VISIBLE);
}
// RTの場合
else if (retweet != null) {
// 自分のツイート
if (userId == status.getUser().getId()) {
holder.action_icon.setText(R.string.fontello_retweet);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_green_light));
holder.action_by_display_name.setText(retweet.getUser().getName());
holder.action_by_screen_name.setText("@" + retweet.getUser().getScreenName());
holder.retweet.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.action.setVisibility(View.VISIBLE);
} else {
if (mApplication.getUserIconSize().equals("none")) {
holder.retweet_icon.setVisibility(View.GONE);
} else {
holder.retweet_icon.setVisibility(View.VISIBLE);
mApplication.displayRoundedImage(retweet.getUser().getProfileImageURL(), holder.retweet_icon);
}
holder.retweet_by.setText("RT by " + retweet.getUser().getName() + " @" + retweet.getUser().getScreenName());
holder.action.setVisibility(View.GONE);
holder.menu_and_via.setVisibility(View.VISIBLE);
holder.retweet.setVisibility(View.VISIBLE);
}
} else {
// 自分へのリプ
if (mApplication.isMentionForMe(status)) {
holder.action_icon.setText(R.string.fontello_at);
holder.action_icon.setTextColor(mContext.getResources().getColor(R.color.holo_red_light));
holder.action_by_display_name.setText(status.getUser().getName());
holder.action_by_screen_name.setText("@" + status.getUser().getScreenName());
holder.action.setVisibility(View.VISIBLE);
holder.retweet.setVisibility(View.GONE);
} else {
holder.action.setVisibility(View.GONE);
holder.retweet.setVisibility(View.GONE);
}
holder.menu_and_via.setVisibility(View.VISIBLE);
}
if (status.getUser().isProtected()) {
holder.fontello_lock.setTypeface(fontello);
holder.fontello_lock.setVisibility(View.VISIBLE);
} else {
holder.fontello_lock.setVisibility(View.INVISIBLE);
}
mApplication.displayUserIcon(status.getUser(), holder.icon);
holder.icon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ProfileActivity.class);
intent.putExtra("screenName", status.getUser().getScreenName());
mContext.startActivity(intent);
}
});
boolean displayThumbnail = mApplication.getDisplayThumbnailOn();
URLEntity[] urls = retweet != null ? retweet.getURLEntities() : status.getURLEntities();
ArrayList<String> imageUrls = new ArrayList<String>();
String statusString = status.getText();
for (URLEntity url : urls) {
Pattern p = Pattern.compile(url.getURL());
Matcher m = p.matcher(statusString);
statusString = m.replaceAll(url.getExpandedURL());
if (!displayThumbnail) {
continue;
}
Matcher twitpic_matcher = TWITPIC_PATTERN.matcher(url.getExpandedURL());
if (twitpic_matcher.find()) {
imageUrls.add("http://twitpic.com/show/full/" + twitpic_matcher.group(1));
continue;
}
Matcher twipple_matcher = TWIPPLE_PATTERN.matcher(url.getExpandedURL());
if (twipple_matcher.find()) {
imageUrls.add("http://p.twpl.jp/show/orig/" + twipple_matcher.group(1));
continue;
}
Matcher instagram_matcher = INSTAGRAM_PATTERN.matcher(url.getExpandedURL());
if (instagram_matcher.find()) {
imageUrls.add(url.getExpandedURL() + "media?size=l");
continue;
}
Matcher youtube_matcher = YOUTUBE_PATTERN.matcher(url.getExpandedURL());
if (youtube_matcher.find()) {
imageUrls.add("http://i.ytimg.com/vi/" + youtube_matcher.group(1) + "/hqdefault.jpg");
continue;
}
Matcher niconico_matcher = NICONICO_PATTERN.matcher(url.getExpandedURL());
if (niconico_matcher.find()) {
int id = Integer.valueOf(niconico_matcher.group(1));
int host = id % 4 + 1;
imageUrls.add("http://tn-skr" + host + ".smilevideo.jp/smile?i=" + id + ".L");
continue;
}
Matcher images_matcher = IMAGES_PATTERN.matcher(url.getExpandedURL());
if (images_matcher.find()) {
imageUrls.add(url.getExpandedURL());
}
}
holder.status.setText(statusString);
if (!displayThumbnail) {
holder.images.setVisibility(View.GONE);
return;
}
MediaEntity[] medias = retweet != null ? retweet.getMediaEntities() : status.getMediaEntities();
for (MediaEntity media : medias) {
imageUrls.add(media.getMediaURL());
}
holder.images.removeAllViews();
if (imageUrls.size() > 0) {
for (final String url : imageUrls) {
ImageView image = new ImageView(mContext);
image.setScaleType(ImageView.ScaleType.CENTER_CROP);
holder.images.addView(image, new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT, 120));
mApplication.displayRoundedImage(url, image);
// 画像タップで拡大表示(ピンチイン・ピンチアウトいつかちゃんとやる)
image.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), ScaleImageActivity.class);
intent.putExtra("url", url);
mContext.startActivity(intent);
}
});
}
holder.images.setVisibility(View.VISIBLE);
} else {
holder.images.setVisibility(View.GONE);
}
}
private String getRelativeTime(Date date) {
int diff = (int) (((new Date()).getTime() - date.getTime()) / 1000);
if (diff < 1) {
return "now";
} else if (diff < 60) {
return diff + "s";
} else if (diff < 3600) {
return (diff / 60) + "m";
} else if (diff < 86400) {
return (diff / 3600) + "h";
} else {
return (diff / 86400) + "d";
}
}
private String getAbsoluteTime(Date date) {
return DATE_FORMAT.format(date);
}
public static final class RetweetDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Status status = (Status) getArguments().getSerializable("status");
if (status == null) {
return null;
}
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.confirm_retweet);
builder.setMessage(status.getText());
builder.setNeutralButton(getString(R.string.button_quote),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doQuote(status, getActivity());
dismiss();
}
}
);
builder.setPositiveButton(getString(R.string.button_retweet),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doRetweet(status.getId());
dismiss();
}
}
);
builder.setNegativeButton(getString(R.string.button_cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismiss();
}
}
);
return builder.create();
}
}
public static final class DestroyRetweetDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Status status = (Status) getArguments().getSerializable("status");
if (status == null) {
return null;
}
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.confirm_destroy_retweet);
builder.setMessage(status.getText());
builder.setPositiveButton(getString(R.string.button_destroy_retweet),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
JustawayApplication.getApplication().doDestroyRetweet(status);
dismiss();
}
}
);
builder.setNegativeButton(getString(R.string.button_cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismiss();
}
}
);
return builder.create();
}
}
}
| Guy to fixed #138
| Justaway/src/main/java/info/justaway/adapter/TwitterAdapter.java | Guy to fixed #138 | <ide><path>ustaway/src/main/java/info/justaway/adapter/TwitterAdapter.java
<ide> return;
<ide> }
<ide> super.add(row);
<del> this.filter(row);
<del> this.mStatuses.add(row);
<add> mStatuses.add(row);
<add> filter(row);
<ide> mLimit++;
<ide> }
<ide>
<ide> if (JustawayApplication.isMute(row)) {
<ide> return;
<ide> }
<add> if (exists(row)) {
<add> return;
<add> }
<ide> super.add(row);
<del> this.filter(row);
<del> this.mStatuses.add(row);
<del> this.limitation();
<add> mStatuses.add(row);
<add> filter(row);
<add> limitation();
<ide> }
<ide>
<ide> @Override
<ide> if (JustawayApplication.isMute(row)) {
<ide> return;
<ide> }
<add> if (exists(row)) {
<add> return;
<add> }
<ide> super.insert(row, index);
<del> this.filter(row);
<del> this.mStatuses.add(index, row);
<del> this.limitation();
<add> mStatuses.add(index, row);
<add> filter(row);
<add> limitation();
<add> }
<add>
<add> public boolean exists(Row row) {
<add> if (row.isStatus()) {
<add> for (Row status : mStatuses) {
<add> if (status.isStatus() && status.getStatus().getId() == row.getStatus().getId()) {
<add> return true;
<add> }
<add> }
<add> } else if (row.isDirectMessage()) {
<add> for (Row status : mStatuses) {
<add> if (status.isDirectMessage() && status.getMessage().getId() == row.getMessage().getId()) {
<add> return true;
<add> }
<add> }
<add> } else if (row.isFavorite()) {
<add> for (Row status : mStatuses) {
<add> if (status.isFavorite() && status.getStatus().getId() == row.getStatus().getId() &&
<add> status.getSource().getId() == row.getSource().getId()) {
<add> return true;
<add> }
<add> }
<add> }
<add> return false;
<ide> }
<ide>
<ide> @Override
<ide> public void remove(Row row) {
<ide> super.remove(row);
<del> this.mStatuses.remove(row);
<add> mStatuses.remove(row);
<ide> }
<ide>
<ide> private void filter(Row row) {
<ide> }
<ide>
<ide> public void limitation() {
<del> int size = this.mStatuses.size();
<add> int size = mStatuses.size();
<ide> if (size > mLimit) {
<ide> int count = size - mLimit;
<ide> for (int i = 0; i < count; i++) {
<del> super.remove(this.mStatuses.remove(size - i - 1));
<add> super.remove(mStatuses.remove(size - i - 1));
<ide> }
<ide> }
<ide> }
<ide> @Override
<ide> public void clear() {
<ide> super.clear();
<del> this.mStatuses.clear();
<add> mStatuses.clear();
<ide> mLimit = LIMIT;
<ide> }
<ide> |
|
Java | apache-2.0 | 2064530ad0ff8e26ee1f8a6248ed5511d9ae276b | 0 | if045/tender,if045/tender | package com.softserveinc.tender.facade.impl;
import com.softserveinc.tender.dto.AddressDto;
import com.softserveinc.tender.dto.CompanyDto;
import com.softserveinc.tender.dto.ProfileDto;
import com.softserveinc.tender.dto.RoleDto;
import com.softserveinc.tender.dto.TradeSphereDto;
import com.softserveinc.tender.dto.UserDto;
import com.softserveinc.tender.dto.UserRegistrationDataDto;
import com.softserveinc.tender.entity.Address;
import com.softserveinc.tender.entity.Category;
import com.softserveinc.tender.entity.Company;
import com.softserveinc.tender.entity.Location;
import com.softserveinc.tender.entity.Profile;
import com.softserveinc.tender.entity.Role;
import com.softserveinc.tender.entity.User;
import com.softserveinc.tender.facade.RegistrationServiceFacade;
import com.softserveinc.tender.service.AddressService;
import com.softserveinc.tender.service.CategoryService;
import com.softserveinc.tender.service.CompanyService;
import com.softserveinc.tender.service.LocationService;
import com.softserveinc.tender.service.ProfileService;
import com.softserveinc.tender.service.RoleService;
import com.softserveinc.tender.service.UserService;
import org.modelmapper.ModelMapper;
import org.modelmapper.TypeToken;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@Service("registrationServiceFacade")
@Transactional
public class RegistrationServiceFacadeImpl implements RegistrationServiceFacade{
@Autowired
private ModelMapper modelMapper;
@Autowired
private RoleService roleService;
@Autowired
private UserService userService;
@Autowired
private ProfileService profileService;
@Autowired
private CompanyService companyService;
@Autowired
private AddressService addressService;
@Autowired
private LocationService locationService;
@Autowired
private CategoryService categoryService;
@Override
public List<RoleDto> findUsersRoles() {
Type targetListType = new TypeToken<List<RoleDto>>(){}.getType();
return modelMapper.map(roleService.findUsersRoles(), targetListType);
}
public User saveUser(UserRegistrationDataDto userData) {
return mapUserRegistrationData(userData);
}
private User mapUserRegistrationData(UserRegistrationDataDto userData) {
User savedUser = saveUser(userData.getUserDto(), userData.getTradeSphereDto());
Address savedAddress = saveAddress(userData.getCompanyDto().getAddressDto());
Company savedCompany = saveCompany(userData.getCompanyDto(), savedAddress);
saveProfile(userData.getProfileDto(), savedCompany, savedUser);
return userService.findUserById(savedUser.getId());
}
private Company saveCompany(CompanyDto companyDto, Address address) {
return companyService.save(mapCompany(companyDto, address));
}
private Company mapCompany(CompanyDto companyDto, Address address) {
Company company = new Company();
company.setName(companyDto.getName());
company.setEmail(companyDto.getEmail());
company.setSrn(companyDto.getSrnNumber());
company.setAddress(addressService.findById(address.getId()));
return company;
}
private Profile saveProfile(ProfileDto profileDto, Company company, User user) {
return profileService.saveProfile(mapProfile(profileDto, company, user));
}
private Profile mapProfile(ProfileDto profileDto, Company company, User user) {
Profile profile = new Profile();
profile.setFirstName(profileDto.getFirstName());
profile.setLastName(profileDto.getLastName());
profile.setPerson(profileDto.getPerson());
profile.setTelephone(profileDto.getTelephone());
profile.setBirthday(formatDate(profileDto.getBirthday()));
profile.setCompany(companyService.findById(company.getId()));
profile.setUser(userService.findUserById(user.getId()));
profile.setChecked(false);
return profile;
}
private Address saveAddress(AddressDto addressDto) {
return addressService.save(mapAddress(addressDto));
}
private Address mapAddress(AddressDto addressDto) {
Address address = new Address();
address.setCity(addressDto.getCity());
address.setStreet(addressDto.getStreet());
address.setBuildingNumber(addressDto.getBuildingNumber());
address.setPostcode(addressDto.getPostcode());
return address;
}
private User saveUser(UserDto userDto, TradeSphereDto tradeSphereDto) {
return userService.saveUser(mapUser(userDto, tradeSphereDto));
}
private User mapUser(UserDto userDto, TradeSphereDto tradeSphereDto) {
List<Role> roles = new ArrayList<>();
List<Category> categories = new ArrayList<>();
List<Location> locations = new ArrayList<>();
User user = new User();
for(Integer id : userDto.getRoles()) {
roles.add(roleService.findRoleById(id));
}
for(Integer id : tradeSphereDto.getCategories()) {
categories.add(categoryService.findCategoryById(id));
}
for(Integer id : tradeSphereDto.getLocations()) {
locations.add(locationService.findById(id));
}
user.setLogin(userDto.getLogin());
user.setPassword(userDto.getPassword());
user.setRoles(roles);
user.setSellerCategories(categories);
user.setSellerLocations(locations);
user.setCreateDate(new Date());
return user;
}
private Date formatDate(String dateValue) {
String datePattern = "yyyy/mm/dd";
SimpleDateFormat formatter = new SimpleDateFormat(datePattern);
Date date = null;
try {
date = formatter.parse(dateValue);
} catch (ParseException e) {
e.getMessage();
}
return date;
}
}
| src/main/java/com/softserveinc/tender/facade/impl/RegistrationServiceFacadeImpl.java | package com.softserveinc.tender.facade.impl;
import com.softserveinc.tender.dto.AddressDto;
import com.softserveinc.tender.dto.CompanyDto;
import com.softserveinc.tender.dto.ProfileDto;
import com.softserveinc.tender.dto.RoleDto;
import com.softserveinc.tender.dto.TradeSphereDto;
import com.softserveinc.tender.dto.UserDto;
import com.softserveinc.tender.dto.UserRegistrationDataDto;
import com.softserveinc.tender.entity.Address;
import com.softserveinc.tender.entity.Category;
import com.softserveinc.tender.entity.Company;
import com.softserveinc.tender.entity.Location;
import com.softserveinc.tender.entity.Profile;
import com.softserveinc.tender.entity.Role;
import com.softserveinc.tender.entity.User;
import com.softserveinc.tender.facade.RegistrationServiceFacade;
import com.softserveinc.tender.service.AddressService;
import com.softserveinc.tender.service.CategoryService;
import com.softserveinc.tender.service.CompanyService;
import com.softserveinc.tender.service.LocationService;
import com.softserveinc.tender.service.ProfileService;
import com.softserveinc.tender.service.RoleService;
import com.softserveinc.tender.service.UserService;
import org.modelmapper.ModelMapper;
import org.modelmapper.TypeToken;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@Service("registrationServiceFacade")
@Transactional
public class RegistrationServiceFacadeImpl implements RegistrationServiceFacade{
@Autowired
private ModelMapper modelMapper;
@Autowired
private RoleService roleService;
@Autowired
private UserService userService;
@Autowired
private ProfileService profileService;
@Autowired
private CompanyService companyService;
@Autowired
private AddressService addressService;
@Autowired
private LocationService locationService;
@Autowired
private CategoryService categoryService;
@Override
public List<RoleDto> findUsersRoles() {
Type targetListType = new TypeToken<List<RoleDto>>(){}.getType();
return modelMapper.map(roleService.findUsersRoles(), targetListType);
}
public User saveUser(UserRegistrationDataDto userData) {
return mapUserRegistrationData(userData);
}
private User mapUserRegistrationData(UserRegistrationDataDto userData) {
User savedUser = saveUser(userData.getUserDto(), userData.getTradeSphereDto());
Address savedAddress = saveAddress(userData.getCompanyDto().getAddressDto());
Company savedCompany = saveCompany(userData.getCompanyDto(), savedAddress);
saveProfile(userData.getProfileDto(), savedCompany, savedUser);
return userService.findUserById(savedUser.getId());
}
private Company saveCompany(CompanyDto companyDto, Address address) {
return companyService.save(mapCompany(companyDto, address));
}
private Company mapCompany(CompanyDto companyDto, Address address) {
Company company = new Company();
company.setName(companyDto.getName());
company.setEmail(companyDto.getEmail());
company.setSrn(companyDto.getSrnNumber());
company.setAddress(addressService.findById(address.getId()));
return company;
}
private Profile saveProfile(ProfileDto profileDto, Company company, User user) {
return profileService.saveProfile(mapProfile(profileDto, company, user));
}
private Profile mapProfile(ProfileDto profileDto, Company company, User user) {
Profile profile = new Profile();
profile.setFirstName(profileDto.getFirstName());
profile.setLastName(profileDto.getLastName());
profile.setPerson(profileDto.getPerson());
profile.setTelephone(profileDto.getTelephone());
profile.setCompany(companyService.findById(company.getId()));
profile.setUser(userService.findUserById(user.getId()));
profile.setChecked(false);
return profile;
}
private Address saveAddress(AddressDto addressDto) {
return addressService.save(mapAddress(addressDto));
}
private Address mapAddress(AddressDto addressDto) {
Address address = new Address();
address.setCity(addressDto.getCity());
address.setStreet(addressDto.getStreet());
address.setBuildingNumber(addressDto.getBuildingNumber());
address.setPostcode(addressDto.getPostcode());
return address;
}
private User saveUser(UserDto userDto, TradeSphereDto tradeSphereDto) {
return userService.saveUser(mapUser(userDto, tradeSphereDto));
}
private User mapUser(UserDto userDto, TradeSphereDto tradeSphereDto) {
List<Role> roles = new ArrayList<>();
List<Category> categories = new ArrayList<>();
List<Location> locations = new ArrayList<>();
User user = new User();
for(Integer id : userDto.getRoles()) {
roles.add(roleService.findRoleById(id));
}
for(Integer id : tradeSphereDto.getCategories()) {
categories.add(categoryService.findCategoryById(id));
}
for(Integer id : tradeSphereDto.getLocations()) {
locations.add(locationService.findById(id));
}
user.setLogin(userDto.getLogin());
user.setPassword(userDto.getPassword());
user.setRoles(roles);
user.setSellerCategories(categories);
user.setSellerLocations(locations);
user.setCreateDate(new Date());
return user;
}
}
| fixed birthday date saving
| src/main/java/com/softserveinc/tender/facade/impl/RegistrationServiceFacadeImpl.java | fixed birthday date saving | <ide><path>rc/main/java/com/softserveinc/tender/facade/impl/RegistrationServiceFacadeImpl.java
<ide> import org.springframework.transaction.annotation.Transactional;
<ide>
<ide> import java.lang.reflect.Type;
<add>import java.text.ParseException;
<add>import java.text.SimpleDateFormat;
<ide> import java.util.ArrayList;
<ide> import java.util.Date;
<ide> import java.util.List;
<ide> profile.setLastName(profileDto.getLastName());
<ide> profile.setPerson(profileDto.getPerson());
<ide> profile.setTelephone(profileDto.getTelephone());
<add> profile.setBirthday(formatDate(profileDto.getBirthday()));
<ide> profile.setCompany(companyService.findById(company.getId()));
<ide> profile.setUser(userService.findUserById(user.getId()));
<ide> profile.setChecked(false);
<ide>
<ide> return user;
<ide> }
<add>
<add> private Date formatDate(String dateValue) {
<add> String datePattern = "yyyy/mm/dd";
<add> SimpleDateFormat formatter = new SimpleDateFormat(datePattern);
<add> Date date = null;
<add>
<add> try {
<add> date = formatter.parse(dateValue);
<add> } catch (ParseException e) {
<add> e.getMessage();
<add> }
<add> return date;
<add> }
<ide> } |
|
Java | mit | 9039bf93dd64fab91bf4ba161c926a8e7af05057 | 0 | ONSdigital/babbage,ONSdigital/babbage,ONSdigital/babbage,ONSdigital/babbage | package com.github.onsdigital.babbage.search.model;
import org.apache.commons.lang3.ArrayUtils;
/**
* Created by bren on 08/09/15.
* <p/>
* Content types.
*/
public enum ContentType {
home_page,
home_page_census,
taxonomy_landing_page,
product_page,
bulletin(1.55f),
article(1.30f),
article_download(1.30f),
timeseries(1.2f),
data_slice,
compendium_landing_page(1.30f),
compendium_chapter,
compendium_data,
static_landing_page,
static_article,
static_methodology,
static_methodology_download,
static_page,
static_qmi,
static_foi,
static_adhoc(1.25f),
dataset,
dataset_landing_page(1.35f),
timeseries_dataset,
release,
reference_tables,
chart,
table,
equation,
departments; //departments type is index under departments index, not a part of ons index which has usual content
//Content type boost in search results
private Float weight;
ContentType(float weight) {
this.weight = weight;
}
ContentType() {
}
public Float getWeight() {
return weight;
}
public static String[] typeNames(ContentType... contentTypes) {
String[] types = new String[0];
for (ContentType type : contentTypes) {
types = ArrayUtils.addAll(types, type.name());
}
return types;
}
}
| src/main/java/com/github/onsdigital/babbage/search/model/ContentType.java | package com.github.onsdigital.babbage.search.model;
import org.apache.commons.lang3.ArrayUtils;
/**
* Created by bren on 08/09/15.
* <p/>
* Content types.
*/
public enum ContentType {
home_page,
home_page_census,
taxonomy_landing_page,
product_page,
bulletin(1.55f),
article(1.30f),
article_download(1.30f),
timeseries(1.2f),
data_slice,
compendium_landing_page(1.30f),
compendium_chapter,
compendium_data,
static_landing_page,
static_article,
static_methodology,
static_methodology_download,
static_page,
static_qmi,
static_foi,
static_adhoc(1.25f),
dataset,
dataset_landing_page(1.35f),
timeseries_dataset,
release,
reference_tables,
chart,
table,
departments; //departments type is index under departments index, not a part of ons index which has usual content
//Content type boost in search results
private Float weight;
ContentType(float weight) {
this.weight = weight;
}
ContentType() {
}
public Float getWeight() {
return weight;
}
public static String[] typeNames(ContentType... contentTypes) {
String[] types = new String[0];
for (ContentType type : contentTypes) {
types = ArrayUtils.addAll(types, type.name());
}
return types;
}
}
| Add equation content type.
| src/main/java/com/github/onsdigital/babbage/search/model/ContentType.java | Add equation content type. | <ide><path>rc/main/java/com/github/onsdigital/babbage/search/model/ContentType.java
<ide> reference_tables,
<ide> chart,
<ide> table,
<add> equation,
<ide> departments; //departments type is index under departments index, not a part of ons index which has usual content
<ide>
<ide> |
|
Java | apache-2.0 | ee8e34aa9833b1e7a93f186df61b1fdcc1eb991d | 0 | brabenetz/xmlunit,xmlunit/xmlunit.net,brabenetz/xmlunit,xmlunit/xmlunit,xmlunit/xmlunit.net,xmlunit/xmlunit | /*
This file is licensed to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.xmlunit.validation;
import javax.xml.transform.Source;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.xmlunit.exceptions.XMLUnitException;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* Validator using the javax.xml.validation namespace.
*
* <p>An implementation detail of {@code
* javax.xml.validation.Validator} leaks into this class: any {@code
* xsi:schemaLocation} or {@code xsi:noSchemaLocation} attribute of
* the instance document will be ignored if any schema source has been
* set. This means you must either specify all sources or none of
* them to successfully validate instances.</p>
*/
public class JAXPValidator extends Validator {
private final String language;
private final SchemaFactory factory;
public JAXPValidator(String language) {
this(language, null);
}
public JAXPValidator(String language, SchemaFactory factory) {
this.language = language;
this.factory = factory;
}
private SchemaFactory getFactory() {
return factory == null ? SchemaFactory.newInstance(language) : factory;
}
@Override public ValidationResult validateSchema() {
ValidationHandler v = new ValidationHandler();
SchemaFactory f = getFactory();
f.setErrorHandler(v);
try {
f.newSchema(getSchemaSources());
} catch (SAXException e) {
if (e instanceof SAXParseException) {
v.error((SAXParseException) e);
} else {
throw new XMLUnitException(e);
}
} finally {
f.setErrorHandler(null);
}
return v.getResult();
}
@Override public ValidationResult validateInstance(Source s) {
Schema schema;
try {
schema = createSchema();
} catch (SAXException e) {
throw new XMLUnitException("The schema is invalid", e);
}
ValidationHandler v = new ValidationHandler();
javax.xml.validation.Validator val = schema.newValidator();
val.setErrorHandler(v);
try {
val.validate(s);
} catch (SAXException e) {
if (e instanceof SAXParseException) {
v.error((SAXParseException) e);
} else {
throw new XMLUnitException(e);
}
} catch (java.io.IOException e) {
throw new XMLUnitException(e);
}
return v.getResult();
}
private Schema createSchema() throws SAXException {
Source[] sources = getSchemaSources();
return sources.length > 0 ? getFactory().newSchema(getSchemaSources())
: getFactory().newSchema();
}
}
| src/main/java-core/org/xmlunit/validation/JAXPValidator.java | /*
This file is licensed to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.xmlunit.validation;
import javax.xml.transform.Source;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.xmlunit.exceptions.XMLUnitException;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* Validator using the javax.xml.validation namespace.
*/
public class JAXPValidator extends Validator {
private final String language;
private final SchemaFactory factory;
public JAXPValidator(String language) {
this(language, null);
}
public JAXPValidator(String language, SchemaFactory factory) {
this.language = language;
this.factory = factory;
}
private SchemaFactory getFactory() {
return factory == null ? SchemaFactory.newInstance(language) : factory;
}
@Override public ValidationResult validateSchema() {
ValidationHandler v = new ValidationHandler();
SchemaFactory f = getFactory();
f.setErrorHandler(v);
try {
f.newSchema(getSchemaSources());
} catch (SAXException e) {
if (e instanceof SAXParseException) {
v.error((SAXParseException) e);
} else {
throw new XMLUnitException(e);
}
} finally {
f.setErrorHandler(null);
}
return v.getResult();
}
@Override public ValidationResult validateInstance(Source s) {
Schema schema;
try {
schema = createSchema();
} catch (SAXException e) {
throw new XMLUnitException("The schema is invalid", e);
}
ValidationHandler v = new ValidationHandler();
javax.xml.validation.Validator val = schema.newValidator();
val.setErrorHandler(v);
try {
val.validate(s);
} catch (SAXException e) {
if (e instanceof SAXParseException) {
v.error((SAXParseException) e);
} else {
throw new XMLUnitException(e);
}
} catch (java.io.IOException e) {
throw new XMLUnitException(e);
}
return v.getResult();
}
private Schema createSchema() throws SAXException {
Source[] sources = getSchemaSources();
return sources.length > 0 ? getFactory().newSchema(getSchemaSources())
: getFactory().newSchema();
}
}
| Documentation for the root cause of https://sourceforge.net/p/xmlunit/bugs/64/
| src/main/java-core/org/xmlunit/validation/JAXPValidator.java | Documentation for the root cause of https://sourceforge.net/p/xmlunit/bugs/64/ | <ide><path>rc/main/java-core/org/xmlunit/validation/JAXPValidator.java
<ide>
<ide> /**
<ide> * Validator using the javax.xml.validation namespace.
<add> *
<add> * <p>An implementation detail of {@code
<add> * javax.xml.validation.Validator} leaks into this class: any {@code
<add> * xsi:schemaLocation} or {@code xsi:noSchemaLocation} attribute of
<add> * the instance document will be ignored if any schema source has been
<add> * set. This means you must either specify all sources or none of
<add> * them to successfully validate instances.</p>
<ide> */
<ide> public class JAXPValidator extends Validator {
<ide> private final String language; |
|
Java | apache-2.0 | 76fa32b8f149eae8d11dee093195d5c88f7d019e | 0 | gatling/gatling,gatling/gatling,gatling/gatling,gatling/gatling,gatling/gatling | /*
* Copyright 2011-2020 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.client.body.form;
import io.gatling.http.client.Param;
import io.gatling.http.client.body.RequestBody;
import io.gatling.http.client.body.RequestBodyBuilder;
import io.gatling.http.client.body.WritableContent;
import io.gatling.netty.util.StringBuilderPool;
import io.gatling.netty.util.Utf8UrlEncoder;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufUtil;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.List;
import static java.nio.charset.StandardCharsets.UTF_8;
public class FormUrlEncodedRequestBody extends RequestBody<List<Param>> {
public FormUrlEncodedRequestBody(List<Param> content, String contentType, Charset charset) {
super(content, contentType, charset);
}
@Override
public WritableContent build(ByteBufAllocator alloc) {
StringBuilder sb = encode();
ByteBuf bb = ByteBufUtil.writeAscii(alloc, sb);
return new WritableContent(bb, bb.readableBytes());
}
private StringBuilder encode() {
StringBuilder sb = StringBuilderPool.DEFAULT.get();
for (Param param : content) {
encodeAndAppendFormParam(sb, param.getName(), param.getValue(), charset);
}
sb.setLength(sb.length() - 1);
return sb;
}
private static void encodeAndAppendFormParam(StringBuilder sb, String name, String value, Charset charset) {
encodeAndAppendFormField(sb, name, charset);
if (value != null) {
sb.append('=');
encodeAndAppendFormField(sb, value, charset);
}
sb.append('&');
}
private static void encodeAndAppendFormField(StringBuilder sb, String field, Charset charset) {
if (charset.equals(UTF_8)) {
Utf8UrlEncoder.encodeAndAppendFormElement(sb, field);
} else {
try {
// TODO there's probably room for perf improvements
sb.append(URLEncoder.encode(field, charset.name()));
} catch (UnsupportedEncodingException e) {
// can't happen, as Charset was already resolved
}
}
}
@Override
public RequestBodyBuilder<List<Param>> newBuilder() {
return new FormUrlEncodedRequestBodyBuilder(content);
}
@Override
public byte[] getBytes() {
return encode().toString().getBytes(charset);
}
@Override
public String toString() {
return "FormUrlEncodedRequestBody{" +
"content=" + content +
", contentType=" + contentType +
", charset=" + charset +
'}';
}
}
| gatling-http-client/src/main/java/io/gatling/http/client/body/form/FormUrlEncodedRequestBody.java | /*
* Copyright 2011-2020 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.client.body.form;
import io.gatling.http.client.Param;
import io.gatling.http.client.body.RequestBody;
import io.gatling.http.client.body.RequestBodyBuilder;
import io.gatling.http.client.body.WritableContent;
import io.gatling.netty.util.StringBuilderPool;
import io.gatling.netty.util.Utf8UrlEncoder;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufUtil;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.List;
import static java.nio.charset.StandardCharsets.UTF_8;
public class FormUrlEncodedRequestBody extends RequestBody<List<Param>> {
public FormUrlEncodedRequestBody(List<Param> content, String contentType, Charset charset) {
super(content, contentType, charset);
}
@Override
public WritableContent build(ByteBufAllocator alloc) {
StringBuilder sb = encode();
ByteBuf bb = ByteBufUtil.writeAscii(alloc, sb);
return new WritableContent(bb, bb.readableBytes());
}
private StringBuilder encode() {
StringBuilder sb = StringBuilderPool.DEFAULT.get();
for (Param param : content) {
encodeAndAppendFormParam(sb, param.getName(), param.getValue(), charset);
}
sb.setLength(sb.length() - 1);
return sb;
}
private static void encodeAndAppendFormParam(StringBuilder sb, String name, String value, Charset charset) {
encodeAndAppendFormField(sb, name, charset);
if (value != null) {
sb.append('=');
encodeAndAppendFormField(sb, value, charset);
}
sb.append('&');
}
private static void encodeAndAppendFormField(StringBuilder sb, String field, Charset charset) {
if (charset.equals(UTF_8)) {
Utf8UrlEncoder.encodeAndAppendFormElement(sb, field);
} else {
try {
// TODO there's probably room for perf improvements
sb.append(URLEncoder.encode(field, charset.name()));
} catch (UnsupportedEncodingException e) {
// can't happen, as Charset was already resolved
}
}
}
@Override
public RequestBodyBuilder<List<Param>> newBuilder() {
return new FormUrlEncodedRequestBodyBuilder(content);
}
@Override
public byte[] getBytes() {
return encode().toString().getBytes();
}
@Override
public String toString() {
return "FormUrlEncodedRequestBody{" +
"content=" + content +
", contentType=" + contentType +
", charset=" + charset +
'}';
}
}
| nit: FormUrlEncodedRequestBody#getBytes should honor charset
| gatling-http-client/src/main/java/io/gatling/http/client/body/form/FormUrlEncodedRequestBody.java | nit: FormUrlEncodedRequestBody#getBytes should honor charset | <ide><path>atling-http-client/src/main/java/io/gatling/http/client/body/form/FormUrlEncodedRequestBody.java
<ide>
<ide> @Override
<ide> public byte[] getBytes() {
<del> return encode().toString().getBytes();
<add> return encode().toString().getBytes(charset);
<ide> }
<ide>
<ide> @Override |
|
Java | apache-2.0 | 76a25d0110306333c7c96fa0f8c6464778172b74 | 0 | giastfader/orientdb,giastfader/orientdb,giastfader/orientdb,joansmith/orientdb,joansmith/orientdb,mmacfadden/orientdb,mmacfadden/orientdb,joansmith/orientdb,mmacfadden/orientdb,mmacfadden/orientdb,joansmith/orientdb,giastfader/orientdb | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.graph.sql;
import com.orientechnologies.common.concur.ONeedRetryException;
import com.orientechnologies.common.types.OModifiableBoolean;
import com.orientechnologies.orient.core.command.OCommandDistributedReplicateRequest;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.command.OCommandRequestText;
import com.orientechnologies.orient.core.command.OCommandResultListener;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.exception.OConcurrentModificationException;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandExecutorSQLRetryAbstract;
import com.orientechnologies.orient.core.sql.OCommandSQLParsingException;
import com.orientechnologies.orient.core.sql.OSQLEngine;
import com.orientechnologies.orient.core.sql.filter.OSQLFilter;
import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.impls.orient.*;
import java.util.*;
/**
* SQL DELETE EDGE command.
*
* @author Luca Garulli
*/
public class OCommandExecutorSQLDeleteEdge extends OCommandExecutorSQLRetryAbstract implements OCommandDistributedReplicateRequest,
OCommandResultListener {
public static final String NAME = "DELETE EDGE";
private static final String KEYWORD_BATCH = "BATCH";
private List<ORecordId> rids;
private String fromExpr;
private String toExpr;
private int removed = 0;
private OCommandRequest query;
private OSQLFilter compiledFilter;
private OrientGraph graph;
private String label;
private OModifiableBoolean shutdownFlag = new OModifiableBoolean();
private boolean txAlreadyBegun;
private int batch = 100;
@SuppressWarnings("unchecked")
public OCommandExecutorSQLDeleteEdge parse(final OCommandRequest iRequest) {
final OCommandRequestText textRequest = (OCommandRequestText) iRequest;
String queryText = textRequest.getText();
String originalQuery = queryText;
try {
// System.out.println("NEW PARSER FROM: " + queryText);
queryText = preParse(queryText, iRequest);
// System.out.println("NEW PARSER TO: " + queryText);
textRequest.setText(queryText);
init((OCommandRequestText) iRequest);
parserRequiredKeyword("DELETE");
parserRequiredKeyword("EDGE");
OClass clazz = null;
String where = null;
String temp = parseOptionalWord(true);
String originalTemp = null;
if (temp != null && !parserIsEnded())
originalTemp = parserText.substring(parserGetPreviousPosition(), parserGetCurrentPosition()).trim();
final OModifiableBoolean shutdownFlag = new OModifiableBoolean();
ODatabaseDocumentInternal curDb = ODatabaseRecordThreadLocal.INSTANCE.get();
final OrientGraph graph = OGraphCommandExecutorSQLFactory.getGraph(false, shutdownFlag);
try {
while (temp != null) {
if (temp.equals("FROM")) {
fromExpr = parserRequiredWord(false, "Syntax error", " =><,\r\n");
if (rids != null)
throwSyntaxErrorException("FROM '" + fromExpr + "' is not allowed when specify a RIDs (" + rids + ")");
} else if (temp.equals("TO")) {
toExpr = parserRequiredWord(false, "Syntax error", " =><,\r\n");
if (rids != null)
throwSyntaxErrorException("TO '" + toExpr + "' is not allowed when specify a RID (" + rids + ")");
} else if (temp.startsWith("#")) {
rids = new ArrayList<ORecordId>();
rids.add(new ORecordId(temp));
if (fromExpr != null || toExpr != null)
throwSyntaxErrorException("Specifying the RID " + rids + " is not allowed with FROM/TO");
} else if (temp.startsWith("[") && temp.endsWith("]")) {
temp = temp.substring(1, temp.length() - 1);
rids = new ArrayList<ORecordId>();
for (String rid : temp.split(",")) {
rid = rid.trim();
if (!rid.startsWith("#")) {
throwSyntaxErrorException("Not a valid RID: " + rid);
}
rids.add(new ORecordId(rid));
}
} else if (temp.equals(KEYWORD_WHERE)) {
if (clazz == null)
// ASSIGN DEFAULT CLASS
clazz = graph.getEdgeType(OrientEdgeType.CLASS_NAME);
where = parserGetCurrentPosition() > -1 ? " " + parserText.substring(parserGetCurrentPosition()) : "";
compiledFilter = OSQLEngine.getInstance().parseCondition(where, getContext(), KEYWORD_WHERE);
break;
} else if (temp.equals(KEYWORD_RETRY)) {
parseRetry();
} else if (temp.equals(KEYWORD_BATCH)) {
temp = parserNextWord(true);
if (temp != null)
batch = Integer.parseInt(temp);
} else if (temp.equals(KEYWORD_LIMIT)) {
temp = parserNextWord(true);
if (temp != null)
limit = Integer.parseInt(temp);
} else if (temp.length() > 0) {
// GET/CHECK CLASS NAME
label = originalTemp;
clazz = graph.getEdgeType(temp);
if (clazz == null)
throw new OCommandSQLParsingException("Class '" + temp + "' was not found");
}
temp = parseOptionalWord(true);
if (parserIsEnded())
break;
}
if (where == null)
if (limit > -1) {
where = " LIMIT " + limit;
} else {
where = "";
}
else
where = " WHERE " + where;
if (fromExpr == null && toExpr == null && rids == null)
if (clazz == null)
// DELETE ALL THE EDGES
query = graph.getRawGraph().command(new OSQLAsynchQuery<ODocument>("select from E" + where, this));
else
// DELETE EDGES OF CLASS X
query = graph.getRawGraph().command(new OSQLAsynchQuery<ODocument>("select from " + clazz.getName() + where, this));
return this;
} finally {
if (shutdownFlag.getValue())
graph.shutdown(false);
ODatabaseRecordThreadLocal.INSTANCE.set(curDb);
}
} finally {
textRequest.setText(originalQuery);
}
}
/**
* Execute the command and return the ODocument object created.
*/
public Object execute(final Map<Object, Object> iArgs) {
if (fromExpr == null && toExpr == null && rids == null && query == null && compiledFilter == null)
throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet");
for (int r = 0; r < retry; ++r) {
try {
txAlreadyBegun = getDatabase().getTransaction().isActive();
graph = OGraphCommandExecutorSQLFactory.getGraph(true, shutdownFlag);
if (rids != null) {
// REMOVE PUNCTUAL RID
OGraphCommandExecutorSQLFactory.runInTx(graph, new OGraphCommandExecutorSQLFactory.GraphCallBack<Object>() {
@Override
public Object call(OrientBaseGraph graph) {
for (ORecordId rid : rids) {
final OrientEdge e = graph.getEdge(rid);
if (e != null) {
e.remove();
removed++;
}
}
return null;
}
});
// CLOSE PENDING TX
end();
} else {
// MULTIPLE EDGES
final Set<OrientEdge> edges = new HashSet<OrientEdge>();
if (query == null) {
OGraphCommandExecutorSQLFactory.runInTx(graph, new OGraphCommandExecutorSQLFactory.GraphCallBack<Object>() {
@Override
public Object call(OrientBaseGraph graph) {
Set<OIdentifiable> fromIds = null;
if (fromExpr != null)
fromIds = OSQLEngine.getInstance().parseRIDTarget(graph.getRawGraph(), fromExpr, context, iArgs);
Set<OIdentifiable> toIds = null;
if (toExpr != null)
toIds = OSQLEngine.getInstance().parseRIDTarget(graph.getRawGraph(), toExpr, context, iArgs);
if (fromIds != null && toIds != null) {
// REMOVE ALL THE EDGES BETWEEN VERTICES
for (OIdentifiable fromId : fromIds) {
final OrientVertex v = graph.getVertex(fromId);
if (v != null)
for (Edge e : v.getEdges(Direction.OUT)) {
if (label != null && !label.equals(e.getLabel()))
continue;
final OIdentifiable inV = ((OrientEdge) e).getInVertex();
if (inV != null && toIds.contains(inV.getIdentity()))
edges.add((OrientEdge) e);
}
}
} else if (fromIds != null) {
// REMOVE ALL THE EDGES THAT START FROM A VERTEXES
for (OIdentifiable fromId : fromIds) {
final OrientVertex v = graph.getVertex(fromId);
if (v != null) {
for (Edge e : v.getEdges(Direction.OUT)) {
if (label != null && !label.equals(e.getLabel()))
continue;
edges.add((OrientEdge) e);
}
}
}
} else if (toIds != null) {
// REMOVE ALL THE EDGES THAT ARRIVE TO A VERTEXES
for (OIdentifiable toId : toIds) {
final OrientVertex v = graph.getVertex(toId);
if (v != null) {
for (Edge e : v.getEdges(Direction.IN)) {
if (label != null && !label.equals(e.getLabel()))
continue;
edges.add((OrientEdge) e);
}
}
}
} else
throw new OCommandExecutionException("Invalid target: " + toIds);
if (compiledFilter != null) {
// ADDITIONAL FILTERING
for (Iterator<OrientEdge> it = edges.iterator(); it.hasNext();) {
final OrientEdge edge = it.next();
if (!(Boolean) compiledFilter.evaluate(edge.getRecord(), null, context))
it.remove();
}
}
// DELETE THE FOUND EDGES
removed = edges.size();
for (OrientEdge edge : edges)
edge.remove();
return null;
}
});
// CLOSE PENDING TX
end();
} else {
query.setContext(getContext());
query.execute(iArgs);
}
}
break;
} catch (OConcurrentModificationException e) {
if (r + 1 >= retry)
// NO RETRY; PROPAGATE THE EXCEPTION
throw e;
// RETRY?
if (wait > 0)
try {
Thread.sleep(wait);
} catch (InterruptedException e1) {
}
}
}
return removed;
}
/**
* Delete the current edge.
*/
public boolean result(final Object iRecord) {
final OIdentifiable id = (OIdentifiable) iRecord;
if (compiledFilter != null) {
// ADDITIONAL FILTERING
if (!(Boolean) compiledFilter.evaluate(id.getRecord(), null, context))
return true;
}
if (id.getIdentity().isValid()) {
final OrientEdge e = graph.getEdge(id);
for (int retry = 0; retry < 20; ++retry) {
try {
if (e != null) {
e.remove();
if (!txAlreadyBegun && batch > 0 && (removed + 1) % batch == 0) {
graph.commit();
graph.begin();
}
removed++;
}
// OK
break;
} catch (ONeedRetryException ex) {
getDatabase().getLocalCache().invalidate();
e.reload();
}
}
}
return true;
}
@Override
public String getSyntax() {
return "DELETE EDGE <rid>|FROM <rid>|TO <rid>|<[<class>] [WHERE <conditions>]> [BATCH <batch-size>]";
}
@Override
public void end() {
if (graph != null) {
if (!txAlreadyBegun) {
graph.commit();
if (shutdownFlag.getValue())
graph.shutdown(false);
}
}
}
@Override
public int getSecurityOperationType() {
return ORole.PERMISSION_DELETE;
}
@Override
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.WRITE;
}
public DISTRIBUTED_RESULT_MGMT getDistributedResultManagement() {
return getDistributedExecutionMode() == DISTRIBUTED_EXECUTION_MODE.LOCAL ? DISTRIBUTED_RESULT_MGMT.CHECK_FOR_EQUALS
: DISTRIBUTED_RESULT_MGMT.MERGE;
}
@Override
public DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return query != null && !getDatabase().getTransaction().isActive() ? DISTRIBUTED_EXECUTION_MODE.REPLICATE
: DISTRIBUTED_EXECUTION_MODE.LOCAL;
}
}
| graphdb/src/main/java/com/orientechnologies/orient/graph/sql/OCommandExecutorSQLDeleteEdge.java | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.graph.sql;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.ONeedRetryException;
import com.orientechnologies.common.types.OModifiableBoolean;
import com.orientechnologies.orient.core.command.OCommandDistributedReplicateRequest;
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.command.OCommandRequestText;
import com.orientechnologies.orient.core.command.OCommandResultListener;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.exception.OConcurrentModificationException;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandExecutorSQLRetryAbstract;
import com.orientechnologies.orient.core.sql.OCommandSQLParsingException;
import com.orientechnologies.orient.core.sql.OSQLEngine;
import com.orientechnologies.orient.core.sql.filter.OSQLFilter;
import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph;
import com.tinkerpop.blueprints.impls.orient.OrientEdge;
import com.tinkerpop.blueprints.impls.orient.OrientEdgeType;
import com.tinkerpop.blueprints.impls.orient.OrientGraph;
import com.tinkerpop.blueprints.impls.orient.OrientVertex;
/**
* SQL DELETE EDGE command.
*
* @author Luca Garulli
*/
public class OCommandExecutorSQLDeleteEdge extends OCommandExecutorSQLRetryAbstract implements OCommandDistributedReplicateRequest,
OCommandResultListener {
public static final String NAME = "DELETE EDGE";
private static final String KEYWORD_BATCH = "BATCH";
private List<ORecordId> rids;
private String fromExpr;
private String toExpr;
private int removed = 0;
private OCommandRequest query;
private OSQLFilter compiledFilter;
private OrientGraph graph;
private String label;
private OModifiableBoolean shutdownFlag = new OModifiableBoolean();
private boolean txAlreadyBegun;
private int batch = 100;
@SuppressWarnings("unchecked")
public OCommandExecutorSQLDeleteEdge parse(final OCommandRequest iRequest) {
final OCommandRequestText textRequest = (OCommandRequestText) iRequest;
String queryText = textRequest.getText();
String originalQuery = queryText;
try {
// System.out.println("NEW PARSER FROM: " + queryText);
queryText = preParse(queryText, iRequest);
// System.out.println("NEW PARSER TO: " + queryText);
textRequest.setText(queryText);
init((OCommandRequestText) iRequest);
parserRequiredKeyword("DELETE");
parserRequiredKeyword("EDGE");
OClass clazz = null;
String where = null;
String temp = parseOptionalWord(true);
String originalTemp = null;
if (temp != null && !parserIsEnded())
originalTemp = parserText.substring(parserGetPreviousPosition(), parserGetCurrentPosition()).trim();
final OModifiableBoolean shutdownFlag = new OModifiableBoolean();
ODatabaseDocumentInternal curDb = ODatabaseRecordThreadLocal.INSTANCE.get();
final OrientGraph graph = OGraphCommandExecutorSQLFactory.getGraph(false, shutdownFlag);
try {
while (temp != null) {
if (temp.equals("FROM")) {
fromExpr = parserRequiredWord(false, "Syntax error", " =><,\r\n");
if (rids != null)
throwSyntaxErrorException("FROM '" + fromExpr + "' is not allowed when specify a RIDs (" + rids + ")");
} else if (temp.equals("TO")) {
toExpr = parserRequiredWord(false, "Syntax error", " =><,\r\n");
if (rids != null)
throwSyntaxErrorException("TO '" + toExpr + "' is not allowed when specify a RID (" + rids + ")");
} else if (temp.startsWith("#")) {
rids = new ArrayList<ORecordId>();
rids.add(new ORecordId(temp));
if (fromExpr != null || toExpr != null)
throwSyntaxErrorException("Specifying the RID " + rids + " is not allowed with FROM/TO");
} else if (temp.startsWith("[") && temp.endsWith("]")) {
temp = temp.substring(1, temp.length() - 1);
rids = new ArrayList<ORecordId>();
for (String rid : temp.split(",")) {
rid = rid.trim();
if (!rid.startsWith("#")) {
throwSyntaxErrorException("Not a valid RID: " + rid);
}
rids.add(new ORecordId(rid));
}
} else if (temp.equals(KEYWORD_WHERE)) {
if (clazz == null)
// ASSIGN DEFAULT CLASS
clazz = graph.getEdgeType(OrientEdgeType.CLASS_NAME);
where = parserGetCurrentPosition() > -1 ? " " + parserText.substring(parserGetCurrentPosition()) : "";
compiledFilter = OSQLEngine.getInstance().parseCondition(where, getContext(), KEYWORD_WHERE);
break;
} else if (temp.equals(KEYWORD_RETRY)) {
parseRetry();
} else if (temp.equals(KEYWORD_BATCH)) {
temp = parserNextWord(true);
if (temp != null)
batch = Integer.parseInt(temp);
} else if (temp.length() > 0) {
// GET/CHECK CLASS NAME
label = originalTemp;
clazz = graph.getEdgeType(temp);
if (clazz == null)
throw new OCommandSQLParsingException("Class '" + temp + "' was not found");
}
temp = parseOptionalWord(true);
if (parserIsEnded())
break;
}
if (where == null)
where = "";
else
where = " WHERE " + where;
if (fromExpr == null && toExpr == null && rids == null)
if (clazz == null)
// DELETE ALL THE EDGES
query = graph.getRawGraph().command(new OSQLAsynchQuery<ODocument>("select from E" + where, this));
else
// DELETE EDGES OF CLASS X
query = graph.getRawGraph().command(new OSQLAsynchQuery<ODocument>("select from " + clazz.getName() + where, this));
return this;
} finally {
if (shutdownFlag.getValue())
graph.shutdown(false);
ODatabaseRecordThreadLocal.INSTANCE.set(curDb);
}
} finally {
textRequest.setText(originalQuery);
}
}
/**
* Execute the command and return the ODocument object created.
*/
public Object execute(final Map<Object, Object> iArgs) {
if (fromExpr == null && toExpr == null && rids == null && query == null && compiledFilter == null)
throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet");
for (int r = 0; r < retry; ++r) {
try {
txAlreadyBegun = getDatabase().getTransaction().isActive();
graph = OGraphCommandExecutorSQLFactory.getGraph(true, shutdownFlag);
if (rids != null) {
// REMOVE PUNCTUAL RID
OGraphCommandExecutorSQLFactory.runInTx(graph, new OGraphCommandExecutorSQLFactory.GraphCallBack<Object>() {
@Override
public Object call(OrientBaseGraph graph) {
for (ORecordId rid : rids) {
final OrientEdge e = graph.getEdge(rid);
if (e != null) {
e.remove();
removed++;
}
}
return null;
}
});
// CLOSE PENDING TX
end();
} else {
// MULTIPLE EDGES
final Set<OrientEdge> edges = new HashSet<OrientEdge>();
if (query == null) {
OGraphCommandExecutorSQLFactory.runInTx(graph, new OGraphCommandExecutorSQLFactory.GraphCallBack<Object>() {
@Override
public Object call(OrientBaseGraph graph) {
Set<OIdentifiable> fromIds = null;
if (fromExpr != null)
fromIds = OSQLEngine.getInstance().parseRIDTarget(graph.getRawGraph(), fromExpr, context, iArgs);
Set<OIdentifiable> toIds = null;
if (toExpr != null)
toIds = OSQLEngine.getInstance().parseRIDTarget(graph.getRawGraph(), toExpr, context, iArgs);
if (fromIds != null && toIds != null) {
// REMOVE ALL THE EDGES BETWEEN VERTICES
for (OIdentifiable fromId : fromIds) {
final OrientVertex v = graph.getVertex(fromId);
if (v != null)
for (Edge e : v.getEdges(Direction.OUT)) {
if (label != null && !label.equals(e.getLabel()))
continue;
final OIdentifiable inV = ((OrientEdge) e).getInVertex();
if (inV != null && toIds.contains(inV.getIdentity()))
edges.add((OrientEdge) e);
}
}
} else if (fromIds != null) {
// REMOVE ALL THE EDGES THAT START FROM A VERTEXES
for (OIdentifiable fromId : fromIds) {
final OrientVertex v = graph.getVertex(fromId);
if (v != null) {
for (Edge e : v.getEdges(Direction.OUT)) {
if (label != null && !label.equals(e.getLabel()))
continue;
edges.add((OrientEdge) e);
}
}
}
} else if (toIds != null) {
// REMOVE ALL THE EDGES THAT ARRIVE TO A VERTEXES
for (OIdentifiable toId : toIds) {
final OrientVertex v = graph.getVertex(toId);
if (v != null) {
for (Edge e : v.getEdges(Direction.IN)) {
if (label != null && !label.equals(e.getLabel()))
continue;
edges.add((OrientEdge) e);
}
}
}
} else
throw new OCommandExecutionException("Invalid target: " + toIds);
if (compiledFilter != null) {
// ADDITIONAL FILTERING
for (Iterator<OrientEdge> it = edges.iterator(); it.hasNext();) {
final OrientEdge edge = it.next();
if (!(Boolean) compiledFilter.evaluate(edge.getRecord(), null, context))
it.remove();
}
}
// DELETE THE FOUND EDGES
removed = edges.size();
for (OrientEdge edge : edges)
edge.remove();
return null;
}
});
// CLOSE PENDING TX
end();
} else {
query.setContext(getContext());
query.execute(iArgs);
}
}
break;
} catch (OConcurrentModificationException e) {
if (r + 1 >= retry)
// NO RETRY; PROPAGATE THE EXCEPTION
throw e;
// RETRY?
if (wait > 0)
try {
Thread.sleep(wait);
} catch (InterruptedException e1) {
}
}
}
return removed;
}
/**
* Delete the current edge.
*/
public boolean result(final Object iRecord) {
final OIdentifiable id = (OIdentifiable) iRecord;
if (compiledFilter != null) {
// ADDITIONAL FILTERING
if (!(Boolean) compiledFilter.evaluate(id.getRecord(), null, context))
return true;
}
if (id.getIdentity().isValid()) {
final OrientEdge e = graph.getEdge(id);
for (int retry = 0; retry < 20; ++retry) {
try {
if (e != null) {
e.remove();
if (!txAlreadyBegun && batch > 0 && (removed + 1) % batch == 0) {
graph.commit();
graph.begin();
}
removed++;
}
// OK
break;
} catch (ONeedRetryException ex) {
getDatabase().getLocalCache().invalidate();
e.reload();
}
}
}
return true;
}
@Override
public String getSyntax() {
return "DELETE EDGE <rid>|FROM <rid>|TO <rid>|<[<class>] [WHERE <conditions>]> [BATCH <batch-size>]";
}
@Override
public void end() {
if (graph != null) {
if (!txAlreadyBegun) {
graph.commit();
if (shutdownFlag.getValue())
graph.shutdown(false);
}
}
}
@Override
public int getSecurityOperationType() {
return ORole.PERMISSION_DELETE;
}
@Override
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.WRITE;
}
public DISTRIBUTED_RESULT_MGMT getDistributedResultManagement() {
return getDistributedExecutionMode() == DISTRIBUTED_EXECUTION_MODE.LOCAL ? DISTRIBUTED_RESULT_MGMT.CHECK_FOR_EQUALS
: DISTRIBUTED_RESULT_MGMT.MERGE;
}
@Override
public DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return query != null && !getDatabase().getTransaction().isActive() ? DISTRIBUTED_EXECUTION_MODE.REPLICATE
: DISTRIBUTED_EXECUTION_MODE.LOCAL;
}
}
| fixed DELETE EDGE with LIMIT - issue #4523
| graphdb/src/main/java/com/orientechnologies/orient/graph/sql/OCommandExecutorSQLDeleteEdge.java | fixed DELETE EDGE with LIMIT - issue #4523 | <ide><path>raphdb/src/main/java/com/orientechnologies/orient/graph/sql/OCommandExecutorSQLDeleteEdge.java
<ide> *
<ide> */
<ide> package com.orientechnologies.orient.graph.sql;
<del>
<del>import java.util.ArrayList;
<del>import java.util.HashSet;
<del>import java.util.Iterator;
<del>import java.util.List;
<del>import java.util.Map;
<del>import java.util.Set;
<ide>
<ide> import com.orientechnologies.common.concur.ONeedRetryException;
<ide> import com.orientechnologies.common.types.OModifiableBoolean;
<ide> import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery;
<ide> import com.tinkerpop.blueprints.Direction;
<ide> import com.tinkerpop.blueprints.Edge;
<del>import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph;
<del>import com.tinkerpop.blueprints.impls.orient.OrientEdge;
<del>import com.tinkerpop.blueprints.impls.orient.OrientEdgeType;
<del>import com.tinkerpop.blueprints.impls.orient.OrientGraph;
<del>import com.tinkerpop.blueprints.impls.orient.OrientVertex;
<add>import com.tinkerpop.blueprints.impls.orient.*;
<add>
<add>import java.util.*;
<ide>
<ide> /**
<ide> * SQL DELETE EDGE command.
<ide> if (temp != null)
<ide> batch = Integer.parseInt(temp);
<ide>
<add> } else if (temp.equals(KEYWORD_LIMIT)) {
<add> temp = parserNextWord(true);
<add> if (temp != null)
<add> limit = Integer.parseInt(temp);
<add>
<ide> } else if (temp.length() > 0) {
<ide> // GET/CHECK CLASS NAME
<ide> label = originalTemp;
<ide> }
<ide>
<ide> if (where == null)
<del> where = "";
<add> if (limit > -1) {
<add> where = " LIMIT " + limit;
<add> } else {
<add> where = "";
<add> }
<ide> else
<ide> where = " WHERE " + where;
<ide> |
|
Java | mit | 3bfaa37b04810c88a17f6883b0b68239bec1f6fe | 0 | Mellich/JukePi,Mellich/JukePi | package windows;
import util.TextFieldListener;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JViewport;
import javax.swing.table.JTableHeader;
import client.serverconnection.ServerConnection;
import client.serverconnection.Song;
import connection.Collector;
/**
* The Main {@link Window}, that contains information transmitted by the Server, this Client
* is connected to.
* @author Haeldeus
* @version 1.3
*/
public class MainWindow extends Window {
/**
* The {@link Collector}, that will perform Actions with extern needed information.
*/
private final Collector collector;
/**
* The TextField that contains the Link.
* @see JTextField
*/
private JTextField txtLink;
/**
* The Label that displays possible Messages.
* @see JLabel
*/
private JLabel lblFail;
/**
* The Frame, this Screen displays.
* @see JFrame
*/
private JFrame frame;
/**
* The {@link ServerConnection}, that will send the Messages.
*/
private final ServerConnection wrapper;
/**
* The Gaplist, that contains all {@link Song}s in the Gaplist.
*/
private Song[] gaplist;
/**
* The Wishlist, that contains all {@link Song}s in the Wishlist.
*/
private Song[] wishlist;
/**
* The Button, that can be pushed to pause/resume a Track.
* @see JButton
*/
private JButton btnPlayPause;
/**
* The Label, that will display the Name of the current Gaplist.
* @see JLabel
*/
private JLabel lblGaplistName;
/**
* The Label, that will display the Name of the current Track.
* @see JLabel
*/
private JLabel lblPlayingTrack;
/**
* The Label, that will display the Name of the next Track.
* @see JLabel
*/
private JLabel lblTrackNext;
/**
* The Label, that will display the number of Tracks in the Gaplist.
* @see JLabel
*/
private JLabel lblNoGaplist;
/**
* The Label, that will display the number of Tracks in the Wishlist.
* @see JLabel
*/
private JLabel lblNoWishlist;
/**
* The ScrollPane, that contains the old Wishlist-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldPane;
/**
* The ScrollPane, that contains the old Gaplist-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldGaplistPane;
/**
* The ScrollPane, that contains the old Saved-Gaplists-Table. Has to be stored to be able
* to keep the table updated.
* @see JScrollPane
*
*/
private JScrollPane oldSavedGaplistPane;
/**
* The Gaplists saved on the Server.
*/
private String[] gaplists;
/**
* The ScrollPane, that contains the old Content-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldContentPane;
/**
* The Icon, that will be displayed instead of "Pause" as a String.
*/
private ImageIcon playIcon = new ImageIcon("pause.png");
/**
* The Icon, that will be displayed instead of "Play" as a String.
*/
private ImageIcon pauseIcon = new ImageIcon("pause.png");
/**
* The Constructor for the Main-Screen. Will set the parameters to their belonging
* variables.
* @param collector The {@link Collector}, that will perform Actions with extern needed
* information.
* @param frame The Frame, this Screen will display.
* @param wrapper The {@link ServerConnection}, that will send the Messages.
* @param gaplist The Gaplist as an Array of {@link Song}s.
* @param wishlist The Wishlist as an Array of {@link Song}s.
* @since 1.0
*/
public MainWindow(Collector collector, JFrame frame, ServerConnection wrapper, Song[] gaplist, Song[] wishlist) {
this.collector = collector;
this.frame = frame;
frame.getContentPane().removeAll();
this.wrapper = wrapper;
this.gaplist = gaplist;
this.wishlist = wishlist;
}
@Override
public void show() {
constructFrame();
frame.setVisible(true);
}
@Override
public void close() {
frame.setVisible(false);
}
/**
* Sets the IP and Port of the Server, the Client is connected to, so the Title of the
* Frame can display it.
* @param ip The IP of the Server, the Client is connected to.
* @param port The Port of the Server, the Client is connected to.
* @see JFrame#setTitle(String)
* @since 1.0
*/
public void setIpAndPort(String ip, int port) {
frame.setTitle("JukePi - "+ip+":"+port);
}
@Override
public void showFail(String text) {
new util.ShowLabelThread(lblFail, frame, text).start();
}
/**
* Skips the current Song.
* @see ServerConnection#skip(ResponseListener)
* @since 1.0
*/
private void skip() {
wrapper.skip((String[] s) -> { if (s[0].equals("true"))
showFail("Skipped Track successfully!");
else
showFail("Couldn't skip the Track!");
});
}
/**
* Messages the Server, that the Play/Pause-Button was pressed.
* @see ServerConnection#pauseResume(ResponseListener)
* @since 1.0
*/
private void pressPause() {
wrapper.pauseResume((String[] s) -> { if (s[0].equals("true"))
wrapper.getCurrentPlaybackStatus((String[] st) -> { if (st[0].equals("false"))
showFail("Paused the Track successfully!");
else
showFail("Resumed the Track successfully!");
});
else
wrapper.getCurrentPlaybackStatus((String[] str) -> { if (str[0].equals("false"))
showFail("Couldn't resume the Track!");
else
showFail("Couldn't pause the Track!");
});
});
}
/**
* Seeks 30 seconds either forward or backward.
* @param forward Determines, whether the Server should seek forward({@code true}) or
* backward({@code false}).
* @see ServerConnection#seekForward(ResponseListener)
* @see ServerConnection#seekBackward(ResponseListener)
* @since 1.0
*/
private void seek(boolean forward) {
if (forward)
wrapper.seekForward((String[] s) -> { if (s[0].equals("true"))
showFail("Successfully sought forward!");
else
showFail("Couldn't seek forward!");
});
else
wrapper.seekBackward((String[] s) -> { if (s[0].equals("true"))
showFail("Successfully sought backwards!");
else
showFail("Couldn't seek backwards!");
});
}
/**
* Adds the given Link to a List, either the Gap- or the Wishlist.
* @param link The Link to the Song.
* @param toWishlist Determines, whether the Song should be added to the Wishlist
* ({@code true}) or to the Gaplist ({@code false}).
* @param inFront Determines, whether the Track should be added in Front of the List
* ({@code true}) or at the the End of the List ({@code false}).
* @param textfield The TextField, that contains the Link.
* @see ServerConnection#addToList(ResponseListener, String, boolean, boolean)
* @since 1.0
*/
private void add(String link, boolean toWishlist , boolean inFront, JTextField textfield) {
if (!link.isEmpty()) {
showFail("Pending Server...");
wrapper.addToList((String[] s) -> { if (s[0].equals("true"))
showFail("Track added!");
else
showFail("Couldn't add the Track.");
textfield.setText("Insert a Link here");
},
link, toWishlist, !inFront);
}
else {
showFail("No valid link!");
textfield.setText("Insert a Link here");
}
}
/**
* Sets the Gaplist to the given List and updates the Gaplist-Model
* @param gaplist The new Gaplist.
* @since 1.0
*/
public void setGaplist(Song[] gaplist) {
this.gaplist = gaplist;
lblNoGaplist.setText(""+gaplist.length);
createGaplistTable();
}
/**
* Sets the Wishlist to the given List and updates the Wishlist-Table.
* @param wishlist The new Wishlist.
* @since 1.0
*/
public void setWishlist(Song[] wishlist) {
this.wishlist = wishlist;
lblNoWishlist.setText(""+wishlist.length);
createWishlistTable();
setNextTrack();
}
/**
* Sets the Gaplists to the given List and updates the Saved-Gaplists-Table.
* @param gaplists The Gaplists on the Server.
* @since 1.2
*/
public void setGaplists(String[] gaplists) {
this.gaplists = gaplists;
createSavedGaplistsTable();
setNextTrack();
}
/**
* Moves the Song at the given index upwards in the Gaplist.
* @param index The index of the Track to be moved.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#setGapListTrackUp(ResponseListener, long)
* @since 1.0
*/
private void moveTrackUp(int index) {
if (index >=0)
wrapper.setGapListTrackUp((String[] s)-> { if (s[0].equals("true")) {
showFail("Moved Track up.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index-1);
}
else {
showFail("Couldn't move Track up.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index);
}
}, gaplist[index].getTrackID());
}
/**
* Moves the Song at the given index downwards in the Gaplist.
* @param index The index of the Track to be moved.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#setGapListTrackDown(ResponseListener, long)
* @since 1.0
*/
private void moveTrackDown(int index) {
if (index >= 0)
wrapper.setGapListTrackDown((String[] s) -> { if (s[0].equals("true")) {
showFail("Moved Track down.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index+1);
}
else {
showFail("Couldn't move Track down");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index);
}
}, gaplist[index].getTrackID());
}
/**
* Deletes the Song at the given index from the Gaplist.
* @param index The index of the Song to be deleted.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#deleteFromList(Song)
* @since 1.0
*/
private void deleteTrack(int index, JScrollPane list) {
if (index >= 0) {
if (wrapper.deleteFromList(gaplist[index]))
showFail("Deleted the Track from the Gaplist");
else
showFail("Couldn't delete the Track from the Gaplist");
try{Thread.sleep(100);} catch (Exception e) {}
setSelectedGaplistIndex(index);
}
}
/**
* Saves the current Gaplist on the Server.
* @see ServerConnection#saveGapList(ResponseListener)
* @since 1.0
*/
private void saveGaplist() {
wrapper.saveGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Saved Gaplist.");
else
showFail("Couldn't save the Gaplist.");
});
}
/**
* Loads the Gaplist with the given Name
* @param name The Name of the Gaplist to be loaded.
* @since 1.2
*/
private void loadGaplist(String name) {
wrapper.switchToGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Loaded Gaplist.");
else
showFail("Couldn't load the Gaplist.");
}, name);
}
/**
* Shows the Content of the Gaplist with the given Name.
* @param name The Name of the Gaplist, which Content will be shown.
* @since 1.2
*/
private void showGaplist(String name) {
wrapper.getTitleFromGapList((String[] s) -> {createContentTable(s);}, name);
}
/**
* Will be executed, when a Song was paused or resumed on the Server.
* @param isPlaying Determines, if the Song is now playing ({@code true}) or paused
* ({@code false}).
* @since 1.0
*/
public void pauseResume(boolean isPlaying) {
if (isPlaying) {
btnPlayPause.setIcon(pauseIcon);
btnPlayPause.setToolTipText("Click here to pause the Track.");
}
else {
btnPlayPause.setIcon(playIcon);
btnPlayPause.setToolTipText("Click here to resume the Track.");
}
}
/**
* Will be executed, when an other Gaplist was loaded on the Server.
* @param gapListName The Name of the new Gaplist.
* @since 1.0
*/
public void gaplistChanged(String gapListName) {
lblGaplistName.setText("Gaplist - " + gapListName);
}
/**
* Sets the Text of the PlayingTrackLabel to the given title.
* @param title The title of the song, that is now playing.
* @since 1.2
*/
public void setNowPlaying(String title) {
lblPlayingTrack.setText(title);
}
/**
* Sets the Text of the NextTrackLabel to the given title.
* @param title The title of the next Song.
* @since 1.0
*/
public void setNextTrack() {
if (wishlist.length == 0)
if (gaplist.length == 0)
lblTrackNext.setText("NOTHING");
else
lblTrackNext.setText(gaplist[0].getName());
else
lblTrackNext.setText(wishlist[0].getName());
}
/**
* Creates the Table, that displays the Wishlist and the Votes for each Song in it.
* @since 1.0
*/
private void createWishlistTable() {
if(oldPane != null)
frame.getContentPane().remove(oldPane);
String[] columns = {"Song:", "Votes:"};
String[][] data = new String[wishlist.length][2];
for (int i = 0; i < wishlist.length; i++) {
data[i][0] = wishlist[i].getName();
data[i][1] = ""+wishlist[i].getVotes();
}
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTips for the TableHeaders.
*/
private String [] columnToolTips = {"The Name of the Song", "The Votes for this Song"};
/**
* Returns the ToolTip for the Cell at the given Position of the Cursor.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Cursor's Position.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given Position is editable.
* @param row The row-index of the Cell.
* @param column The column-index of the Cell.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return the new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the given Cursor's Position.
* @param e The MouseEvent.
* @return the ToolTip for the column at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
table.getColumnModel().getColumn(0).setMinWidth(210);
JScrollPane wishlistPane = new JScrollPane(table);
wishlistPane.setBounds(320,328,250,102);
frame.getContentPane().add(wishlistPane);
oldPane = wishlistPane;
}
/**
* Creates the Table, that contains the Gaplist.
* @since 1.1
*/
private synchronized void createGaplistTable() {
if (oldGaplistPane != null)
frame.getContentPane().remove(oldGaplistPane);
String[] columns = {"Gaplist:"};
String[][] data = new String[gaplist.length][1];
for (int i = 0; i < gaplist.length; i++)
data[i][0] = gaplist[i].getName();
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTip for the column.
*/
private String [] columnToolTips = {"The Name of the Song in the Gaplist"};
/**
* Returns the ToolTip for the Cell at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given index is editable.
* @param row The row-index of the Cell.
* @param column The column-index of the Cell.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the given column.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane gaplistPane = new JScrollPane(table);
gaplistPane.setBounds(10, 328, 250, 102);
frame.getContentPane().add(gaplistPane);
oldGaplistPane = gaplistPane;
}
/**
* Creates a Table with all saved Gaplists in it.
* @since 1.2
*/
private synchronized void createSavedGaplistsTable() {
if (oldSavedGaplistPane != null)
frame.getContentPane().remove(oldSavedGaplistPane);
String[] columns = {"Gaplists:"};
String[][] data = new String[gaplists.length][1];
for (int i = 0; i < gaplists.length; i++)
data[i][0] = gaplists[i];
JTable table = new JTable(data, columns) {
/**
* The serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The Tooltip of the column.
*/
private String [] columnToolTips = {"The Name of the Gaplist"};
/**
* Returns the ToolTip for the Cell at the Position of the Cursor.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Cursor's Position.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the Row and column is editable.
* @param row The row index of the Cell.
* @param column The column index of the Cell.
* @return false as default value, since these Cells shouldn't be edited.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position
* @param e The MouseEvent.
* @return The ToolTip for the column at the given Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane gaplistsPane = new JScrollPane(table);
gaplistsPane.setBounds(10, 528, 248, 102);
frame.getContentPane().add(gaplistsPane);
oldSavedGaplistPane = gaplistsPane;
}
/**
* Creates the Table with the Content of the to be shown Gaplist. If the Content is null,
* an empty table will be build with nothing but the header in it.
* @param content The Content of the Gaplist, that should be shown.
* @since 1.2
*/
private synchronized void createContentTable(String[] content) {
if (oldContentPane != null)
frame.getContentPane().remove(oldContentPane);
String[] columns = {"Content:"};
String[][] data = new String[0][1];
if (content != null) {
data = new String[content.length][1];
for (int i = 0; i < content.length; i++)
data[i][0] = content[i];
}
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTip for the column.
*/
private String [] columnToolTips = {"The Name of the Song in the selected Gaplist."};
/**
* Returns the ToolTip of the Cell at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip of the Cell at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given index is editable.
* @param row The row-Index.
* @param column The column-Index.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the column at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane contentPane = new JScrollPane(table);
contentPane.setBounds(320, 528, 248, 102);
frame.getContentPane().add(contentPane);
oldContentPane = contentPane;
}
/**
* Sets the SelectedIndex of gaplistList to the given index.
* @param index The index of the new Selection.
* @since 1.1
*/
private void setSelectedGaplistIndex(int index) {
if (index >= 0) {
try {
((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).setRowSelectionInterval(index, index);
}
catch (IllegalArgumentException iae) {
((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).setRowSelectionInterval(index-1, index-1);
}
}
}
/**
* Removes the Gaplist with the given Name from the Server.
* @param name The Name of the Gaplist to be removed.
* @since 1.2
*/
private void removeGaplist(String name) {
wrapper.deleteGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Removed the Gaplist.");
else
showFail("Couldn't remove the Gaplist");
}, name);
}
/**
* Creates a Gaplist with the given Name.
* @param name The Name of the new Gaplist.
* @since 1.2
*/
private void createGaplist(String name) {
if (name != null) {
String newName = replaceSpecials(name);
wrapper.switchToGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Created a new Gaplist.");
else
showFail("Failed to create a new Gaplist.");
}, newName);
}
else {
showFail("Please enter a name first");
}
}
/**
* Replaces all special Characters from the given String.
* @param regex The String to have all specials replaced.
* @return The given String without special Characters.
* @since 1.2
*/
private String replaceSpecials(String regex) {
regex = regex.replaceAll("", "ae");
regex = regex.replaceAll("", "ae");
regex = regex.replaceAll("", "ue");
regex = regex.replaceAll("", "ue");
regex = regex.replaceAll("", "oe");
regex = regex.replaceAll("", "oe");
regex = regex.replaceAll("", "ss");
return regex;
}
/**
* Votes for the Song at the given index.
* @param index The index of the Song, that will be voted for.
* @since 1.3
*/
private void vote(int index) {
wrapper.removeVote((String[] s)-> {});
wrapper.voteSong((String[] s) -> { if (s[0].equals("true"))
showFail("Voted for the Song");
else
showFail("Couldn't vote for the Song");
createWishlistTable();
}, wishlist[index]);
}
/**
* Removes the Vote.
* @since 1.3
*/
private void remove() {
wrapper.removeVote((String[] s) -> { if (s[0].equals("true"))
showFail("Removed your vote.");
else
showFail("Couldn't remove your vote.");
createWishlistTable();
});
}
/**
* Creates a new Frame.
* @return The created Frame.
* @since 1.0
* @wbp.parser.entryPoint
*/
private void constructFrame() {
gaplist = wrapper.getGapList();
wishlist = wrapper.getWishList();
gaplists = wrapper.getAvailableGapLists();
frame = new JFrame();
frame.setSize(new Dimension(600, 700));
frame.setTitle("JukePi");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
frame.setResizable(false);
/*Delete till here*/
lblFail = new JLabel("");
lblFail.setBounds(143, 278, 189, 14);
frame.getContentPane().add(lblFail);
JLabel lblGaplist = new JLabel("Tracks in the Gaplist:");
lblGaplist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblGaplist.setBounds(10, 11, 123, 14);
frame.getContentPane().add(lblGaplist);
JLabel lblWishlist = new JLabel("Tracks in the Wishlist:");
lblWishlist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblWishlist.setBounds(10, 36, 123, 14);
frame.getContentPane().add(lblWishlist);
lblNoGaplist = new JLabel(""+ gaplist.length);
lblNoGaplist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblNoGaplist.setBounds(143, 11, 68, 14);
frame.getContentPane().add(lblNoGaplist);
lblNoWishlist = new JLabel("" + wishlist.length);
lblNoWishlist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblNoWishlist.setBounds(143, 36, 46, 14);
frame.getContentPane().add(lblNoWishlist);
txtLink = new JTextField();
txtLink.setBounds(10, 60, 362, 20);
txtLink.setText("Insert a Link here.");
frame.getContentPane().add(txtLink);
JButton btnAdd = new JButton("Add");
btnAdd.setBounds(10, 91, 62, 20);
btnAdd.setToolTipText("Adds the YouTube-Link in the upper Textfield either to the Gaplist or the Wishlist, whatever is selected on the right.");
frame.getContentPane().add(btnAdd);
JRadioButton rdbtnWishlist = new JRadioButton("Wishlist");
rdbtnWishlist.setBounds(78, 90, 75, 23);
frame.getContentPane().add(rdbtnWishlist);
rdbtnWishlist.setSelected(true);
JRadioButton rdbtnGaplist = new JRadioButton("Gaplist");
rdbtnGaplist.setBounds(155, 90, 75, 23);
frame.getContentPane().add(rdbtnGaplist);
JLabel lblNowPlaying = new JLabel("Now Playing:");
lblNowPlaying.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblNowPlaying.setBounds(10, 144, 68, 14);
frame.getContentPane().add(lblNowPlaying);
JLabel lblNextTrack = new JLabel("Next Track:");
lblNextTrack.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblNextTrack.setBounds(10, 169, 68, 14);
frame.getContentPane().add(lblNextTrack);
lblPlayingTrack = new JLabel("");
lblPlayingTrack.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblPlayingTrack.setBounds(88, 144, 244, 14);
frame.getContentPane().add(lblPlayingTrack);
wrapper.getCurrentTrackTitle((String[] s) -> {lblPlayingTrack.setText(s[0]);});
lblTrackNext = new JLabel("");
lblTrackNext.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblTrackNext.setBounds(88, 169, 244, 14);
frame.getContentPane().add(lblTrackNext);
Song[] wishlist = wrapper.getWishList();
Song[] gaplist = wrapper.getGapList();
collector.setLists(wishlist, gaplist);
if (wishlist.length == 0)
if (gaplist.length == 0)
lblTrackNext.setText("NOTHING");
else
lblTrackNext.setText(gaplist[0].getName());
else
lblTrackNext.setText(wishlist[0].getName());
ImageIcon icon = new ImageIcon("play.png");
btnPlayPause = new JButton(icon);
btnPlayPause.setBounds(140, 194, 120, 45);
frame.getContentPane().add(btnPlayPause);
JButton btnSeekBackwards = new JButton("<html><body>Seek<br>Backward</body></html>");
btnSeekBackwards.setBounds(10, 194, 120, 45);
btnSeekBackwards.setToolTipText("Click here to seek 30 seconds backward.");
frame.getContentPane().add(btnSeekBackwards);
JButton btnSkip = new JButton("Skip");
btnSkip.setBounds(450, 194, 120, 45);
btnSkip.setToolTipText("Click here to skip the current track.");
frame.getContentPane().add(btnSkip);
JButton btnSeekForward = new JButton("<html><body>Seek<br>Forward</body></html>");
btnSeekForward.setBounds(320, 194, 120, 45);
btnSeekForward.setToolTipText("Click here to seek 30 seconds forward.");
frame.getContentPane().add(btnSeekForward);
JButton btnDisconnect = new JButton("Disconnect");
btnDisconnect.setBounds(450, 7, 120, 23);
btnDisconnect.setToolTipText("Click here to disconnect from the Server.");
frame.getContentPane().add(btnDisconnect);
JCheckBox chckbxInfront = new JCheckBox("Add in Front");
chckbxInfront.setBounds(232, 90, 97, 23);
chckbxInfront.setToolTipText("When selected, the track will be added in Front of the list.");
frame.getContentPane().add(chckbxInfront);
createWishlistTable();
createGaplistTable();
lblGaplistName = new JLabel("");
lblGaplistName.setFont(new Font("Tahoma", Font.BOLD, 11));
lblGaplistName.setBounds(10, 303, 250, 14);
lblGaplistName.setVerticalAlignment(JLabel.CENTER);
lblGaplistName.setHorizontalAlignment(JLabel.CENTER);
frame.getContentPane().add(lblGaplistName);
wrapper.getCurrentGapListName((String[] s) -> {lblGaplistName.setText("Gaplist - "+ s[0]);});
JLabel lblWishlist2 = new JLabel("Wishlist");
lblWishlist2.setHorizontalAlignment(JLabel.CENTER);
lblWishlist2.setVerticalAlignment(JLabel.CENTER);
lblWishlist2.setBounds(320, 303, 250, 14);
frame.getContentPane().add(lblWishlist2);
JButton btnDelete = new JButton("Delete");
btnDelete.setBounds(10, 437, 120, 23);
btnDelete.setToolTipText("Click here to delete the selected track from the Gaplist.");
frame.getContentPane().add(btnDelete);
JButton btnSave = new JButton("Save");
btnSave.setBounds(140, 437, 120, 23);
btnSave.setToolTipText("Click here to save the current Gaplist on the Server.");
frame.getContentPane().add(btnSave);
JButton btnUp = new JButton("/\\");
btnUp.setToolTipText("Click here to move the selected track upwards.");
btnUp.setBounds(260, 341, 40, 25);
frame.getContentPane().add(btnUp);
JButton btnDown = new JButton("\\/");
btnDown.setToolTipText("Click here to move the selected track downwards.");
btnDown.setBounds(260, 392, 40, 25);
frame.getContentPane().add(btnDown);
createSavedGaplistsTable();
createContentTable(null);
ButtonGroup bg = new ButtonGroup();
bg.add(rdbtnGaplist);
bg.add(rdbtnWishlist);
JLabel lblSavedGaplists = new JLabel("Saved Gaplists");
lblSavedGaplists.setBounds(10, 501, 250, 20);
lblSavedGaplists.setVerticalAlignment(JLabel.CENTER);
lblSavedGaplists.setHorizontalAlignment(JLabel.CENTER);
frame.getContentPane().add(lblSavedGaplists);
JButton btnLoad = new JButton("Load");
btnLoad.setBounds(10, 637, 75, 23);
btnLoad.setToolTipText("Loads the selected Gaplist.");
frame.getContentPane().add(btnLoad);
JButton btnShow = new JButton("Show");
btnShow.setBounds(95, 637, 75, 23);
btnShow.setToolTipText("Shows the Content of the selected Gaplist.");
frame.getContentPane().add(btnShow);
JButton btnRemove = new JButton("Remove");
btnRemove.setBounds(180, 637, 80, 23);
btnRemove.setToolTipText("Removes the selected Gaplist.");
frame.getContentPane().add(btnRemove);
JButton btnCreate = new JButton("Create");
btnCreate.setBounds(320, 637, 80, 23);
btnCreate.setToolTipText("Click here to create a Gaplist with the Name in the Textfield on the right.");
frame.getContentPane().add(btnCreate);
JTextField textField = new JTextField();
textField.setBounds(410, 637, 158, 23);
frame.getContentPane().add(textField);
textField.setColumns(10);
JButton btnVote = new JButton("Vote");
btnVote.setBounds(320, 437, 120, 23);
btnVote.setToolTipText("Click here to vote for the selected Song.");
frame.getContentPane().add(btnVote);
JButton btnRemoveVote = new JButton("Remove Vote");
btnRemoveVote.setBounds(450, 437, 120, 23);
btnRemoveVote.setToolTipText("Click here to remove your Vote.");
frame.getContentPane().add(btnRemoveVote);
txtLink.addMouseListener(new TextFieldListener(new String[] {"Insert a Link here", "Couldn't add", "Track added", "No valid"}, txtLink));
txtLink.setColumns(10);
wrapper.getCurrentPlaybackStatus((String[] s) -> { if (s[0].equals("true")) {
btnPlayPause.setToolTipText("Click here to Pause the Track.");
// btnPlayPause.setIcon(pauseIcon);
btnPlayPause.setText("Pause");
}
else {
btnPlayPause.setToolTipText("Click here to resume the Track");
// btnPlayPause.setIcon(playIcon);
btnPlayPause.setText("Play");
}
});
btnDisconnect.addActionListener((ActionEvent ae)->{collector.disconnect();});
btnSkip.addActionListener((ActionEvent ae) -> {skip();});
btnPlayPause.addActionListener((ActionEvent ae) -> {pressPause();});
btnSeekForward.addActionListener((ActionEvent ae) -> {seek(true);});
btnSeekBackwards.addActionListener((ActionEvent ae) -> {seek(false);});
btnAdd.addActionListener((ActionEvent ae) -> {add(txtLink.getText(), rdbtnWishlist.isSelected(), chckbxInfront.isSelected(), txtLink);});
btnSave.addActionListener((ActionEvent ae) -> {saveGaplist();});
btnDelete.addActionListener((ActionEvent ae) -> {deleteTrack(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), oldGaplistPane);});
btnUp.addActionListener((ActionEvent ae) -> {moveTrackUp(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnDown.addActionListener((ActionEvent ae) -> {moveTrackDown(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnLoad.addActionListener((ActionEvent ae) -> {loadGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnShow.addActionListener((ActionEvent ae) -> {showGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnRemove.addActionListener((ActionEvent ae) -> {removeGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnCreate.addActionListener((ActionEvent ae) -> {createGaplist(textField.getText());});
btnVote.addActionListener((ActionEvent ae) -> {vote(((JTable) ((JViewport) oldPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnRemove.addActionListener((ActionEvent ae) -> {remove();});
}
} | NewClient/src/windows/MainWindow.java | package windows;
import util.TextFieldListener;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JViewport;
import javax.swing.table.JTableHeader;
import client.serverconnection.ServerConnection;
import client.serverconnection.Song;
import connection.Collector;
/**
* The Main {@link Window}, that contains information transmitted by the Server, this Client
* is connected to.
* @author Haeldeus
* @version 1.3
*/
public class MainWindow extends Window {
/**
* The {@link Collector}, that will perform Actions with extern needed information.
*/
private final Collector collector;
/**
* The TextField that contains the Link.
* @see JTextField
*/
private JTextField txtLink;
/**
* The Label that displays possible Messages.
* @see JLabel
*/
private JLabel lblFail;
/**
* The Frame, this Screen displays.
* @see JFrame
*/
private JFrame frame;
/**
* The {@link ServerConnection}, that will send the Messages.
*/
private final ServerConnection wrapper;
/**
* The Gaplist, that contains all {@link Song}s in the Gaplist.
*/
private Song[] gaplist;
/**
* The Wishlist, that contains all {@link Song}s in the Wishlist.
*/
private Song[] wishlist;
/**
* The Button, that can be pushed to pause/resume a Track.
* @see JButton
*/
private JButton btnPlayPause;
/**
* The Label, that will display the Name of the current Gaplist.
* @see JLabel
*/
private JLabel lblGaplistName;
/**
* The Label, that will display the Name of the current Track.
* @see JLabel
*/
private JLabel lblPlayingTrack;
/**
* The Label, that will display the Name of the next Track.
* @see JLabel
*/
private JLabel lblTrackNext;
/**
* The Label, that will display the number of Tracks in the Gaplist.
* @see JLabel
*/
private JLabel lblNoGaplist;
/**
* The Label, that will display the number of Tracks in the Wishlist.
* @see JLabel
*/
private JLabel lblNoWishlist;
/**
* The ScrollPane, that contains the old Wishlist-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldPane;
/**
* The ScrollPane, that contains the old Gaplist-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldGaplistPane;
/**
* The ScrollPane, that contains the old Saved-Gaplists-Table. Has to be stored to be able
* to keep the table updated.
* @see JScrollPane
*
*/
private JScrollPane oldSavedGaplistPane;
/**
* The Gaplists saved on the Server.
*/
private String[] gaplists;
/**
* The ScrollPane, that contains the old Content-Table. Has to be stored to be able to
* keep the table updated.
* @see JScrollPane
*/
private JScrollPane oldContentPane;
/**
* The Icon, that will be displayed instead of "Pause" as a String.
*/
private ImageIcon playIcon = new ImageIcon("pause.png");
/**
* The Icon, that will be displayed instead of "Play" as a String.
*/
private ImageIcon pauseIcon = new ImageIcon("pause.png");
/**
* The Constructor for the Main-Screen. Will set the parameters to their belonging
* variables.
* @param collector The {@link Collector}, that will perform Actions with extern needed
* information.
* @param frame The Frame, this Screen will display.
* @param wrapper The {@link ServerConnection}, that will send the Messages.
* @param gaplist The Gaplist as an Array of {@link Song}s.
* @param wishlist The Wishlist as an Array of {@link Song}s.
* @since 1.0
*/
public MainWindow(Collector collector, JFrame frame, ServerConnection wrapper, Song[] gaplist, Song[] wishlist) {
this.collector = collector;
this.frame = frame;
frame.getContentPane().removeAll();
this.wrapper = wrapper;
this.gaplist = gaplist;
this.wishlist = wishlist;
}
@Override
public void show() {
constructFrame();
frame.setVisible(true);
}
@Override
public void close() {
frame.setVisible(false);
}
/**
* Sets the IP and Port of the Server, the Client is connected to, so the Title of the
* Frame can display it.
* @param ip The IP of the Server, the Client is connected to.
* @param port The Port of the Server, the Client is connected to.
* @see JFrame#setTitle(String)
* @since 1.0
*/
public void setIpAndPort(String ip, int port) {
frame.setTitle("JukePi - "+ip+":"+port);
}
@Override
public void showFail(String text) {
new util.ShowLabelThread(lblFail, frame, text).start();
}
/**
* Skips the current Song.
* @see ServerConnection#skip(ResponseListener)
* @since 1.0
*/
private void skip() {
wrapper.skip((String[] s) -> { if (s[0].equals("true"))
showFail("Skipped Track successfully!");
else
showFail("Couldn't skip the Track!");
});
}
/**
* Messages the Server, that the Play/Pause-Button was pressed.
* @see ServerConnection#pauseResume(ResponseListener)
* @since 1.0
*/
private void pressPause() {
wrapper.pauseResume((String[] s) -> { if (s[0].equals("true"))
wrapper.getCurrentPlaybackStatus((String[] st) -> { if (st[0].equals("false"))
showFail("Paused the Track successfully!");
else
showFail("Resumed the Track successfully!");
});
else
wrapper.getCurrentPlaybackStatus((String[] str) -> { if (str[0].equals("false"))
showFail("Couldn't resume the Track!");
else
showFail("Couldn't pause the Track!");
});
});
}
/**
* Seeks 30 seconds either forward or backward.
* @param forward Determines, whether the Server should seek forward({@code true}) or
* backward({@code false}).
* @see ServerConnection#seekForward(ResponseListener)
* @see ServerConnection#seekBackward(ResponseListener)
* @since 1.0
*/
private void seek(boolean forward) {
if (forward)
wrapper.seekForward((String[] s) -> { if (s[0].equals("true"))
showFail("Successfully seeked forward!");
else
showFail("Couldn't seek forward!");
});
else
wrapper.seekBackward((String[] s) -> { if (s[0].equals("true"))
showFail("Successfully seeked backwards!");
else
showFail("Couldn't seek backwards!");
});
}
/**
* Adds the given Link to a List, either the Gap- or the Wishlist.
* @param link The Link to the Song.
* @param toWishlist Determines, whether the Song should be added to the Wishlist
* ({@code true}) or to the Gaplist ({@code false}).
* @param inFront Determines, whether the Track should be added in Front of the List
* ({@code true}) or at the the End of the List ({@code false}).
* @param textfield The TextField, that contains the Link.
* @see ServerConnection#addToList(ResponseListener, String, boolean, boolean)
* @since 1.0
*/
private void add(String link, boolean toWishlist , boolean inFront, JTextField textfield) {
if (!link.isEmpty()) {
showFail("Pending Server...");
wrapper.addToList((String[] s) -> { if (s[0].equals("true"))
showFail("Track added!");
else
showFail("Couldn't add the Track.");
textfield.setText("Insert a Link here");
},
link, toWishlist, !inFront);
}
else {
showFail("No valid link!");
textfield.setText("Insert a Link here");
}
}
/**
* Sets the Gaplist to the given List and updates the Gaplist-Model
* @param gaplist The new Gaplist.
* @since 1.0
*/
public void setGaplist(Song[] gaplist) {
this.gaplist = gaplist;
lblNoGaplist.setText(""+gaplist.length);
createGaplistTable();
}
/**
* Sets the Wishlist to the given List and updates the Wishlist-Table.
* @param wishlist The new Wishlist.
* @since 1.0
*/
public void setWishlist(Song[] wishlist) {
this.wishlist = wishlist;
lblNoWishlist.setText(""+wishlist.length);
createWishlistTable();
setNextTrack();
}
/**
* Sets the Gaplists to the given List and updates the Saved-Gaplists-Table.
* @param gaplists The Gaplists on the Server.
* @since 1.2
*/
public void setGaplists(String[] gaplists) {
this.gaplists = gaplists;
createSavedGaplistsTable();
setNextTrack();
}
/**
* Moves the Song at the given index upwards in the Gaplist.
* @param index The index of the Track to be moved.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#setGapListTrackUp(ResponseListener, long)
* @since 1.0
*/
private void moveTrackUp(int index) {
if (index >=0)
wrapper.setGapListTrackUp((String[] s)-> { if (s[0].equals("true")) {
showFail("Moved Track up.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index-1);
}
else {
showFail("Couldn't move Track up.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index);
}
}, gaplist[index].getTrackID());
}
/**
* Moves the Song at the given index downwards in the Gaplist.
* @param index The index of the Track to be moved.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#setGapListTrackDown(ResponseListener, long)
* @since 1.0
*/
private void moveTrackDown(int index) {
if (index >= 0)
wrapper.setGapListTrackDown((String[] s) -> { if (s[0].equals("true")) {
showFail("Moved Track down.");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index+1);
}
else {
showFail("Couldn't move Track down");
try{Thread.sleep(100);}catch(Exception e) {}
setSelectedGaplistIndex(index);
}
}, gaplist[index].getTrackID());
}
/**
* Deletes the Song at the given index from the Gaplist.
* @param index The index of the Song to be deleted.
* @param list The List, that contains the Gaplist-Model.
* @see ServerConnection#deleteFromList(Song)
* @since 1.0
*/
private void deleteTrack(int index, JScrollPane list) {
if (index >= 0) {
if (wrapper.deleteFromList(gaplist[index]))
showFail("Deleted the Track from the Gaplist");
else
showFail("Couldn't delete the Track from the Gaplist");
try{Thread.sleep(100);} catch (Exception e) {}
setSelectedGaplistIndex(index);
}
}
/**
* Saves the current Gaplist on the Server.
* @see ServerConnection#saveGapList(ResponseListener)
* @since 1.0
*/
private void saveGaplist() {
wrapper.saveGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Saved Gaplist.");
else
showFail("Couldn't save the Gaplist.");
});
}
/**
* Loads the Gaplist with the given Name
* @param name The Name of the Gaplist to be loaded.
* @since 1.2
*/
private void loadGaplist(String name) {
wrapper.switchToGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Loaded Gaplist.");
else
showFail("Couldn't load the Gaplist.");
}, name);
}
/**
* Shows the Content of the Gaplist with the given Name.
* @param name The Name of the Gaplist, which Content will be shown.
* @since 1.2
*/
private void showGaplist(String name) {
wrapper.getTitleFromGapList((String[] s) -> {createContentTable(s);}, name);
}
/**
* Will be executed, when a Song was paused or resumed on the Server.
* @param isPlaying Determines, if the Song is now playing ({@code true}) or paused
* ({@code false}).
* @since 1.0
*/
public void pauseResume(boolean isPlaying) {
if (isPlaying) {
btnPlayPause.setIcon(pauseIcon);
btnPlayPause.setToolTipText("Click here to pause the Track.");
}
else {
btnPlayPause.setIcon(playIcon);
btnPlayPause.setToolTipText("Click here to resume the Track.");
}
}
/**
* Will be executed, when an other Gaplist was loaded on the Server.
* @param gapListName The Name of the new Gaplist.
* @since 1.0
*/
public void gaplistChanged(String gapListName) {
lblGaplistName.setText("Gaplist - " + gapListName);
}
/**
* Sets the Text of the PlayingTrackLabel to the given title.
* @param title The title of the song, that is now playing.
* @since 1.2
*/
public void setNowPlaying(String title) {
lblPlayingTrack.setText(title);
}
/**
* Sets the Text of the NextTrackLabel to the given title.
* @param title The title of the next Song.
* @since 1.0
*/
public void setNextTrack() {
if (wishlist.length == 0)
if (gaplist.length == 0)
lblTrackNext.setText("NOTHING");
else
lblTrackNext.setText(gaplist[0].getName());
else
lblTrackNext.setText(wishlist[0].getName());
}
/**
* Creates the Table, that displays the Wishlist and the Votes for each Song in it.
* @since 1.0
*/
private void createWishlistTable() {
if(oldPane != null)
frame.getContentPane().remove(oldPane);
String[] columns = {"Song:", "Votes:"};
String[][] data = new String[wishlist.length][2];
for (int i = 0; i < wishlist.length; i++) {
data[i][0] = wishlist[i].getName();
data[i][1] = ""+wishlist[i].getVotes();
}
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTips for the TableHeaders.
*/
private String [] columnToolTips = {"The Name of the Song", "The Votes for this Song"};
/**
* Returns the ToolTip for the Cell at the given Position of the Cursor.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Cursor's Position.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given Position is editable.
* @param row The row-index of the Cell.
* @param column The column-index of the Cell.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return the new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the given Cursor's Position.
* @param e The MouseEvent.
* @return the ToolTip for the column at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
table.getColumnModel().getColumn(0).setMinWidth(210);
JScrollPane wishlistPane = new JScrollPane(table);
wishlistPane.setBounds(320,328,250,102);
frame.getContentPane().add(wishlistPane);
oldPane = wishlistPane;
}
/**
* Creates the Table, that contains the Gaplist.
* @since 1.1
*/
private synchronized void createGaplistTable() {
if (oldGaplistPane != null)
frame.getContentPane().remove(oldGaplistPane);
String[] columns = {"Gaplist:"};
String[][] data = new String[gaplist.length][1];
for (int i = 0; i < gaplist.length; i++)
data[i][0] = gaplist[i].getName();
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTip for the column.
*/
private String [] columnToolTips = {"The Name of the Song in the Gaplist"};
/**
* Returns the ToolTip for the Cell at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given index is editable.
* @param row The row-index of the Cell.
* @param column The column-index of the Cell.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the given column.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane gaplistPane = new JScrollPane(table);
gaplistPane.setBounds(10, 328, 250, 102);
frame.getContentPane().add(gaplistPane);
oldGaplistPane = gaplistPane;
}
/**
* Creates a Table with all saved Gaplists in it.
* @since 1.2
*/
private synchronized void createSavedGaplistsTable() {
if (oldSavedGaplistPane != null)
frame.getContentPane().remove(oldSavedGaplistPane);
String[] columns = {"Gaplists:"};
String[][] data = new String[gaplists.length][1];
for (int i = 0; i < gaplists.length; i++)
data[i][0] = gaplists[i];
JTable table = new JTable(data, columns) {
/**
* The serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The Tooltip of the column.
*/
private String [] columnToolTips = {"The Name of the Gaplist"};
/**
* Returns the ToolTip for the Cell at the Position of the Cursor.
* @param e The MouseEvent.
* @return The ToolTip for the Cell at the Cursor's Position.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the Row and column is editable.
* @param row The row index of the Cell.
* @param column The column index of the Cell.
* @return false as default value, since these Cells shouldn't be edited.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position
* @param e The MouseEvent.
* @return The ToolTip for the column at the given Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane gaplistsPane = new JScrollPane(table);
gaplistsPane.setBounds(10, 528, 248, 102);
frame.getContentPane().add(gaplistsPane);
oldSavedGaplistPane = gaplistsPane;
}
/**
* Creates the Table with the Content of the to be shown Gaplist. If the Content is null,
* an empty table will be build with nothing but the header in it.
* @param content The Content of the Gaplist, that should be shown.
* @since 1.2
*/
private synchronized void createContentTable(String[] content) {
if (oldContentPane != null)
frame.getContentPane().remove(oldContentPane);
String[] columns = {"Content:"};
String[][] data = new String[0][1];
if (content != null) {
data = new String[content.length][1];
for (int i = 0; i < content.length; i++)
data[i][0] = content[i];
}
JTable table = new JTable(data, columns) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* The ToolTip for the column.
*/
private String [] columnToolTips = {"The Name of the Song in the selected Gaplist."};
/**
* Returns the ToolTip of the Cell at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip of the Cell at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
String tip = null;
java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p);
int colIndex = columnAtPoint(p);
if (colIndex == 0)
tip = ""+ getValueAt(rowIndex, colIndex);
return tip;
}
/**
* Returns, if the Cell at the given index is editable.
* @param row The row-Index.
* @param column The column-Index.
* @return false by default, as these Cells shouldn't be editable.
*/
public boolean isCellEditable(int row, int column){
return false;
}
/**
* Creates a new TableHeader.
* @return The new TableHeader.
*/
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
/**
* The Serial Version ID.
*/
private static final long serialVersionUID = 1L;
/**
* Returns the ToolTip for the column at the Cursor's Position.
* @param e The MouseEvent.
* @return The ToolTip for the column at the Position of the Cursor.
*/
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
return columnToolTips[realIndex];
}
};
}
};
JScrollPane contentPane = new JScrollPane(table);
contentPane.setBounds(320, 528, 248, 102);
frame.getContentPane().add(contentPane);
oldContentPane = contentPane;
}
/**
* Sets the SelectedIndex of gaplistList to the given index.
* @param index The index of the new Selection.
* @since 1.1
*/
private void setSelectedGaplistIndex(int index) {
if (index >= 0) {
try {
((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).setRowSelectionInterval(index, index);
}
catch (IllegalArgumentException iae) {
((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).setRowSelectionInterval(index-1, index-1);
}
}
}
/**
* Removes the Gaplist with the given Name from the Server.
* @param name The Name of the Gaplist to be removed.
* @since 1.2
*/
private void removeGaplist(String name) {
wrapper.deleteGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Removed the Gaplist.");
else
showFail("Coudln't remove the Gaplist");
}, name);
}
/**
* Creates a Gaplist with the given Name.
* @param name The Name of the new Gaplist.
* @since 1.2
*/
private void createGaplist(String name) {
if (name != null) {
String newName = replaceSpecials(name);
wrapper.switchToGapList((String[] s) -> { if (s[0].equals("true"))
showFail("Created a new Gaplist.");
else
showFail("Failed to create a new Gaplist.");
}, newName);
}
else {
showFail("Please enter a name first");
}
}
/**
* Replaces all special Characters from the given String.
* @param regex The String to have all specials replaced.
* @return The given String without special Characters.
* @since 1.2
*/
private String replaceSpecials(String regex) {
regex = regex.replaceAll("", "ae");
regex = regex.replaceAll("", "ae");
regex = regex.replaceAll("", "ue");
regex = regex.replaceAll("", "ue");
regex = regex.replaceAll("", "oe");
regex = regex.replaceAll("", "oe");
regex = regex.replaceAll("", "ss");
return regex;
}
/**
* Votes for the Song at the given index.
* @param index The index of the Song, that will be voted for.
* @since 1.3
*/
private void vote(int index) {
wrapper.removeVote((String[] s)-> {});
wrapper.voteSong((String[] s) -> { if (s[0].equals("true"))
showFail("Voted for the Song");
else
showFail("Couldn't vote for the Song");
createWishlistTable();
}, wishlist[index]);
}
/**
* Removes the Vote.
* @since 1.3
*/
private void remove() {
wrapper.removeVote((String[] s) -> { if (s[0].equals("true"))
showFail("Removed your vote.");
else
showFail("Couldn't remove your vote.");
createWishlistTable();
});
}
/**
* Creates a new Frame.
* @return The created Frame.
* @since 1.0
* @wbp.parser.entryPoint
*/
private void constructFrame() {
gaplist = wrapper.getGapList();
wishlist = wrapper.getWishList();
gaplists = wrapper.getAvailableGapLists();
frame = new JFrame();
frame.setSize(new Dimension(600, 700));
frame.setTitle("JukePi");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
frame.setResizable(false);
/*Delete till here*/
lblFail = new JLabel("");
lblFail.setBounds(143, 278, 189, 14);
frame.getContentPane().add(lblFail);
JLabel lblGaplist = new JLabel("Tracks in the Gaplist:");
lblGaplist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblGaplist.setBounds(10, 11, 123, 14);
frame.getContentPane().add(lblGaplist);
JLabel lblWishlist = new JLabel("Tracks in the Wishlist:");
lblWishlist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblWishlist.setBounds(10, 36, 123, 14);
frame.getContentPane().add(lblWishlist);
lblNoGaplist = new JLabel(""+ gaplist.length);
lblNoGaplist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblNoGaplist.setBounds(143, 11, 68, 14);
frame.getContentPane().add(lblNoGaplist);
lblNoWishlist = new JLabel("" + wishlist.length);
lblNoWishlist.setFont(new Font("Tahoma", Font.BOLD, 11));
lblNoWishlist.setBounds(143, 36, 46, 14);
frame.getContentPane().add(lblNoWishlist);
txtLink = new JTextField();
txtLink.setBounds(10, 60, 362, 20);
txtLink.setText("Insert a Link here.");
frame.getContentPane().add(txtLink);
JButton btnAdd = new JButton("Add");
btnAdd.setBounds(10, 91, 62, 20);
btnAdd.setToolTipText("Adds the YouTube-Link in the upper Textfield either to the Gaplist or the Wishlist, whatever is selected on the right.");
frame.getContentPane().add(btnAdd);
JRadioButton rdbtnWishlist = new JRadioButton("Wishlist");
rdbtnWishlist.setBounds(78, 90, 75, 23);
frame.getContentPane().add(rdbtnWishlist);
rdbtnWishlist.setSelected(true);
JRadioButton rdbtnGaplist = new JRadioButton("Gaplist");
rdbtnGaplist.setBounds(155, 90, 75, 23);
frame.getContentPane().add(rdbtnGaplist);
JLabel lblNowPlaying = new JLabel("Now Playing:");
lblNowPlaying.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblNowPlaying.setBounds(10, 144, 68, 14);
frame.getContentPane().add(lblNowPlaying);
JLabel lblNextTrack = new JLabel("Next Track:");
lblNextTrack.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblNextTrack.setBounds(10, 169, 68, 14);
frame.getContentPane().add(lblNextTrack);
lblPlayingTrack = new JLabel("");
lblPlayingTrack.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblPlayingTrack.setBounds(88, 144, 244, 14);
frame.getContentPane().add(lblPlayingTrack);
wrapper.getCurrentTrackTitle((String[] s) -> {lblPlayingTrack.setText(s[0]);});
lblTrackNext = new JLabel("");
lblTrackNext.setFont(new Font("Tahoma", Font.PLAIN, 11));
lblTrackNext.setBounds(88, 169, 244, 14);
frame.getContentPane().add(lblTrackNext);
Song[] wishlist = wrapper.getWishList();
Song[] gaplist = wrapper.getGapList();
collector.setLists(wishlist, gaplist);
if (wishlist.length == 0)
if (gaplist.length == 0)
lblTrackNext.setText("NOTHING");
else
lblTrackNext.setText(gaplist[0].getName());
else
lblTrackNext.setText(wishlist[0].getName());
ImageIcon icon = new ImageIcon("play.png");
btnPlayPause = new JButton(icon);
btnPlayPause.setBounds(140, 194, 120, 45);
frame.getContentPane().add(btnPlayPause);
JButton btnSeekBackwards = new JButton("<html><body>Wind<br>Backward</body></html>");
btnSeekBackwards.setBounds(10, 194, 120, 45);
btnSeekBackwards.setToolTipText("Click here to wind 30 seconds backward.");
frame.getContentPane().add(btnSeekBackwards);
JButton btnSkip = new JButton("Skip");
btnSkip.setBounds(450, 194, 120, 45);
btnSkip.setToolTipText("Click here to skip the current track.");
frame.getContentPane().add(btnSkip);
JButton btnSeekForward = new JButton("<html><body>Wind<br>Forward</body></html>");
btnSeekForward.setBounds(320, 194, 120, 45);
btnSeekForward.setToolTipText("Click here to wind 30 seconds forward.");
frame.getContentPane().add(btnSeekForward);
JButton btnDisconnect = new JButton("Disconnect");
btnDisconnect.setBounds(450, 7, 120, 23);
btnDisconnect.setToolTipText("Click here to disconnect from the Server.");
frame.getContentPane().add(btnDisconnect);
JCheckBox chckbxInfront = new JCheckBox("Add in Front");
chckbxInfront.setBounds(232, 90, 97, 23);
chckbxInfront.setToolTipText("When selected, the track will be added in Front of the list.");
frame.getContentPane().add(chckbxInfront);
createWishlistTable();
createGaplistTable();
lblGaplistName = new JLabel("");
lblGaplistName.setFont(new Font("Tahoma", Font.BOLD, 11));
lblGaplistName.setBounds(10, 303, 250, 14);
lblGaplistName.setVerticalAlignment(JLabel.CENTER);
lblGaplistName.setHorizontalAlignment(JLabel.CENTER);
frame.getContentPane().add(lblGaplistName);
wrapper.getCurrentGapListName((String[] s) -> {lblGaplistName.setText("Gaplist - "+ s[0]);});
JLabel lblWishlist2 = new JLabel("Wishlist");
lblWishlist2.setHorizontalAlignment(JLabel.CENTER);
lblWishlist2.setVerticalAlignment(JLabel.CENTER);
lblWishlist2.setBounds(320, 303, 250, 14);
frame.getContentPane().add(lblWishlist2);
JButton btnDelete = new JButton("Delete");
btnDelete.setBounds(10, 437, 120, 23);
btnDelete.setToolTipText("Click here to delete the selected track from the Gaplist.");
frame.getContentPane().add(btnDelete);
JButton btnSave = new JButton("Save");
btnSave.setBounds(140, 437, 120, 23);
btnSave.setToolTipText("Click here to save the current Gaplist on the Server.");
frame.getContentPane().add(btnSave);
JButton btnUp = new JButton("/\\");
btnUp.setToolTipText("Click here to move the selected track upwards.");
btnUp.setBounds(260, 341, 40, 25);
frame.getContentPane().add(btnUp);
JButton btnDown = new JButton("\\/");
btnDown.setToolTipText("Click here to move the selected track downwards.");
btnDown.setBounds(260, 392, 40, 25);
frame.getContentPane().add(btnDown);
createSavedGaplistsTable();
createContentTable(null);
ButtonGroup bg = new ButtonGroup();
bg.add(rdbtnGaplist);
bg.add(rdbtnWishlist);
JLabel lblSavedGaplists = new JLabel("Saved Gaplists");
lblSavedGaplists.setBounds(10, 501, 250, 20);
lblSavedGaplists.setVerticalAlignment(JLabel.CENTER);
lblSavedGaplists.setHorizontalAlignment(JLabel.CENTER);
frame.getContentPane().add(lblSavedGaplists);
JButton btnLoad = new JButton("Load");
btnLoad.setBounds(10, 637, 75, 23);
btnLoad.setToolTipText("Loads the selected Gaplist.");
frame.getContentPane().add(btnLoad);
JButton btnShow = new JButton("Show");
btnShow.setBounds(95, 637, 75, 23);
btnShow.setToolTipText("Shows the Content of the selected Gaplist.");
frame.getContentPane().add(btnShow);
JButton btnRemove = new JButton("Remove");
btnRemove.setBounds(180, 637, 80, 23);
btnRemove.setToolTipText("Removes the selected Gaplist.");
frame.getContentPane().add(btnRemove);
JButton btnCreate = new JButton("Create");
btnCreate.setBounds(320, 637, 80, 23);
btnCreate.setToolTipText("Click here to create a Gaplist with the Name in the Textfield on the right.");
frame.getContentPane().add(btnCreate);
JTextField textField = new JTextField();
textField.setBounds(410, 637, 158, 23);
frame.getContentPane().add(textField);
textField.setColumns(10);
JButton btnVote = new JButton("Vote");
btnVote.setBounds(320, 437, 120, 23);
btnVote.setToolTipText("Click here to vote for the selected Song.");
frame.getContentPane().add(btnVote);
JButton btnRemoveVote = new JButton("Remove Vote");
btnRemoveVote.setBounds(450, 437, 120, 23);
btnRemoveVote.setToolTipText("Click here to remove your Vote.");
frame.getContentPane().add(btnRemoveVote);
txtLink.addMouseListener(new TextFieldListener(new String[] {"Insert a Link here", "Couldn't add", "Track added", "No valid"}, txtLink));
txtLink.setColumns(10);
wrapper.getCurrentPlaybackStatus((String[] s) -> { if (s[0].equals("true")) {
btnPlayPause.setToolTipText("Click here to Pause the Track.");
btnPlayPause.setIcon(pauseIcon);
}
else {
btnPlayPause.setToolTipText("Click here to resume the Track");
btnPlayPause.setIcon(playIcon);
}
});
btnDisconnect.addActionListener((ActionEvent ae)->{collector.disconnect();});
btnSkip.addActionListener((ActionEvent ae) -> {skip();});
btnPlayPause.addActionListener((ActionEvent ae) -> {pressPause();});
btnSeekForward.addActionListener((ActionEvent ae) -> {seek(true);});
btnSeekBackwards.addActionListener((ActionEvent ae) -> {seek(false);});
btnAdd.addActionListener((ActionEvent ae) -> {add(txtLink.getText(), rdbtnWishlist.isSelected(), chckbxInfront.isSelected(), txtLink);});
btnSave.addActionListener((ActionEvent ae) -> {saveGaplist();});
btnDelete.addActionListener((ActionEvent ae) -> {deleteTrack(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), oldGaplistPane);});
btnUp.addActionListener((ActionEvent ae) -> {moveTrackUp(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnDown.addActionListener((ActionEvent ae) -> {moveTrackDown(((JTable) ((JViewport) oldGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnLoad.addActionListener((ActionEvent ae) -> {loadGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnShow.addActionListener((ActionEvent ae) -> {showGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnRemove.addActionListener((ActionEvent ae) -> {removeGaplist((String)(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getValueAt(((JTable) ((JViewport) oldSavedGaplistPane.getComponent(0)).getComponent(0)).getSelectedRow(), 0)));});
btnCreate.addActionListener((ActionEvent ae) -> {createGaplist(textField.getText());});
btnVote.addActionListener((ActionEvent ae) -> {vote(((JTable) ((JViewport) oldPane.getComponent(0)).getComponent(0)).getSelectedRow());});
btnRemove.addActionListener((ActionEvent ae) -> {remove();});
}
} | Fixes
few fixes
| NewClient/src/windows/MainWindow.java | Fixes | <ide><path>ewClient/src/windows/MainWindow.java
<ide> private void seek(boolean forward) {
<ide> if (forward)
<ide> wrapper.seekForward((String[] s) -> { if (s[0].equals("true"))
<del> showFail("Successfully seeked forward!");
<add> showFail("Successfully sought forward!");
<ide> else
<ide> showFail("Couldn't seek forward!");
<ide> });
<ide> else
<ide> wrapper.seekBackward((String[] s) -> { if (s[0].equals("true"))
<del> showFail("Successfully seeked backwards!");
<add> showFail("Successfully sought backwards!");
<ide> else
<ide> showFail("Couldn't seek backwards!");
<ide> });
<ide> wrapper.deleteGapList((String[] s) -> { if (s[0].equals("true"))
<ide> showFail("Removed the Gaplist.");
<ide> else
<del> showFail("Coudln't remove the Gaplist");
<add> showFail("Couldn't remove the Gaplist");
<ide> }, name);
<ide> }
<ide>
<ide> btnPlayPause.setBounds(140, 194, 120, 45);
<ide> frame.getContentPane().add(btnPlayPause);
<ide>
<del> JButton btnSeekBackwards = new JButton("<html><body>Wind<br>Backward</body></html>");
<add> JButton btnSeekBackwards = new JButton("<html><body>Seek<br>Backward</body></html>");
<ide> btnSeekBackwards.setBounds(10, 194, 120, 45);
<del> btnSeekBackwards.setToolTipText("Click here to wind 30 seconds backward.");
<add> btnSeekBackwards.setToolTipText("Click here to seek 30 seconds backward.");
<ide> frame.getContentPane().add(btnSeekBackwards);
<ide>
<ide> JButton btnSkip = new JButton("Skip");
<ide> btnSkip.setToolTipText("Click here to skip the current track.");
<ide> frame.getContentPane().add(btnSkip);
<ide>
<del> JButton btnSeekForward = new JButton("<html><body>Wind<br>Forward</body></html>");
<add> JButton btnSeekForward = new JButton("<html><body>Seek<br>Forward</body></html>");
<ide> btnSeekForward.setBounds(320, 194, 120, 45);
<del> btnSeekForward.setToolTipText("Click here to wind 30 seconds forward.");
<add> btnSeekForward.setToolTipText("Click here to seek 30 seconds forward.");
<ide> frame.getContentPane().add(btnSeekForward);
<ide>
<ide> JButton btnDisconnect = new JButton("Disconnect");
<ide>
<ide> wrapper.getCurrentPlaybackStatus((String[] s) -> { if (s[0].equals("true")) {
<ide> btnPlayPause.setToolTipText("Click here to Pause the Track.");
<del> btnPlayPause.setIcon(pauseIcon);
<add> // btnPlayPause.setIcon(pauseIcon);
<add> btnPlayPause.setText("Pause");
<ide> }
<ide> else {
<ide> btnPlayPause.setToolTipText("Click here to resume the Track");
<del> btnPlayPause.setIcon(playIcon);
<add> // btnPlayPause.setIcon(playIcon);
<add> btnPlayPause.setText("Play");
<ide> }
<ide> });
<ide> |
|
Java | unlicense | ef9e970bb33e2bb7a0b181c84692379aa6161a0c | 0 | skeeto/october-chess-engine | package com.nullprogram.chess.pieces;
import com.nullprogram.chess.Piece;
import com.nullprogram.chess.Board;
import com.nullprogram.chess.Position;
import com.nullprogram.chess.Move;
import com.nullprogram.chess.MoveList;
/**
* The Chess pawn.
*
* This class describes the movement and capture behavior of the pawn
* chess piece.
*/
public class Pawn extends Piece {
/**
* Create a new pawn on the given side.
*
* @param side piece owner
*/
public Pawn(final Side side) {
super(side);
}
/** {@inheritDoc} */
public final MoveList getMoves(final boolean check) {
MoveList list = new MoveList(getBoard(), check);
Position pos = getPosition();
Board board = getBoard();
int dir = direction();
Position dest = new Position(pos, 0, 1 * dir);
Move first = new Move(pos, dest);
if (dest.getY() == upgradeRow()) {
first.setNext(new Move(dest, null)); // remove the pawn
Move upgrade = new Move(null, dest);
upgrade.setCaptured(new Queen(getSide()));
first.getNext().setNext(upgrade); // add a queen
}
if (list.addMove(first)) {
if (!moved()) {
list.addMove(new Move(pos, new Position(pos, 0, 2 * dir)));
}
}
list.addCaptureOnly(new Move(pos, new Position(pos, -1, 1 * dir)));
list.addCaptureOnly(new Move(pos, new Position(pos, 1, 1 * dir)));
/* check for en passant */
Move last = board.last();
if (last != null) {
Position left = new Position(pos, -1, 0);
Position right = new Position(pos, 1, 0);
if (left.equals(last.getDest())
&& (last.getOrigin().getX() == last.getDest().getX())
&& (board.getPiece(left) instanceof Pawn)) {
/* en passant to the left */
Move passant = new Move(pos, new Position(pos, -1, dir));
passant.setNext(new Move(left, null));
list.addMove(passant);
} else if (right.equals(last.getDest())
&& (last.getOrigin().getX() == last.getDest().getX())
&& (board.getPiece(right) instanceof Pawn)) {
/* en passant to the right */
Move passant = new Move(pos, new Position(pos, 1, dir));
passant.setNext(new Move(right, null));
list.addMove(passant);
}
}
return list;
}
/**
* Determine direction of this pawn's movement.
*
* @return direction for this pawn
*/
private int direction() {
if (getSide() == Side.WHITE) {
return 1;
} else {
return -1;
}
}
/**
* Determine upgrade row.
*
* @return the upgrade row index.
*/
private int upgradeRow() {
if (getSide() == Side.BLACK) {
return 0;
} else {
return getBoard().getHeight() - 1;
}
}
}
| src/com/nullprogram/chess/pieces/Pawn.java | package com.nullprogram.chess.pieces;
import com.nullprogram.chess.Piece;
import com.nullprogram.chess.Board;
import com.nullprogram.chess.Position;
import com.nullprogram.chess.Move;
import com.nullprogram.chess.MoveList;
/**
* The Chess pawn.
*
* This class describes the movement and capture behavior of the pawn
* chess piece.
*/
public class Pawn extends Piece {
/**
* Create a new pawn on the given side.
*
* @param side piece owner
*/
public Pawn(final Side side) {
super(side);
}
/** {@inheritDoc} */
public final MoveList getMoves(final boolean check) {
MoveList list = new MoveList(getBoard(), check);
Position pos = getPosition();
Board board = getBoard();
int dir = direction();
Position dest = new Position(pos, 0, 1 * dir);
Move first = new Move(pos, dest);
if (dest.getY() == upgradeRow()) {
first.setNext(new Move(dest, null)); // remove the pawn
Move upgrade = new Move(null, dest);
upgrade.setCaptured(new Queen(getSide()));
first.getNext().setNext(upgrade); // add a queen
}
if (list.addMove(first)) {
if (!moved()) {
list.addMove(new Move(pos, new Position(pos, 0, 2 * dir)));
}
}
list.addCaptureOnly(new Move(pos, new Position(pos, -1, 1 * dir)));
list.addCaptureOnly(new Move(pos, new Position(pos, 1, 1 * dir)));
/* check for en passant */
Move last = board.last();
if (last != null) {
Position left = new Position(pos, -1, 0);
Position right = new Position(pos, 1, 0);
if (left.equals(last.getDest())
&& (board.getPiece(left) instanceof Pawn)) {
/* en passant to the left */
Move passant = new Move(pos, new Position(pos, -1, dir));
passant.setNext(new Move(left, null));
list.addMove(passant);
} else if (right.equals(last.getDest())
&& (board.getPiece(right) instanceof Pawn)) {
/* en passant to the right */
Move passant = new Move(pos, new Position(pos, 1, dir));
passant.setNext(new Move(right, null));
list.addMove(passant);
}
}
return list;
}
/**
* Determine direction of this pawn's movement.
*
* @return direction for this pawn
*/
private int direction() {
if (getSide() == Side.WHITE) {
return 1;
} else {
return -1;
}
}
/**
* Determine upgrade row.
*
* @return the upgrade row index.
*/
private int upgradeRow() {
if (getSide() == Side.BLACK) {
return 0;
} else {
return getBoard().getHeight() - 1;
}
}
}
| Fix en passant on capture bug.
| src/com/nullprogram/chess/pieces/Pawn.java | Fix en passant on capture bug. | <ide><path>rc/com/nullprogram/chess/pieces/Pawn.java
<ide> Position left = new Position(pos, -1, 0);
<ide> Position right = new Position(pos, 1, 0);
<ide> if (left.equals(last.getDest())
<add> && (last.getOrigin().getX() == last.getDest().getX())
<ide> && (board.getPiece(left) instanceof Pawn)) {
<ide> /* en passant to the left */
<ide> Move passant = new Move(pos, new Position(pos, -1, dir));
<ide> passant.setNext(new Move(left, null));
<ide> list.addMove(passant);
<ide> } else if (right.equals(last.getDest())
<add> && (last.getOrigin().getX() == last.getDest().getX())
<ide> && (board.getPiece(right) instanceof Pawn)) {
<ide> /* en passant to the right */
<ide> Move passant = new Move(pos, new Position(pos, 1, dir)); |
|
Java | apache-2.0 | 7957af28731bf1e6d8776790fe7f3449054f84dd | 0 | codegauravg/DynamicTable | package org.ACMSviet.SchedulerAMa.Services;
import java.util.ArrayList;
import java.util.List;
import org.ACMSviet.SchedulerAMa.Models.Course;
import org.ACMSviet.SchedulerAMa.Models.CourseListResponse;
import org.ACMSviet.SchedulerAMa.Models.DSS;
import org.ACMSviet.SchedulerAMa.Models.DSSModificationLog;
import org.ACMSviet.SchedulerAMa.Models.Repeatition;
import org.ACMSviet.SchedulerAMa.Models.RepeatitionListResponse;
import org.ACMSviet.SchedulerAMa.Models.RepeatitionUnit;
import org.ACMSviet.SchedulerAMa.Models.ResponseReport;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Example;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional
public class CourseService {
/*
* TODO List:
* -> Code Update Services. *done*
* -> Code Deletion Services. *done*
* -> Code Repeation adding,updating and deletion services. (also check for no duplicacy of repeatitions in same DSS, different types are excluded) *done*
* -> Create a service for getting all repeatitions(temp type course overlapping the main type course) of a specific DSS. use hibernate EXAMPLE API.*done*
* -> Views:
* -> filter by class.*done*
* -> filter by faculty.*done*
* -> add error control in addingCourse functions.*done*
*
* -> error in schedule function, giving duplicate data sets.*done*
*
* -> create a service to generate course options available for a specific lecture.
*/
@Autowired
private SessionFactory sessionFactory;
private String TAG = "[CourseService] : ",
addOK = "ADD OK",
statusOK = "STATUS OK",
deleteOK = "DELETE OK",
addFailed = "ADD FAILED",
statusFailed = "STATUS FAILED",
deleteFailed = "DELETE FAILED",
updateOK = "UPDATE OK",
updateFailed = "UPDATE FAILED";
//function: add course to the data set.
public ResponseReport addCourse(Course course) {
if(course.getName().isEmpty()||course.getFaculty().isEmpty()||course.getType().isEmpty()||course.getDept().isEmpty()||course.getSem().isEmpty()||course.getSection().isEmpty()) {
return new ResponseReport().addStatus(addFailed).addError("Required fields : Name, Faculty, Type, Dept, Sem, Section.");
}
else if(getCourseByName(course.getName()).getStatus().equals(statusOK)) {
return new ResponseReport().addStatus(addFailed).addError("Course Already exists.");
}
this.sessionFactory.getCurrentSession().save(course);
notifyModification(course);
return new ResponseReport().addStatus(addOK);
}
//function: gather full list of courses from the data set.
public CourseListResponse getAllCourses() {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusOK).addError("Empty List of Courses.");
}
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
//function: find a course by its name (Primary Key)
public CourseListResponse getCourseByName(String name) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.eq("name", name)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: find a course by its faculty
public CourseListResponse getCourseByFaculty(String faculty) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.eq("faculty", faculty)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: gather Courses list according to common Department, Semester and Section values. to get Courses for a unique class hall.
public CourseListResponse findCoursesByDSS(String dept,String sem,String section) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.and(Restrictions.eq("dept", dept),
Restrictions.and(Restrictions.eq("sem", sem), Restrictions.eq("section",section)))).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: gather Courses list according to common Department, Semester, Section and Faculty values. to get Courses for a unique class and faculty hall.
public CourseListResponse findCoursesByDSSF(String dept,String sem,String section,String faculty) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.and(
Restrictions.and(Restrictions.eq("dept", dept), Restrictions.eq("faculty", faculty))
,
Restrictions.and(Restrictions.eq("sem", sem), Restrictions.eq("section",section))
)
).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: Update the contents of course data..
public ResponseReport updateCourse(Course course) {
try {
ArrayList<Course> courses = (ArrayList<Course>) getCourseByName(course.getName()).getCourses();
if(courses.isEmpty()) {
return new ResponseReport().addStatus(updateFailed).addError("No Such Course Found.");}
courses.get(0)
.addDept(course.getDept())
.addDescription(course.getDescription())
.addFac_contact(course.getFac_contact())
.addFaculty(course.getFaculty())
.addRefBook(course.getRefBook())
.addSection(course.getSection())
.addSem(course.getSem())
.addtRefBookLink(course.getRefBookLink())
.addType(course.getType());
this.sessionFactory.getCurrentSession().update(courses.get(0));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}catch(Exception e) {
return new ResponseReport().addStatus(updateFailed).addError("No such Course Found.");
}
}
//function: delete course details from the database.(also all child repeatitions)
public ResponseReport deleteCourseByName(String name) {
//exception handling for no course found case.
try {
List<Course> courses = getCourseByName(name).getCourses();
System.out.println(TAG+"Course found:"+courses.get(0).getName());
//exception handing for no repeatition gathered
try{
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) getCourseRepeatitions(name).getRepeatitions();
if(!repeatitions.isEmpty()) {
for(Repeatition rep : repeatitions) {
this.sessionFactory.getCurrentSession().delete(rep);
}
}
}catch(Exception e) {
//Do Nothing..
System.out.println(TAG+"No Repeatitions associated with mentioned course");
}
this.sessionFactory.getCurrentSession().delete(courses.get(0));
notifyModification(courses.get(0));
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
return new ResponseReport().addStatus(deleteFailed).addError("No such Course found.");
}
}
//function: delete all repeatitions for course available.
public ResponseReport flushRepeatitionsByCourseName(String name) {
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) getCourseRepeatitions(name).getRepeatitions();
for(Repeatition rep : repeatitions) {
this.sessionFactory.getCurrentSession().delete(rep);
notifyModification(rep.getCourse());
}
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
return new ResponseReport().addStatus(deleteFailed).addError("No Repeatitions found for mentioned Course.");
}
}
//function: add repeatition from a course.
public ResponseReport addRepeatitions(String name,int weekDay,int lectureNo) {
try {
if(weekDay>5||lectureNo>7||weekDay<1||lectureNo<1) {
return new ResponseReport().addStatus(addFailed).addError("Range for weekDay : 1 - 5 & lectureNo : 1 -7");
}
Course course = getCourseByName(name).getCourses().get(0);
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>)this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(
//finding repeatition schedule similarities
Restrictions.and(Restrictions.eq("weekDay", weekDay),Restrictions.eq("lectureNo", lectureNo))
).list();
if(!repeatitions.isEmpty()) {
for(Repeatition travRep : repeatitions) {
if(travRep.getCourse().getDept().equals(course.getDept())&&travRep.getCourse().getSem().equals(course.getSem())&&
travRep.getCourse().getSection().equals(course.getSection())&&travRep.getCourse().getType().equals(course.getType())) {
return new ResponseReport().addStatus(addFailed).addError("Similar Repeatition is available in the Schedule.");
}
}
this.sessionFactory.getCurrentSession().save(new Repeatition().addWeekDay(weekDay).addLectureNo(lectureNo).addCourse(course));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}
else {
this.sessionFactory.getCurrentSession().save(new Repeatition().addWeekDay(weekDay).addLectureNo(lectureNo).addCourse(course));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}
}catch(Exception e) {
return new ResponseReport().addError(addFailed).addError("No Such Course Found.");
}
}
//function: get Repeatitions list for a Course.
public RepeatitionListResponse getCourseRepeatitions(String name){
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>)this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(Restrictions.eq("course", getCourseByName(name).getCourses().get(0))).list();
if(repeatitions.isEmpty()) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Repeatitions for mentioned course.");
}else {
return new RepeatitionListResponse().addStatus(statusOK).addRepeatitions(repeatitions);
}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No such Course found.");
}
}
//function: Course list filtered by type
public CourseListResponse getCourseListByType(String type) {
if(!type.equals("main")&&!type.equals("temp")) {
return new CourseListResponse().addStatus(statusFailed).addError("Wrong Course Type entered.");
}
try {
ArrayList<Course> courses = (ArrayList<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class).add(Restrictions.eq("type", type)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}catch(Exception e) {
return new CourseListResponse().addStatus(statusFailed).addError("Fetch Error Occured.");
}
}
//function: Delete Unique Repeatition for a mentioned course
public ResponseReport deleteUniqueRepeatitionByCourseName(String name,int weekDay,int lectureNo) {
if(weekDay>5||lectureNo>7||weekDay<1||lectureNo<1) {
return new ResponseReport().addStatus(deleteFailed).addError("Range for weekDay : 1 - 5 & lectureNo : 1 -7");
}
try {
Course course = getCourseByName(name).getCourses().get(0);
Repeatition repeatition = (Repeatition) this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(Restrictions.and(Restrictions.eq("course", course),Restrictions.and(
Restrictions.eq("weekDay", weekDay), Restrictions.eq("lectureNo", lectureNo)) )).uniqueResult();
if(repeatition==null) {
return new ResponseReport().addStatus(deleteFailed).addError("No Such Repeatition Found for mentioned Course.");
}
this.sessionFactory.getCurrentSession().delete(repeatition);
notifyModification(course);
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
System.out.println(e);
return new ResponseReport().addStatus(deleteFailed).addError("No such Course Found.");
}
}
//function: get Repeatitions by Course type.
public RepeatitionListResponse getRepeatitionsByCourseType(String type) {
if(!type.equals("main")&&!type.equals("temp")){return new RepeatitionListResponse().addStatus(statusFailed).addError("Unsupported Course type entered.");}
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class).list();
if(!repeatitions.isEmpty()) {
ArrayList<Repeatition> repeatitionList = new ArrayList<Repeatition>();
for(Repeatition repeatition : repeatitions ) {
if(repeatition.getCourse().getType().equals(type)) {
repeatitionList.add(repeatition);
}
}
if(!repeatitionList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(repeatitionList).addStatus(statusOK);}
else {return new RepeatitionListResponse().addStatus(statusFailed).addError("No repeatitions for mentioned Type.");}
}else {
return new RepeatitionListResponse().addStatus(statusFailed).addError("Repeatitions List Empty.");
}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No repeatitions Found");
}
}
//function: Get all unique repeatitions of a specific DSS(Where temp course overlaps main course).
//TODO: optimize this function to perform lesser iterations.
//TODO: this code crashes if there is no temp or main course individually. FIX THIS.
public RepeatitionListResponse getScheduleForDSS(String dept,String sem,String section) {
ArrayList<Repeatition> tempRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("temp").getRepeatitions();
ArrayList<Repeatition> mainRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("main").getRepeatitions();
ArrayList<Repeatition> scheduleList = new ArrayList<Repeatition>();
try {
if(mainRepeatitions.isEmpty()) {}
if(tempRepeatitions.isEmpty()) {}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Temp or Main Course Found.");
}
for(int weekDay=1;weekDay<=5;weekDay++) {
for(int lectureNo=1;lectureNo<=7;lectureNo++) {
//for checking if a temp course for this repeatition is found.
boolean tempGet = false;
if(!tempRepeatitions.isEmpty()) {
//iteration for adding temp courses in scheduleList.
for(Repeatition tempRep : tempRepeatitions) {
if(tempRep.getWeekDay()==weekDay&&tempRep.getLectureNo()==lectureNo&&
tempRep.getCourse().getDept().equals(dept)&&tempRep.getCourse().getSem().equals(sem)&&tempRep.getCourse().getSection().equals(section)) {
scheduleList.add(tempRep);
tempGet=true;
}
}
}
//if tempCourse is found,
if(tempGet) {continue;}
if(!mainRepeatitions.isEmpty()) {
//iteration for adding main courses in scheduleList.
for(Repeatition mainRep : mainRepeatitions) {
if(mainRep.getWeekDay()==weekDay&&mainRep.getLectureNo()==lectureNo&&
mainRep.getCourse().getDept().equals(dept)&&mainRep.getCourse().getSem().equals(sem)&&mainRep.getCourse().getSection().equals(section)) {
scheduleList.add(mainRep);
}
}
}
}
}
if(!scheduleList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(scheduleList).addStatus(statusOK); }
else{
return new RepeatitionListResponse().addStatus(statusFailed).addError("Schedule for selected category is empty.");
}
}
//function: Get all unique repeatitions of a specific DSS and weekDay(Where temp course overlaps main course).
//TODO: optimize this function to perform lesser iterations.
//TODO: this code crashes if there is no temp or main course individually. FIX THIS.
public RepeatitionListResponse getScheduleForDSSWeekDay(String dept,String sem,String section,int weekDay) {
if(weekDay<1||weekDay>5) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("Range for weekDay : 1 - 5");
}
ArrayList<Repeatition> tempRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("temp").getRepeatitions();
ArrayList<Repeatition> mainRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("main").getRepeatitions();
ArrayList<Repeatition> scheduleList = new ArrayList<Repeatition>();
try {
if(mainRepeatitions.isEmpty()) {}
if(tempRepeatitions.isEmpty()) {}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Temp or Main Course Found.");
}
for(int lectureNo=1;lectureNo<=7;lectureNo++) {
//for checking if a temp course for this repeatition is found.
boolean tempGet = false;
if(!tempRepeatitions.isEmpty()) {
//iteration for adding temp courses in scheduleList.
for(Repeatition tempRep : tempRepeatitions) {
if(tempRep.getWeekDay()==weekDay&&tempRep.getLectureNo()==lectureNo&&
tempRep.getCourse().getDept().equals(dept)&&tempRep.getCourse().getSem().equals(sem)&&tempRep.getCourse().getSection().equals(section)) {
scheduleList.add(tempRep);
tempGet=true;
}
}
}
//if tempCourse is found,
if(tempGet) {continue;}
if(!mainRepeatitions.isEmpty()) {
//iteration for adding main courses in scheduleList.
for(Repeatition mainRep : mainRepeatitions) {
if(mainRep.getWeekDay()==weekDay&&mainRep.getLectureNo()==lectureNo&&
mainRep.getCourse().getDept().equals(dept)&&mainRep.getCourse().getSem().equals(sem)&&mainRep.getCourse().getSection().equals(section)) {
scheduleList.add(mainRep);
}
}
}
}
if(!scheduleList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(scheduleList).addStatus(statusOK); }
else{
return new RepeatitionListResponse().addStatus(statusFailed).addError("Schedule for selected category is empty.");
}
}
//function: generate available course options for a specific lecture of a class.
public CourseListResponse getCourseOptions(String dept,String sem,String section,int weekDay,int lectureNo) {
try {
ArrayList<Course> allCourses = (ArrayList<Course>) getAllCourses().getCourses();
for(Course course : allCourses){
//TODO Add functionality.
}
}catch(Exception e) {}
return null;
}
//function: Add a list of repeatitions to a specific Course as a bundle.
public ResponseReport addRepeatitionListToCourseByName(String name,ArrayList<RepeatitionUnit> repeatitions) {
for(RepeatitionUnit repeat : repeatitions) {
if(addRepeatitions(name, repeat.getWeekDay(), repeat.getLectureNo()).getStatus().equals(addFailed)) {
return new ResponseReport().addStatus(addFailed).addError("Repeatition Add error occured.");
}
}
return new ResponseReport().addStatus(addOK);
}
//DSS services
//function: create a new Dept Sem Section modification log unit for every new DSS value.
public void createDSSModLog(DSS dss) {
this.sessionFactory.getCurrentSession().save(new DSSModificationLog().addModifiedCount(0)
.addDss(dss));
}
//function: get the DSS modification Log foor a specific DSS value.(Primary Key)
public DSSModificationLog getDSSModLog(DSS dss) {
return (DSSModificationLog) this.sessionFactory.getCurrentSession().get(DSSModificationLog.class, dss);
}
//function: add new log modification increment for any new change in the specific DSS.
public void DSSModLogInc(DSS dss) {
DSSModificationLog dssmodlog = (DSSModificationLog) this.sessionFactory.getCurrentSession().get(DSSModificationLog.class, dss);
dssmodlog.setModifiedCount(dssmodlog.getModifiedCount()+1);
this.sessionFactory.getCurrentSession().update(dssmodlog);
System.out.println(TAG+"DSSModification Increment Implemented.");
}
//function: Modification notification to DSS service for logging the modification counter.
public void notifyModification(Course course) {
try {
getDSSModLog(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
DSSModLogInc(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
}catch(Exception e) {
createDSSModLog(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
}
}
}
| SchedulerAMa/src/main/java/org/ACMSviet/SchedulerAMa/Services/CourseService.java | package org.ACMSviet.SchedulerAMa.Services;
import java.util.ArrayList;
import java.util.List;
import org.ACMSviet.SchedulerAMa.Models.Course;
import org.ACMSviet.SchedulerAMa.Models.CourseListResponse;
import org.ACMSviet.SchedulerAMa.Models.DSS;
import org.ACMSviet.SchedulerAMa.Models.DSSModificationLog;
import org.ACMSviet.SchedulerAMa.Models.Repeatition;
import org.ACMSviet.SchedulerAMa.Models.RepeatitionListResponse;
import org.ACMSviet.SchedulerAMa.Models.RepeatitionUnit;
import org.ACMSviet.SchedulerAMa.Models.ResponseReport;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Example;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional
public class CourseService {
/*
* TODO List:
* -> Code Update Services. *done*
* -> Code Deletion Services. *done*
* -> Code Repeation adding,updating and deletion services. (also check for no duplicacy of repeatitions in same DSS, different types are excluded) *done*
* -> Create a service for getting all repeatitions(temp type course overlapping the main type course) of a specific DSS. use hibernate EXAMPLE API.*done*
* -> Views:
* -> filter by class.*done*
* -> filter by faculty.*done*
* -> add error control in addingCourse functions.*done*
*
* -> error in schedule function, giving duplicate data sets.*done*
*
* -> create a service to generate course options available for a specific lecture.
*/
@Autowired
private SessionFactory sessionFactory;
private String TAG = "[CourseService] : ",
addOK = "ADD OK",
statusOK = "STATUS OK",
deleteOK = "DELETE OK",
addFailed = "ADD FAILED",
statusFailed = "STATUS FAILED",
deleteFailed = "DELETE FAILED",
updateOK = "UPDATE OK",
updateFailed = "UPDATE FAILED";
//function: add course to the data set.
public ResponseReport addCourse(Course course) {
if(course.getName().isEmpty()||course.getFaculty().isEmpty()||course.getType().isEmpty()||course.getDept().isEmpty()||course.getSem().isEmpty()||course.getSection().isEmpty()) {
return new ResponseReport().addStatus(addFailed).addError("Required fields : Name, Faculty, Type, Dept, Sem, Section.");
}
else if(getCourseByName(course.getName()).getStatus().equals(statusOK)) {
return new ResponseReport().addStatus(addFailed).addError("Course Already exists.");
}
this.sessionFactory.getCurrentSession().save(course);
notifyModification(course);
return new ResponseReport().addStatus(addOK);
}
//function: gather full list of courses from the data set.
public CourseListResponse getAllCourses() {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusOK).addError("Empty List of Courses.");
}
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
//function: find a course by its name (Primary Key)
public CourseListResponse getCourseByName(String name) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.eq("name", name)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: find a course by its faculty
public CourseListResponse getCourseByFaculty(String faculty) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.eq("faculty", faculty)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: gather Courses list according to common Department, Semester and Section values. to get Courses for a unique class hall.
public CourseListResponse findCoursesByDSS(String dept,String sem,String section) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.and(Restrictions.eq("dept", dept),
Restrictions.and(Restrictions.eq("sem", sem), Restrictions.eq("section",section)))).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: gather Courses list according to common Department, Semester, Section and Faculty values. to get Courses for a unique class and faculty hall.
public CourseListResponse findCoursesByDSSF(String dept,String sem,String section,String faculty) {
List<Course> courses = (List<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class)
.add(Restrictions.and(
Restrictions.and(Restrictions.eq("dept", dept), Restrictions.eq("faculty", faculty))
,
Restrictions.and(Restrictions.eq("sem", sem), Restrictions.eq("section",section))
)
).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
else {
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}
}
//function: Update the contents of course data..
public ResponseReport updateCourse(Course course) {
try {
ArrayList<Course> courses = (ArrayList<Course>) getCourseByName(course.getName()).getCourses();
if(courses.isEmpty()) {
return new ResponseReport().addStatus(updateFailed).addError("No Such Course Found.");}
courses.get(0)
.addDept(course.getDept())
.addDescription(course.getDescription())
.addFac_contact(course.getFac_contact())
.addFaculty(course.getFaculty())
.addRefBook(course.getRefBook())
.addSection(course.getSection())
.addSem(course.getSem())
.addtRefBookLink(course.getRefBookLink())
.addType(course.getType());
this.sessionFactory.getCurrentSession().update(courses.get(0));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}catch(Exception e) {
return new ResponseReport().addStatus(updateFailed).addError("No such Course Found.");
}
}
//function: delete course details from the database.(also all child repeatitions)
public ResponseReport deleteCourseByName(String name) {
//exception handling for no course found case.
try {
List<Course> courses = getCourseByName(name).getCourses();
System.out.println(TAG+"Course found:"+courses.get(0).getName());
//exception handing for no repeatition gathered
try{
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) getCourseRepeatitions(name).getRepeatitions();
if(!repeatitions.isEmpty()) {
for(Repeatition rep : repeatitions) {
this.sessionFactory.getCurrentSession().delete(rep);
}
}
}catch(Exception e) {
//Do Nothing..
System.out.println(TAG+"No Repeatitions associated with mentioned course");
}
this.sessionFactory.getCurrentSession().delete(courses.get(0));
notifyModification(courses.get(0));
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
return new ResponseReport().addStatus(deleteFailed).addError("No such Course found.");
}
}
//function: delete all repeatitions for course available.
public ResponseReport flushRepeatitionsByCourseName(String name) {
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) getCourseRepeatitions(name).getRepeatitions();
for(Repeatition rep : repeatitions) {
this.sessionFactory.getCurrentSession().delete(rep);
notifyModification(rep.getCourse());
}
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
return new ResponseReport().addStatus(deleteFailed).addError("No Repeatitions found for mentioned Course.");
}
}
//function: add repeatition from a course.
public ResponseReport addRepeatitions(String name,int weekDay,int lectureNo) {
try {
if(weekDay>5||lectureNo>7||weekDay<1||lectureNo<1) {
return new ResponseReport().addStatus(addFailed).addError("Range for weekDay : 1 - 5 & lectureNo : 1 -7");
}
Course course = getCourseByName(name).getCourses().get(0);
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>)this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(
//finding repeatition schedule similarities
Restrictions.and(Restrictions.eq("weekDay", weekDay),Restrictions.eq("lectureNo", lectureNo))
).list();
if(!repeatitions.isEmpty()) {
for(Repeatition travRep : repeatitions) {
if(travRep.getCourse().getDept().equals(course.getDept())&&travRep.getCourse().getSem().equals(course.getSem())&&
travRep.getCourse().getSection().equals(course.getSection())&&travRep.getCourse().getType().equals(course.getType())) {
return new ResponseReport().addStatus(addFailed).addError("Similar Repeatition is available in the Schedule.");
}
}
this.sessionFactory.getCurrentSession().save(new Repeatition().addWeekDay(weekDay).addLectureNo(lectureNo).addCourse(course));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}
else {
this.sessionFactory.getCurrentSession().save(new Repeatition().addWeekDay(weekDay).addLectureNo(lectureNo).addCourse(course));
notifyModification(course);
return new ResponseReport().addStatus(updateOK);
}
}catch(Exception e) {
return new ResponseReport().addError(addFailed).addError("No Such Course Found.");
}
}
//function: get Repeatitions list for a Course.
public RepeatitionListResponse getCourseRepeatitions(String name){
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>)this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(Restrictions.eq("course", getCourseByName(name).getCourses().get(0))).list();
if(repeatitions.isEmpty()) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Repeatitions for mentioned course.");
}else {
return new RepeatitionListResponse().addStatus(statusOK).addRepeatitions(repeatitions);
}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No such Course found.");
}
}
//function: Course list filtered by type
public CourseListResponse getCourseListByType(String type) {
if(!type.equals("main")&&!type.equals("temp")) {
return new CourseListResponse().addStatus(statusFailed).addError("Wrong Course Type entered.");
}
try {
ArrayList<Course> courses = (ArrayList<Course>) this.sessionFactory.getCurrentSession().createCriteria(Course.class).add(Restrictions.eq("type", type)).list();
if(courses.isEmpty()) {
return new CourseListResponse().addStatus(statusFailed).addError("No such Course found.");
}
return new CourseListResponse().addCourses(courses).addStatus(statusOK);
}catch(Exception e) {
return new CourseListResponse().addStatus(statusFailed).addError("Fetch Error Occured.");
}
}
//function: Delete Unique Repeatition for a mentioned course
public ResponseReport deleteUniqueRepeatitionByCourseName(String name,int weekDay,int lectureNo) {
if(weekDay>5||lectureNo>7||weekDay<1||lectureNo<1) {
return new ResponseReport().addStatus(deleteFailed).addError("Range for weekDay : 1 - 5 & lectureNo : 1 -7");
}
try {
Course course = getCourseByName(name).getCourses().get(0);
Repeatition repeatition = (Repeatition) this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class)
.add(Restrictions.and(Restrictions.eq("course", course),Restrictions.and(
Restrictions.eq("weekDay", weekDay), Restrictions.eq("lectureNo", lectureNo)) )).uniqueResult();
if(repeatition==null) {
return new ResponseReport().addStatus(deleteFailed).addError("No Such Repeatition Found for mentioned Course.");
}
this.sessionFactory.getCurrentSession().delete(repeatition);
notifyModification(course);
return new ResponseReport().addStatus(deleteOK);
}catch(Exception e) {
System.out.println(e);
return new ResponseReport().addStatus(deleteFailed).addError("No such Course Found.");
}
}
//function: get Repeatitions by Course type.
public RepeatitionListResponse getRepeatitionsByCourseType(String type) {
if(!type.equals("main")&&!type.equals("temp")){return new RepeatitionListResponse().addStatus(statusFailed).addError("Unsupported Course type entered.");}
try {
ArrayList<Repeatition> repeatitions = (ArrayList<Repeatition>) this.sessionFactory.getCurrentSession().createCriteria(Repeatition.class).list();
if(!repeatitions.isEmpty()) {
ArrayList<Repeatition> repeatitionList = new ArrayList<Repeatition>();
for(Repeatition repeatition : repeatitions ) {
if(repeatition.getCourse().getType().equals(type)) {
repeatitionList.add(repeatition);
}
}
if(!repeatitionList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(repeatitionList).addStatus(statusOK);}
else {return new RepeatitionListResponse().addStatus(statusFailed).addError("No repeatitions for mentioned Type.");}
}else {
return new RepeatitionListResponse().addStatus(statusFailed).addError("Repeatitions List Empty.");
}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No repeatitions Found");
}
}
//function: Get all unique repeatitions of a specific DSS(Where temp course overlaps main course).
//TODO: optimize this function to perform lesser iterations.
//TODO: this code crashes if there is no temp or main course individually. FIX THIS.
public RepeatitionListResponse getScheduleForDSS(String dept,String sem,String section) {
ArrayList<Repeatition> tempRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("temp").getRepeatitions();
ArrayList<Repeatition> mainRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("main").getRepeatitions();
ArrayList<Repeatition> scheduleList = new ArrayList<Repeatition>();
try {
if(mainRepeatitions.isEmpty()) {}
if(tempRepeatitions.isEmpty()) {}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Temp or Main Course Found.");
}
for(int weekDay=1;weekDay<=5;weekDay++) {
for(int lectureNo=1;lectureNo<=7;lectureNo++) {
//for checking if a temp course for this repeatition is found.
boolean tempGet = false;
if(!tempRepeatitions.isEmpty()) {
//iteration for adding temp courses in scheduleList.
for(Repeatition tempRep : tempRepeatitions) {
if(tempRep.getWeekDay()==weekDay&&tempRep.getLectureNo()==lectureNo&&
tempRep.getCourse().getDept().equals(dept)&&tempRep.getCourse().getSem().equals(sem)&&tempRep.getCourse().getSection().equals(section)) {
scheduleList.add(tempRep);
tempGet=true;
}
}
}
//if tempCourse is found,
if(tempGet) {continue;}
if(!mainRepeatitions.isEmpty()) {
//iteration for adding main courses in scheduleList.
for(Repeatition mainRep : mainRepeatitions) {
if(mainRep.getWeekDay()==weekDay&&mainRep.getLectureNo()==lectureNo&&
mainRep.getCourse().getDept().equals(dept)&&mainRep.getCourse().getSem().equals(sem)&&mainRep.getCourse().getSection().equals(section)) {
scheduleList.add(mainRep);
}
}
}
}
}
if(!scheduleList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(scheduleList).addStatus(statusOK); }
else{
return new RepeatitionListResponse().addStatus(statusFailed).addError("Schedule for selected category is empty.");
}
}
//function: Get all unique repeatitions of a specific DSS and weekDay(Where temp course overlaps main course).
//TODO: optimize this function to perform lesser iterations.
//TODO: this code crashes if there is no temp or main course individually. FIX THIS.
public RepeatitionListResponse getScheduleForDSSWeekDay(String dept,String sem,String section,int weekDay) {
if(weekDay<1||weekDay>5) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("Range for weekDay : 1 - 5");
}
ArrayList<Repeatition> tempRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("temp").getRepeatitions();
ArrayList<Repeatition> mainRepeatitions = (ArrayList<Repeatition>) getRepeatitionsByCourseType("main").getRepeatitions();
ArrayList<Repeatition> scheduleList = new ArrayList<Repeatition>();
try {
if(mainRepeatitions.isEmpty()) {}
if(tempRepeatitions.isEmpty()) {}
}catch(Exception e) {
return new RepeatitionListResponse().addStatus(statusFailed).addError("No Temp or Main Course Found.");
}
for(int lectureNo=1;lectureNo<=7;lectureNo++) {
//for checking if a temp course for this repeatition is found.
boolean tempGet = false;
if(!tempRepeatitions.isEmpty()) {
//iteration for adding temp courses in scheduleList.
for(Repeatition tempRep : tempRepeatitions) {
if(tempRep.getWeekDay()==weekDay&&tempRep.getLectureNo()==lectureNo&&
tempRep.getCourse().getDept().equals(dept)&&tempRep.getCourse().getSem().equals(sem)&&tempRep.getCourse().getSection().equals(section)) {
scheduleList.add(tempRep);
tempGet=true;
}
}
}
//if tempCourse is found,
if(tempGet) {continue;}
if(!mainRepeatitions.isEmpty()) {
//iteration for adding main courses in scheduleList.
for(Repeatition mainRep : mainRepeatitions) {
if(mainRep.getWeekDay()==weekDay&&mainRep.getLectureNo()==lectureNo&&
mainRep.getCourse().getDept().equals(dept)&&mainRep.getCourse().getSem().equals(sem)&&mainRep.getCourse().getSection().equals(section)) {
scheduleList.add(mainRep);
}
}
}
}
if(!scheduleList.isEmpty()) {return new RepeatitionListResponse().addRepeatitions(scheduleList).addStatus(statusOK); }
else{
return new RepeatitionListResponse().addStatus(statusFailed).addError("Schedule for selected category is empty.");
}
}
//function: generate available course options for a specific lecture of a class.
public CourseListResponse getCourseOptions(String dept,String sem,String section,int weekDay,int lectureNo) {
try {
ArrayList<Course> allCourses = (ArrayList<Course>) getAllCourses().getCourses();
for(Course course : allCourses){
//TODO Add functionality.
}
}catch(Exception e) {}
return null;
}
public ResponseReport addRepeatitionListToCourseByName(String name,ArrayList<RepeatitionUnit> repeatitions) {
for(RepeatitionUnit repeat : repeatitions) {
if(addRepeatitions(name, repeat.getWeekDay(), repeat.getLectureNo()).getStatus().equals(addFailed)) {
return new ResponseReport().addStatus(addFailed).addError("Repeatition Add error occured.");
}
}
return new ResponseReport().addStatus(addOK);
}
//DSS services
//function: create a new Dept Sem Section modification log unit for every new DSS value.
public void createDSSModLog(DSS dss) {
this.sessionFactory.getCurrentSession().save(new DSSModificationLog().addModifiedCount(0)
.addDss(dss));
}
//function: get the DSS modification Log foor a specific DSS value.(Primary Key)
public DSSModificationLog getDSSModLog(DSS dss) {
return (DSSModificationLog) this.sessionFactory.getCurrentSession().get(DSSModificationLog.class, dss);
}
//function: add new log modification increment for any new change in the specific DSS.
public void DSSModLogInc(DSS dss) {
DSSModificationLog dssmodlog = (DSSModificationLog) this.sessionFactory.getCurrentSession().get(DSSModificationLog.class, dss);
dssmodlog.setModifiedCount(dssmodlog.getModifiedCount()+1);
this.sessionFactory.getCurrentSession().update(dssmodlog);
System.out.println(TAG+"DSSModification Increment Implemented.");
}
//function: Modification notification to DSS service for logging the modification counter.
public void notifyModification(Course course) {
try {
getDSSModLog(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
DSSModLogInc(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
}catch(Exception e) {
createDSSModLog(
new DSS().addDept(course.getDept()).addSection(course.getSection()).addSem(course.getSem())
);
}
}
}
| added function intro comment
| SchedulerAMa/src/main/java/org/ACMSviet/SchedulerAMa/Services/CourseService.java | added function intro comment | <ide><path>chedulerAMa/src/main/java/org/ACMSviet/SchedulerAMa/Services/CourseService.java
<ide> return null;
<ide> }
<ide>
<add> //function: Add a list of repeatitions to a specific Course as a bundle.
<ide> public ResponseReport addRepeatitionListToCourseByName(String name,ArrayList<RepeatitionUnit> repeatitions) {
<ide> for(RepeatitionUnit repeat : repeatitions) {
<ide>
<ide> if(addRepeatitions(name, repeat.getWeekDay(), repeat.getLectureNo()).getStatus().equals(addFailed)) {
<ide> return new ResponseReport().addStatus(addFailed).addError("Repeatition Add error occured.");
<ide> }
<del>
<ide>
<ide> }
<ide> return new ResponseReport().addStatus(addOK); |
|
Java | apache-2.0 | f9ede93b326d97ba5e06d0f2b499c21cc7928499 | 0 | awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples | // snippet-sourcedescription:[StreamingRetryApp.java is an application that demonstrates using the Amazon Transcribe retry client.]
// snippet-service:[transcribe]
// snippet-keyword:[Java]
// snippet-sourcesyntax:[java]
// snippet-keyword:[Amazon Transcribe]
// snippet-keyword:[Code Sample]
// snippet-keyword:[TranscribeStreamingAsyncClient]
// snippet-keyword:[StartStreamTranscriptionResponse]
// snippet-sourcetype:[snippet]
// snippet-sourcedate:[2019-01-10]
// snippet-sourceauthor:[AWS]
// snippet-start:[transcribe.java-streaming-retry-app]
/**
* COPYRIGHT:
* <p>
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.transcribestreaming.retryclient;
import com.amazonaws.transcribestreaming.TranscribeStreamingDemoApp;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.transcribestreaming.model.AudioStream;
import software.amazon.awssdk.services.transcribestreaming.model.LanguageCode;
import software.amazon.awssdk.services.transcribestreaming.model.StartStreamTranscriptionRequest;
import javax.sound.sampled.LineUnavailableException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import static com.amazonaws.transcribestreaming.TranscribeStreamingDemoApp.getCredentials;
public class StreamingRetryApp {
private static final String endpoint = "endpoint";
private static final Region region = Region.US_EAST_1;
private static final int sample_rate = 28800;
private static final String encoding = " ";
private static final String language = LanguageCode.EN_US.toString();
public static void main(String args[]) throws URISyntaxException, ExecutionException, InterruptedException, LineUnavailableException, FileNotFoundException {
/**
* Create Transcribe streaming retry client using AWS credentials.
*/
TranscribeStreamingRetryClient client = new TranscribeStreamingRetryClient(EnvironmentVariableCredentialsProvider.create() ,endpoint, region);
StartStreamTranscriptionRequest request = StartStreamTranscriptionRequest.builder()
.languageCode(language)
.mediaEncoding(encoding)
.mediaSampleRateHertz(sample_rate)
.build();
/**
* Start real-time speech recognition. The Transcribe streaming java client uses the Reactive-streams
* interface. For reference on Reactive-streams:
* https://github.com/reactive-streams/reactive-streams-jvm
*/
CompletableFuture<Void> result = client.startStreamTranscription(
/**
* Request parameters. Refer to API documentation for details.
*/
request,
/**
* Provide an input audio stream.
* For input from a microphone, use getStreamFromMic().
* For input from a file, use getStreamFromFile().
*/
new AudioStreamPublisher(
new FileInputStream(new File("FileName"))),
/**
* Object that defines the behavior on how to handle the stream
*/
new StreamTranscriptionBehaviorImpl());
/**
* Synchronous wait for stream to close, and close client connection
*/
result.get();
client.close();
}
private static class AudioStreamPublisher implements Publisher<AudioStream> {
private final InputStream inputStream;
private AudioStreamPublisher(InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public void subscribe(Subscriber<? super AudioStream> s) {
if (s.currentSubscription == null) {
this.currentSubscription = new TranscribeStreamingDemoApp.SubscriptionImpl(s, inputStream);
} else {
this.currentSubscription.cancel();
this.currentSubscription = new TranscribeStreamingDemoApp.SubscriptionImpl(s, inputStream);
}
s.onSubscribe(currentSubscription);
}
}
}
// snippet-end:[transcribe.java-streaming-retry-app]
| javav2/example_code/transcribe/src/main/java/com/amazonaws/transcribestreaming/retryclient/StreamingRetryApp.java | // snippet-sourcedescription:[StreamingRetryApp.java is an application that demonstrates using the Amazon Transcribe retry client.]
// snippet-service:[transcribe]
// snippet-keyword:[Java]
// snippet-sourcesyntax:[java]
// snippet-keyword:[Amazon Transcribe]
// snippet-keyword:[Code Sample]
// snippet-keyword:[TranscribeStreamingAsyncClient]
// snippet-keyword:[StartStreamTranscriptionResponse]
// snippet-sourcetype:[snippet]
// snippet-sourcedate:[2019-01-10]
// snippet-sourceauthor:[AWS]
// snippet-start:[transcribe.java-streaming-retry-app]
/**
* COPYRIGHT:
* <p>
* Copyright 2018-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.transcribestreaming.retryclient;
import com.amazonaws.transcribestreaming.TranscribeStreamingDemoApp;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.transcribestreaming.model.AudioStream;
import software.amazon.awssdk.services.transcribestreaming.model.LanguageCode;
import software.amazon.awssdk.services.transcribestreaming.model.StartStreamTranscriptionRequest;
import javax.sound.sampled.LineUnavailableException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import static com.amazonaws.transcribestreaming.TranscribeStreamingDemoApp.getCredentials;
public class StreamingRetryApp {
private static final String endpoint = "endpoint";
private static final Region region = Region.US_EAST_1;
private static final int sample_rate = 28800;
private static final String encoding = " ";
private static final String language = LanguageCode.EN_US.toString();
public static void main(String args[]) throws URISyntaxException, ExecutionException, InterruptedException, LineUnavailableException, FileNotFoundException {
/**
* Create Transcribe streaming retry client using AWS credentials.
*/
TranscribeStreamingRetryClient client = new TranscribeStreamingRetryClient(EnvironmentVariableCredentialsProvider.create() ,endpoint, region);
StartStreamTranscriptionRequest request = StartStreamTranscriptionRequest.builder()
.languageCode(language)
.mediaEncoding(encoding)
.mediaSampleRateHertz(sample_rate)
.build();
/**
* Start real-time speech recognition. The Transcribe streaming java client uses the Reactive-streams
* interface. For reference on Reactive-streams:
* https://github.com/reactive-streams/reactive-streams-jvm
*/
CompletableFuture<Void> result = client.startStreamTranscription(
/**
* Request parameters. Refer to API documentation for details.
*/
request,
/**
* Provide an input audio stream.
* For input from a microphone, use getStreamFromMic().
* For input from a file, use getStreamFromFile().
*/
new AudioStreamPublisher(
new FileInputStream(new File("FileName"))),
/**
* Object that defines the behavior on how to handle the stream
*/
new StreamTranscriptionBehaviorImpl());
/**
* Synchronous wait for stream to close, and close client connection
*/
result.get();
client.close();
}
private static class AudioStreamPublisher implements Publisher<AudioStream> {
private final InputStream inputStream;
private AudioStreamPublisher(InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public void subscribe(Subscriber<? super AudioStream> s) {
if (s.currentSubscription == null) {
this.currentSubscription = new TranscribeStreamingDemoApp.SubscriptionImpl(s, inputStream);
} else {
this.currentSubscription.cancel();
this.currentSubscription = new TranscribeStreamingDemoApp.SubscriptionImpl(s, inputStream);
}
s.onSubscribe(currentSubscription);
}
}
}
// snippet-end:[transcribe.java-streaming-retry-app] | Update StreamingRetryApp.java | javav2/example_code/transcribe/src/main/java/com/amazonaws/transcribestreaming/retryclient/StreamingRetryApp.java | Update StreamingRetryApp.java | <ide><path>avav2/example_code/transcribe/src/main/java/com/amazonaws/transcribestreaming/retryclient/StreamingRetryApp.java
<ide> /**
<ide> * COPYRIGHT:
<ide> * <p>
<del> * Copyright 2018-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
<add> * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
<ide> * <p>
<ide> * Licensed under the Apache License, Version 2.0 (the "License").
<ide> * You may not use this file except in compliance with the License. |
|
Java | lgpl-2.1 | 28facf84f3e2b92ee89d727f26075cce9b01ef5d | 0 | genomescale/starbeast2,genomescale/starbeast2 | package starbeast2;
import beast.core.Description;
import beast.core.Input;
import beast.core.Input.Validate;
import beast.core.Operator;
import beast.core.parameter.RealParameter;
import beast.evolution.tree.Node;
import beast.util.Randomizer;
/**
* @author Huw Ogilvie
*/
@Description("Tree operator which randomly changes the height of a node, " +
"then reconstructs the tree from node heights.")
public class SAMau1999 extends Operator {
public final Input<SpeciesTree> treeInput = new Input<>("tree", "the species tree", Validate.REQUIRED);
public final Input<Double> windowInput = new Input<>("window", "size of the random walk window", 10.0);
public final Input<RealParameter> originInput = new Input<RealParameter>("origin", "The time when the process started", (RealParameter) null);
private int nextIndex;
private int nodeCount;
private int trueBifurcationCount;
private Node[] canonicalOrder;
private int[] trueBifurcations;
private double[] nodeHeights;
private Node[] leftChildren;
private Node[] rightChildren;
private Node[] parents;
private boolean superimposedAncestors;
private double window;
private boolean originSpecified;
@Override
public void initAndValidate() {
final SpeciesTree speciesTree = treeInput.get();
nodeCount = speciesTree.getNodeCount();
canonicalOrder = new Node[nodeCount];
trueBifurcations = new int[nodeCount];
nodeHeights = new double[nodeCount];
leftChildren = new Node[nodeCount];
rightChildren = new Node[nodeCount];
parents = new Node[nodeCount];
window = windowInput.get();
originSpecified = originInput.get() != null;
}
/* This proposal improves TREE SLIDE, developed by Joseph Heled. See section 3.4.1 of Heled's 2011 PhD thesis
"Bayesian Computational Inference of Species Trees and Population Sizes". TREE SLIDE was developed for ultrametric
binary species trees, this proposal has been made compatible with sampled ancestors by enforcing a minimum height
and disallowing superimposed sampled ancestor nodes. Also uses a random walk window with reflection in order to
sample the heights of nodes without maximum height constraints. */
@Override
public double proposal() {
final SpeciesTree tree = treeInput.get();
final Node originalRoot = tree.getRoot();
// chooseCanonicalOrder also fills in nodeHeights and trueBifurcations
// the lastIndex will be the last and right-most node index
trueBifurcationCount = 0;
nextIndex = 0;
chooseCanonicalOrder(originalRoot);
// no nodes can be changed by this operator
if (trueBifurcationCount == 0) {
return Double.NEGATIVE_INFINITY;
}
// pick a bifurcation at random and change the height
final int chosenNode = trueBifurcations[Randomizer.nextInt(trueBifurcationCount)];
final double originalHeight = nodeHeights[chosenNode];
// as long as the height is above the tips (or sampled ancestors) immediately either side in the canonical
// order, the species tree seems buildable from the new times (except in the case of superimposed
// sampled ancestors, as discussed below)
final double minHeight = Double.max(nodeHeights[chosenNode - 1], nodeHeights[chosenNode + 1]);
double maxHeight;
if (originSpecified) {
maxHeight = originInput.get().getValue();
} else {
maxHeight = Double.POSITIVE_INFINITY;
}
// Use reflection to avoid invalid heights. Height returns to original position every 2 * (max - min) units,
// so modulus is used to avoid unnecessary looping if the difference between window size and the tree scale
// is extreme.
final double heightDelta = (window * (Randomizer.nextDouble() - 0.5)) % (2.0 * (maxHeight - minHeight));
double newHeight = originalHeight + heightDelta;
while (newHeight < minHeight || newHeight > maxHeight) {
if (newHeight < minHeight) {
newHeight = minHeight + minHeight - newHeight;
}
if (newHeight > maxHeight) {
newHeight = maxHeight + maxHeight - newHeight;
}
}
nodeHeights[chosenNode] = newHeight;
superimposedAncestors = false;
final int rootIndex = rebuildTree(0, nodeCount - 1);
parents[rootIndex] = null;
if (superimposedAncestors) {
return Double.NEGATIVE_INFINITY;
}
// wait until after checking for superimposed ancestors before modifying tree
canonicalOrder[chosenNode].setHeight(newHeight);
for (int i = 0; i < nodeCount; i++) {
canonicalOrder[i].setParent(parents[i]);
if (i % 2 == 1) { // internal node
canonicalOrder[i].setLeft(leftChildren[i]);
canonicalOrder[i].setRight(rightChildren[i]);
}
}
final Node newRoot = canonicalOrder[rootIndex];
// for some reason if the root is not reset - even if the root node is the same node as before! - the
// morphological likelihood will be radically wrong (idk why)
tree.setRoot(newRoot);
return 0.0;
}
/* Performs an in-order traversal of the species tree, randomly shuffling left and right nodes, to produce
a canonical order in the sense of Mau et al 1999. Also identify which nodes are true bifurcations
(not fake nodes used for sampled ancestors) */
private double chooseCanonicalOrder(final Node node) {
Node canonicalLeft;
Node canonicalRight;
if (Randomizer.nextBoolean()) {
canonicalLeft = node.getLeft();
canonicalRight = node.getRight();
} else {
canonicalLeft = node.getRight();
canonicalRight = node.getLeft();
}
double leftChildHeight;
if (canonicalLeft.isLeaf()) {
final int leftChildIndex = nextIndex;
nextIndex++;
canonicalOrder[leftChildIndex] = canonicalLeft;
leftChildHeight = canonicalLeft.getHeight();
nodeHeights[leftChildIndex] = leftChildHeight;
} else {
leftChildHeight = chooseCanonicalOrder(canonicalLeft);
}
final int thisIndex = nextIndex;
nextIndex++;
canonicalOrder[thisIndex] = node;
final double thisHeight = node.getHeight();
nodeHeights[thisIndex] = thisHeight;
double rightChildHeight;
if (canonicalRight.isLeaf()) {
final int rightChildIndex = nextIndex;
nextIndex++;
canonicalOrder[rightChildIndex] = canonicalRight;
rightChildHeight = canonicalRight.getHeight();
nodeHeights[rightChildIndex] = rightChildHeight;
} else {
rightChildHeight = chooseCanonicalOrder(canonicalRight);
}
if (thisHeight > leftChildHeight && thisHeight > rightChildHeight) {
trueBifurcations[trueBifurcationCount] = thisIndex;
trueBifurcationCount++;
}
return thisHeight;
}
/* from and to are inclusive */
private int rebuildTree(final int from, final int to) {
double thisHeight = 0.0;
int nodeIndex = -1;
/* Only check internal nodes, which are odd numbered (leaves are even numbered). If there are multiple highest
internal nodes in the range, they are likely fake bifurcations, and connecting
them will result in multiple sampled ancestors at the same point in time along the same lineage.
In this case we repeat changing the height of the chosen node until this no longer occurs.
This is similar to the following behaviour of LeafToSampledAncestorJump (see lines 68-70):
if (getOtherChild(parent, leaf).getHeight() >= leaf.getHeight()) return Double.NEGATIVE_INFINITY; */
for (int i = from + 1; i < to; i = i + 2) {
if (nodeHeights[i] > thisHeight) {
thisHeight = nodeHeights[i];
nodeIndex = i;
} else if (nodeHeights[i] == thisHeight) {
superimposedAncestors = true;
}
}
int leftNodeIndex;
if (from == nodeIndex - 1) {
leftNodeIndex = from;
} else {
leftNodeIndex = rebuildTree(from, nodeIndex - 1);
}
parents[leftNodeIndex] = canonicalOrder[nodeIndex];
leftChildren[nodeIndex] = canonicalOrder[leftNodeIndex];
int rightNodeIndex;
if (nodeIndex + 1 == to) {
rightNodeIndex = to;
} else {
rightNodeIndex = rebuildTree(nodeIndex + 1, to);
}
parents[rightNodeIndex] = canonicalOrder[nodeIndex];
rightChildren[nodeIndex] = canonicalOrder[rightNodeIndex];
return nodeIndex;
}
}
| src/starbeast2/SAMau1999.java | package starbeast2;
import beast.core.Description;
import beast.core.Input;
import beast.core.Input.Validate;
import beast.core.Operator;
import beast.core.parameter.RealParameter;
import beast.evolution.tree.Node;
import beast.evolution.tree.Tree;
import beast.util.Randomizer;
/**
* @author Huw Ogilvie
*/
@Description("Tree operator which randomly changes the height of a node, " +
"then reconstructs the tree from node heights.")
public class SAMau1999 extends Operator {
public final Input<Tree> treeInput = new Input<>("tree", "the tree", Validate.REQUIRED);
public final Input<Double> windowInput = new Input<>("window", "size of the random walk window", 10.0);
public final Input<RealParameter> originInput = new Input<RealParameter>("origin", "The time when the process started", (RealParameter) null);
private int nextIndex;
private int nodeCount;
private int trueBifurcationCount;
private Node[] canonicalOrder;
private int[] trueBifurcations;
private double[] nodeHeights;
private boolean superimposedAncestors;
private double window;
private boolean originSpecified;
@Override
public void initAndValidate() {
final Tree tree = treeInput.get();
nodeCount = tree.getNodeCount();
canonicalOrder = new Node[nodeCount];
trueBifurcations = new int[nodeCount];
nodeHeights = new double[nodeCount];
window = windowInput.get();
originSpecified = originInput.get() != null;
}
/* This improves the proposal suggested by Mau (1999). It has been made compatible with sampled ancestors by
enforcing a minimum height and disallowing superimposed sampled ancestor nodes. */
@Override
public double proposal() {
final Tree tree = treeInput.get();
final Node originalRoot = tree.getRoot();
double maxHeight;
if (originSpecified) {
maxHeight = originInput.get().getValue();
} else {
maxHeight = Double.POSITIVE_INFINITY;
}
// chooseCanonicalOrder also fills in nodeHeights and trueBifurcations
// the lastIndex will be the last and right-most node index
trueBifurcationCount = 0;
nextIndex = 0;
chooseCanonicalOrder(originalRoot);
// no nodes can be changed by this operator
if (trueBifurcationCount == 0) {
return Double.NEGATIVE_INFINITY;
}
// pick a bifurcation at random and change the height
final int chosenNode = trueBifurcations[Randomizer.nextInt(trueBifurcationCount)];
// as long as the height is above the tips (or sampled ancestors) immediately either side in the canonical
// order, the species tree seems buildable from the new times
final double minHeight = Double.max(nodeHeights[chosenNode - 1], nodeHeights[chosenNode + 1]);
final double heightDelta = window * (Randomizer.nextDouble() - 0.5);
// use reflection to avoid invalid heights
double newHeight = nodeHeights[chosenNode] + heightDelta;
while (newHeight < minHeight || newHeight > maxHeight) {
if (newHeight < minHeight) {
newHeight = minHeight + minHeight - newHeight;
}
if (newHeight > maxHeight) {
newHeight = maxHeight + maxHeight - newHeight;
}
}
nodeHeights[chosenNode] = newHeight;
canonicalOrder[chosenNode].setHeight(newHeight);
superimposedAncestors = false;
final int rootIndex = rebuildTree(0, nodeCount - 1);
if (superimposedAncestors) {
return Double.NEGATIVE_INFINITY;
}
final Node newRoot = canonicalOrder[rootIndex];
if (newRoot != originalRoot) {
newRoot.setParent(null);
tree.setRoot(newRoot);
}
return 0.0;
}
/* Performs an in-order traversal of the species tree, randomly shuffling left and right nodes, to produce
a canonical order in the sense of Mau et al 1999. Also identify which nodes are true bifurcations
(not fake nodes used for sampled ancestors) */
private double chooseCanonicalOrder(final Node node) {
Node canonicalLeft;
Node canonicalRight;
if (Randomizer.nextBoolean()) {
canonicalLeft = node.getLeft();
canonicalRight = node.getRight();
} else {
canonicalLeft = node.getRight();
canonicalRight = node.getLeft();
}
double leftChildHeight;
if (canonicalLeft.isLeaf()) {
final int leftChildIndex = nextIndex;
nextIndex++;
canonicalOrder[leftChildIndex] = canonicalLeft;
leftChildHeight = canonicalLeft.getHeight();
nodeHeights[leftChildIndex] = leftChildHeight;
} else {
leftChildHeight = chooseCanonicalOrder(canonicalLeft);
}
final int thisIndex = nextIndex;
nextIndex++;
canonicalOrder[thisIndex] = node;
final double thisHeight = node.getHeight();
nodeHeights[thisIndex] = thisHeight;
double rightChildHeight;
if (canonicalRight.isLeaf()) {
final int rightChildIndex = nextIndex;
nextIndex++;
canonicalOrder[rightChildIndex] = canonicalRight;
rightChildHeight = canonicalRight.getHeight();
nodeHeights[rightChildIndex] = rightChildHeight;
} else {
rightChildHeight = chooseCanonicalOrder(canonicalRight);
}
if (thisHeight > leftChildHeight && thisHeight > rightChildHeight) {
trueBifurcations[trueBifurcationCount] = thisIndex;
trueBifurcationCount++;
}
return thisHeight;
}
/* from and to are inclusive */
private int rebuildTree(final int from, final int to) {
double thisHeight = 0.0;
int nodeIndex = -1;
/* Only check internal nodes, which are odd numbered (leaves are even numbered). Reject move if multiple
internal nodes in the range have the same height, as they are likely fake bifurcations, and connecting
them will result in multiple sampled ancestors at the same point in time along the same lineage. This
matches the following behaviour of LeafToSampledAncestorJump (see lines 68-70):
if (getOtherChild(parent, leaf).getHeight() >= leaf.getHeight()) return Double.NEGATIVE_INFINITY; */
for (int i = from + 1; i < to; i = i + 2) {
if (nodeHeights[i] > thisHeight) {
thisHeight = nodeHeights[i];
nodeIndex = i;
} else if (nodeHeights[i] == thisHeight) {
superimposedAncestors = true;
}
}
int leftNodeIndex;
if (from == nodeIndex - 1) {
leftNodeIndex = from;
} else {
leftNodeIndex = rebuildTree(from, nodeIndex - 1);
}
canonicalOrder[leftNodeIndex].setParent(canonicalOrder[nodeIndex]);
canonicalOrder[nodeIndex].setLeft(canonicalOrder[leftNodeIndex]);
int rightNodeIndex;
if (nodeIndex + 1 == to) {
rightNodeIndex = to;
} else {
rightNodeIndex = rebuildTree(nodeIndex + 1, to);
}
canonicalOrder[rightNodeIndex].setParent(canonicalOrder[nodeIndex]);
canonicalOrder[nodeIndex].setRight(canonicalOrder[rightNodeIndex]);
return nodeIndex;
}
}
| Sync up SAMau1999
| src/starbeast2/SAMau1999.java | Sync up SAMau1999 | <ide><path>rc/starbeast2/SAMau1999.java
<ide> import beast.core.Operator;
<ide> import beast.core.parameter.RealParameter;
<ide> import beast.evolution.tree.Node;
<del>import beast.evolution.tree.Tree;
<ide> import beast.util.Randomizer;
<add>
<ide>
<ide> /**
<ide> * @author Huw Ogilvie
<ide> @Description("Tree operator which randomly changes the height of a node, " +
<ide> "then reconstructs the tree from node heights.")
<ide> public class SAMau1999 extends Operator {
<del> public final Input<Tree> treeInput = new Input<>("tree", "the tree", Validate.REQUIRED);
<add> public final Input<SpeciesTree> treeInput = new Input<>("tree", "the species tree", Validate.REQUIRED);
<ide> public final Input<Double> windowInput = new Input<>("window", "size of the random walk window", 10.0);
<ide> public final Input<RealParameter> originInput = new Input<RealParameter>("origin", "The time when the process started", (RealParameter) null);
<ide>
<ide> private Node[] canonicalOrder;
<ide> private int[] trueBifurcations;
<ide> private double[] nodeHeights;
<add> private Node[] leftChildren;
<add> private Node[] rightChildren;
<add> private Node[] parents;
<ide> private boolean superimposedAncestors;
<ide> private double window;
<ide> private boolean originSpecified;
<ide>
<ide> @Override
<ide> public void initAndValidate() {
<del> final Tree tree = treeInput.get();
<del> nodeCount = tree.getNodeCount();
<add> final SpeciesTree speciesTree = treeInput.get();
<add> nodeCount = speciesTree.getNodeCount();
<ide> canonicalOrder = new Node[nodeCount];
<ide> trueBifurcations = new int[nodeCount];
<ide> nodeHeights = new double[nodeCount];
<add> leftChildren = new Node[nodeCount];
<add> rightChildren = new Node[nodeCount];
<add> parents = new Node[nodeCount];
<ide> window = windowInput.get();
<ide> originSpecified = originInput.get() != null;
<ide> }
<ide>
<del> /* This improves the proposal suggested by Mau (1999). It has been made compatible with sampled ancestors by
<del> enforcing a minimum height and disallowing superimposed sampled ancestor nodes. */
<add> /* This proposal improves TREE SLIDE, developed by Joseph Heled. See section 3.4.1 of Heled's 2011 PhD thesis
<add> "Bayesian Computational Inference of Species Trees and Population Sizes". TREE SLIDE was developed for ultrametric
<add> binary species trees, this proposal has been made compatible with sampled ancestors by enforcing a minimum height
<add> and disallowing superimposed sampled ancestor nodes. Also uses a random walk window with reflection in order to
<add> sample the heights of nodes without maximum height constraints. */
<ide> @Override
<ide> public double proposal() {
<del> final Tree tree = treeInput.get();
<add> final SpeciesTree tree = treeInput.get();
<ide> final Node originalRoot = tree.getRoot();
<del>
<del> double maxHeight;
<del> if (originSpecified) {
<del> maxHeight = originInput.get().getValue();
<del> } else {
<del> maxHeight = Double.POSITIVE_INFINITY;
<del> }
<ide>
<ide> // chooseCanonicalOrder also fills in nodeHeights and trueBifurcations
<ide> // the lastIndex will be the last and right-most node index
<ide>
<ide> // pick a bifurcation at random and change the height
<ide> final int chosenNode = trueBifurcations[Randomizer.nextInt(trueBifurcationCount)];
<add> final double originalHeight = nodeHeights[chosenNode];
<add>
<ide> // as long as the height is above the tips (or sampled ancestors) immediately either side in the canonical
<del> // order, the species tree seems buildable from the new times
<add> // order, the species tree seems buildable from the new times (except in the case of superimposed
<add> // sampled ancestors, as discussed below)
<ide> final double minHeight = Double.max(nodeHeights[chosenNode - 1], nodeHeights[chosenNode + 1]);
<ide>
<del> final double heightDelta = window * (Randomizer.nextDouble() - 0.5);
<del>
<del> // use reflection to avoid invalid heights
<del> double newHeight = nodeHeights[chosenNode] + heightDelta;
<add> double maxHeight;
<add> if (originSpecified) {
<add> maxHeight = originInput.get().getValue();
<add> } else {
<add> maxHeight = Double.POSITIVE_INFINITY;
<add> }
<add>
<add> // Use reflection to avoid invalid heights. Height returns to original position every 2 * (max - min) units,
<add> // so modulus is used to avoid unnecessary looping if the difference between window size and the tree scale
<add> // is extreme.
<add> final double heightDelta = (window * (Randomizer.nextDouble() - 0.5)) % (2.0 * (maxHeight - minHeight));
<add> double newHeight = originalHeight + heightDelta;
<ide> while (newHeight < minHeight || newHeight > maxHeight) {
<ide> if (newHeight < minHeight) {
<ide> newHeight = minHeight + minHeight - newHeight;
<ide> }
<ide>
<ide> nodeHeights[chosenNode] = newHeight;
<del> canonicalOrder[chosenNode].setHeight(newHeight);
<ide>
<ide> superimposedAncestors = false;
<ide> final int rootIndex = rebuildTree(0, nodeCount - 1);
<add> parents[rootIndex] = null;
<ide> if (superimposedAncestors) {
<ide> return Double.NEGATIVE_INFINITY;
<ide> }
<ide>
<add> // wait until after checking for superimposed ancestors before modifying tree
<add> canonicalOrder[chosenNode].setHeight(newHeight);
<add>
<add> for (int i = 0; i < nodeCount; i++) {
<add> canonicalOrder[i].setParent(parents[i]);
<add>
<add> if (i % 2 == 1) { // internal node
<add> canonicalOrder[i].setLeft(leftChildren[i]);
<add> canonicalOrder[i].setRight(rightChildren[i]);
<add> }
<add> }
<add>
<ide> final Node newRoot = canonicalOrder[rootIndex];
<del> if (newRoot != originalRoot) {
<del> newRoot.setParent(null);
<del> tree.setRoot(newRoot);
<del> }
<add> // for some reason if the root is not reset - even if the root node is the same node as before! - the
<add> // morphological likelihood will be radically wrong (idk why)
<add> tree.setRoot(newRoot);
<ide>
<ide> return 0.0;
<ide> }
<ide> double thisHeight = 0.0;
<ide> int nodeIndex = -1;
<ide>
<del> /* Only check internal nodes, which are odd numbered (leaves are even numbered). Reject move if multiple
<del> internal nodes in the range have the same height, as they are likely fake bifurcations, and connecting
<del> them will result in multiple sampled ancestors at the same point in time along the same lineage. This
<del> matches the following behaviour of LeafToSampledAncestorJump (see lines 68-70):
<add> /* Only check internal nodes, which are odd numbered (leaves are even numbered). If there are multiple highest
<add> internal nodes in the range, they are likely fake bifurcations, and connecting
<add> them will result in multiple sampled ancestors at the same point in time along the same lineage.
<add> In this case we repeat changing the height of the chosen node until this no longer occurs.
<add> This is similar to the following behaviour of LeafToSampledAncestorJump (see lines 68-70):
<ide> if (getOtherChild(parent, leaf).getHeight() >= leaf.getHeight()) return Double.NEGATIVE_INFINITY; */
<ide> for (int i = from + 1; i < to; i = i + 2) {
<ide> if (nodeHeights[i] > thisHeight) {
<ide> leftNodeIndex = rebuildTree(from, nodeIndex - 1);
<ide> }
<ide>
<del> canonicalOrder[leftNodeIndex].setParent(canonicalOrder[nodeIndex]);
<del> canonicalOrder[nodeIndex].setLeft(canonicalOrder[leftNodeIndex]);
<add> parents[leftNodeIndex] = canonicalOrder[nodeIndex];
<add> leftChildren[nodeIndex] = canonicalOrder[leftNodeIndex];
<ide>
<ide> int rightNodeIndex;
<ide> if (nodeIndex + 1 == to) {
<ide> rightNodeIndex = rebuildTree(nodeIndex + 1, to);
<ide> }
<ide>
<del> canonicalOrder[rightNodeIndex].setParent(canonicalOrder[nodeIndex]);
<del> canonicalOrder[nodeIndex].setRight(canonicalOrder[rightNodeIndex]);
<add> parents[rightNodeIndex] = canonicalOrder[nodeIndex];
<add> rightChildren[nodeIndex] = canonicalOrder[rightNodeIndex];
<ide>
<ide> return nodeIndex;
<ide> } |
|
Java | apache-2.0 | error: pathspec 'sli/api/src/main/java/org/slc/sli/api/resources/SessionDebugResource.java' did not match any file(s) known to git
| 16336b8040ae80c120800fc907be6d3f982cba21 | 1 | inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service | package org.slc.sli.api.resources;
import org.springframework.context.annotation.Scope;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
/**
* System resource class for security session context.
* Hosted at the URI path "/system/session"
*/
@Path("/system/session")
@Component
@Scope("request")
public class SessionDebugResource {
/**
* Method processing HTTP GET requests, producing "application/json" MIME media
* type.
*
* @return SecurityContext that will be send back as a response of type "application/json".
*/
@GET
@Produces("application/json")
public SecurityContext getSecurityContext() {
return SecurityContextHolder.getContext();
}
}
| sli/api/src/main/java/org/slc/sli/api/resources/SessionDebugResource.java | added session debug resource. serves security context info in json
| sli/api/src/main/java/org/slc/sli/api/resources/SessionDebugResource.java | added session debug resource. serves security context info in json | <ide><path>li/api/src/main/java/org/slc/sli/api/resources/SessionDebugResource.java
<add>package org.slc.sli.api.resources;
<add>
<add>import org.springframework.context.annotation.Scope;
<add>import org.springframework.security.core.context.SecurityContext;
<add>import org.springframework.security.core.context.SecurityContextHolder;
<add>import org.springframework.stereotype.Component;
<add>
<add>import javax.ws.rs.GET;
<add>import javax.ws.rs.Path;
<add>import javax.ws.rs.Produces;
<add>
<add>/**
<add> * System resource class for security session context.
<add> * Hosted at the URI path "/system/session"
<add> */
<add>@Path("/system/session")
<add>@Component
<add>@Scope("request")
<add>public class SessionDebugResource {
<add>
<add> /**
<add> * Method processing HTTP GET requests, producing "application/json" MIME media
<add> * type.
<add> *
<add> * @return SecurityContext that will be send back as a response of type "application/json".
<add> */
<add> @GET
<add> @Produces("application/json")
<add> public SecurityContext getSecurityContext() {
<add> return SecurityContextHolder.getContext();
<add> }
<add>} |
|
Java | apache-2.0 | b0fd91370f12032b323859c1e44b6be479fc0832 | 0 | apache/pdfbox,apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.graphics.xobject;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Transparency;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.IndexColorModel;
import java.awt.image.WritableRaster;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.common.PDStream;
import org.apache.pdfbox.pdmodel.common.function.PDFunction;
import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace;
import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceGray;
import org.apache.pdfbox.pdmodel.graphics.color.PDICCBased;
import org.apache.pdfbox.pdmodel.graphics.color.PDIndexed;
import org.apache.pdfbox.pdmodel.graphics.color.PDSeparation;
import org.apache.pdfbox.util.ImageIOUtil;
/**
* This class contains a PixelMap Image.
* @author <a href="mailto:[email protected]">Ben Litchfield</a>
* @author mathiak
* @version $Revision: 1.10 $
*/
public class PDPixelMap extends PDXObjectImage
{
/**
* Log instance.
*/
private static final Log LOG = LogFactory.getLog(PDPixelMap.class);
private BufferedImage image = null;
private static final String PNG = "png";
/**
* Standard constructor. Basically does nothing.
* @param pdStream The stream that holds the pixel map.
*/
public PDPixelMap(PDStream pdStream)
{
super(pdStream, PNG);
}
/**
* Construct a pixel map image from an AWT image.
*
* @param doc The PDF document to embed the image in.
* @param awtImage The image to read data from.
*
* @throws IOException If there is an error while embedding this image.
*/
/*
* This method is broken and needs to be implemented, any takers?
public PDPixelMap(PDDocument doc, BufferedImage awtImage) throws IOException
{
super( doc, "png");
image = awtImage;
setWidth( image.getWidth() );
setHeight( image.getHeight() );
ColorModel cm = image.getColorModel();
ColorSpace cs = cm.getColorSpace();
PDColorSpace pdColorSpace = PDColorSpaceFactory.createColorSpace( doc, cs );
setColorSpace( pdColorSpace );
//setColorSpace( )
PDStream stream = getPDStream();
OutputStream output = null;
try
{
output = stream.createOutputStream();
DataBuffer buffer = awtImage.getRaster().getDataBuffer();
if( buffer instanceof DataBufferByte )
{
DataBufferByte byteBuffer = (DataBufferByte)buffer;
byte[] data = byteBuffer.getData();
output.write( data );
}
setBitsPerComponent( cm.getPixelSize() );
}
finally
{
if( output != null )
{
output.close();
}
}
}*/
/**
* Returns a {@link java.awt.image.BufferedImage} of the COSStream
* set in the constructor or null if the COSStream could not be encoded.
*
* @return {@inheritDoc}
*
* @throws IOException {@inheritDoc}
*/
public BufferedImage getRGBImage() throws IOException
{
if( image != null )
{
return image;
}
try
{
int width = getWidth();
int height = getHeight();
int bpc = getBitsPerComponent();
byte[] array = getPDStream().getByteArray();
if (array.length == 0)
{
LOG.error("Something went wrong ... the pixelmap doesn't contain any data.");
return null;
}
// Get the ColorModel right
PDColorSpace colorspace = getColorSpace();
if (colorspace == null)
{
LOG.error("getColorSpace() returned NULL. Predictor = " + getPredictor());
return null;
}
ColorModel cm = null;
if (colorspace instanceof PDIndexed)
{
PDIndexed csIndexed = (PDIndexed)colorspace;
// the base color space uses 8 bit per component, as the indexed color values
// of an indexed color space are always in a range from 0 to 255
ColorModel baseColorModel = csIndexed.getBaseColorSpace().createColorModel(8);
// number of possible color values in the target color space
int numberOfColorValues = 1 << bpc;
// number of indexed color values
int highValue = csIndexed.getHighValue();
// choose the correct size, sometimes there are more indexed values than needed
// and sometimes there are fewer indexed value than possible
int size = Math.min(numberOfColorValues-1, highValue);
byte[] index = csIndexed.getLookupData();
boolean hasAlpha = baseColorModel.hasAlpha();
COSArray maskArray = getMask();
if( baseColorModel.getTransferType() != DataBuffer.TYPE_BYTE )
{
throw new IOException( "Not implemented" );
}
// the IndexColorModel uses RGB-based color values
// which leads to 3 color components and a optional alpha channel
int numberOfComponents = 3 + (hasAlpha ? 1 : 0);
int buffersize = (size+1) * numberOfComponents;
byte[] colorValues = new byte[buffersize];
byte[] inData = new byte[baseColorModel.getNumComponents()];
int bufferIndex = 0;
for( int i = 0; i <= size; i++ )
{
System.arraycopy(index, i * inData.length, inData, 0, inData.length);
// convert the indexed color values to RGB
colorValues[bufferIndex] = (byte)baseColorModel.getRed(inData);
colorValues[bufferIndex+1] = (byte)baseColorModel.getGreen(inData);
colorValues[bufferIndex+2] = (byte)baseColorModel.getBlue(inData);
if( hasAlpha )
{
colorValues[bufferIndex+3] = (byte)baseColorModel.getAlpha(inData);
}
bufferIndex += numberOfComponents;
}
if (maskArray != null)
{
cm = new IndexColorModel(bpc, size+1, colorValues, 0, hasAlpha, maskArray.getInt(0));
}
else
{
cm = new IndexColorModel(bpc, size+1, colorValues, 0, hasAlpha);
}
}
else if (colorspace instanceof PDSeparation)
{
PDSeparation csSeparation = (PDSeparation)colorspace;
int numberOfComponents = csSeparation.getAlternateColorSpace().getNumberOfComponents();
PDFunction tintTransformFunc = csSeparation.getTintTransform();
COSArray decode = getDecode();
// we have to invert the tint-values,
// if the Decode array exists and consists of (1,0)
boolean invert = decode != null && decode.getInt(0) == 1;
// TODO add interpolation for other decode values then 1,0
int maxValue = (int)Math.pow(2,bpc) - 1;
// destination array
byte[] mappedData = new byte[width*height*numberOfComponents];
int rowLength = width*numberOfComponents;
float[] input = new float[1];
for ( int i = 0; i < height; i++ )
{
int rowOffset = i * rowLength;
for (int j = 0; j < width; j++)
{
// scale tint values to a range of 0...1
int value = (array[ i * width + j ] + 256) % 256;
if (invert)
{
input[0] = 1-(value / maxValue);
}
else
{
input[0] = value / maxValue;
}
float[] mappedColor = tintTransformFunc.eval(input);
int columnOffset = j * numberOfComponents;
for ( int k = 0; k < numberOfComponents; k++ )
{
// redo scaling for every single color value
float mappedValue = mappedColor[k];
mappedData[ rowOffset + columnOffset + k] = (byte)(mappedValue * maxValue);
}
}
}
array = mappedData;
cm = colorspace.createColorModel( bpc );
}
else if (bpc == 1)
{
byte[] map = null;
if (colorspace instanceof PDDeviceGray)
{
COSArray decode = getDecode();
// we have to invert the b/w-values,
// if the Decode array exists and consists of (1,0)
if (decode != null && decode.getInt(0) == 1)
{
map = new byte[] {(byte)0xff};
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
}
else if (colorspace instanceof PDICCBased)
{
if ( ((PDICCBased)colorspace).getNumberOfComponents() == 1)
{
map = new byte[] {(byte)0xff};
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
cm = new IndexColorModel(bpc, map.length, map, map, map, Transparency.OPAQUE);
}
else
{
if (colorspace instanceof PDICCBased)
{
if (((PDICCBased)colorspace).getNumberOfComponents() == 1)
{
byte[] map = new byte[] {(byte)0xff};
cm = new IndexColorModel(bpc, 1, map, map, map, Transparency.OPAQUE);
}
else
{
cm = colorspace.createColorModel( bpc );
}
}
else
{
cm = colorspace.createColorModel( bpc );
}
}
LOG.debug("ColorModel: " + cm.toString());
WritableRaster raster = cm.createCompatibleWritableRaster( width, height );
DataBufferByte buffer = (DataBufferByte)raster.getDataBuffer();
byte[] bufferData = buffer.getData();
System.arraycopy( array, 0,bufferData, 0,
(array.length<bufferData.length?array.length: bufferData.length) );
image = new BufferedImage(cm, raster, false, null);
// If there is a 'soft mask' image then we use that as a transparency mask.
PDXObjectImage smask = getSMaskImage();
if (smask != null)
{
BufferedImage smaskBI = smask.getRGBImage();
COSArray decodeArray = smask.getDecode();
CompositeImage compositeImage = new CompositeImage(image, smaskBI);
BufferedImage rgbImage = compositeImage.createMaskedImage(decodeArray);
return rgbImage;
}
else if (getImageMask())
{
BufferedImage stencilMask = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
Graphics2D graphics = (Graphics2D)stencilMask.getGraphics();
if (getStencilColor() != null)
{
graphics.setColor(getStencilColor().getJavaColor());
}
else
{
// this might happen when using ExractImages, see PDFBOX-1145
LOG.debug("no stencil color for PixelMap found, using Color.BLACK instead.");
graphics.setColor(Color.BLACK);
}
graphics.fillRect(0, 0, width, height);
// assume default values ([0,1]) for the DecodeArray
// TODO DecodeArray == [1,0]
graphics.setComposite(AlphaComposite.DstIn);
graphics.drawImage(image, null, 0, 0);
return stencilMask;
}
else
{
// if there is no mask, use the unaltered image.
return image;
}
}
catch (Exception exception)
{
LOG.error(exception, exception);
//A NULL return is caught in pagedrawer.Invoke.process() so don't re-throw.
//Returning the NULL falls through to Phillip Koch's TODO section.
return null;
}
}
/**
* Writes the image as .png.
*
* {@inheritDoc}
*/
public void write2OutputStream(OutputStream out) throws IOException
{
getRGBImage();
if (image != null)
{
ImageIOUtil.writeImage(image, PNG, out);
}
}
/**
* DecodeParms is an optional parameter for filters.
*
* It is provided if any of the filters has nondefault parameters. If there
* is only one filter it is a dictionary, if there are multiple filters it
* is an array with an entry for each filter. An array entry can hold a null
* value if only the default values are used or a dictionary with
* parameters.
*
* @return The decoding parameters.
*
*/
public COSDictionary getDecodeParams()
{
COSBase decodeParms = getCOSStream().getDictionaryObject(COSName.DECODE_PARMS);
if (decodeParms != null)
{
if (decodeParms instanceof COSDictionary)
{
return (COSDictionary) decodeParms;
}
else if (decodeParms instanceof COSArray)
{
// not implemented yet, which index should we use?
return null;//(COSDictionary)((COSArray)decodeParms).get(0);
}
else
{
return null;
}
}
return null;
}
/**
* A code that selects the predictor algorithm.
*
* <ul>
* <li>1 No prediction (the default value)
* <li>2 TIFF Predictor 2
* <li>10 PNG prediction (on encoding, PNG None on all rows)
* <li>11 PNG prediction (on encoding, PNG Sub on all rows)
* <li>12 PNG prediction (on encoding, PNG Up on all rows)
* <li>13 PNG prediction (on encoding, PNG Average on all rows)
* <li>14 PNG prediction (on encoding, PNG Path on all rows)
* <li>15 PNG prediction (on encoding, PNG optimum)
* </ul>
*
* Default value: 1.
*
* @return predictor algorithm code
*/
public int getPredictor()
{
COSDictionary decodeParms = getDecodeParams();
if (decodeParms != null)
{
int i = decodeParms.getInt(COSName.PREDICTOR);
if (i != -1)
{
return i;
}
}
return 1;
}
}
| pdfbox/src/main/java/org/apache/pdfbox/pdmodel/graphics/xobject/PDPixelMap.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.graphics.xobject;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Transparency;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.IndexColorModel;
import java.awt.image.WritableRaster;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.common.PDStream;
import org.apache.pdfbox.pdmodel.common.function.PDFunction;
import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace;
import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceGray;
import org.apache.pdfbox.pdmodel.graphics.color.PDICCBased;
import org.apache.pdfbox.pdmodel.graphics.color.PDIndexed;
import org.apache.pdfbox.pdmodel.graphics.color.PDSeparation;
import org.apache.pdfbox.util.ImageIOUtil;
/**
* This class contains a PixelMap Image.
* @author <a href="mailto:[email protected]">Ben Litchfield</a>
* @author mathiak
* @version $Revision: 1.10 $
*/
public class PDPixelMap extends PDXObjectImage
{
/**
* Log instance.
*/
private static final Log LOG = LogFactory.getLog(PDPixelMap.class);
private BufferedImage image = null;
private static final String PNG = "png";
/**
* Standard constructor. Basically does nothing.
* @param pdStream The stream that holds the pixel map.
*/
public PDPixelMap(PDStream pdStream)
{
super(pdStream, PNG);
}
/**
* Construct a pixel map image from an AWT image.
*
* @param doc The PDF document to embed the image in.
* @param awtImage The image to read data from.
*
* @throws IOException If there is an error while embedding this image.
*/
/*
* This method is broken and needs to be implemented, any takers?
public PDPixelMap(PDDocument doc, BufferedImage awtImage) throws IOException
{
super( doc, "png");
image = awtImage;
setWidth( image.getWidth() );
setHeight( image.getHeight() );
ColorModel cm = image.getColorModel();
ColorSpace cs = cm.getColorSpace();
PDColorSpace pdColorSpace = PDColorSpaceFactory.createColorSpace( doc, cs );
setColorSpace( pdColorSpace );
//setColorSpace( )
PDStream stream = getPDStream();
OutputStream output = null;
try
{
output = stream.createOutputStream();
DataBuffer buffer = awtImage.getRaster().getDataBuffer();
if( buffer instanceof DataBufferByte )
{
DataBufferByte byteBuffer = (DataBufferByte)buffer;
byte[] data = byteBuffer.getData();
output.write( data );
}
setBitsPerComponent( cm.getPixelSize() );
}
finally
{
if( output != null )
{
output.close();
}
}
}*/
/**
* Returns a {@link java.awt.image.BufferedImage} of the COSStream
* set in the constructor or null if the COSStream could not be encoded.
*
* @return {@inheritDoc}
*
* @throws IOException {@inheritDoc}
*/
public BufferedImage getRGBImage() throws IOException
{
if( image != null )
{
return image;
}
try
{
int width = getWidth();
int height = getHeight();
int bpc = getBitsPerComponent();
byte[] array = getPDStream().getByteArray();
if (array.length == 0)
{
LOG.error("Something went wrong ... the pixelmap doesn't contain any data.");
return null;
}
// Get the ColorModel right
PDColorSpace colorspace = getColorSpace();
if (colorspace == null)
{
LOG.error("getColorSpace() returned NULL. Predictor = " + getPredictor());
return null;
}
ColorModel cm = null;
if (colorspace instanceof PDIndexed)
{
PDIndexed csIndexed = (PDIndexed)colorspace;
// the base color space uses 8 bit per component, as the indexed color values
// of an indexed color space are always in a range from 0 to 255
ColorModel baseColorModel = csIndexed.getBaseColorSpace().createColorModel(8);
// number of possible color values in the target color space
int numberOfColorValues = 1 << bpc;
// number of indexed color values
int highValue = csIndexed.getHighValue();
// choose the correct size, sometimes there are more indexed values than needed
// and sometimes there are fewer indexed value than possible
int size = Math.min(numberOfColorValues-1, highValue);
byte[] index = csIndexed.getLookupData();
boolean hasAlpha = baseColorModel.hasAlpha();
COSArray maskArray = getMask();
if( baseColorModel.getTransferType() != DataBuffer.TYPE_BYTE )
{
throw new IOException( "Not implemented" );
}
int numberOfComponents = baseColorModel.getNumComponents() + (hasAlpha ? 1 : 0);
int buffersize = (size+1) * numberOfComponents;
byte[] colorValues = new byte[buffersize];
byte[] inData = new byte[baseColorModel.getNumComponents()];
int bufferIndex = 0;
for( int i = 0; i <= size; i++ )
{
System.arraycopy(index, i * inData.length, inData, 0, inData.length);
colorValues[bufferIndex] = (byte)baseColorModel.getRed(inData);
colorValues[bufferIndex+1] = (byte)baseColorModel.getGreen(inData);
colorValues[bufferIndex+2] = (byte)baseColorModel.getBlue(inData);
if( hasAlpha )
{
colorValues[bufferIndex+3] = (byte)baseColorModel.getAlpha(inData);
}
bufferIndex += numberOfComponents;
}
if (maskArray != null)
{
cm = new IndexColorModel(bpc, size+1, colorValues, 0, hasAlpha, maskArray.getInt(0));
}
else
{
cm = new IndexColorModel(bpc, size+1, colorValues, 0, hasAlpha);
}
}
else if (colorspace instanceof PDSeparation)
{
PDSeparation csSeparation = (PDSeparation)colorspace;
int numberOfComponents = csSeparation.getAlternateColorSpace().getNumberOfComponents();
PDFunction tintTransformFunc = csSeparation.getTintTransform();
COSArray decode = getDecode();
// we have to invert the tint-values,
// if the Decode array exists and consists of (1,0)
boolean invert = decode != null && decode.getInt(0) == 1;
// TODO add interpolation for other decode values then 1,0
int maxValue = (int)Math.pow(2,bpc) - 1;
// destination array
byte[] mappedData = new byte[width*height*numberOfComponents];
int rowLength = width*numberOfComponents;
float[] input = new float[1];
for ( int i = 0; i < height; i++ )
{
int rowOffset = i * rowLength;
for (int j = 0; j < width; j++)
{
// scale tint values to a range of 0...1
int value = (array[ i * width + j ] + 256) % 256;
if (invert)
{
input[0] = 1-(value / maxValue);
}
else
{
input[0] = value / maxValue;
}
float[] mappedColor = tintTransformFunc.eval(input);
int columnOffset = j * numberOfComponents;
for ( int k = 0; k < numberOfComponents; k++ )
{
// redo scaling for every single color value
float mappedValue = mappedColor[k];
mappedData[ rowOffset + columnOffset + k] = (byte)(mappedValue * maxValue);
}
}
}
array = mappedData;
cm = colorspace.createColorModel( bpc );
}
else if (bpc == 1)
{
byte[] map = null;
if (colorspace instanceof PDDeviceGray)
{
COSArray decode = getDecode();
// we have to invert the b/w-values,
// if the Decode array exists and consists of (1,0)
if (decode != null && decode.getInt(0) == 1)
{
map = new byte[] {(byte)0xff};
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
}
else if (colorspace instanceof PDICCBased)
{
if ( ((PDICCBased)colorspace).getNumberOfComponents() == 1)
{
map = new byte[] {(byte)0xff};
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
}
else
{
map = new byte[] {(byte)0x00, (byte)0xff};
}
cm = new IndexColorModel(bpc, map.length, map, map, map, Transparency.OPAQUE);
}
else
{
if (colorspace instanceof PDICCBased)
{
if (((PDICCBased)colorspace).getNumberOfComponents() == 1)
{
byte[] map = new byte[] {(byte)0xff};
cm = new IndexColorModel(bpc, 1, map, map, map, Transparency.OPAQUE);
}
else
{
cm = colorspace.createColorModel( bpc );
}
}
else
{
cm = colorspace.createColorModel( bpc );
}
}
LOG.debug("ColorModel: " + cm.toString());
WritableRaster raster = cm.createCompatibleWritableRaster( width, height );
DataBufferByte buffer = (DataBufferByte)raster.getDataBuffer();
byte[] bufferData = buffer.getData();
System.arraycopy( array, 0,bufferData, 0,
(array.length<bufferData.length?array.length: bufferData.length) );
image = new BufferedImage(cm, raster, false, null);
// If there is a 'soft mask' image then we use that as a transparency mask.
PDXObjectImage smask = getSMaskImage();
if (smask != null)
{
BufferedImage smaskBI = smask.getRGBImage();
COSArray decodeArray = smask.getDecode();
CompositeImage compositeImage = new CompositeImage(image, smaskBI);
BufferedImage rgbImage = compositeImage.createMaskedImage(decodeArray);
return rgbImage;
}
else if (getImageMask())
{
BufferedImage stencilMask = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
Graphics2D graphics = (Graphics2D)stencilMask.getGraphics();
if (getStencilColor() != null)
{
graphics.setColor(getStencilColor().getJavaColor());
}
else
{
// this might happen when using ExractImages, see PDFBOX-1145
LOG.debug("no stencil color for PixelMap found, using Color.BLACK instead.");
graphics.setColor(Color.BLACK);
}
graphics.fillRect(0, 0, width, height);
// assume default values ([0,1]) for the DecodeArray
// TODO DecodeArray == [1,0]
graphics.setComposite(AlphaComposite.DstIn);
graphics.drawImage(image, null, 0, 0);
return stencilMask;
}
else
{
// if there is no mask, use the unaltered image.
return image;
}
}
catch (Exception exception)
{
LOG.error(exception, exception);
//A NULL return is caught in pagedrawer.Invoke.process() so don't re-throw.
//Returning the NULL falls through to Phillip Koch's TODO section.
return null;
}
}
/**
* Writes the image as .png.
*
* {@inheritDoc}
*/
public void write2OutputStream(OutputStream out) throws IOException
{
getRGBImage();
if (image != null)
{
ImageIOUtil.writeImage(image, PNG, out);
}
}
/**
* DecodeParms is an optional parameter for filters.
*
* It is provided if any of the filters has nondefault parameters. If there
* is only one filter it is a dictionary, if there are multiple filters it
* is an array with an entry for each filter. An array entry can hold a null
* value if only the default values are used or a dictionary with
* parameters.
*
* @return The decoding parameters.
*
*/
public COSDictionary getDecodeParams()
{
COSBase decodeParms = getCOSStream().getDictionaryObject(COSName.DECODE_PARMS);
if (decodeParms != null)
{
if (decodeParms instanceof COSDictionary)
{
return (COSDictionary) decodeParms;
}
else if (decodeParms instanceof COSArray)
{
// not implemented yet, which index should we use?
return null;//(COSDictionary)((COSArray)decodeParms).get(0);
}
else
{
return null;
}
}
return null;
}
/**
* A code that selects the predictor algorithm.
*
* <ul>
* <li>1 No prediction (the default value)
* <li>2 TIFF Predictor 2
* <li>10 PNG prediction (on encoding, PNG None on all rows)
* <li>11 PNG prediction (on encoding, PNG Sub on all rows)
* <li>12 PNG prediction (on encoding, PNG Up on all rows)
* <li>13 PNG prediction (on encoding, PNG Average on all rows)
* <li>14 PNG prediction (on encoding, PNG Path on all rows)
* <li>15 PNG prediction (on encoding, PNG optimum)
* </ul>
*
* Default value: 1.
*
* @return predictor algorithm code
*/
public int getPredictor()
{
COSDictionary decodeParms = getDecodeParams();
if (decodeParms != null)
{
int i = decodeParms.getInt(COSName.PREDICTOR);
if (i != -1)
{
return i;
}
}
return 1;
}
}
| PDFBOX-1237: use the correct number of components if the pixelmap uses an indexed colorspace
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1294185 13f79535-47bb-0310-9956-ffa450edef68
| pdfbox/src/main/java/org/apache/pdfbox/pdmodel/graphics/xobject/PDPixelMap.java | PDFBOX-1237: use the correct number of components if the pixelmap uses an indexed colorspace | <ide><path>dfbox/src/main/java/org/apache/pdfbox/pdmodel/graphics/xobject/PDPixelMap.java
<ide> {
<ide> throw new IOException( "Not implemented" );
<ide> }
<del> int numberOfComponents = baseColorModel.getNumComponents() + (hasAlpha ? 1 : 0);
<add> // the IndexColorModel uses RGB-based color values
<add> // which leads to 3 color components and a optional alpha channel
<add> int numberOfComponents = 3 + (hasAlpha ? 1 : 0);
<ide> int buffersize = (size+1) * numberOfComponents;
<ide> byte[] colorValues = new byte[buffersize];
<ide> byte[] inData = new byte[baseColorModel.getNumComponents()];
<ide> for( int i = 0; i <= size; i++ )
<ide> {
<ide> System.arraycopy(index, i * inData.length, inData, 0, inData.length);
<add> // convert the indexed color values to RGB
<ide> colorValues[bufferIndex] = (byte)baseColorModel.getRed(inData);
<ide> colorValues[bufferIndex+1] = (byte)baseColorModel.getGreen(inData);
<ide> colorValues[bufferIndex+2] = (byte)baseColorModel.getBlue(inData); |
|
Java | mpl-2.0 | e21ef6fdb75587cea5f2960e6e20f3005d4a7cda | 0 | Pilarbrist/rhino,tuchida/rhino,Angelfirenze/rhino,Pilarbrist/rhino,Angelfirenze/rhino,jsdoc3/rhino,ashwinrayaprolu1984/rhino,sainaen/rhino,Pilarbrist/rhino,qhanam/rhino,tntim96/rhino-jscover-repackaged,lv7777/egit_test,swannodette/rhino,sainaen/rhino,rasmuserik/rhino,lv7777/egit_test,tntim96/rhino-jscover-repackaged,tntim96/rhino-apigee,Angelfirenze/rhino,AlexTrotsenko/rhino,AlexTrotsenko/rhino,tntim96/htmlunit-rhino-fork,tejassaoji/RhinoCoarseTainting,ashwinrayaprolu1984/rhino,tejassaoji/RhinoCoarseTainting,tejassaoji/RhinoCoarseTainting,swannodette/rhino,Pilarbrist/rhino,Angelfirenze/rhino,sam/htmlunit-rhino-fork,InstantWebP2P/rhino-android,ashwinrayaprolu1984/rhino,lv7777/egit_test,rasmuserik/rhino,AlexTrotsenko/rhino,Distrotech/rhino,AlexTrotsenko/rhino,tejassaoji/RhinoCoarseTainting,jsdoc3/rhino,tntim96/rhino-jscover,sainaen/rhino,sainaen/rhino,sam/htmlunit-rhino-fork,lv7777/egit_test,ashwinrayaprolu1984/rhino,tuchida/rhino,tntim96/htmlunit-rhino-fork,tntim96/rhino-jscover,tuchida/rhino,swannodette/rhino,sainaen/rhino,sainaen/rhino,sainaen/rhino,qhanam/rhino,lv7777/egit_test,tejassaoji/RhinoCoarseTainting,AlexTrotsenko/rhino,AlexTrotsenko/rhino,lv7777/egit_test,tuchida/rhino,Pilarbrist/rhino,Pilarbrist/rhino,swannodette/rhino,tejassaoji/RhinoCoarseTainting,InstantWebP2P/rhino-android,tuchida/rhino,sam/htmlunit-rhino-fork,lv7777/egit_test,sam/htmlunit-rhino-fork,swannodette/rhino,qhanam/rhino,tuchida/rhino,qhanam/rhino,Angelfirenze/rhino,tejassaoji/RhinoCoarseTainting,tuchida/rhino,swannodette/rhino,sam/htmlunit-rhino-fork,sam/htmlunit-rhino-fork,tntim96/rhino-apigee,Angelfirenze/rhino,swannodette/rhino,Pilarbrist/rhino,sam/htmlunit-rhino-fork,ashwinrayaprolu1984/rhino,Distrotech/rhino,tntim96/rhino-apigee,ashwinrayaprolu1984/rhino,AlexTrotsenko/rhino,ashwinrayaprolu1984/rhino,jsdoc3/rhino,Angelfirenze/rhino | /* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* The contents of this file are subject to the Netscape Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/NPL/
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is Netscape
* Communications Corporation. Portions created by Netscape are
* Copyright (C) 1997-2000 Netscape Communications Corporation. All
* Rights Reserved.
*
* Contributor(s):
* Igor Bukanov
*
* Alternatively, the contents of this file may be used under the
* terms of the GNU Public License (the "GPL"), in which case the
* provisions of the GPL are applicable instead of those above.
* If you wish to allow use of your version of this file only
* under the terms of the GPL and not to allow others to use your
* version of this file under the NPL, indicate your decision by
* deleting the provisions above and replace them with the notice
* and other provisions required by the GPL. If you do not delete
* the provisions above, a recipient may use your version of this
* file under either the NPL or the GPL.
*/
package org.mozilla.javascript;
import java.io.Serializable;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
/**
* Map to associate non-negative integers to objects or integers.
* The map does not synchronize any of its operation, so either use
* it from a single thread or do own synchronization or perform all mutation
* operations on one thread before passing the map to others
*
* @author Igor Bukanov
*
*/
class UintMap implements Serializable {
// Map implementation via hashtable,
// follows "The Art of Computer Programming" by Donald E. Knuth
public UintMap() {
this(4);
}
public UintMap(int initialCapacity) {
if (initialCapacity < 0) Context.codeBug();
// Table grow when number of stored keys >= 3/4 of max capacity
int minimalCapacity = initialCapacity * 4 / 3;
int i;
for (i = 2; (1 << i) < minimalCapacity; ++i) { }
power = i;
if (check && power < 2) Context.codeBug();
}
public boolean isEmpty() {
return keyCount == 0;
}
public int size() {
return keyCount;
}
public boolean has(int key) {
if (key < 0) Context.codeBug();
return 0 <= findIndex(key);
}
/**
* Get object value assigned with key.
* @return key object value or null if key is absent
*/
public Object getObject(int key) {
if (key < 0) Context.codeBug();
if (values != null) {
int index = findIndex(key);
if (0 <= index) {
return values[index];
}
}
return null;
}
/**
* Get integer value assigned with key.
* @return key integer value or defaultValue if key is absent
*/
public int getInt(int key, int defaultValue) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
if (ivaluesShift != 0) {
return keys[ivaluesShift + index];
}
return 0;
}
return defaultValue;
}
/**
* Get integer value assigned with key.
* @return key integer value or defaultValue if key does not exist or does
* not have int value
* @throws RuntimeException if key does not exist
*/
public int getExistingInt(int key) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
if (ivaluesShift != 0) {
return keys[ivaluesShift + index];
}
return 0;
}
// Key must exist
Context.codeBug();
return 0;
}
/**
* Set object value of the key.
* If key does not exist, also set its int value to 0.
*/
public void put(int key, Object value) {
if (key < 0) Context.codeBug();
int index = ensureIndex(key, false);
if (values == null) {
values = new Object[1 << power];
}
values[index] = value;
}
/**
* Set int value of the key.
* If key does not exist, also set its object value to null.
*/
public void put(int key, int value) {
if (key < 0) Context.codeBug();
int index = ensureIndex(key, true);
if (ivaluesShift == 0) {
int N = 1 << power;
// keys.length can be N * 2 after clear which set ivaluesShift to 0
if (keys.length != N * 2) {
int[] tmp = new int[N * 2];
System.arraycopy(keys, 0, tmp, 0, N);
keys = tmp;
}
ivaluesShift = N;
}
keys[ivaluesShift + index] = value;
}
public void remove(int key) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
keys[index] = DELETED;
--keyCount;
// Allow to GC value and make sure that new key with the deleted
// slot shall get proper default values
if (values != null) { values[index] = null; }
if (ivaluesShift != 0) { keys[ivaluesShift + index] = 0; }
}
}
public void clear() {
int N = 1 << power;
if (keys != null) {
for (int i = 0; i != N; ++i) {
keys[i] = EMPTY;
}
if (values != null) {
for (int i = 0; i != N; ++i) {
values[i] = null;
}
}
}
ivaluesShift = 0;
keyCount = 0;
occupiedCount = 0;
}
/** Return array of present keys */
public int[] getKeys() {
int[] keys = this.keys;
int n = keyCount;
int[] result = new int[n];
for (int i = 0; n != 0; ++i) {
int entry = keys[i];
if (entry != EMPTY && entry != DELETED) {
result[--n] = entry;
}
}
return result;
}
private static int tableLookupStep(int fraction, int mask, int power) {
int shift = 32 - 2 * power;
if (shift >= 0) {
return ((fraction >>> shift) & mask) | 1;
}
else {
return (fraction & (mask >>> -shift)) | 1;
}
}
private int findIndex(int key) {
int[] keys = this.keys;
if (keys != null) {
int fraction = key * A;
int index = fraction >>> (32 - power);
int entry = keys[index];
if (entry == key) { return index; }
if (entry != EMPTY) {
// Search in table after first failed attempt
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int n = 0;
do {
if (check) {
if (n >= occupiedCount) Context.codeBug();
++n;
}
index = (index + step) & mask;
entry = keys[index];
if (entry == key) { return index; }
} while (entry != EMPTY);
}
}
return -1;
}
// Insert key that is not present to table without deleted entries
// and enough free space
private int insertNewKey(int key) {
if (check && occupiedCount != keyCount) Context.codeBug();
if (check && keyCount == 1 << power) Context.codeBug();
int[] keys = this.keys;
int fraction = key * A;
int index = fraction >>> (32 - power);
if (keys[index] != EMPTY) {
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int firstIndex = index;
do {
if (check && keys[index] == DELETED) Context.codeBug();
index = (index + step) & mask;
if (check && firstIndex == index) Context.codeBug();
} while (keys[index] != EMPTY);
}
keys[index] = key;
++occupiedCount;
++keyCount;
return index;
}
private void rehashTable(boolean ensureIntSpace) {
if (keys != null) {
// Check if removing deleted entries would free enough space
if (keyCount * 2 >= occupiedCount) {
// Need to grow: less then half of deleted entries
++power;
}
}
int N = 1 << power;
int[] old = keys;
int oldShift = ivaluesShift;
if (oldShift == 0 && !ensureIntSpace) {
keys = new int[N];
}
else {
ivaluesShift = N; keys = new int[N * 2];
}
for (int i = 0; i != N; ++i) { keys[i] = EMPTY; }
Object[] oldValues = values;
if (oldValues != null) { values = new Object[N]; }
int oldCount = keyCount;
occupiedCount = 0;
if (oldCount != 0) {
keyCount = 0;
for (int i = 0, remaining = oldCount; remaining != 0; ++i) {
int key = old[i];
if (key != EMPTY && key != DELETED) {
int index = insertNewKey(key);
if (oldValues != null) {
values[index] = oldValues[i];
}
if (oldShift != 0) {
keys[ivaluesShift + index] = old[oldShift + i];
}
--remaining;
}
}
}
}
// Ensure key index creating one if necessary
private int ensureIndex(int key, boolean intType) {
int index = -1;
int firstDeleted = -1;
int[] keys = this.keys;
if (keys != null) {
int fraction = key * A;
index = fraction >>> (32 - power);
int entry = keys[index];
if (entry == key) { return index; }
if (entry != EMPTY) {
if (entry == DELETED) { firstDeleted = index; }
// Search in table after first failed attempt
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int n = 0;
do {
if (check) {
if (n >= occupiedCount) Context.codeBug();
++n;
}
index = (index + step) & mask;
entry = keys[index];
if (entry == key) { return index; }
if (entry == DELETED && firstDeleted < 0) {
firstDeleted = index;
}
} while (entry != EMPTY);
}
}
// Inserting of new key
if (check && keys != null && keys[index] != EMPTY)
Context.codeBug();
if (firstDeleted >= 0) {
index = firstDeleted;
}
else {
// Need to consume empty entry: check occupation level
if (keys == null || occupiedCount * 4 >= (1 << power) * 3) {
// Too litle unused entries: rehash
rehashTable(intType);
keys = this.keys;
return insertNewKey(key);
}
++occupiedCount;
}
keys[index] = key;
++keyCount;
return index;
}
private void writeObject(ObjectOutputStream out)
throws IOException
{
out.defaultWriteObject();
int count = keyCount;
if (count != 0) {
boolean hasIntValues = (ivaluesShift != 0);
boolean hasObjectValues = (values != null);
out.writeBoolean(hasIntValues);
out.writeBoolean(hasObjectValues);
for (int i = 0; count != 0; ++i) {
int key = keys[i];
if (key != EMPTY && key != DELETED) {
--count;
out.writeInt(key);
if (hasIntValues) {
out.writeInt(keys[ivaluesShift + i]);
}
if (hasObjectValues) {
out.writeObject(values[i]);
}
}
}
}
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
int writtenKeyCount = keyCount;
if (writtenKeyCount != 0) {
keyCount = 0;
boolean hasIntValues = in.readBoolean();
boolean hasObjectValues = in.readBoolean();
int N = 1 << power;
if (hasIntValues) {
keys = new int[2 * N];
ivaluesShift = N;
}else {
keys = new int[N];
}
for (int i = 0; i != N; ++i) {
keys[i] = EMPTY;
}
if (hasObjectValues) {
values = new Object[N];
}
for (int i = 0; i != writtenKeyCount; ++i) {
int key = in.readInt();
int index = insertNewKey(key);
if (hasIntValues) {
int ivalue = in.readInt();
keys[ivaluesShift + index] = ivalue;
}
if (hasObjectValues) {
values[index] = in.readObject();
}
}
}
}
static final long serialVersionUID = -6916326879143724506L;
// A == golden_ratio * (1 << 32) = ((sqrt(5) - 1) / 2) * (1 << 32)
// See Knuth etc.
private static final int A = 0x9e3779b9;
private static final int EMPTY = -1;
private static final int DELETED = -2;
// Structure of kyes and values arrays (N == 1 << power):
// keys[0 <= i < N]: key value or EMPTY or DELETED mark
// values[0 <= i < N]: value of key at keys[i]
// keys[N <= i < 2N]: int values of keys at keys[i - N]
private transient int[] keys;
private transient Object[] values;
private int power;
private int keyCount;
private transient int occupiedCount; // == keyCount + deleted_count
// If ivaluesShift != 0, keys[ivaluesShift + index] contains integer
// values associated with keys
private transient int ivaluesShift;
// If true, enables consitency checks
private static final boolean check = false;
/* TEST START
public static void main(String[] args) {
if (!check) {
System.err.println("Set check to true and re-run");
throw new RuntimeException("Set check to true and re-run");
}
UintMap map;
map = new UintMap();
testHash(map, 2);
map = new UintMap();
testHash(map, 10 * 1000);
map = new UintMap(30 * 1000);
testHash(map, 10 * 100);
map.clear();
testHash(map, 4);
map = new UintMap(0);
testHash(map, 10 * 100);
}
private static void testHash(UintMap map, int N) {
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, i);
check(i == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, i);
check(i == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, new Integer(i));
check(-1 == map.getInt(i, -1));
Integer obj = (Integer)map.getObject(i);
check(obj != null && i == obj.intValue());
}
check(map.size() == N);
System.out.print("."); System.out.flush();
int[] keys = map.getKeys();
check(keys.length == N);
for (int i = 0; i != N; ++i) {
int key = keys[i];
check(map.has(key));
check(!map.isIntType(key));
check(map.isObjectType(key));
Integer obj = (Integer) map.getObject(key);
check(obj != null && key == obj.intValue());
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
check(-1 == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i * i, i);
check(i == map.getInt(i * i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
check(i == map.getInt(i * i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i * i, new Integer(i));
check(-1 == map.getInt(i * i, -1));
map.remove(i * i);
check(!map.has(i * i));
map.put(i * i, i);
check(map.isIntType(i * i));
check(null == map.getObject(i * i));
map.remove(i * i);
check(!map.isObjectType(i * i));
check(!map.isIntType(i * i));
}
int old_size = map.size();
for (int i = 0; i != N; ++i) {
map.remove(i * i);
check(map.size() == old_size);
}
System.out.print("."); System.out.flush();
map.clear();
check(map.size() == 0);
for (int i = 0; i != N; ++i) {
map.put(i * i, i);
map.put(i * i + 1, new Double(i+0.5));
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(0);
checkSameMaps(map, (UintMap)writeAndRead(map));
map = new UintMap(1);
checkSameMaps(map, (UintMap)writeAndRead(map));
map = new UintMap(1000);
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(N / 10);
for (int i = 0; i != N; ++i) {
map.put(2*i+1, i);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(N / 10);
for (int i = 0; i != N; ++i) {
map.put(2*i+1, i);
}
for (int i = 0; i != N / 2; ++i) {
map.remove(2*i+1);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap();
for (int i = 0; i != N; ++i) {
map.put(2*i+1, new Double(i + 10));
}
for (int i = 0; i != N / 2; ++i) {
map.remove(2*i+1);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.println(); System.out.flush();
}
private static void checkSameMaps(UintMap map1, UintMap map2) {
check(map1.size() == map2.size());
int[] keys = map1.getKeys();
check(keys.length == map1.size());
for (int i = 0; i != keys.length; ++i) {
int key = keys[i];
check(map2.has(key));
check(map1.isObjectType(key) == map2.isObjectType(key));
check(map1.isIntType(key) == map2.isIntType(key));
Object o1 = map1.getObject(key);
Object o2 = map2.getObject(key);
if (map1.isObjectType(key)) {
check(o1.equals(o2));
}else {
check(map1.getObject(key) == null);
check(map2.getObject(key) == null);
}
if (map1.isIntType(key)) {
check(map1.getExistingInt(key) == map2.getExistingInt(key));
}else {
check(map1.getInt(key, -10) == -10);
check(map1.getInt(key, -11) == -11);
check(map2.getInt(key, -10) == -10);
check(map2.getInt(key, -11) == -11);
}
}
}
private static void check(boolean condition) {
if (!condition) Context.codeBug();
}
private static Object writeAndRead(Object obj) {
try {
java.io.ByteArrayOutputStream
bos = new java.io.ByteArrayOutputStream();
java.io.ObjectOutputStream
out = new java.io.ObjectOutputStream(bos);
out.writeObject(obj);
out.close();
byte[] data = bos.toByteArray();
java.io.ByteArrayInputStream
bis = new java.io.ByteArrayInputStream(data);
java.io.ObjectInputStream
in = new java.io.ObjectInputStream(bis);
Object result = in.readObject();
in.close();
return result;
}catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected");
}
}
// TEST END */
}
| src/org/mozilla/javascript/UintMap.java | /* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* The contents of this file are subject to the Netscape Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/NPL/
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is Netscape
* Communications Corporation. Portions created by Netscape are
* Copyright (C) 1997-2000 Netscape Communications Corporation. All
* Rights Reserved.
*
* Contributor(s):
* Igor Bukanov
*
* Alternatively, the contents of this file may be used under the
* terms of the GNU Public License (the "GPL"), in which case the
* provisions of the GPL are applicable instead of those above.
* If you wish to allow use of your version of this file only
* under the terms of the GPL and not to allow others to use your
* version of this file under the NPL, indicate your decision by
* deleting the provisions above and replace them with the notice
* and other provisions required by the GPL. If you do not delete
* the provisions above, a recipient may use your version of this
* file under either the NPL or the GPL.
*/
package org.mozilla.javascript;
import java.io.Serializable;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
/**
* Map to associate non-negative integers to objects or integers.
* The map does not synchronize any of its operation, so either use
* it from a single thread or do own synchronization or perform all mutation
* operations on one thread before passing the map to others
*
* @author Igor Bukanov
*
*/
class UintMap implements Serializable {
// Map implementation via hashtable,
// follows "The Art of Computer Programming" by Donald E. Knuth
public UintMap() {
this(4);
}
public UintMap(int initialCapacity) {
if (initialCapacity < 0) Context.codeBug();
// Table grow when number of stored keys >= 3/4 of max capacity
int minimalCapacity = initialCapacity * 4 / 3;
int i;
for (i = 2; (1 << i) < minimalCapacity; ++i) { }
power = i;
if (check && power < 2) Context.codeBug();
}
public boolean isEmpty() {
return keyCount == 0;
}
public int size() {
return keyCount;
}
public boolean has(int key) {
if (key < 0) Context.codeBug();
return 0 <= findIndex(key);
}
/**
* Get object value assigned with key.
* @return key object value or null if key is absent
*/
public Object getObject(int key) {
if (key < 0) Context.codeBug();
if (values != null) {
int index = findIndex(key);
if (0 <= index) {
return values[index];
}
}
return null;
}
/**
* Get integer value assigned with key.
* @return key integer value or defaultValue if key is absent
*/
public int getInt(int key, int defaultValue) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
if (ivaluesShift != 0) {
return keys[ivaluesShift + index];
}
return 0;
}
return defaultValue;
}
/**
* Get integer value assigned with key.
* @return key integer value or defaultValue if key does not exist or does
* not have int value
* @throws RuntimeException if key does not exist
*/
public int getExistingInt(int key) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
if (ivaluesShift != 0) {
return keys[ivaluesShift + index];
}
return 0;
}
// Key must exist
Context.codeBug();
return 0;
}
/**
* Set object value of the key.
* If key does not exist, also set its int value to 0.
*/
public void put(int key, Object value) {
if (key < 0) Context.codeBug();
int index = ensureIndex(key, false);
if (values == null) {
values = new Object[1 << power];
}
values[index] = value;
}
/**
* Set int value of the key.
* If key does not exist, also set its object value to null.
*/
public void put(int key, int value) {
if (key < 0) Context.codeBug();
int index = ensureIndex(key, true);
if (ivaluesShift == 0) {
int N = 1 << power;
// keys.length can be N * 2 after clear which set ivaluesShift to 0
if (keys.length != N * 2) {
int[] tmp = new int[N * 2];
System.arraycopy(keys, 0, tmp, 0, N);
keys = tmp;
}
ivaluesShift = N;
}
keys[ivaluesShift + index] = value;
}
public void remove(int key) {
if (key < 0) Context.codeBug();
int index = findIndex(key);
if (0 <= index) {
keys[index] = DELETED;
--keyCount;
if (values != null) { values[index] = null; }
}
}
public void clear() {
int N = 1 << power;
if (keys != null) {
for (int i = 0; i != N; ++i) {
keys[i] = EMPTY;
}
if (values != null) {
for (int i = 0; i != N; ++i) {
values[i] = null;
}
}
}
ivaluesShift = 0;
keyCount = 0;
occupiedCount = 0;
}
/** Return array of present keys */
public int[] getKeys() {
int[] keys = this.keys;
int n = keyCount;
int[] result = new int[n];
for (int i = 0; n != 0; ++i) {
int entry = keys[i];
if (entry != EMPTY && entry != DELETED) {
result[--n] = entry;
}
}
return result;
}
private static int tableLookupStep(int fraction, int mask, int power) {
int shift = 32 - 2 * power;
if (shift >= 0) {
return ((fraction >>> shift) & mask) | 1;
}
else {
return (fraction & (mask >>> -shift)) | 1;
}
}
private int findIndex(int key) {
int[] keys = this.keys;
if (keys != null) {
int fraction = key * A;
int index = fraction >>> (32 - power);
int entry = keys[index];
if (entry == key) { return index; }
if (entry != EMPTY) {
// Search in table after first failed attempt
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int n = 0;
do {
if (check) {
if (n >= occupiedCount) Context.codeBug();
++n;
}
index = (index + step) & mask;
entry = keys[index];
if (entry == key) { return index; }
} while (entry != EMPTY);
}
}
return -1;
}
// Insert key that is not present to table without deleted entries
// and enough free space
private int insertNewKey(int key) {
if (check && occupiedCount != keyCount) Context.codeBug();
if (check && keyCount == 1 << power) Context.codeBug();
int[] keys = this.keys;
int fraction = key * A;
int index = fraction >>> (32 - power);
if (keys[index] != EMPTY) {
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int firstIndex = index;
do {
if (check && keys[index] == DELETED) Context.codeBug();
index = (index + step) & mask;
if (check && firstIndex == index) Context.codeBug();
} while (keys[index] != EMPTY);
}
keys[index] = key;
++occupiedCount;
++keyCount;
return index;
}
private void rehashTable(boolean ensureIntSpace) {
if (keys != null) {
// Check if removing deleted entries would free enough space
if (keyCount * 2 >= occupiedCount) {
// Need to grow: less then half of deleted entries
++power;
}
}
int N = 1 << power;
int[] old = keys;
int oldShift = ivaluesShift;
if (oldShift == 0 && !ensureIntSpace) {
keys = new int[N];
}
else {
ivaluesShift = N; keys = new int[N * 2];
}
for (int i = 0; i != N; ++i) { keys[i] = EMPTY; }
Object[] oldValues = values;
if (oldValues != null) { values = new Object[N]; }
int oldCount = keyCount;
occupiedCount = 0;
if (oldCount != 0) {
keyCount = 0;
for (int i = 0, remaining = oldCount; remaining != 0; ++i) {
int key = old[i];
if (key != EMPTY && key != DELETED) {
int index = insertNewKey(key);
if (oldValues != null) {
values[index] = oldValues[i];
}
if (oldShift != 0) {
keys[ivaluesShift + index] = old[oldShift + i];
}
--remaining;
}
}
}
}
// Ensure key index creating one if necessary
private int ensureIndex(int key, boolean intType) {
int index = -1;
int firstDeleted = -1;
int[] keys = this.keys;
if (keys != null) {
int fraction = key * A;
index = fraction >>> (32 - power);
int entry = keys[index];
if (entry == key) { return index; }
if (entry != EMPTY) {
if (entry == DELETED) { firstDeleted = index; }
// Search in table after first failed attempt
int mask = (1 << power) - 1;
int step = tableLookupStep(fraction, mask, power);
int n = 0;
do {
if (check) {
if (n >= occupiedCount) Context.codeBug();
++n;
}
index = (index + step) & mask;
entry = keys[index];
if (entry == key) { return index; }
if (entry == DELETED && firstDeleted < 0) {
firstDeleted = index;
}
} while (entry != EMPTY);
}
}
// Inserting of new key
if (check && keys != null && keys[index] != EMPTY)
Context.codeBug();
if (firstDeleted >= 0) {
index = firstDeleted;
}
else {
// Need to consume empty entry: check occupation level
if (keys == null || occupiedCount * 4 >= (1 << power) * 3) {
// Too litle unused entries: rehash
rehashTable(intType);
keys = this.keys;
return insertNewKey(key);
}
++occupiedCount;
}
keys[index] = key;
++keyCount;
return index;
}
private void writeObject(ObjectOutputStream out)
throws IOException
{
out.defaultWriteObject();
int count = keyCount;
if (count != 0) {
boolean hasIntValues = (ivaluesShift != 0);
boolean hasObjectValues = (values != null);
out.writeBoolean(hasIntValues);
out.writeBoolean(hasObjectValues);
for (int i = 0; count != 0; ++i) {
int key = keys[i];
if (key != EMPTY && key != DELETED) {
--count;
out.writeInt(key);
if (hasIntValues) {
out.writeInt(keys[ivaluesShift + i]);
}
if (hasObjectValues) {
out.writeObject(values[i]);
}
}
}
}
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
int writtenKeyCount = keyCount;
if (writtenKeyCount != 0) {
keyCount = 0;
boolean hasIntValues = in.readBoolean();
boolean hasObjectValues = in.readBoolean();
int N = 1 << power;
if (hasIntValues) {
keys = new int[2 * N];
ivaluesShift = N;
}else {
keys = new int[N];
}
for (int i = 0; i != N; ++i) {
keys[i] = EMPTY;
}
if (hasObjectValues) {
values = new Object[N];
}
for (int i = 0; i != writtenKeyCount; ++i) {
int key = in.readInt();
int index = insertNewKey(key);
if (hasIntValues) {
int ivalue = in.readInt();
keys[ivaluesShift + index] = ivalue;
}
if (hasObjectValues) {
values[index] = in.readObject();
}
}
}
}
static final long serialVersionUID = -6916326879143724506L;
// A == golden_ratio * (1 << 32) = ((sqrt(5) - 1) / 2) * (1 << 32)
// See Knuth etc.
private static final int A = 0x9e3779b9;
private static final int EMPTY = -1;
private static final int DELETED = -2;
// Structure of kyes and values arrays (N == 1 << power):
// keys[0 <= i < N]: key value or EMPTY or DELETED mark
// values[0 <= i < N]: value of key at keys[i]
// keys[N <= i < 2N]: int values of keys at keys[i - N]
private transient int[] keys;
private transient Object[] values;
private int power;
private int keyCount;
private transient int occupiedCount; // == keyCount + deleted_count
// If ivaluesShift != 0, keys[ivaluesShift + index] contains integer
// values associated with keys
private transient int ivaluesShift;
// If true, enables consitency checks
private static final boolean check = false;
/* TEST START
public static void main(String[] args) {
if (!check) {
System.err.println("Set check to true and re-run");
throw new RuntimeException("Set check to true and re-run");
}
UintMap map;
map = new UintMap();
testHash(map, 2);
map = new UintMap();
testHash(map, 10 * 1000);
map = new UintMap(30 * 1000);
testHash(map, 10 * 100);
map.clear();
testHash(map, 4);
map = new UintMap(0);
testHash(map, 10 * 100);
}
private static void testHash(UintMap map, int N) {
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, i);
check(i == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, i);
check(i == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i, new Integer(i));
check(-1 == map.getInt(i, -1));
Integer obj = (Integer)map.getObject(i);
check(obj != null && i == obj.intValue());
}
check(map.size() == N);
System.out.print("."); System.out.flush();
int[] keys = map.getKeys();
check(keys.length == N);
for (int i = 0; i != N; ++i) {
int key = keys[i];
check(map.has(key));
check(!map.isIntType(key));
check(map.isObjectType(key));
Integer obj = (Integer) map.getObject(key);
check(obj != null && key == obj.intValue());
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
check(-1 == map.getInt(i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i * i, i);
check(i == map.getInt(i * i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
check(i == map.getInt(i * i, -1));
}
System.out.print("."); System.out.flush();
for (int i = 0; i != N; ++i) {
map.put(i * i, new Integer(i));
check(-1 == map.getInt(i * i, -1));
map.remove(i * i);
check(!map.has(i * i));
map.put(i * i, i);
check(map.isIntType(i * i));
check(null == map.getObject(i * i));
map.remove(i * i);
check(!map.isObjectType(i * i));
check(!map.isIntType(i * i));
}
int old_size = map.size();
for (int i = 0; i != N; ++i) {
map.remove(i * i);
check(map.size() == old_size);
}
System.out.print("."); System.out.flush();
map.clear();
check(map.size() == 0);
for (int i = 0; i != N; ++i) {
map.put(i * i, i);
map.put(i * i + 1, new Double(i+0.5));
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(0);
checkSameMaps(map, (UintMap)writeAndRead(map));
map = new UintMap(1);
checkSameMaps(map, (UintMap)writeAndRead(map));
map = new UintMap(1000);
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(N / 10);
for (int i = 0; i != N; ++i) {
map.put(2*i+1, i);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap(N / 10);
for (int i = 0; i != N; ++i) {
map.put(2*i+1, i);
}
for (int i = 0; i != N / 2; ++i) {
map.remove(2*i+1);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.print("."); System.out.flush();
map = new UintMap();
for (int i = 0; i != N; ++i) {
map.put(2*i+1, new Double(i + 10));
}
for (int i = 0; i != N / 2; ++i) {
map.remove(2*i+1);
}
checkSameMaps(map, (UintMap)writeAndRead(map));
System.out.println(); System.out.flush();
}
private static void checkSameMaps(UintMap map1, UintMap map2) {
check(map1.size() == map2.size());
int[] keys = map1.getKeys();
check(keys.length == map1.size());
for (int i = 0; i != keys.length; ++i) {
int key = keys[i];
check(map2.has(key));
check(map1.isObjectType(key) == map2.isObjectType(key));
check(map1.isIntType(key) == map2.isIntType(key));
Object o1 = map1.getObject(key);
Object o2 = map2.getObject(key);
if (map1.isObjectType(key)) {
check(o1.equals(o2));
}else {
check(map1.getObject(key) == null);
check(map2.getObject(key) == null);
}
if (map1.isIntType(key)) {
check(map1.getExistingInt(key) == map2.getExistingInt(key));
}else {
check(map1.getInt(key, -10) == -10);
check(map1.getInt(key, -11) == -11);
check(map2.getInt(key, -10) == -10);
check(map2.getInt(key, -11) == -11);
}
}
}
private static void check(boolean condition) {
if (!condition) Context.codeBug();
}
private static Object writeAndRead(Object obj) {
try {
java.io.ByteArrayOutputStream
bos = new java.io.ByteArrayOutputStream();
java.io.ObjectOutputStream
out = new java.io.ObjectOutputStream(bos);
out.writeObject(obj);
out.close();
byte[] data = bos.toByteArray();
java.io.ByteArrayInputStream
bis = new java.io.ByteArrayInputStream(data);
java.io.ObjectInputStream
in = new java.io.ObjectInputStream(bis);
Object result = in.readObject();
in.close();
return result;
}catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException("Unexpected");
}
}
// TEST END */
}
| In remove set int key value to 0 so a new key with the same slot would get initial 0 after put(int, Object)
| src/org/mozilla/javascript/UintMap.java | In remove set int key value to 0 so a new key with the same slot would get initial 0 after put(int, Object) | <ide><path>rc/org/mozilla/javascript/UintMap.java
<ide> if (0 <= index) {
<ide> keys[index] = DELETED;
<ide> --keyCount;
<add> // Allow to GC value and make sure that new key with the deleted
<add> // slot shall get proper default values
<ide> if (values != null) { values[index] = null; }
<add> if (ivaluesShift != 0) { keys[ivaluesShift + index] = 0; }
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 1549f55dea2eae483e014f9d23281e1ca64bc6f6 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.AutoPopupController;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.lookup.*;
import com.intellij.codeInsight.lookup.impl.actions.ChooseItemAction;
import com.intellij.codeInsight.template.impl.actions.NextVariableAction;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.ui.UISettings;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.internal.statistic.service.fus.collectors.UIEventId;
import com.intellij.internal.statistic.service.fus.collectors.UIEventLogger;
import com.intellij.lang.LangBundle;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorActivityManager;
import com.intellij.openapi.editor.EditorModificationUtil;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.colors.FontPreferences;
import com.intellij.openapi.editor.colors.impl.FontPreferencesImpl;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.JBList;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.util.CollectConsumer;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EDT;
import com.intellij.util.ui.accessibility.AccessibleContextUtil;
import com.intellij.util.ui.accessibility.ScreenReader;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.awt.event.*;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
public class LookupImpl extends LightweightHint implements LookupEx, Disposable, LookupElementListPresenter {
private static final Logger LOG = Logger.getInstance(LookupImpl.class);
private final LookupOffsets myOffsets;
private final Project myProject;
private final Editor myEditor;
private final Object myUiLock = new Object();
private final JBList myList = new JBList<LookupElement>(new CollectionListModel<>()) {
// 'myList' is focused when "Screen Reader" mode is enabled
@Override
protected void processKeyEvent(@NotNull final KeyEvent e) {
myEditor.getContentComponent().dispatchEvent(e); // let the editor handle actions properly for the lookup list
}
@NotNull
@Override
protected ExpandableItemsHandler<Integer> createExpandableItemsHandler() {
return new CompletionExtender(this);
}
};
final LookupCellRenderer myCellRenderer;
private final List<LookupListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final List<PrefixChangeListener> myPrefixChangeListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final LookupPreview myPreview = new LookupPreview(this);
// keeping our own copy of editor's font preferences, which can be used in non-EDT threads (to avoid race conditions)
private final FontPreferences myFontPreferences = new FontPreferencesImpl();
private long myStampShown = 0;
private boolean myShown = false;
private boolean myDisposed = false;
private boolean myHidden = false;
private boolean mySelectionTouched;
private LookupFocusDegree myLookupFocusDegree = LookupFocusDegree.FOCUSED;
private volatile boolean myCalculating;
private final Advertiser myAdComponent;
private int myGuardedChanges;
private volatile LookupArranger myArranger;
private LookupArranger myPresentableArranger;
private boolean myStartCompletionWhenNothingMatches;
boolean myResizePending;
private boolean myFinishing;
boolean myUpdating;
private LookupUi myUi;
private Integer myLastVisibleIndex;
private final AtomicInteger myDummyItemCount = new AtomicInteger();
public LookupImpl(Project project, Editor editor, @NotNull LookupArranger arranger) {
super(new JPanel(new BorderLayout()));
setForceShowAsPopup(true);
setCancelOnClickOutside(false);
setResizable(true);
myProject = project;
myEditor = InjectedLanguageUtil.getTopLevelEditor(editor);
myArranger = arranger;
myPresentableArranger = arranger;
myEditor.getColorsScheme().getFontPreferences().copyTo(myFontPreferences);
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
myCellRenderer = new LookupCellRenderer(this);
myList.setCellRenderer(myCellRenderer);
myList.setFocusable(false);
myList.setFixedCellWidth(50);
myList.setBorder(null);
// a new top level frame just got the focus. This is important to prevent screen readers
// from announcing the title of the top level frame when the list is shown (or hidden),
// as they usually do when a new top-level frame receives the focus.
AccessibleContextUtil.setParent((Component)myList, myEditor.getContentComponent());
myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myList.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myAdComponent = new Advertiser();
myAdComponent.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myOffsets = new LookupOffsets(myEditor);
final CollectionListModel<LookupElement> model = getListModel();
addEmptyItem(model);
updateListHeight(model);
addListeners();
}
private CollectionListModel<LookupElement> getListModel() {
//noinspection unchecked
return (CollectionListModel<LookupElement>)myList.getModel();
}
@SuppressWarnings("unused") // used plugins
public LookupArranger getArranger() {
return myArranger;
}
public void setArranger(LookupArranger arranger) {
myArranger = arranger;
}
@Override
public boolean isFocused() {
return getLookupFocusDegree() == LookupFocusDegree.FOCUSED;
}
/**
* @deprecated Use {@link #setLookupFocusDegree(LookupFocusDegree)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2020.3")
public void setFocusDegree(FocusDegree focusDegree) {
if (focusDegree != null) {
setLookupFocusDegree(convertToLookupFocusDegree(focusDegree));
}
}
@NotNull
@Override
public LookupFocusDegree getLookupFocusDegree() {
return myLookupFocusDegree;
}
public void setLookupFocusDegree(@NotNull LookupFocusDegree lookupFocusDegree) {
myLookupFocusDegree = lookupFocusDegree;
for (LookupListener listener : myListeners) {
listener.focusDegreeChanged();
}
}
public boolean isCalculating() {
return myCalculating;
}
public void setCalculating(boolean calculating) {
myCalculating = calculating;
if (myUi != null) {
myUi.setCalculating(calculating);
}
}
public void markSelectionTouched() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
mySelectionTouched = true;
myList.repaint();
}
@TestOnly
public void setSelectionTouched(boolean selectionTouched) {
mySelectionTouched = selectionTouched;
}
@Override
public int getSelectedIndex() {
return myList.getSelectedIndex();
}
public void setSelectedIndex(int index) {
myList.setSelectedIndex(index);
myList.ensureIndexIsVisible(index);
}
public void setDummyItemCount(int count) {
myDummyItemCount.set(count);
}
public void repaintLookup(boolean onExplicitAction, boolean reused, boolean selectionVisible, boolean itemsChanged) {
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
public void resort(boolean addAgain) {
final List<LookupElement> items = getItems();
myPresentableArranger.prefixChanged(this);
synchronized (myUiLock) {
getListModel().removeAll();
}
if (addAgain) {
for (final LookupElement item : items) {
addItem(item, itemMatcher(item));
}
}
refreshUi(true, true);
}
public boolean addItem(LookupElement item, PrefixMatcher matcher) {
LookupElementPresentation presentation = LookupElementPresentation.renderElement(item);
if (containsDummyIdentifier(presentation.getItemText()) ||
containsDummyIdentifier(presentation.getTailText()) ||
containsDummyIdentifier(presentation.getTypeText())) {
return false;
}
myCellRenderer.itemAdded(item, presentation);
LookupArranger arranger = myArranger;
arranger.registerMatcher(item, matcher);
arranger.addElement(item, presentation);
return true;
}
private void addDummyItems(int count) {
EmptyLookupItem dummy = new EmptyLookupItem("loading...", true);
for (int i = count; i > 0; i--) {
getListModel().add(dummy);
}
}
private static boolean containsDummyIdentifier(@Nullable final String s) {
return s != null && s.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED);
}
public void updateLookupWidth(LookupElement item) {
myCellRenderer.updateLookupWidth(item, LookupElementPresentation.renderElement(item));
}
public void requestResize() {
ApplicationManager.getApplication().assertIsDispatchThread();
myResizePending = true;
}
public Collection<LookupElementAction> getActionsFor(LookupElement element) {
final CollectConsumer<LookupElementAction> consumer = new CollectConsumer<>();
for (LookupActionProvider provider : LookupActionProvider.EP_NAME.getExtensions()) {
provider.fillActions(element, this, consumer);
}
if (!consumer.getResult().isEmpty()) {
consumer.consume(new ShowHideIntentionIconLookupAction());
}
return consumer.getResult();
}
public JList getList() {
return myList;
}
@Override
public List<LookupElement> getItems() {
synchronized (myUiLock) {
return ContainerUtil.findAll(getListModel().toList(), element -> !(element instanceof EmptyLookupItem));
}
}
@Override
@NotNull
public String getAdditionalPrefix() {
return myOffsets.getAdditionalPrefix();
}
void fireBeforeAppendPrefix(char c) {
myPrefixChangeListeners.forEach((listener -> listener.beforeAppend(c)));
}
void appendPrefix(char c) {
checkValid();
myOffsets.appendPrefix(c);
myPresentableArranger.prefixChanged(this);
requestResize();
refreshUi(false, true);
ensureSelectionVisible(true);
myPrefixChangeListeners.forEach((listener -> listener.afterAppend(c)));
}
public void setStartCompletionWhenNothingMatches(boolean startCompletionWhenNothingMatches) {
myStartCompletionWhenNothingMatches = startCompletionWhenNothingMatches;
}
public boolean isStartCompletionWhenNothingMatches() {
return myStartCompletionWhenNothingMatches;
}
public void ensureSelectionVisible(boolean forceTopSelection) {
if (isSelectionVisible() && !forceTopSelection) {
return;
}
if (!forceTopSelection) {
ScrollingUtil.ensureIndexIsVisible(myList, myList.getSelectedIndex(), 1);
return;
}
// selected item should be at the top of the visible list
int top = myList.getSelectedIndex();
if (top > 0) {
top--; // show one element above the selected one to give the hint that there are more available via scrolling
}
int firstVisibleIndex = myList.getFirstVisibleIndex();
if (firstVisibleIndex == top) {
return;
}
ScrollingUtil.ensureRangeIsVisible(myList, top, top + myList.getLastVisibleIndex() - firstVisibleIndex);
}
void truncatePrefix(boolean preserveSelection, int hideOffset) {
if (!myOffsets.truncatePrefix()) {
myArranger.prefixTruncated(this, hideOffset);
return;
}
myPrefixChangeListeners.forEach((listener -> listener.beforeTruncate()));
if (preserveSelection) {
markSelectionTouched();
}
myPresentableArranger.prefixChanged(this);
requestResize();
if (myPresentableArranger == myArranger) {
refreshUi(false, true);
ensureSelectionVisible(true);
}
myPrefixChangeListeners.forEach((listener -> listener.afterTruncate()));
}
void moveToCaretPosition() {
myOffsets.destabilizeLookupStart();
refreshUi(false, true);
}
private boolean updateList(boolean onExplicitAction, boolean reused) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
checkValid();
CollectionListModel<LookupElement> listModel = getListModel();
Pair<List<LookupElement>, Integer> pair = myPresentableArranger.arrangeItems(this, onExplicitAction || reused);
List<LookupElement> items = pair.first;
Integer toSelect = pair.second;
if (toSelect == null || toSelect < 0 || items.size() > 0 && toSelect >= items.size()) {
LOG.error("Arranger " + myPresentableArranger + " returned invalid selection index=" + toSelect + "; items=" + items);
toSelect = 0;
}
myOffsets.checkMinPrefixLengthChanges(items, this);
List<LookupElement> oldModel = listModel.toList();
synchronized (myUiLock) {
listModel.removeAll();
if (!items.isEmpty()) {
listModel.add(items);
addDummyItems(myDummyItemCount.get());
}
else {
addEmptyItem(listModel);
}
}
updateListHeight(listModel);
myList.setSelectedIndex(toSelect);
return !ContainerUtil.equalsIdentity(oldModel, items);
}
public boolean isSelectionVisible() {
return ScrollingUtil.isIndexFullyVisible(myList, myList.getSelectedIndex());
}
private boolean checkReused() {
EDT.assertIsEdt();
if (myPresentableArranger != myArranger) {
myPresentableArranger = myArranger;
clearIfLookupAndArrangerPrefixesMatch();
myPresentableArranger.prefixChanged(this);
return true;
}
return false;
}
//some items may have passed to myArranger from CompletionProgressIndicator for an older prefix
//these items won't be cleared during appending a new prefix (mayCheckReused = false)
//so these 'out of dated' items which were matched against an old prefix, should be now matched against the new, updated lookup prefix.
private void clearIfLookupAndArrangerPrefixesMatch() {
boolean isCompletionArranger = myArranger instanceof CompletionLookupArrangerImpl;
if (isCompletionArranger) {
final String lastLookupArrangersPrefix = ((CompletionLookupArrangerImpl)myArranger).getLastLookupPrefix();
if (lastLookupArrangersPrefix != null && !lastLookupArrangersPrefix.equals(getAdditionalPrefix())) {
LOG.trace("prefixes don't match, do not clear lookup additional prefix");
}
else {
myOffsets.clearAdditionalPrefix();
}
} else {
myOffsets.clearAdditionalPrefix();
}
}
private void updateListHeight(ListModel<LookupElement> model) {
myList.setFixedCellHeight(myCellRenderer.getListCellRendererComponent(myList, model.getElementAt(0), 0, false, false).getPreferredSize().height);
myList.setVisibleRowCount(Math.min(model.getSize(), UISettings.getInstance().getMaxLookupListHeight()));
}
private void addEmptyItem(CollectionListModel<? super LookupElement> model) {
LookupElement item = new EmptyLookupItem(myCalculating ? " " : LangBundle.message("completion.no.suggestions"), false);
model.add(item);
myCellRenderer.itemAdded(item, LookupElementPresentation.renderElement(item));
requestResize();
}
@NotNull
@Override
public String itemPattern(@NotNull LookupElement element) {
if (element instanceof EmptyLookupItem) return "";
return myPresentableArranger.itemPattern(element);
}
@Override
@NotNull
public PrefixMatcher itemMatcher(@NotNull LookupElement item) {
if (item instanceof EmptyLookupItem) {
return new CamelHumpMatcher("");
}
return myPresentableArranger.itemMatcher(item);
}
public void finishLookup(final char completionChar) {
finishLookup(completionChar, (LookupElement)myList.getSelectedValue());
}
public void finishLookup(char completionChar, @Nullable final LookupElement item) {
LOG.assertTrue(!ApplicationManager.getApplication().isWriteAccessAllowed(), "finishLookup should be called without a write action");
final PsiFile file = getPsiFile();
boolean writableOk = file == null || FileModificationService.getInstance().prepareFileForWrite(file);
if (myDisposed) { // ensureFilesWritable could close us by showing a dialog
return;
}
if (!writableOk) {
hideWithItemSelected(null, completionChar);
return;
}
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookupInWritableFile(completionChar, item), null, null);
}
void finishLookupInWritableFile(char completionChar, @Nullable LookupElement item) {
//noinspection deprecation,unchecked
if (item == null ||
!item.isValid() ||
item instanceof EmptyLookupItem ||
item.getObject() instanceof DeferredUserLookupValue &&
item.as(LookupItem.CLASS_CONDITION_KEY) != null &&
!((DeferredUserLookupValue)item.getObject()).handleUserSelection(item.as(LookupItem.CLASS_CONDITION_KEY), myProject)) {
hideWithItemSelected(null, completionChar);
return;
}
if (item.getUserData(CodeCompletionHandlerBase.DIRECT_INSERTION) != null) {
hideWithItemSelected(item, completionChar);
return;
}
if (myDisposed) { // DeferredUserLookupValue could close us in any way
return;
}
final String prefix = itemPattern(item);
boolean plainMatch = ContainerUtil.or(item.getAllLookupStrings(), s -> StringUtil.containsIgnoreCase(s, prefix));
if (!plainMatch) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CAMEL_HUMPS);
}
myFinishing = true;
if (fireBeforeItemSelected(item, completionChar)) {
ApplicationManager.getApplication().runWriteAction(() -> {
myEditor.getDocument().startGuardedBlockChecking();
try {
insertLookupString(item, getPrefixLength(item));
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
}
});
}
if (myDisposed) { // any document listeners could close us
return;
}
doHide(false, true);
fireItemSelected(item, completionChar);
}
private void hideWithItemSelected(LookupElement lookupItem, char completionChar) {
fireBeforeItemSelected(lookupItem, completionChar);
doHide(false, true);
fireItemSelected(lookupItem, completionChar);
}
public int getPrefixLength(LookupElement item) {
return myOffsets.getPrefixLength(item, this);
}
protected void insertLookupString(LookupElement item, final int prefix) {
insertLookupString(myProject, getTopLevelEditor(), item, itemMatcher(item), itemPattern(item), prefix);
}
public static void insertLookupString(final Project project,
Editor editor, LookupElement item,
PrefixMatcher matcher, String itemPattern, final int prefixLength) {
final String lookupString = LookupUtil.getCaseCorrectedLookupString(item, matcher, itemPattern);
final Editor hostEditor = editor;
hostEditor.getCaretModel().runForEachCaret(__ -> {
EditorModificationUtil.deleteSelectedText(hostEditor);
final int caretOffset = hostEditor.getCaretModel().getOffset();
int offset = LookupUtil.insertLookupInDocumentWindowIfNeeded(project, editor, caretOffset, prefixLength, lookupString);
hostEditor.getCaretModel().moveToOffset(offset);
hostEditor.getSelectionModel().removeSelection();
});
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
@Override
public int getLookupStart() {
return myOffsets.getLookupStart(disposeTrace);
}
public int getLookupOriginalStart() {
return myOffsets.getLookupOriginalStart();
}
public boolean performGuardedChange(Runnable change) {
checkValid();
myEditor.getDocument().startGuardedBlockChecking();
myGuardedChanges++;
boolean result;
try {
result = myOffsets.performGuardedChange(change);
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
myGuardedChanges--;
}
if (!result || myDisposed) {
hideLookup(false);
return false;
}
if (isVisible() && myEditor.getContentComponent().isShowing()) {
HintManagerImpl.updateLocation(this, myEditor, myUi.calculatePosition().getLocation());
}
checkValid();
return true;
}
@Override
public boolean vetoesHiding() {
return myGuardedChanges > 0;
}
public boolean isAvailableToUser() {
if (ApplicationManager.getApplication().isHeadlessEnvironment()) {
return myShown;
}
return isVisible();
}
@Override
public boolean isShown() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
return myShown;
}
public boolean showLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
checkValid();
LOG.assertTrue(!myShown);
myShown = true;
myStampShown = System.currentTimeMillis();
fireLookupShown();
if (ApplicationManager.getApplication().isHeadlessEnvironment()) return true;
if (!EditorActivityManager.getInstance().isVisible(myEditor)) {
hideLookup(false);
return false;
}
myAdComponent.showRandomText();
if (Boolean.TRUE.equals(myEditor.getUserData(AutoPopupController.NO_ADS))) {
myAdComponent.clearAdvertisements();
}
myUi = new LookupUi(this, myAdComponent, myList);//, myProject);
myUi.setCalculating(myCalculating);
Point p = myUi.calculatePosition().getLocation();
if (ScreenReader.isActive()) {
myList.setFocusable(true);
setFocusRequestor(myList);
AnActionEvent actionEvent = AnActionEvent.createFromDataContext(ActionPlaces.EDITOR_POPUP, null, ((EditorImpl)myEditor).getDataContext());
delegateActionToEditor(IdeActions.ACTION_EDITOR_BACKSPACE, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_ESCAPE, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_TAB, () -> new ChooseItemAction.Replacing(), actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_ENTER,
/* e.g. rename popup comes initially unfocused */
() -> getLookupFocusDegree() == LookupFocusDegree.UNFOCUSED ? new NextVariableAction() : new ChooseItemAction.FocusedOnly(),
actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_UP, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_RIGHT, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_LEFT, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_RENAME, null, actionEvent);
}
try {
HintManagerImpl.getInstanceImpl().showEditorHint(this, myEditor, p, HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING, 0, false,
HintManagerImpl.createHintHint(myEditor, p, this, HintManager.UNDER).
setRequestFocus(ScreenReader.isActive()).
setAwtTooltip(false));
}
catch (Exception e) {
LOG.error(e);
}
if (!isVisible() || !myList.isShowing()) {
hideLookup(false);
return false;
}
return true;
}
private void fireLookupShown() {
if (!myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, false);
for (LookupListener listener : myListeners) {
listener.lookupShown(event);
}
}
}
private void delegateActionToEditor(@NotNull String actionID, @Nullable Supplier<? extends AnAction> delegateActionSupplier, @NotNull AnActionEvent actionEvent) {
AnAction action = ActionManager.getInstance().getAction(actionID);
DumbAwareAction.create(
e -> ActionUtil.performActionDumbAware(delegateActionSupplier == null ? action : delegateActionSupplier.get(), actionEvent)
).registerCustomShortcutSet(action.getShortcutSet(), myList);
}
public Advertiser getAdvertiser() {
return myAdComponent;
}
public boolean mayBeNoticed() {
return myStampShown > 0 && System.currentTimeMillis() - myStampShown > 300;
}
private void addListeners() {
myEditor.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void documentChanged(@NotNull DocumentEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
final EditorMouseListener mouseListener = new EditorMouseListener() {
@Override
public void mouseClicked(@NotNull EditorMouseEvent e){
e.consume();
hideLookup(false);
}
};
myEditor.getCaretModel().addCaretListener(new CaretListener() {
@Override
public void caretPositionChanged(@NotNull CaretEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
myEditor.getSelectionModel().addSelectionListener(new SelectionListener() {
@Override
public void selectionChanged(@NotNull final SelectionEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
myEditor.addEditorMouseListener(mouseListener, this);
JComponent editorComponent = myEditor.getContentComponent();
if (editorComponent.isShowing()) {
Disposer.register(this, new UiNotifyConnector(editorComponent, new Activatable() {
@Override
public void showNotify() {
}
@Override
public void hideNotify() {
hideLookup(false);
}
}));
Window window = ComponentUtil.getWindow(editorComponent);
if (window != null) {
ComponentListener windowListener = new ComponentAdapter() {
@Override
public void componentMoved(ComponentEvent event) {
hideLookup(false);
}
};
window.addComponentListener(windowListener);
Disposer.register(this, () -> window.removeComponentListener(windowListener));
}
}
myList.addListSelectionListener(new ListSelectionListener() {
private LookupElement oldItem = null;
@Override
public void valueChanged(@NotNull ListSelectionEvent e){
if (!myUpdating) {
final LookupElement item = getCurrentItem();
fireCurrentItemChanged(oldItem, item);
oldItem = item;
}
}
});
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
setLookupFocusDegree(LookupFocusDegree.FOCUSED);
markSelectionTouched();
if (clickCount == 2){
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookup(NORMAL_SELECT_CHAR), "", null, myEditor.getDocument());
}
return true;
}
}.installOn(myList);
}
@Override
@Nullable
public LookupElement getCurrentItem(){
synchronized (myUiLock) {
LookupElement item = (LookupElement)myList.getSelectedValue();
return item instanceof EmptyLookupItem ? null : item;
}
}
@Override
public LookupElement getCurrentItemOrEmpty() {
return (LookupElement)myList.getSelectedValue();
}
@Override
public void setCurrentItem(LookupElement item){
markSelectionTouched();
myList.setSelectedValue(item, false);
}
@Override
public void addLookupListener(LookupListener listener){
myListeners.add(listener);
}
@Override
public void removeLookupListener(LookupListener listener){
myListeners.remove(listener);
}
@Override
public Rectangle getCurrentItemBounds(){
int index = myList.getSelectedIndex();
if (index < 0) {
LOG.error("No selected element, size=" + getListModel().getSize() + "; items" + getItems());
}
Rectangle itemBounds = myList.getCellBounds(index, index);
if (itemBounds == null){
LOG.error("No bounds for " + index + "; size=" + getListModel().getSize());
return null;
}
return SwingUtilities.convertRectangle(myList, itemBounds, getComponent());
}
private boolean fireBeforeItemSelected(@Nullable final LookupElement item, char completionChar) {
boolean result = true;
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
if (!listener.beforeItemSelected(event)) result = false;
}
catch (Throwable e) {
LOG.error(e);
}
}
}
return result;
}
public void fireItemSelected(@Nullable final LookupElement item, char completionChar){
if (item != null && item.requiresCommittedDocuments()) {
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
}
myArranger.itemSelected(item, completionChar);
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
listener.itemSelected(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireLookupCanceled(final boolean explicitly) {
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, explicitly);
for (LookupListener listener : myListeners) {
try {
listener.lookupCanceled(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireCurrentItemChanged(@Nullable LookupElement oldItem, @Nullable LookupElement currentItem) {
if (oldItem != currentItem && !myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, currentItem, (char)0);
for (LookupListener listener : myListeners) {
listener.currentItemChanged(event);
}
}
myPreview.updatePreview(currentItem);
}
private void fireUiRefreshed() {
for (LookupListener listener : myListeners) {
listener.uiRefreshed();
}
}
public void replacePrefix(final String presentPrefix, final String newPrefix) {
if (!performGuardedChange(() -> {
EditorModificationUtil.deleteSelectedText(myEditor);
int offset = myEditor.getCaretModel().getOffset();
final int start = offset - presentPrefix.length();
myEditor.getDocument().replaceString(start, offset, newPrefix);
myOffsets.clearAdditionalPrefix();
myEditor.getCaretModel().moveToOffset(start + newPrefix.length());
})) {
return;
}
myPresentableArranger.prefixReplaced(this, newPrefix);
refreshUi(true, true);
}
@Override
@Nullable
public PsiFile getPsiFile() {
return PsiDocumentManager.getInstance(myProject).getPsiFile(getEditor().getDocument());
}
@Override
public boolean isCompletion() {
return myArranger.isCompletion();
}
@Override
public PsiElement getPsiElement() {
PsiFile file = getPsiFile();
if (file == null) return null;
int offset = getLookupStart();
Editor editor = getEditor();
if (editor instanceof EditorWindow) {
offset = editor.logicalPositionToOffset(((EditorWindow)editor).hostToInjected(myEditor.offsetToLogicalPosition(offset)));
}
if (offset > 0) return file.findElementAt(offset - 1);
return file.findElementAt(0);
}
@Nullable
private static DocumentWindow getInjectedDocument(Project project, Editor editor, int offset) {
PsiFile hostFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (hostFile != null) {
// inspired by com.intellij.codeInsight.editorActions.TypedHandler.injectedEditorIfCharTypedIsSignificant()
List<DocumentWindow> injected = InjectedLanguageManager.getInstance(project).getCachedInjectedDocumentsInRange(hostFile, TextRange.create(offset, offset));
for (DocumentWindow documentWindow : injected ) {
if (documentWindow.isValid() && documentWindow.containsRange(offset, offset)) {
return documentWindow;
}
}
}
return null;
}
@Override
@NotNull
public Editor getEditor() {
DocumentWindow documentWindow = getInjectedDocument(myProject, myEditor, myEditor.getCaretModel().getOffset());
if (documentWindow != null) {
PsiFile injectedFile = PsiDocumentManager.getInstance(myProject).getPsiFile(documentWindow);
return InjectedLanguageUtil.getInjectedEditorForInjectedFile(myEditor, injectedFile);
}
return myEditor;
}
@Override
@NotNull
public Editor getTopLevelEditor() {
return myEditor;
}
@NotNull
@Override
public Project getProject() {
return myProject;
}
@Override
public boolean isPositionedAboveCaret(){
return myUi != null && myUi.isPositionedAboveCaret();
}
@Override
public boolean isSelectionTouched() {
return mySelectionTouched;
}
@Override
public int getLastVisibleIndex() {
if (myLastVisibleIndex != null) {
return myLastVisibleIndex;
}
return myList.getLastVisibleIndex();
}
@Override
public List<String> getAdvertisements() {
return myAdComponent.getAdvertisements();
}
@Override
public void hide(){
hideLookup(true);
}
@Override
public void hideLookup(boolean explicitly) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myHidden) return;
doHide(true, explicitly);
}
private void doHide(final boolean fireCanceled, final boolean explicitly) {
if (myDisposed) {
LOG.error(formatDisposeTrace());
}
else {
myHidden = true;
try {
super.hide();
Disposer.dispose(this);
ToolTipManager.sharedInstance().unregisterComponent(myList);
assert myDisposed;
}
catch (Throwable e) {
LOG.error(e);
}
}
if (fireCanceled) {
fireLookupCanceled(explicitly);
}
}
@Override
protected void onPopupCancel() {
hide();
}
private static Throwable staticDisposeTrace = null;
private Throwable disposeTrace = null;
public static String getLastLookupDisposeTrace() {
return ExceptionUtil.getThrowableText(staticDisposeTrace);
}
@Override
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
assert myHidden;
if (myDisposed) {
LOG.error(formatDisposeTrace());
return;
}
myOffsets.disposeMarkers();
disposeTrace = new Throwable();
myDisposed = true;
if (LOG.isDebugEnabled()) {
LOG.debug("Disposing lookup:", disposeTrace);
}
//noinspection AssignmentToStaticFieldFromInstanceMethod
staticDisposeTrace = disposeTrace;
}
private String formatDisposeTrace() {
return ExceptionUtil.getThrowableText(disposeTrace) + "\n============";
}
public void refreshUi(boolean mayCheckReused, boolean onExplicitAction) {
assert !myUpdating;
LookupElement prevItem = getCurrentItem();
myUpdating = true;
try {
final boolean reused = mayCheckReused && checkReused();
boolean selectionVisible = isSelectionVisible();
boolean itemsChanged = updateList(onExplicitAction, reused);
if (isVisible()) {
LOG.assertTrue(!ApplicationManager.getApplication().isUnitTestMode());
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
}
finally {
myUpdating = false;
fireCurrentItemChanged(prevItem, getCurrentItem());
fireUiRefreshed();
}
}
public void markReused() {
EDT.assertIsEdt();
myArranger = myArranger.createEmptyCopy();
requestResize();
}
public void addAdvertisement(@NotNull String text, @Nullable Icon icon) {
if (!containsDummyIdentifier(text)) {
myAdComponent.addAdvertisement(text, icon);
requestResize();
}
}
public boolean isLookupDisposed() {
return myDisposed;
}
public void checkValid() {
if (myDisposed) {
throw new AssertionError("Disposed at: " + formatDisposeTrace());
}
}
@Override
public void showElementActions(@Nullable InputEvent event) {
if (!isVisible()) return;
LookupElement element = getCurrentItem();
if (element == null) {
return;
}
Collection<LookupElementAction> actions = getActionsFor(element);
if (actions.isEmpty()) {
return;
}
UIEventLogger.logUIEvent(UIEventId.LookupShowElementActions);
Rectangle itemBounds = getCurrentItemBounds();
Rectangle visibleRect = SwingUtilities.convertRectangle(myList, myList.getVisibleRect(), getComponent());
ListPopup listPopup = JBPopupFactory.getInstance().createListPopup(new LookupActionsStep(actions, this, element));
Point p = (itemBounds.intersects(visibleRect) || event == null) ?
new Point(itemBounds.x + itemBounds.width, itemBounds.y):
SwingUtilities.convertPoint(event.getComponent(), new Point(0, event.getComponent().getHeight() + JBUIScale.scale(2)), getComponent());
listPopup.show(new RelativePoint(getComponent(), p));
}
@NotNull
public Map<LookupElement, List<Pair<String, Object>>> getRelevanceObjects(@NotNull Iterable<? extends LookupElement> items, boolean hideSingleValued) {
return myPresentableArranger.getRelevanceObjects(items, hideSingleValued);
}
public void setPrefixChangeListener(PrefixChangeListener listener) {
myPrefixChangeListeners.add(listener);
}
public void addPrefixChangeListener(PrefixChangeListener listener, Disposable parentDisposable) {
ContainerUtil.add(listener, myPrefixChangeListeners, parentDisposable);
}
FontPreferences getFontPreferences() {
return myFontPreferences;
}
@NotNull
private static LookupFocusDegree convertToLookupFocusDegree(@NotNull FocusDegree focusDegree) {
switch (focusDegree) {
case FOCUSED:
return LookupFocusDegree.FOCUSED;
case SEMI_FOCUSED:
return LookupFocusDegree.SEMI_FOCUSED;
case UNFOCUSED:
return LookupFocusDegree.UNFOCUSED;
default:
throw new IllegalStateException("Unknown focusDegree " + focusDegree);
}
}
/**
* @deprecated Use {@link LookupFocusDegree}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2020.3")
public enum FocusDegree { FOCUSED, SEMI_FOCUSED, UNFOCUSED }
}
| platform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.AutoPopupController;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.lookup.*;
import com.intellij.codeInsight.lookup.impl.actions.ChooseItemAction;
import com.intellij.codeInsight.template.impl.actions.NextVariableAction;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.ui.UISettings;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.internal.statistic.service.fus.collectors.UIEventId;
import com.intellij.internal.statistic.service.fus.collectors.UIEventLogger;
import com.intellij.lang.LangBundle;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorActivityManager;
import com.intellij.openapi.editor.EditorModificationUtil;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.colors.FontPreferences;
import com.intellij.openapi.editor.colors.impl.FontPreferencesImpl;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.JBList;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.util.CollectConsumer;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EDT;
import com.intellij.util.ui.accessibility.AccessibleContextUtil;
import com.intellij.util.ui.accessibility.ScreenReader;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.awt.event.*;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
public class LookupImpl extends LightweightHint implements LookupEx, Disposable, LookupElementListPresenter {
private static final Logger LOG = Logger.getInstance(LookupImpl.class);
private final LookupOffsets myOffsets;
private final Project myProject;
private final Editor myEditor;
private final Object myUiLock = new Object();
private final JBList myList = new JBList<LookupElement>(new CollectionListModel<>()) {
// 'myList' is focused when "Screen Reader" mode is enabled
@Override
protected void processKeyEvent(@NotNull final KeyEvent e) {
myEditor.getContentComponent().dispatchEvent(e); // let the editor handle actions properly for the lookup list
}
@NotNull
@Override
protected ExpandableItemsHandler<Integer> createExpandableItemsHandler() {
return new CompletionExtender(this);
}
};
final LookupCellRenderer myCellRenderer;
private final List<LookupListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final List<PrefixChangeListener> myPrefixChangeListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final LookupPreview myPreview = new LookupPreview(this);
// keeping our own copy of editor's font preferences, which can be used in non-EDT threads (to avoid race conditions)
private final FontPreferences myFontPreferences = new FontPreferencesImpl();
private long myStampShown = 0;
private boolean myShown = false;
private boolean myDisposed = false;
private boolean myHidden = false;
private boolean mySelectionTouched;
private LookupFocusDegree myLookupFocusDegree = LookupFocusDegree.FOCUSED;
private volatile boolean myCalculating;
private final Advertiser myAdComponent;
private int myGuardedChanges;
private volatile LookupArranger myArranger;
private LookupArranger myPresentableArranger;
private boolean myStartCompletionWhenNothingMatches;
boolean myResizePending;
private boolean myFinishing;
boolean myUpdating;
private LookupUi myUi;
private Integer myLastVisibleIndex;
private final AtomicInteger myDummyItemCount = new AtomicInteger();
public LookupImpl(Project project, Editor editor, @NotNull LookupArranger arranger) {
super(new JPanel(new BorderLayout()));
setForceShowAsPopup(true);
setCancelOnClickOutside(false);
setResizable(true);
myProject = project;
myEditor = InjectedLanguageUtil.getTopLevelEditor(editor);
myArranger = arranger;
myPresentableArranger = arranger;
myEditor.getColorsScheme().getFontPreferences().copyTo(myFontPreferences);
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(this);
myCellRenderer = new LookupCellRenderer(this);
myList.setCellRenderer(myCellRenderer);
myList.setFocusable(false);
myList.setFixedCellWidth(50);
myList.setBorder(null);
// a new top level frame just got the focus. This is important to prevent screen readers
// from announcing the title of the top level frame when the list is shown (or hidden),
// as they usually do when a new top-level frame receives the focus.
AccessibleContextUtil.setParent((Component)myList, myEditor.getContentComponent());
myList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myList.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myAdComponent = new Advertiser();
myAdComponent.setBackground(LookupCellRenderer.BACKGROUND_COLOR);
myOffsets = new LookupOffsets(myEditor);
final CollectionListModel<LookupElement> model = getListModel();
addEmptyItem(model);
updateListHeight(model);
addListeners();
}
private CollectionListModel<LookupElement> getListModel() {
//noinspection unchecked
return (CollectionListModel<LookupElement>)myList.getModel();
}
public void setArranger(LookupArranger arranger) {
myArranger = arranger;
}
@Override
public boolean isFocused() {
return getLookupFocusDegree() == LookupFocusDegree.FOCUSED;
}
/**
* @deprecated Use {@link #setLookupFocusDegree(LookupFocusDegree)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2020.3")
public void setFocusDegree(FocusDegree focusDegree) {
if (focusDegree != null) {
setLookupFocusDegree(convertToLookupFocusDegree(focusDegree));
}
}
@NotNull
@Override
public LookupFocusDegree getLookupFocusDegree() {
return myLookupFocusDegree;
}
public void setLookupFocusDegree(@NotNull LookupFocusDegree lookupFocusDegree) {
myLookupFocusDegree = lookupFocusDegree;
for (LookupListener listener : myListeners) {
listener.focusDegreeChanged();
}
}
public boolean isCalculating() {
return myCalculating;
}
public void setCalculating(boolean calculating) {
myCalculating = calculating;
if (myUi != null) {
myUi.setCalculating(calculating);
}
}
public void markSelectionTouched() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
mySelectionTouched = true;
myList.repaint();
}
@TestOnly
public void setSelectionTouched(boolean selectionTouched) {
mySelectionTouched = selectionTouched;
}
@Override
public int getSelectedIndex() {
return myList.getSelectedIndex();
}
public void setSelectedIndex(int index) {
myList.setSelectedIndex(index);
myList.ensureIndexIsVisible(index);
}
public void setDummyItemCount(int count) {
myDummyItemCount.set(count);
}
public void repaintLookup(boolean onExplicitAction, boolean reused, boolean selectionVisible, boolean itemsChanged) {
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
public void resort(boolean addAgain) {
final List<LookupElement> items = getItems();
myPresentableArranger.prefixChanged(this);
synchronized (myUiLock) {
getListModel().removeAll();
}
if (addAgain) {
for (final LookupElement item : items) {
addItem(item, itemMatcher(item));
}
}
refreshUi(true, true);
}
public boolean addItem(LookupElement item, PrefixMatcher matcher) {
LookupElementPresentation presentation = LookupElementPresentation.renderElement(item);
if (containsDummyIdentifier(presentation.getItemText()) ||
containsDummyIdentifier(presentation.getTailText()) ||
containsDummyIdentifier(presentation.getTypeText())) {
return false;
}
myCellRenderer.itemAdded(item, presentation);
LookupArranger arranger = myArranger;
arranger.registerMatcher(item, matcher);
arranger.addElement(item, presentation);
return true;
}
private void addDummyItems(int count) {
EmptyLookupItem dummy = new EmptyLookupItem("loading...", true);
for (int i = count; i > 0; i--) {
getListModel().add(dummy);
}
}
private static boolean containsDummyIdentifier(@Nullable final String s) {
return s != null && s.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED);
}
public void updateLookupWidth(LookupElement item) {
myCellRenderer.updateLookupWidth(item, LookupElementPresentation.renderElement(item));
}
public void requestResize() {
ApplicationManager.getApplication().assertIsDispatchThread();
myResizePending = true;
}
public Collection<LookupElementAction> getActionsFor(LookupElement element) {
final CollectConsumer<LookupElementAction> consumer = new CollectConsumer<>();
for (LookupActionProvider provider : LookupActionProvider.EP_NAME.getExtensions()) {
provider.fillActions(element, this, consumer);
}
if (!consumer.getResult().isEmpty()) {
consumer.consume(new ShowHideIntentionIconLookupAction());
}
return consumer.getResult();
}
public JList getList() {
return myList;
}
@Override
public List<LookupElement> getItems() {
synchronized (myUiLock) {
return ContainerUtil.findAll(getListModel().toList(), element -> !(element instanceof EmptyLookupItem));
}
}
@Override
@NotNull
public String getAdditionalPrefix() {
return myOffsets.getAdditionalPrefix();
}
void fireBeforeAppendPrefix(char c) {
myPrefixChangeListeners.forEach((listener -> listener.beforeAppend(c)));
}
void appendPrefix(char c) {
checkValid();
myOffsets.appendPrefix(c);
myPresentableArranger.prefixChanged(this);
requestResize();
refreshUi(false, true);
ensureSelectionVisible(true);
myPrefixChangeListeners.forEach((listener -> listener.afterAppend(c)));
}
public void setStartCompletionWhenNothingMatches(boolean startCompletionWhenNothingMatches) {
myStartCompletionWhenNothingMatches = startCompletionWhenNothingMatches;
}
public boolean isStartCompletionWhenNothingMatches() {
return myStartCompletionWhenNothingMatches;
}
public void ensureSelectionVisible(boolean forceTopSelection) {
if (isSelectionVisible() && !forceTopSelection) {
return;
}
if (!forceTopSelection) {
ScrollingUtil.ensureIndexIsVisible(myList, myList.getSelectedIndex(), 1);
return;
}
// selected item should be at the top of the visible list
int top = myList.getSelectedIndex();
if (top > 0) {
top--; // show one element above the selected one to give the hint that there are more available via scrolling
}
int firstVisibleIndex = myList.getFirstVisibleIndex();
if (firstVisibleIndex == top) {
return;
}
ScrollingUtil.ensureRangeIsVisible(myList, top, top + myList.getLastVisibleIndex() - firstVisibleIndex);
}
void truncatePrefix(boolean preserveSelection, int hideOffset) {
if (!myOffsets.truncatePrefix()) {
myArranger.prefixTruncated(this, hideOffset);
return;
}
myPrefixChangeListeners.forEach((listener -> listener.beforeTruncate()));
if (preserveSelection) {
markSelectionTouched();
}
myPresentableArranger.prefixChanged(this);
requestResize();
if (myPresentableArranger == myArranger) {
refreshUi(false, true);
ensureSelectionVisible(true);
}
myPrefixChangeListeners.forEach((listener -> listener.afterTruncate()));
}
void moveToCaretPosition() {
myOffsets.destabilizeLookupStart();
refreshUi(false, true);
}
private boolean updateList(boolean onExplicitAction, boolean reused) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
checkValid();
CollectionListModel<LookupElement> listModel = getListModel();
Pair<List<LookupElement>, Integer> pair = myPresentableArranger.arrangeItems(this, onExplicitAction || reused);
List<LookupElement> items = pair.first;
Integer toSelect = pair.second;
if (toSelect == null || toSelect < 0 || items.size() > 0 && toSelect >= items.size()) {
LOG.error("Arranger " + myPresentableArranger + " returned invalid selection index=" + toSelect + "; items=" + items);
toSelect = 0;
}
myOffsets.checkMinPrefixLengthChanges(items, this);
List<LookupElement> oldModel = listModel.toList();
synchronized (myUiLock) {
listModel.removeAll();
if (!items.isEmpty()) {
listModel.add(items);
addDummyItems(myDummyItemCount.get());
}
else {
addEmptyItem(listModel);
}
}
updateListHeight(listModel);
myList.setSelectedIndex(toSelect);
return !ContainerUtil.equalsIdentity(oldModel, items);
}
public boolean isSelectionVisible() {
return ScrollingUtil.isIndexFullyVisible(myList, myList.getSelectedIndex());
}
private boolean checkReused() {
EDT.assertIsEdt();
if (myPresentableArranger != myArranger) {
myPresentableArranger = myArranger;
clearIfLookupAndArrangerPrefixesMatch();
myPresentableArranger.prefixChanged(this);
return true;
}
return false;
}
//some items may have passed to myArranger from CompletionProgressIndicator for an older prefix
//these items won't be cleared during appending a new prefix (mayCheckReused = false)
//so these 'out of dated' items which were matched against an old prefix, should be now matched against the new, updated lookup prefix.
private void clearIfLookupAndArrangerPrefixesMatch() {
boolean isCompletionArranger = myArranger instanceof CompletionLookupArrangerImpl;
if (isCompletionArranger) {
final String lastLookupArrangersPrefix = ((CompletionLookupArrangerImpl)myArranger).getLastLookupPrefix();
if (lastLookupArrangersPrefix != null && !lastLookupArrangersPrefix.equals(getAdditionalPrefix())) {
LOG.trace("prefixes don't match, do not clear lookup additional prefix");
}
else {
myOffsets.clearAdditionalPrefix();
}
} else {
myOffsets.clearAdditionalPrefix();
}
}
private void updateListHeight(ListModel<LookupElement> model) {
myList.setFixedCellHeight(myCellRenderer.getListCellRendererComponent(myList, model.getElementAt(0), 0, false, false).getPreferredSize().height);
myList.setVisibleRowCount(Math.min(model.getSize(), UISettings.getInstance().getMaxLookupListHeight()));
}
private void addEmptyItem(CollectionListModel<? super LookupElement> model) {
LookupElement item = new EmptyLookupItem(myCalculating ? " " : LangBundle.message("completion.no.suggestions"), false);
model.add(item);
myCellRenderer.itemAdded(item, LookupElementPresentation.renderElement(item));
requestResize();
}
@NotNull
@Override
public String itemPattern(@NotNull LookupElement element) {
if (element instanceof EmptyLookupItem) return "";
return myPresentableArranger.itemPattern(element);
}
@Override
@NotNull
public PrefixMatcher itemMatcher(@NotNull LookupElement item) {
if (item instanceof EmptyLookupItem) {
return new CamelHumpMatcher("");
}
return myPresentableArranger.itemMatcher(item);
}
public void finishLookup(final char completionChar) {
finishLookup(completionChar, (LookupElement)myList.getSelectedValue());
}
public void finishLookup(char completionChar, @Nullable final LookupElement item) {
LOG.assertTrue(!ApplicationManager.getApplication().isWriteAccessAllowed(), "finishLookup should be called without a write action");
final PsiFile file = getPsiFile();
boolean writableOk = file == null || FileModificationService.getInstance().prepareFileForWrite(file);
if (myDisposed) { // ensureFilesWritable could close us by showing a dialog
return;
}
if (!writableOk) {
hideWithItemSelected(null, completionChar);
return;
}
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookupInWritableFile(completionChar, item), null, null);
}
void finishLookupInWritableFile(char completionChar, @Nullable LookupElement item) {
//noinspection deprecation,unchecked
if (item == null ||
!item.isValid() ||
item instanceof EmptyLookupItem ||
item.getObject() instanceof DeferredUserLookupValue &&
item.as(LookupItem.CLASS_CONDITION_KEY) != null &&
!((DeferredUserLookupValue)item.getObject()).handleUserSelection(item.as(LookupItem.CLASS_CONDITION_KEY), myProject)) {
hideWithItemSelected(null, completionChar);
return;
}
if (item.getUserData(CodeCompletionHandlerBase.DIRECT_INSERTION) != null) {
hideWithItemSelected(item, completionChar);
return;
}
if (myDisposed) { // DeferredUserLookupValue could close us in any way
return;
}
final String prefix = itemPattern(item);
boolean plainMatch = ContainerUtil.or(item.getAllLookupStrings(), s -> StringUtil.containsIgnoreCase(s, prefix));
if (!plainMatch) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CAMEL_HUMPS);
}
myFinishing = true;
if (fireBeforeItemSelected(item, completionChar)) {
ApplicationManager.getApplication().runWriteAction(() -> {
myEditor.getDocument().startGuardedBlockChecking();
try {
insertLookupString(item, getPrefixLength(item));
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
}
});
}
if (myDisposed) { // any document listeners could close us
return;
}
doHide(false, true);
fireItemSelected(item, completionChar);
}
private void hideWithItemSelected(LookupElement lookupItem, char completionChar) {
fireBeforeItemSelected(lookupItem, completionChar);
doHide(false, true);
fireItemSelected(lookupItem, completionChar);
}
public int getPrefixLength(LookupElement item) {
return myOffsets.getPrefixLength(item, this);
}
protected void insertLookupString(LookupElement item, final int prefix) {
insertLookupString(myProject, getTopLevelEditor(), item, itemMatcher(item), itemPattern(item), prefix);
}
public static void insertLookupString(final Project project,
Editor editor, LookupElement item,
PrefixMatcher matcher, String itemPattern, final int prefixLength) {
final String lookupString = LookupUtil.getCaseCorrectedLookupString(item, matcher, itemPattern);
final Editor hostEditor = editor;
hostEditor.getCaretModel().runForEachCaret(__ -> {
EditorModificationUtil.deleteSelectedText(hostEditor);
final int caretOffset = hostEditor.getCaretModel().getOffset();
int offset = LookupUtil.insertLookupInDocumentWindowIfNeeded(project, editor, caretOffset, prefixLength, lookupString);
hostEditor.getCaretModel().moveToOffset(offset);
hostEditor.getSelectionModel().removeSelection();
});
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
@Override
public int getLookupStart() {
return myOffsets.getLookupStart(disposeTrace);
}
public int getLookupOriginalStart() {
return myOffsets.getLookupOriginalStart();
}
public boolean performGuardedChange(Runnable change) {
checkValid();
myEditor.getDocument().startGuardedBlockChecking();
myGuardedChanges++;
boolean result;
try {
result = myOffsets.performGuardedChange(change);
}
finally {
myEditor.getDocument().stopGuardedBlockChecking();
myGuardedChanges--;
}
if (!result || myDisposed) {
hideLookup(false);
return false;
}
if (isVisible() && myEditor.getContentComponent().isShowing()) {
HintManagerImpl.updateLocation(this, myEditor, myUi.calculatePosition().getLocation());
}
checkValid();
return true;
}
@Override
public boolean vetoesHiding() {
return myGuardedChanges > 0;
}
public boolean isAvailableToUser() {
if (ApplicationManager.getApplication().isHeadlessEnvironment()) {
return myShown;
}
return isVisible();
}
@Override
public boolean isShown() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
ApplicationManager.getApplication().assertIsDispatchThread();
}
return myShown;
}
public boolean showLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
checkValid();
LOG.assertTrue(!myShown);
myShown = true;
myStampShown = System.currentTimeMillis();
fireLookupShown();
if (ApplicationManager.getApplication().isHeadlessEnvironment()) return true;
if (!EditorActivityManager.getInstance().isVisible(myEditor)) {
hideLookup(false);
return false;
}
myAdComponent.showRandomText();
if (Boolean.TRUE.equals(myEditor.getUserData(AutoPopupController.NO_ADS))) {
myAdComponent.clearAdvertisements();
}
myUi = new LookupUi(this, myAdComponent, myList);//, myProject);
myUi.setCalculating(myCalculating);
Point p = myUi.calculatePosition().getLocation();
if (ScreenReader.isActive()) {
myList.setFocusable(true);
setFocusRequestor(myList);
AnActionEvent actionEvent = AnActionEvent.createFromDataContext(ActionPlaces.EDITOR_POPUP, null, ((EditorImpl)myEditor).getDataContext());
delegateActionToEditor(IdeActions.ACTION_EDITOR_BACKSPACE, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_ESCAPE, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_TAB, () -> new ChooseItemAction.Replacing(), actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_ENTER,
/* e.g. rename popup comes initially unfocused */
() -> getLookupFocusDegree() == LookupFocusDegree.UNFOCUSED ? new NextVariableAction() : new ChooseItemAction.FocusedOnly(),
actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_UP, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_RIGHT, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_EDITOR_MOVE_CARET_LEFT, null, actionEvent);
delegateActionToEditor(IdeActions.ACTION_RENAME, null, actionEvent);
}
try {
HintManagerImpl.getInstanceImpl().showEditorHint(this, myEditor, p, HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING, 0, false,
HintManagerImpl.createHintHint(myEditor, p, this, HintManager.UNDER).
setRequestFocus(ScreenReader.isActive()).
setAwtTooltip(false));
}
catch (Exception e) {
LOG.error(e);
}
if (!isVisible() || !myList.isShowing()) {
hideLookup(false);
return false;
}
return true;
}
private void fireLookupShown() {
if (!myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, false);
for (LookupListener listener : myListeners) {
listener.lookupShown(event);
}
}
}
private void delegateActionToEditor(@NotNull String actionID, @Nullable Supplier<? extends AnAction> delegateActionSupplier, @NotNull AnActionEvent actionEvent) {
AnAction action = ActionManager.getInstance().getAction(actionID);
DumbAwareAction.create(
e -> ActionUtil.performActionDumbAware(delegateActionSupplier == null ? action : delegateActionSupplier.get(), actionEvent)
).registerCustomShortcutSet(action.getShortcutSet(), myList);
}
public Advertiser getAdvertiser() {
return myAdComponent;
}
public boolean mayBeNoticed() {
return myStampShown > 0 && System.currentTimeMillis() - myStampShown > 300;
}
private void addListeners() {
myEditor.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void documentChanged(@NotNull DocumentEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
final EditorMouseListener mouseListener = new EditorMouseListener() {
@Override
public void mouseClicked(@NotNull EditorMouseEvent e){
e.consume();
hideLookup(false);
}
};
myEditor.getCaretModel().addCaretListener(new CaretListener() {
@Override
public void caretPositionChanged(@NotNull CaretEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
myEditor.getSelectionModel().addSelectionListener(new SelectionListener() {
@Override
public void selectionChanged(@NotNull final SelectionEvent e) {
if (myGuardedChanges == 0 && !myFinishing) {
hideLookup(false);
}
}
}, this);
myEditor.addEditorMouseListener(mouseListener, this);
JComponent editorComponent = myEditor.getContentComponent();
if (editorComponent.isShowing()) {
Disposer.register(this, new UiNotifyConnector(editorComponent, new Activatable() {
@Override
public void showNotify() {
}
@Override
public void hideNotify() {
hideLookup(false);
}
}));
Window window = ComponentUtil.getWindow(editorComponent);
if (window != null) {
ComponentListener windowListener = new ComponentAdapter() {
@Override
public void componentMoved(ComponentEvent event) {
hideLookup(false);
}
};
window.addComponentListener(windowListener);
Disposer.register(this, () -> window.removeComponentListener(windowListener));
}
}
myList.addListSelectionListener(new ListSelectionListener() {
private LookupElement oldItem = null;
@Override
public void valueChanged(@NotNull ListSelectionEvent e){
if (!myUpdating) {
final LookupElement item = getCurrentItem();
fireCurrentItemChanged(oldItem, item);
oldItem = item;
}
}
});
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
setLookupFocusDegree(LookupFocusDegree.FOCUSED);
markSelectionTouched();
if (clickCount == 2){
CommandProcessor.getInstance().executeCommand(myProject, () -> finishLookup(NORMAL_SELECT_CHAR), "", null, myEditor.getDocument());
}
return true;
}
}.installOn(myList);
}
@Override
@Nullable
public LookupElement getCurrentItem(){
synchronized (myUiLock) {
LookupElement item = (LookupElement)myList.getSelectedValue();
return item instanceof EmptyLookupItem ? null : item;
}
}
@Override
public LookupElement getCurrentItemOrEmpty() {
return (LookupElement)myList.getSelectedValue();
}
@Override
public void setCurrentItem(LookupElement item){
markSelectionTouched();
myList.setSelectedValue(item, false);
}
@Override
public void addLookupListener(LookupListener listener){
myListeners.add(listener);
}
@Override
public void removeLookupListener(LookupListener listener){
myListeners.remove(listener);
}
@Override
public Rectangle getCurrentItemBounds(){
int index = myList.getSelectedIndex();
if (index < 0) {
LOG.error("No selected element, size=" + getListModel().getSize() + "; items" + getItems());
}
Rectangle itemBounds = myList.getCellBounds(index, index);
if (itemBounds == null){
LOG.error("No bounds for " + index + "; size=" + getListModel().getSize());
return null;
}
return SwingUtilities.convertRectangle(myList, itemBounds, getComponent());
}
private boolean fireBeforeItemSelected(@Nullable final LookupElement item, char completionChar) {
boolean result = true;
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
if (!listener.beforeItemSelected(event)) result = false;
}
catch (Throwable e) {
LOG.error(e);
}
}
}
return result;
}
public void fireItemSelected(@Nullable final LookupElement item, char completionChar){
if (item != null && item.requiresCommittedDocuments()) {
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
}
myArranger.itemSelected(item, completionChar);
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, item, completionChar);
for (LookupListener listener : myListeners) {
try {
listener.itemSelected(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireLookupCanceled(final boolean explicitly) {
if (!myListeners.isEmpty()){
LookupEvent event = new LookupEvent(this, explicitly);
for (LookupListener listener : myListeners) {
try {
listener.lookupCanceled(event);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
}
private void fireCurrentItemChanged(@Nullable LookupElement oldItem, @Nullable LookupElement currentItem) {
if (oldItem != currentItem && !myListeners.isEmpty()) {
LookupEvent event = new LookupEvent(this, currentItem, (char)0);
for (LookupListener listener : myListeners) {
listener.currentItemChanged(event);
}
}
myPreview.updatePreview(currentItem);
}
private void fireUiRefreshed() {
for (LookupListener listener : myListeners) {
listener.uiRefreshed();
}
}
public void replacePrefix(final String presentPrefix, final String newPrefix) {
if (!performGuardedChange(() -> {
EditorModificationUtil.deleteSelectedText(myEditor);
int offset = myEditor.getCaretModel().getOffset();
final int start = offset - presentPrefix.length();
myEditor.getDocument().replaceString(start, offset, newPrefix);
myOffsets.clearAdditionalPrefix();
myEditor.getCaretModel().moveToOffset(start + newPrefix.length());
})) {
return;
}
myPresentableArranger.prefixReplaced(this, newPrefix);
refreshUi(true, true);
}
@Override
@Nullable
public PsiFile getPsiFile() {
return PsiDocumentManager.getInstance(myProject).getPsiFile(getEditor().getDocument());
}
@Override
public boolean isCompletion() {
return myArranger.isCompletion();
}
@Override
public PsiElement getPsiElement() {
PsiFile file = getPsiFile();
if (file == null) return null;
int offset = getLookupStart();
Editor editor = getEditor();
if (editor instanceof EditorWindow) {
offset = editor.logicalPositionToOffset(((EditorWindow)editor).hostToInjected(myEditor.offsetToLogicalPosition(offset)));
}
if (offset > 0) return file.findElementAt(offset - 1);
return file.findElementAt(0);
}
@Nullable
private static DocumentWindow getInjectedDocument(Project project, Editor editor, int offset) {
PsiFile hostFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (hostFile != null) {
// inspired by com.intellij.codeInsight.editorActions.TypedHandler.injectedEditorIfCharTypedIsSignificant()
List<DocumentWindow> injected = InjectedLanguageManager.getInstance(project).getCachedInjectedDocumentsInRange(hostFile, TextRange.create(offset, offset));
for (DocumentWindow documentWindow : injected ) {
if (documentWindow.isValid() && documentWindow.containsRange(offset, offset)) {
return documentWindow;
}
}
}
return null;
}
@Override
@NotNull
public Editor getEditor() {
DocumentWindow documentWindow = getInjectedDocument(myProject, myEditor, myEditor.getCaretModel().getOffset());
if (documentWindow != null) {
PsiFile injectedFile = PsiDocumentManager.getInstance(myProject).getPsiFile(documentWindow);
return InjectedLanguageUtil.getInjectedEditorForInjectedFile(myEditor, injectedFile);
}
return myEditor;
}
@Override
@NotNull
public Editor getTopLevelEditor() {
return myEditor;
}
@NotNull
@Override
public Project getProject() {
return myProject;
}
@Override
public boolean isPositionedAboveCaret(){
return myUi != null && myUi.isPositionedAboveCaret();
}
@Override
public boolean isSelectionTouched() {
return mySelectionTouched;
}
@Override
public int getLastVisibleIndex() {
if (myLastVisibleIndex != null) {
return myLastVisibleIndex;
}
return myList.getLastVisibleIndex();
}
@Override
public List<String> getAdvertisements() {
return myAdComponent.getAdvertisements();
}
@Override
public void hide(){
hideLookup(true);
}
@Override
public void hideLookup(boolean explicitly) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myHidden) return;
doHide(true, explicitly);
}
private void doHide(final boolean fireCanceled, final boolean explicitly) {
if (myDisposed) {
LOG.error(formatDisposeTrace());
}
else {
myHidden = true;
try {
super.hide();
Disposer.dispose(this);
ToolTipManager.sharedInstance().unregisterComponent(myList);
assert myDisposed;
}
catch (Throwable e) {
LOG.error(e);
}
}
if (fireCanceled) {
fireLookupCanceled(explicitly);
}
}
@Override
protected void onPopupCancel() {
hide();
}
private static Throwable staticDisposeTrace = null;
private Throwable disposeTrace = null;
public static String getLastLookupDisposeTrace() {
return ExceptionUtil.getThrowableText(staticDisposeTrace);
}
@Override
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
assert myHidden;
if (myDisposed) {
LOG.error(formatDisposeTrace());
return;
}
myOffsets.disposeMarkers();
disposeTrace = new Throwable();
myDisposed = true;
if (LOG.isDebugEnabled()) {
LOG.debug("Disposing lookup:", disposeTrace);
}
//noinspection AssignmentToStaticFieldFromInstanceMethod
staticDisposeTrace = disposeTrace;
}
private String formatDisposeTrace() {
return ExceptionUtil.getThrowableText(disposeTrace) + "\n============";
}
public void refreshUi(boolean mayCheckReused, boolean onExplicitAction) {
assert !myUpdating;
LookupElement prevItem = getCurrentItem();
myUpdating = true;
try {
final boolean reused = mayCheckReused && checkReused();
boolean selectionVisible = isSelectionVisible();
boolean itemsChanged = updateList(onExplicitAction, reused);
if (isVisible()) {
LOG.assertTrue(!ApplicationManager.getApplication().isUnitTestMode());
myUi.refreshUi(selectionVisible, itemsChanged, reused, onExplicitAction);
}
}
finally {
myUpdating = false;
fireCurrentItemChanged(prevItem, getCurrentItem());
fireUiRefreshed();
}
}
public void markReused() {
EDT.assertIsEdt();
myArranger = myArranger.createEmptyCopy();
requestResize();
}
public void addAdvertisement(@NotNull String text, @Nullable Icon icon) {
if (!containsDummyIdentifier(text)) {
myAdComponent.addAdvertisement(text, icon);
requestResize();
}
}
public boolean isLookupDisposed() {
return myDisposed;
}
public void checkValid() {
if (myDisposed) {
throw new AssertionError("Disposed at: " + formatDisposeTrace());
}
}
@Override
public void showElementActions(@Nullable InputEvent event) {
if (!isVisible()) return;
LookupElement element = getCurrentItem();
if (element == null) {
return;
}
Collection<LookupElementAction> actions = getActionsFor(element);
if (actions.isEmpty()) {
return;
}
UIEventLogger.logUIEvent(UIEventId.LookupShowElementActions);
Rectangle itemBounds = getCurrentItemBounds();
Rectangle visibleRect = SwingUtilities.convertRectangle(myList, myList.getVisibleRect(), getComponent());
ListPopup listPopup = JBPopupFactory.getInstance().createListPopup(new LookupActionsStep(actions, this, element));
Point p = (itemBounds.intersects(visibleRect) || event == null) ?
new Point(itemBounds.x + itemBounds.width, itemBounds.y):
SwingUtilities.convertPoint(event.getComponent(), new Point(0, event.getComponent().getHeight() + JBUIScale.scale(2)), getComponent());
listPopup.show(new RelativePoint(getComponent(), p));
}
@NotNull
public Map<LookupElement, List<Pair<String, Object>>> getRelevanceObjects(@NotNull Iterable<? extends LookupElement> items, boolean hideSingleValued) {
return myPresentableArranger.getRelevanceObjects(items, hideSingleValued);
}
public void setPrefixChangeListener(PrefixChangeListener listener) {
myPrefixChangeListeners.add(listener);
}
public void addPrefixChangeListener(PrefixChangeListener listener, Disposable parentDisposable) {
ContainerUtil.add(listener, myPrefixChangeListeners, parentDisposable);
}
FontPreferences getFontPreferences() {
return myFontPreferences;
}
@NotNull
private static LookupFocusDegree convertToLookupFocusDegree(@NotNull FocusDegree focusDegree) {
switch (focusDegree) {
case FOCUSED:
return LookupFocusDegree.FOCUSED;
case SEMI_FOCUSED:
return LookupFocusDegree.SEMI_FOCUSED;
case UNFOCUSED:
return LookupFocusDegree.UNFOCUSED;
default:
throw new IllegalStateException("Unknown focusDegree " + focusDegree);
}
}
/**
* @deprecated Use {@link LookupFocusDegree}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2020.3")
public enum FocusDegree { FOCUSED, SEMI_FOCUSED, UNFOCUSED }
}
| restore LookupImpl.getArranger API for "Conventional Commit" plugin
GitOrigin-RevId: b17a606c2709f1c1c2be2f184b6acf018ef8c145 | platform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java | restore LookupImpl.getArranger API for "Conventional Commit" plugin | <ide><path>latform/lang-impl/src/com/intellij/codeInsight/lookup/impl/LookupImpl.java
<ide> return (CollectionListModel<LookupElement>)myList.getModel();
<ide> }
<ide>
<add> @SuppressWarnings("unused") // used plugins
<add> public LookupArranger getArranger() {
<add> return myArranger;
<add> }
<add>
<ide> public void setArranger(LookupArranger arranger) {
<ide> myArranger = arranger;
<ide> } |
|
Java | apache-2.0 | 9db27958a0f27d8167bb51b524814d448eacd81d | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.impl;
import com.intellij.codeInsight.daemon.GutterMark;
import com.intellij.codeInsight.daemon.NonHideableIconGutterMark;
import com.intellij.codeInsight.folding.impl.FoldingUtil;
import com.intellij.codeInsight.hint.TooltipController;
import com.intellij.codeInsight.hint.TooltipGroup;
import com.intellij.codeInsight.hint.TooltipRenderer;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.dnd.DnDDragStartBean;
import com.intellij.ide.dnd.DnDImage;
import com.intellij.ide.dnd.DnDNativeTarget;
import com.intellij.ide.dnd.DnDSupport;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.customization.CustomActionsSchema;
import com.intellij.internal.statistic.eventLog.FeatureUsageData;
import com.intellij.internal.statistic.service.fus.collectors.FUCounterUsageLogger;
import com.intellij.internal.statistic.utils.PluginInfo;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.ColorKey;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorFontType;
import com.intellij.openapi.editor.event.EditorMouseEventArea;
import com.intellij.openapi.editor.ex.*;
import com.intellij.openapi.editor.ex.util.EditorUIUtil;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.impl.view.FontLayoutService;
import com.intellij.openapi.editor.impl.view.IterationState;
import com.intellij.openapi.editor.impl.view.VisualLinesIterator;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.fileEditor.impl.EditorComposite;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.GraphicsConfig;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.impl.IdeGlassPaneImpl;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.paint.LinePainter2D;
import com.intellij.ui.paint.LinePainter2D.StrokeType;
import com.intellij.ui.paint.PaintUtil;
import com.intellij.ui.paint.PaintUtil.RoundingMode;
import com.intellij.ui.paint.RectanglePainter2D;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.scale.ScaleContext;
import com.intellij.util.BitUtil;
import com.intellij.util.IconUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.*;
import com.intellij.util.ui.JBValue.JBValueGroup;
import com.intellij.util.ui.accessibility.ScreenReader;
import it.unimi.dsi.fastutil.ints.*;
import it.unimi.dsi.fastutil.objects.ObjectIterable;
import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.accessibility.Accessible;
import javax.accessibility.AccessibleContext;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.*;
import java.awt.geom.AffineTransform;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.util.List;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
/**
* Gutter content (left to right):
* <ul>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Line numbers area
* <ul>
* <li>Line numbers</li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Additional line numbers (used in diff)</li>
* </ul>
* </li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Annotations area
* <ul>
* <li>Annotations</li>
* <li>Annotations extra (used in distraction free mode)</li>
* </ul>
* </li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Line markers area
* <ul>
* <li>Left free painters</li>
* <li>Icons</li>
* <li>Gap (required by debugger to set breakpoints with mouse click - IDEA-137353) </li>
* <li>Free painters</li>
* </ul>
* </li>
* <li>Folding area</li>
*</ul>
*/
@DirtyUI
final class EditorGutterComponentImpl extends EditorGutterComponentEx implements MouseListener, MouseMotionListener, DataProvider, Accessible {
private static final Logger LOG = Logger.getInstance(EditorGutterComponentImpl.class);
private static final JBValueGroup JBVG = new JBValueGroup();
private static final JBValue START_ICON_AREA_WIDTH = JBVG.value(17);
private static final JBValue FREE_PAINTERS_LEFT_AREA_WIDTH = JBVG.value(8);
private static final JBValue FREE_PAINTERS_RIGHT_AREA_WIDTH = JBVG.value(5);
private static final JBValue GAP_BETWEEN_ICONS = JBVG.value(3);
private static final JBValue GAP_BETWEEN_AREAS = JBVG.value(5);
private static final JBValue GAP_BETWEEN_ANNOTATIONS = JBVG.value(5);
private static final TooltipGroup GUTTER_TOOLTIP_GROUP = new TooltipGroup("GUTTER_TOOLTIP_GROUP", 0);
private ClickInfo myLastActionableClick;
@NotNull
private final EditorImpl myEditor;
private final FoldingAnchorsOverlayStrategy myAnchorsDisplayStrategy;
@Nullable private Int2ObjectMap<List<GutterMark>> myLineToGutterRenderers;
private boolean myLineToGutterRenderersCacheForLogicalLines;
private boolean myHasInlaysWithGutterIcons;
private int myStartIconAreaWidth = START_ICON_AREA_WIDTH.get();
private int myIconsAreaWidth;
private int myLineNumberAreaWidth;
private int myAdditionalLineNumberAreaWidth;
@NotNull private List<FoldRegion> myActiveFoldRegions = Collections.emptyList();
private int myTextAnnotationGuttersSize;
private int myTextAnnotationExtraSize;
final IntList myTextAnnotationGutterSizes = new IntArrayList();
final ArrayList<TextAnnotationGutterProvider> myTextAnnotationGutters = new ArrayList<>();
private boolean myGapAfterAnnotations;
private final Map<TextAnnotationGutterProvider, EditorGutterAction> myProviderToListener = new HashMap<>();
private String myLastGutterToolTip;
@NotNull private LineNumberConverter myLineNumberConverter = LineNumberConverter.DEFAULT;
@Nullable private LineNumberConverter myAdditionalLineNumberConverter;
private boolean myShowDefaultGutterPopup = true;
private boolean myCanCloseAnnotations = true;
@Nullable private ActionGroup myCustomGutterPopupGroup;
private final Int2ObjectMap<Color> myTextFgColors = new Int2ObjectOpenHashMap<>();
private boolean myPaintBackground = true;
private boolean myLeftFreePaintersAreaShown;
private boolean myRightFreePaintersAreaShown;
boolean myForceLeftFreePaintersAreaShown;
boolean myForceRightFreePaintersAreaShown;
private short myForcedLeftFreePaintersAreaWidth = -1;
private short myForcedRightFreePaintersAreaWidth = -1;
private int myLastNonDumbModeIconAreaWidth;
boolean myDnDInProgress;
@Nullable private AccessibleGutterLine myAccessibleGutterLine;
EditorGutterComponentImpl(@NotNull EditorImpl editor) {
myEditor = editor;
if (!ApplicationManager.getApplication().isHeadlessEnvironment()) {
installDnD();
}
setOpaque(true);
myAnchorsDisplayStrategy = new FoldingAnchorsOverlayStrategy(editor);
Project project = myEditor.getProject();
if (project != null) {
project.getMessageBus().connect(myEditor.getDisposable()).subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() {
@Override
public void exitDumbMode() {
updateSize();
}
});
}
if (ScreenReader.isActive()) {
AccessibleGutterLine.installListeners(this);
}
else {
ScreenReader.addPropertyChangeListener(ScreenReader.SCREEN_READER_ACTIVE_PROPERTY, editor.getDisposable(), e -> {
if ((boolean)e.getNewValue()) {
AccessibleGutterLine.installListeners(this);
}
});
}
UISettings.setupEditorAntialiasing(this);
}
@NotNull
EditorImpl getEditor() {
return myEditor;
}
private void installDnD() {
DnDSupport.createBuilder(this)
.setBeanProvider(info -> {
final GutterIconRenderer renderer = getGutterRenderer(info.getPoint());
if (renderer != null &&
renderer.getDraggableObject() != null &&
(info.isCopy() || info.isMove())) {
myDnDInProgress = true;
return new DnDDragStartBean(renderer);
}
return null;
})
.setDropHandlerWithResult(e -> {
boolean success = true;
final Object attachedObject = e.getAttachedObject();
if (attachedObject instanceof GutterIconRenderer && checkDumbAware(attachedObject)) {
final GutterDraggableObject draggableObject = ((GutterIconRenderer)attachedObject).getDraggableObject();
if (draggableObject != null) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
draggableObject.copy(line, myEditor.getVirtualFile(), e.getAction().getActionId());
}
}
}
else if (attachedObject instanceof DnDNativeTarget.EventInfo && myEditor.getSettings().isDndEnabled()) {
Transferable transferable = ((DnDNativeTarget.EventInfo)attachedObject).getTransferable();
if (transferable != null && transferable.isDataFlavorSupported(DataFlavor.stringFlavor)) {
success = EditorImpl.handleDrop(myEditor, transferable, e.getAction().getActionId());
}
}
myDnDInProgress = false;
return success;
})
.setTargetChecker(e -> {
final Object attachedObject = e.getAttachedObject();
if (attachedObject instanceof GutterIconRenderer && checkDumbAware(attachedObject)) {
final GutterDraggableObject draggableObject = ((GutterIconRenderer)attachedObject).getDraggableObject();
if (draggableObject != null) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
e.setDropPossible(true);
e.setCursor(draggableObject.getCursor(line, myEditor.getVirtualFile(), e.getAction().getActionId()));
}
}
}
else if (attachedObject instanceof DnDNativeTarget.EventInfo && myEditor.getSettings().isDndEnabled()) {
Transferable transferable = ((DnDNativeTarget.EventInfo)attachedObject).getTransferable();
if (transferable != null && transferable.isDataFlavorSupported(DataFlavor.stringFlavor)) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
e.setDropPossible(true);
myEditor.getCaretModel().moveToOffset(myEditor.getDocument().getLineStartOffset(line));
}
}
}
return true;
})
.setImageProvider(info -> {
// [tav] temp workaround for JRE-224
boolean inUserScale = !SystemInfo.isWindows || !StartupUiUtil.isJreHiDPI(myEditor.getComponent());
Image image = ImageUtil.toBufferedImage(getDragImage(getGutterRenderer(info.getPoint())), inUserScale);
return new DnDImage(image, new Point(image.getWidth(null) / 2, image.getHeight(null) / 2));
})
.enableAsNativeTarget() // required to accept dragging from editor (as editor component doesn't use DnDSupport to implement drag'n'drop)
.install();
}
Image getDragImage(GutterMark renderer) {
return IconUtil.toImage(scaleIcon(renderer.getIcon()));
}
private void fireResized() {
processComponentEvent(new ComponentEvent(this, ComponentEvent.COMPONENT_RESIZED));
}
@Override
public Dimension getPreferredSize() {
int w = getFoldingAreaOffset() + getFoldingAreaWidth();
Dimension size = new Dimension(w, myEditor.getPreferredHeight());
JBInsets.addTo(size, getInsets());
return size;
}
@Override
protected void setUI(ComponentUI newUI) {
super.setUI(newUI);
reinitSettings(true);
}
@Override
public void updateUI() {
super.updateUI();
reinitSettings(true);
}
public void reinitSettings(boolean updateGutterSize) {
updateSize(false, updateGutterSize);
repaint();
}
@Override
protected Graphics getComponentGraphics(Graphics graphics) {
return JBSwingUtilities.runGlobalCGTransform(this, super.getComponentGraphics(graphics));
}
@Override
public void paintComponent(Graphics g_) {
Rectangle clip = g_.getClipBounds();
Graphics2D g = (Graphics2D)getComponentGraphics(g_);
if (myEditor.isDisposed()) {
g.setColor(EditorImpl.getDisposedBackground());
g.fillRect(clip.x, clip.y, clip.width, clip.height);
return;
}
AffineTransform old = setMirrorTransformIfNeeded(g, 0, getWidth());
EditorUIUtil.setupAntialiasing(g);
Color backgroundColor = getBackground();
int startVisualLine;
int endVisualLine;
int firstVisibleOffset;
int lastVisibleOffset;
Segment focusModeRange = myEditor.getFocusModeRange();
if (focusModeRange == null) {
startVisualLine = myEditor.yToVisualLine(clip.y);
endVisualLine = myEditor.yToVisualLine(clip.y + clip.height - 1);
firstVisibleOffset = myEditor.visualLineStartOffset(startVisualLine);
lastVisibleOffset = myEditor.visualLineStartOffset(endVisualLine + 1);
}
else {
firstVisibleOffset = focusModeRange.getStartOffset();
lastVisibleOffset = focusModeRange.getEndOffset();
startVisualLine = myEditor.offsetToVisualLine(firstVisibleOffset);
endVisualLine = myEditor.offsetToVisualLine(lastVisibleOffset);
}
if (firstVisibleOffset > lastVisibleOffset) {
LOG.error("Unexpected painting range: (" + firstVisibleOffset + ":" + lastVisibleOffset
+ "), visual line range: (" + startVisualLine + ":" + endVisualLine
+ "), clip: " + clip + ", focus range: " + focusModeRange);
}
// paint all backgrounds
int gutterSeparatorX = getWhitespaceSeparatorOffset();
paintBackground(g, clip, 0, gutterSeparatorX, backgroundColor);
paintBackground(g, clip, gutterSeparatorX, getFoldingAreaWidth(), myEditor.getBackgroundColor());
paintEditorBackgrounds(g, firstVisibleOffset, lastVisibleOffset);
Object hint = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
if (!JreHiDpiUtil.isJreHiDPI(g)) g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
try {
paintAnnotations(g, startVisualLine, endVisualLine);
if (focusModeRange != null) {
int startY = Math.max(myEditor.visualLineToY(startVisualLine), clip.y);
int endY = Math.min(myEditor.visualLineToY(endVisualLine), clip.y + clip.height);
g.setClip(clip.x, startY, clip.width, endY - startY);
}
paintLineMarkers(g, firstVisibleOffset, lastVisibleOffset, startVisualLine, endVisualLine);
g.setClip(clip);
paintFoldingLines(g, clip);
paintFoldingTree(g, clip, firstVisibleOffset, lastVisibleOffset);
paintLineNumbers(g, startVisualLine, endVisualLine);
paintCurrentAccessibleLine(g);
}
finally {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
}
if (old != null) g.setTransform(old);
}
private void paintEditorBackgrounds(Graphics g, int firstVisibleOffset, int lastVisibleOffset) {
myTextFgColors.clear();
Color defaultBackgroundColor = myEditor.getBackgroundColor();
Color defaultForegroundColor = myEditor.getColorsScheme().getDefaultForeground();
int startX = myEditor.isInDistractionFreeMode() ? 0 : getWhitespaceSeparatorOffset();
IterationState state = new IterationState(myEditor, firstVisibleOffset, lastVisibleOffset, null, true, false, true, false);
while (!state.atEnd()) {
drawEditorBackgroundForRange(g, state.getStartOffset(), state.getEndOffset(), state.getMergedAttributes(),
defaultBackgroundColor, defaultForegroundColor, startX);
state.advance();
}
}
private void drawEditorBackgroundForRange(Graphics g, int startOffset, int endOffset, TextAttributes attributes,
Color defaultBackgroundColor, Color defaultForegroundColor, int startX) {
Color bgColor = myEditor.getBackgroundColor(attributes);
if (Comparing.equal(bgColor, defaultBackgroundColor)) return;
VisualPosition visualStart = myEditor.offsetToVisualPosition(startOffset, true, false);
VisualPosition visualEnd = myEditor.offsetToVisualPosition(endOffset, false, false);
int startVisualLine = visualStart.getLine() + (visualStart.getColumn() == 0 ? 0 : 1);
int endVisualLine = visualEnd.getLine() - (visualEnd.getColumn() == 0 ? 1 : 0);
if (startVisualLine <= endVisualLine) {
int startY = myEditor.visualLineToY(startVisualLine);
int endY = myEditor.visualLineToY(endVisualLine) + myEditor.getLineHeight();
g.setColor(bgColor);
g.fillRect(startX, startY, getWidth() - startX, endY - startY);
Color fgColor = attributes.getForegroundColor();
if (!Comparing.equal(fgColor, defaultForegroundColor)) {
for (int line = startVisualLine; line <= endVisualLine; line++) {
myTextFgColors.put(line, fgColor);
}
}
}
}
private void processClose(final MouseEvent e) {
final IdeEventQueue queue = IdeEventQueue.getInstance();
// See IDEA-59553 for rationale on why this feature is disabled
//if (isLineNumbersShown()) {
// if (e.getX() >= getLineNumberAreaOffset() && getLineNumberAreaOffset() + getLineNumberAreaWidth() >= e.getX()) {
// queue.blockNextEvents(e);
// myEditor.getSettings().setLineNumbersShown(false);
// e.consume();
// return;
// }
//}
if (getGutterRenderer(e) != null) return;
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
queue.blockNextEvents(e);
closeAllAnnotations();
e.consume();
}
}
private void paintAnnotations(Graphics2D g, int startVisualLine, int endVisualLine) {
int x = getAnnotationsAreaOffset();
int w = getAnnotationsAreaWidthEx();
if (w == 0) return;
int viewportStartY = myEditor.getScrollingModel().getVisibleArea().y;
AffineTransform old = setMirrorTransformIfNeeded(g, x, w);
try {
Color color = myEditor.getColorsScheme().getColor(EditorColors.ANNOTATIONS_COLOR);
g.setColor(color != null ? color : JBColor.blue);
g.setFont(myEditor.getColorsScheme().getFont(EditorFontType.PLAIN));
for (int i = 0; i < myTextAnnotationGutters.size(); i++) {
TextAnnotationGutterProvider gutterProvider = myTextAnnotationGutters.get(i);
int lineHeight = myEditor.getLineHeight();
int lastLine = myEditor.logicalToVisualPosition(new LogicalPosition(endLineNumber(), 0)).line;
endVisualLine = Math.min(endVisualLine, lastLine);
if (startVisualLine > endVisualLine) {
break;
}
int annotationSize = myTextAnnotationGutterSizes.getInt(i);
int logicalLine = -1;
Color bg = null;
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, startVisualLine);
while (!visLinesIterator.atEnd() && visLinesIterator.getVisualLine() <= endVisualLine) {
int y = visLinesIterator.getY();
int bgLineHeight = lineHeight;
boolean paintText = !visLinesIterator.startsWithSoftWrap() || y <= viewportStartY;
if (y < viewportStartY && visLinesIterator.endsWithSoftWrap()) { // "sticky" line annotation
y = viewportStartY;
}
else if (viewportStartY < y && y < viewportStartY + lineHeight && visLinesIterator.startsWithSoftWrap()) {
// avoid drawing bg over the "sticky" line above, or over a possible gap in the gutter below (e.g. code vision)
bgLineHeight = y - viewportStartY;
y = viewportStartY + lineHeight;
}
if (paintText || logicalLine == -1) {
logicalLine = visLinesIterator.getDisplayedLogicalLine();
bg = gutterProvider.getBgColor(logicalLine, myEditor);
}
if (bg != null) {
g.setColor(bg);
g.fillRect(x, y, annotationSize, bgLineHeight);
}
if (paintText) {
paintAnnotationLine(g, gutterProvider, logicalLine, x, y);
}
visLinesIterator.advance();
}
x += annotationSize;
}
}
finally {
if (old != null) g.setTransform(old);
}
}
private void paintAnnotationLine(Graphics g, TextAnnotationGutterProvider gutterProvider, int line, int x, int y) {
String s = gutterProvider.getLineText(line, myEditor);
if (!StringUtil.isEmpty(s)) {
g.setColor(myEditor.getColorsScheme().getColor(gutterProvider.getColor(line, myEditor)));
EditorFontType style = gutterProvider.getStyle(line, myEditor);
Font font = getFontForText(s, style);
g.setFont(font);
g.drawString(s, (gutterProvider.useMargin() ? getGapBetweenAnnotations() / 2 : 0) + x, y + myEditor.getAscent());
}
}
private Font getFontForText(String text, EditorFontType style) {
Font font = myEditor.getColorsScheme().getFont(style);
return UIUtil.getFontWithFallbackIfNeeded(font, text);
}
private void paintFoldingTree(@NotNull Graphics g, @NotNull Rectangle clip, int firstVisibleOffset, int lastVisibleOffset) {
if (isFoldingOutlineShown()) {
doPaintFoldingTree((Graphics2D)g, clip, firstVisibleOffset, lastVisibleOffset);
}
}
private void paintLineMarkers(Graphics2D g, int firstVisibleOffset, int lastVisibleOffset, int firstVisibleLine, int lastVisibleLine) {
if (isLineMarkersShown()) {
paintGutterRenderers(g, firstVisibleOffset, lastVisibleOffset, firstVisibleLine, lastVisibleLine);
}
}
private void paintBackground(final Graphics g,
final Rectangle clip,
final int x,
final int width,
Color background) {
g.setColor(background);
g.fillRect(x, clip.y, width, clip.height);
paintCaretRowBackground(g, x, width);
}
private void paintCaretRowBackground(final Graphics g, final int x, final int width) {
if (!myEditor.getSettings().isCaretRowShown()) return;
final VisualPosition visCaret = myEditor.getCaretModel().getVisualPosition();
Color caretRowColor = myEditor.getColorsScheme().getColor(EditorColors.CARET_ROW_COLOR);
if (caretRowColor != null) {
g.setColor(caretRowColor);
final Point caretPoint = myEditor.visualPositionToXY(visCaret);
g.fillRect(x, caretPoint.y, width, myEditor.getLineHeight());
}
}
private void paintLineNumbers(Graphics2D g, int startVisualLine, int endVisualLine) {
if (isLineNumbersShown()) {
int offset = getLineNumberAreaOffset() + myLineNumberAreaWidth;
doPaintLineNumbers(g, startVisualLine, endVisualLine, offset, myLineNumberConverter);
if (myAdditionalLineNumberConverter != null) {
doPaintLineNumbers(g, startVisualLine, endVisualLine, offset + getAreaWidthWithGap(myAdditionalLineNumberAreaWidth),
myAdditionalLineNumberConverter);
}
}
}
private void paintCurrentAccessibleLine(Graphics2D g) {
if (myAccessibleGutterLine != null) {
myAccessibleGutterLine.paint(g);
}
}
@Override
public Color getBackground() {
if (myEditor.isInDistractionFreeMode() || !myPaintBackground) {
return myEditor.getBackgroundColor();
}
Color color = myEditor.getColorsScheme().getColor(EditorColors.GUTTER_BACKGROUND);
return color != null ? color : EditorColors.GUTTER_BACKGROUND.getDefaultColor();
}
private Font getFontForLineNumbers() {
Font editorFont = myEditor.getColorsScheme().getFont(EditorFontType.PLAIN);
float editorFontSize = editorFont.getSize2D();
return editorFont.deriveFont(Math.max(1f, editorFontSize - 1f));
}
private int calcLineNumbersAreaWidth(int maxLineNumber) {
return FontLayoutService.getInstance().stringWidth(getFontMetrics(getFontForLineNumbers()), Integer.toString(maxLineNumber));
}
private void doPaintLineNumbers(Graphics2D g, int startVisualLine, int endVisualLine, int offset,
@NotNull LineNumberConverter converter) {
int lastLine = myEditor.logicalToVisualPosition(
new LogicalPosition(endLineNumber(), 0))
.line;
endVisualLine = Math.min(endVisualLine, lastLine);
if (startVisualLine > endVisualLine) {
return;
}
Color color = myEditor.getColorsScheme().getColor(EditorColors.LINE_NUMBERS_COLOR);
Color colorUnderCaretRow = myEditor.getColorsScheme().getColor(EditorColors.LINE_NUMBER_ON_CARET_ROW_COLOR);
Font font = getFontForLineNumbers();
g.setFont(font);
int viewportStartY = myEditor.getScrollingModel().getVisibleArea().y;
AffineTransform old = setMirrorTransformIfNeeded(g, getLineNumberAreaOffset(), getLineNumberAreaWidth());
try {
int caretLogicalLine = myEditor.getCaretModel().getLogicalPosition().line;
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, startVisualLine);
while (!visLinesIterator.atEnd() && visLinesIterator.getVisualLine() <= endVisualLine) {
if (!visLinesIterator.startsWithSoftWrap() || visLinesIterator.getY() <= viewportStartY) {
int logicalLine = visLinesIterator.getDisplayedLogicalLine();
Integer lineToDisplay = converter.convert(myEditor, logicalLine + 1);
if (lineToDisplay != null) {
int y = visLinesIterator.getY();
if (y < viewportStartY && visLinesIterator.endsWithSoftWrap()) { // "sticky" line number
y = viewportStartY;
}
if (myEditor.isInDistractionFreeMode()) {
Color fgColor = myTextFgColors.get(visLinesIterator.getVisualLine());
g.setColor(fgColor != null ? fgColor : color != null ? color : JBColor.blue);
} else {
g.setColor(color);
}
if (colorUnderCaretRow != null && caretLogicalLine == logicalLine) {
g.setColor(colorUnderCaretRow);
}
String s = String.valueOf(lineToDisplay);
int textOffset = isMirrored() ?
offset - getLineNumberAreaWidth() - 1 :
offset - FontLayoutService.getInstance().stringWidth(g.getFontMetrics(), s);
g.drawString(s,
textOffset,
y + myEditor.getAscent());
}
}
visLinesIterator.advance();
}
}
finally {
if (old != null) g.setTransform(old);
}
}
private int endLineNumber() {
return Math.max(0, myEditor.getDocument().getLineCount() - 1);
}
@Nullable
@Override
public Object getData(@NotNull @NonNls String dataId) {
if (myEditor.isDisposed()) return null;
if (EditorGutter.KEY.is(dataId)) {
return this;
}
if (CommonDataKeys.EDITOR.is(dataId)) {
return myEditor;
}
if (EditorGutterComponentEx.LOGICAL_LINE_AT_CURSOR.is(dataId)) {
if (myLastActionableClick == null) return null;
return myLastActionableClick.myLogicalLineAtCursor;
}
if (EditorGutterComponentEx.ICON_CENTER_POSITION.is(dataId)) {
if (myLastActionableClick == null) return null;
return myLastActionableClick.myIconCenterPosition;
}
return null;
}
@FunctionalInterface
interface RangeHighlighterProcessor {
void process(@NotNull RangeHighlighter highlighter);
}
void processRangeHighlighters(int startOffset, int endOffset, @NotNull RangeHighlighterProcessor processor) {
// we limit highlighters to process to between line starting at startOffset and line ending at endOffset
MarkupIterator<RangeHighlighterEx> docHighlighters =
myEditor.getFilteredDocumentMarkupModel().overlappingIterator(startOffset, endOffset, true);
MarkupIterator<RangeHighlighterEx> editorHighlighters =
myEditor.getMarkupModel().overlappingIterator(startOffset, endOffset, true);
try {
RangeHighlighterEx lastDocHighlighter = null;
RangeHighlighterEx lastEditorHighlighter = null;
while (true) {
if (lastDocHighlighter == null && docHighlighters.hasNext()) {
lastDocHighlighter = docHighlighters.next();
if (lastDocHighlighter.getAffectedAreaStartOffset() > endOffset) {
lastDocHighlighter = null;
continue;
}
if (lastDocHighlighter.getAffectedAreaEndOffset() < startOffset) {
lastDocHighlighter = null;
continue;
}
}
if (lastEditorHighlighter == null && editorHighlighters.hasNext()) {
lastEditorHighlighter = editorHighlighters.next();
if (lastEditorHighlighter.getAffectedAreaStartOffset() > endOffset) {
lastEditorHighlighter = null;
continue;
}
if (lastEditorHighlighter.getAffectedAreaEndOffset() < startOffset) {
lastEditorHighlighter = null;
continue;
}
}
if (lastDocHighlighter == null && lastEditorHighlighter == null) return;
final RangeHighlighterEx lowerHighlighter;
if (less(lastDocHighlighter, lastEditorHighlighter)) {
lowerHighlighter = lastDocHighlighter;
lastDocHighlighter = null;
}
else {
lowerHighlighter = lastEditorHighlighter;
lastEditorHighlighter = null;
}
processor.process(lowerHighlighter);
}
}
finally {
docHighlighters.dispose();
editorHighlighters.dispose();
}
}
private static boolean isValidLine(@NotNull Document document, int line) {
if (line < 0) return false;
int lineCount = document.getLineCount();
return lineCount == 0 ? line == 0 : line < lineCount;
}
private static boolean less(RangeHighlighter h1, RangeHighlighter h2) {
return h1 != null && (h2 == null || h1.getStartOffset() < h2.getStartOffset());
}
@Override
public void revalidateMarkup() {
updateSize();
}
void updateSizeOnShowNotify() {
updateSize(false, true);
}
public void updateSize() {
updateSize(false, false);
}
void updateSize(boolean onLayout, boolean canShrink) {
int prevHash = sizeHash();
if (!onLayout) {
clearLineToGutterRenderersCache();
calcLineNumberAreaWidth();
calcLineMarkerAreaWidth(canShrink);
calcAnnotationsSize();
}
calcAnnotationExtraSize();
if (prevHash != sizeHash()) {
fireResized();
}
repaint();
}
private int sizeHash() {
int result = getLineMarkerAreaWidth();
result = 31 * result + myTextAnnotationGuttersSize;
result = 31 * result + myTextAnnotationExtraSize;
result = 31 * result + getLineNumberAreaWidth();
return result;
}
private void calcAnnotationsSize() {
myTextAnnotationGuttersSize = 0;
myGapAfterAnnotations = false;
final int lineCount = Math.max(myEditor.getDocument().getLineCount(), 1);
final int guttersCount = myTextAnnotationGutters.size();
for (int j = 0; j < guttersCount; j++) {
TextAnnotationGutterProvider gutterProvider = myTextAnnotationGutters.get(j);
int gutterSize = 0;
for (int i = 0; i < lineCount; i++) {
String lineText = gutterProvider.getLineText(i, myEditor);
if (!StringUtil.isEmpty(lineText)) {
EditorFontType style = gutterProvider.getStyle(i, myEditor);
Font font = getFontForText(lineText, style);
FontMetrics fontMetrics = getFontMetrics(font);
gutterSize = Math.max(gutterSize, fontMetrics.stringWidth(lineText));
}
}
if (gutterSize > 0) {
boolean margin = gutterProvider.useMargin();
myGapAfterAnnotations = margin;
if (margin) {
gutterSize += getGapBetweenAnnotations();
}
}
myTextAnnotationGutterSizes.set(j, gutterSize);
myTextAnnotationGuttersSize += gutterSize;
}
}
private void calcAnnotationExtraSize() {
myTextAnnotationExtraSize = 0;
if (!myEditor.isInDistractionFreeMode() || isMirrored()) return;
Component outerContainer = ComponentUtil.findParentByCondition(myEditor.getComponent(), c -> EditorComposite.isEditorComposite(c));
if (outerContainer == null) return;
EditorSettings settings = myEditor.getSettings();
Project project = myEditor.getProject();
if (project != null && project.isDisposed()) return;
int rightMargin = settings.getRightMargin(project);
if (rightMargin <= 0) return;
JComponent editorComponent = myEditor.getComponent();
RelativePoint point = new RelativePoint(editorComponent, new Point(0, 0));
Point editorLocationInWindow = point.getPoint(outerContainer);
int editorLocationX = (int)editorLocationInWindow.getX();
int rightMarginX = rightMargin * EditorUtil.getSpaceWidth(Font.PLAIN, myEditor) + editorLocationX;
int width = editorLocationX + editorComponent.getWidth();
if (rightMarginX < width && editorLocationX < width - rightMarginX) {
int centeredSize = (width - rightMarginX - editorLocationX) / 2 - (getLineMarkerAreaWidth() + getLineNumberAreaWidth() +
getFoldingAreaWidth() + 2 * getGapBetweenAreas());
myTextAnnotationExtraSize = Math.max(0, centeredSize - myTextAnnotationGuttersSize);
}
}
private boolean logicalLinesMatchVisualOnes() {
return myEditor.getSoftWrapModel().getSoftWrapsIntroducedLinesNumber() == 0 &&
myEditor.getFoldingModel().getTotalNumberOfFoldedLines() == 0;
}
void clearLineToGutterRenderersCache() {
myLineToGutterRenderers = null;
}
private void buildGutterRenderersCache() {
myLineToGutterRenderersCacheForLogicalLines = logicalLinesMatchVisualOnes();
myLineToGutterRenderers = new Int2ObjectOpenHashMap<>();
processRangeHighlighters(0, myEditor.getDocument().getTextLength(), highlighter -> {
GutterMark renderer = highlighter.getGutterIconRenderer();
if (!shouldBeShown(renderer)) {
return;
}
if (!isHighlighterVisible(highlighter)) {
return;
}
int line = myEditor.offsetToVisualLine(highlighter.getStartOffset());
List<GutterMark> renderers = myLineToGutterRenderers.get(line);
if (renderers == null) {
renderers = new SmartList<>();
myLineToGutterRenderers.put(line, renderers);
}
renderers.add(renderer);
});
List<GutterMarkPreprocessor> gutterMarkPreprocessors = GutterMarkPreprocessor.EP_NAME.getExtensionList();
for (Int2ObjectMap.Entry<List<GutterMark>> entry : Int2ObjectMaps.fastIterable(myLineToGutterRenderers)) {
List<GutterMark> newValue = entry.getValue();
for (GutterMarkPreprocessor preprocessor : gutterMarkPreprocessors) {
newValue = preprocessor.processMarkers(entry.getValue());
}
// don't allow more than 4 icons per line
entry.setValue(ContainerUtil.getFirstItems(newValue, 4));
}
}
private boolean shouldBeShown(@Nullable GutterMark gutterIconRenderer) {
return gutterIconRenderer != null && (areIconsShown() || gutterIconRenderer instanceof NonHideableIconGutterMark);
}
private void calcLineMarkerAreaWidth(boolean canShrink) {
myLeftFreePaintersAreaShown = myForceLeftFreePaintersAreaShown;
myRightFreePaintersAreaShown = myForceRightFreePaintersAreaShown;
processRangeHighlighters(0, myEditor.getDocument().getTextLength(), highlighter -> {
LineMarkerRenderer lineMarkerRenderer = highlighter.getLineMarkerRenderer();
if (lineMarkerRenderer != null) {
LineMarkerRendererEx.Position position = getLineMarkerPosition(lineMarkerRenderer);
if (position == LineMarkerRendererEx.Position.LEFT && isLineMarkerVisible(highlighter)) myLeftFreePaintersAreaShown = true;
if (position == LineMarkerRendererEx.Position.RIGHT && isLineMarkerVisible(highlighter)) myRightFreePaintersAreaShown = true;
}
});
int minWidth = areIconsShown() ? scaleWidth(myStartIconAreaWidth) : 0;
myIconsAreaWidth = canShrink ? minWidth : Math.max(myIconsAreaWidth, minWidth);
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
int width = 1;
List<GutterMark> renderers = entry.getValue();
for (int i = 0; i < renderers.size(); i++) {
GutterMark renderer = renderers.get(i);
if (!checkDumbAware(renderer)) continue;
width += scaleIcon(renderer.getIcon()).getIconWidth();
if (i > 0) width += getGapBetweenIcons();
}
if (myIconsAreaWidth < width) {
myIconsAreaWidth = width + 1;
}
}
myHasInlaysWithGutterIcons = false;
myEditor.getInlayModel().getBlockElementsInRange(0, myEditor.getDocument().getTextLength()).forEach(inlay -> {
GutterIconRenderer iconRenderer = inlay.getGutterIconRenderer();
if (shouldBeShown(iconRenderer) && checkDumbAware(iconRenderer) && !EditorUtil.isInlayFolded(inlay)) {
Icon icon = scaleIcon(iconRenderer.getIcon());
if (icon.getIconHeight() <= inlay.getHeightInPixels()) {
myHasInlaysWithGutterIcons = true;
myIconsAreaWidth = Math.max(myIconsAreaWidth, icon.getIconWidth());
}
}
});
if (isDumbMode()) {
myIconsAreaWidth = Math.max(myIconsAreaWidth, myLastNonDumbModeIconAreaWidth);
}
else {
myLastNonDumbModeIconAreaWidth = myIconsAreaWidth;
}
}
@Override
@NotNull
public List<GutterMark> getGutterRenderers(int line) {
if (myLineToGutterRenderers == null || myLineToGutterRenderersCacheForLogicalLines != logicalLinesMatchVisualOnes()) {
buildGutterRenderersCache();
}
Segment focusModeRange = myEditor.getFocusModeRange();
if (focusModeRange != null) {
int start = myEditor.offsetToVisualLine(focusModeRange.getStartOffset());
int end = myEditor.offsetToVisualLine(focusModeRange.getEndOffset());
if (line < start || line > end) return Collections.emptyList();
}
List<GutterMark> marks = myLineToGutterRenderers.get(line);
return marks != null ? marks : Collections.emptyList();
}
private @NotNull ObjectIterable<Int2ObjectMap.Entry<List<GutterMark>>> processGutterRenderers() {
if (myLineToGutterRenderers == null || myLineToGutterRenderersCacheForLogicalLines != logicalLinesMatchVisualOnes()) {
buildGutterRenderersCache();
}
return Int2ObjectMaps.fastIterable(myLineToGutterRenderers);
}
private boolean isHighlighterVisible(RangeHighlighter highlighter) {
return !FoldingUtil.isHighlighterFolded(myEditor, highlighter);
}
private void paintGutterRenderers(final Graphics2D g,
int firstVisibleOffset, int lastVisibleOffset, int firstVisibleLine, int lastVisibleLine) {
Object hint = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
try {
List<RangeHighlighter> highlighters = new ArrayList<>();
processRangeHighlighters(firstVisibleOffset, lastVisibleOffset, highlighter -> {
LineMarkerRenderer renderer = highlighter.getLineMarkerRenderer();
if (renderer != null) highlighters.add(highlighter);
});
ContainerUtil.sort(highlighters, Comparator.comparingInt(RangeHighlighter::getLayer));
for (RangeHighlighter highlighter : highlighters) {
paintLineMarkerRenderer(highlighter, g);
}
}
finally {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
}
paintIcons(firstVisibleLine, lastVisibleLine, g);
}
private void paintIcons(final int firstVisibleLine, final int lastVisibleLine, final Graphics2D g) {
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, firstVisibleLine);
while (!visLinesIterator.atEnd()) {
int visualLine = visLinesIterator.getVisualLine();
if (visualLine > lastVisibleLine) break;
int y = visLinesIterator.getY();
List<GutterMark> renderers = getGutterRenderers(visualLine);
paintIconRow(y, renderers, g);
if (myHasInlaysWithGutterIcons) {
Rectangle clip = g.getClipBounds();
int curY = y;
for (Inlay<?> inlay : visLinesIterator.getBlockInlaysAbove()) {
if (curY <= clip.y) break;
int height = inlay.getHeightInPixels();
if (height > 0) {
int newY = curY - height;
paintInlayIcon(inlay, g, newY);
curY = newY;
}
}
curY = y + myEditor.getLineHeight();
for (Inlay<?> inlay : visLinesIterator.getBlockInlaysBelow()) {
if (curY >= clip.y + clip.height) break;
int height = inlay.getHeightInPixels();
if (height > 0) {
paintInlayIcon(inlay, g, curY);
curY += height;
}
}
}
visLinesIterator.advance();
}
}
private void paintInlayIcon(Inlay<?> inlay, Graphics2D g, int y) {
GutterIconRenderer iconRenderer = inlay.getGutterIconRenderer();
if (shouldBeShown(iconRenderer) && checkDumbAware(iconRenderer)) {
Icon icon = scaleIcon(iconRenderer.getIcon());
if (icon.getIconHeight() <= inlay.getHeightInPixels()) {
int iconWidth = icon.getIconWidth();
int x = getIconAreaOffset() + myIconsAreaWidth - iconWidth;
y += getTextAlignmentShiftForInlayIcon(icon, inlay);
AffineTransform old = setMirrorTransformIfNeeded(g, x, iconWidth);
icon.paintIcon(this, g, x, y);
if (old != null) g.setTransform(old);
}
}
}
private void paintIconRow(int lineY, List<? extends GutterMark> row, final Graphics2D g) {
processIconsRowForY(lineY, row, (x, y, renderer) -> {
Icon icon = scaleIcon(renderer.getIcon());
AffineTransform old = setMirrorTransformIfNeeded(g, x, icon.getIconWidth());
try {
icon.paintIcon(this, g, x, y);
}
finally {
if (old != null) g.setTransform(old);
}
});
}
private void paintLineMarkerRenderer(@NotNull RangeHighlighter highlighter, @NotNull Graphics g) {
LineMarkerRenderer lineMarkerRenderer = highlighter.getLineMarkerRenderer();
if (lineMarkerRenderer != null) {
Rectangle rectangle = getLineRendererRectangle(highlighter);
if (rectangle != null) {
lineMarkerRenderer.paint(myEditor, g, rectangle);
}
}
}
private boolean isLineMarkerVisible(RangeHighlighter highlighter) {
int startOffset = highlighter.getStartOffset();
int endOffset = highlighter.getEndOffset();
FoldRegion startFoldRegion = myEditor.getFoldingModel().getCollapsedRegionAtOffset(startOffset);
FoldRegion endFoldRegion = myEditor.getFoldingModel().getCollapsedRegionAtOffset(endOffset);
return startFoldRegion == null || !startFoldRegion.equals(endFoldRegion);
}
@Nullable
Rectangle getLineRendererRectangle(RangeHighlighter highlighter) {
if (!isLineMarkerVisible(highlighter)) return null;
int startOffset = highlighter.getStartOffset();
int endOffset = highlighter.getEndOffset();
int startY = myEditor.visualLineToY(myEditor.offsetToVisualLine(startOffset));
// top edge of the last line of the highlighted area
int endY = myEditor.visualLineToY(myEditor.offsetToVisualLine(endOffset));
// => add one line height to make height correct (bottom edge of the highlighted area)
endY += myEditor.getLineHeight();
LineMarkerRenderer renderer = Objects.requireNonNull(highlighter.getLineMarkerRenderer());
LineMarkerRendererEx.Position position = getLineMarkerPosition(renderer);
int w;
int x;
switch (position) {
case LEFT:
w = getLeftFreePaintersAreaWidth();
x = getLeftFreePaintersAreaOffset();
break;
case RIGHT:
w = getRightFreePaintersAreaWidth();
x = getLineMarkerFreePaintersAreaOffset();
break;
case CUSTOM:
w = getWidth();
x = 0;
break;
default:
throw new IllegalArgumentException(position.name());
}
int height = endY - startY;
return new Rectangle(x, startY, w, height);
}
@FunctionalInterface
interface LineGutterIconRendererProcessor {
void process(int x, int y, @NotNull GutterMark renderer);
}
private float getEditorScaleFactor() {
if (Registry.is("editor.scale.gutter.icons")) {
float scale = myEditor.getScale();
if (Math.abs(1f - scale) > 0.10f) {
return scale;
}
}
return 1f;
}
Icon scaleIcon(Icon icon) {
float scale = getEditorScaleFactor();
return scale == 1 ? icon : IconUtil.scale(icon, this, scale);
}
private int scaleWidth(int width) {
return (int) (getEditorScaleFactor() * width);
}
void processIconsRow(int line, @NotNull List<? extends GutterMark> row, @NotNull LineGutterIconRendererProcessor processor) {
processIconsRowForY(myEditor.visualLineToY(line), row, processor);
}
// y should be equal to visualLineToY(visualLine)
private void processIconsRowForY(int y, @NotNull List<? extends GutterMark> row, @NotNull LineGutterIconRendererProcessor processor) {
if (row.isEmpty()) return;
int middleCount = 0;
int middleSize = 0;
int x = getIconAreaOffset() + 2;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
final GutterIconRenderer.Alignment alignment = ((GutterIconRenderer)r).getAlignment();
final Icon icon = scaleIcon(r.getIcon());
if (alignment == GutterIconRenderer.Alignment.LEFT) {
processor.process(x, y + getTextAlignmentShift(icon), r);
x += icon.getIconWidth() + getGapBetweenIcons();
}
else if (alignment == GutterIconRenderer.Alignment.CENTER) {
middleCount++;
middleSize += icon.getIconWidth() + getGapBetweenIcons();
}
}
final int leftSize = x - getIconAreaOffset();
x = getIconAreaOffset() + myIconsAreaWidth;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
if (((GutterIconRenderer)r).getAlignment() == GutterIconRenderer.Alignment.RIGHT) {
Icon icon = scaleIcon(r.getIcon());
x -= icon.getIconWidth();
processor.process(x, y + getTextAlignmentShift(icon), r);
x -= getGapBetweenIcons();
}
}
int rightSize = myIconsAreaWidth + getIconAreaOffset() - x + 1;
if (middleCount > 0) {
middleSize -= getGapBetweenIcons();
x = getIconAreaOffset() + leftSize + (myIconsAreaWidth - leftSize - rightSize - middleSize) / 2;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
if (((GutterIconRenderer)r).getAlignment() == GutterIconRenderer.Alignment.CENTER) {
Icon icon = scaleIcon(r.getIcon());
processor.process(x, y + getTextAlignmentShift(icon), r);
x += icon.getIconWidth() + getGapBetweenIcons();
}
}
}
}
private int getTextAlignmentShiftForInlayIcon(Icon icon, Inlay<?> inlay) {
return Math.min(getTextAlignmentShift(icon), inlay.getHeightInPixels() - icon.getIconHeight());
}
private int getTextAlignmentShift(Icon icon) {
int centerRelative = (myEditor.getLineHeight() - icon.getIconHeight()) / 2;
int baselineRelative = myEditor.getAscent() - icon.getIconHeight();
return Math.max(centerRelative, baselineRelative);
}
private Color getOutlineColor(boolean isActive) {
ColorKey key = isActive ? EditorColors.SELECTED_TEARLINE_COLOR : EditorColors.TEARLINE_COLOR;
Color color = myEditor.getColorsScheme().getColor(key);
return color != null ? color : JBColor.black;
}
@Override
public void registerTextAnnotation(@NotNull TextAnnotationGutterProvider provider) {
myTextAnnotationGutters.add(provider);
myTextAnnotationGutterSizes.add(0);
updateSize();
}
@Override
public void registerTextAnnotation(@NotNull TextAnnotationGutterProvider provider, @NotNull EditorGutterAction action) {
myTextAnnotationGutters.add(provider);
myProviderToListener.put(provider, action);
myTextAnnotationGutterSizes.add(0);
updateSize();
}
@NotNull
@Override
public List<TextAnnotationGutterProvider> getTextAnnotations() {
return new ArrayList<>(myTextAnnotationGutters);
}
private void doPaintFoldingTree(@NotNull Graphics2D g, @NotNull Rectangle clip, int firstVisibleOffset, int lastVisibleOffset) {
final double width = getFoldingAnchorWidth2D();
Collection<DisplayedFoldingAnchor> anchorsToDisplay =
myAnchorsDisplayStrategy.getAnchorsToDisplay(firstVisibleOffset, lastVisibleOffset, myActiveFoldRegions);
for (DisplayedFoldingAnchor anchor : anchorsToDisplay) {
drawFoldingAnchor(width, clip, g, anchor.visualLine, anchor.type, myActiveFoldRegions.contains(anchor.foldRegion));
}
}
private void paintFoldingLines(final Graphics2D g, final Rectangle clip) {
boolean shown = isFoldingOutlineShown();
double x = getWhitespaceSeparatorOffset2D();
if ((shown || myEditor.isInDistractionFreeMode() && Registry.is("editor.distraction.gutter.separator")) && myPaintBackground) {
g.setColor(getOutlineColor(false));
LinePainter2D.paint(g, x, clip.y, x, clip.y + clip.height, StrokeType.CENTERED, getStrokeWidth());
}
if (!shown) return;
myActiveFoldRegions.forEach(region -> {
if (region.isValid() && region.isExpanded()) {
int foldStart = myEditor.offsetToVisualLine(region.getStartOffset());
int foldEnd = myEditor.offsetToVisualLine(region.getEndOffset());
if (foldStart < foldEnd) {
int startY = getLineCenterY(foldStart);
int endY = getLineCenterY(foldEnd);
if (startY <= clip.y + clip.height && endY + 1 + myEditor.getDescent() >= clip.y) {
g.setColor(getOutlineColor(true));
LinePainter2D.paint(g, x, startY, x, endY, StrokeType.CENTERED, getStrokeWidth());
}
}
}
});
}
@Override
public int getWhitespaceSeparatorOffset() {
return (int)Math.round(getWhitespaceSeparatorOffset2D());
}
private double getWhitespaceSeparatorOffset2D() {
return PaintUtil.alignToInt(getFoldingAreaOffset() + getFoldingAnchorWidth() / 2.,
ScaleContext.create(myEditor.getComponent()), RoundingMode.ROUND, null);
}
void setActiveFoldRegions(@NotNull List<FoldRegion> activeFoldRegions) {
if (!myActiveFoldRegions.equals(activeFoldRegions)) {
myActiveFoldRegions = activeFoldRegions;
repaint();
}
}
private int getLineCenterY(int line) {
return myEditor.visualLineToY(line) + myEditor.getLineHeight() / 2;
}
private double getFoldAnchorY(int line, double width) {
return myEditor.visualLineToY(line) + myEditor.getAscent() - width;
}
private void drawFoldingAnchor(double width, @NotNull Rectangle clip, @NotNull Graphics2D g, int visualLine,
@NotNull DisplayedFoldingAnchor.Type type, boolean active) {
double off = width / 4;
double height = width + off;
double baseHeight = height - width / 2;
double y = getFoldAnchorY(visualLine, width);
double centerX = LinePainter2D.getStrokeCenter(g, getWhitespaceSeparatorOffset2D(), StrokeType.CENTERED, getStrokeWidth());
double strokeOff = centerX - getWhitespaceSeparatorOffset2D();
// need to have the same sub-device-pixel offset as centerX for the square_with_plus rect to have equal dev width/height
double centerY = PaintUtil.alignToInt(y + width / 2, g) + strokeOff;
switch (type) {
case COLLAPSED:
case COLLAPSED_SINGLE_LINE:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawSquareWithPlusOrMinus(g, centerX, centerY, width, true, active);
}
break;
case EXPANDED_SINGLE_LINE:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawSquareWithPlusOrMinus(g, centerX, centerY, width, false, active);
}
break;
case EXPANDED_TOP:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawDirectedBox(g, centerX, centerY, width, height, baseHeight, active);
}
break;
case EXPANDED_BOTTOM:
y += width;
if (y - height <= clip.y + clip.height && y >= clip.y) {
drawDirectedBox(g, centerX, centerY, width, -height, -baseHeight, active);
}
break;
}
}
private void drawDirectedBox(Graphics2D g,
double centerX,
double centerY,
double width,
double height,
double baseHeight,
boolean active)
{
double sw = getStrokeWidth();
Rectangle2D rect = RectanglePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, width, width, StrokeType.CENTERED, sw);
double x1 = rect.getX();
double x2 = x1 + rect.getWidth() - 1;
double y = height > 0 ? rect.getY() : rect.getY() + rect.getHeight() - 1;
double[] dxPoints = {x1, x1, x2, x2, centerX};
double[] dyPoints = {y + baseHeight, y, y, y + baseHeight, y + height + (height < 0 ? 1 : 0)};
if (!SystemInfo.isMac && Registry.is("ide.editor.alternative.folding.icons.painting")) {
GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
g.setStroke(new BasicStroke((float)getStrokeWidth(), BasicStroke.CAP_SQUARE, BasicStroke.JOIN_ROUND));
int ix1 = (int)Math.round(x1);
int ix2 = (int)Math.round(x2);
int[] xPoints = {ix1, ix1, ix2, ix2, (int)Math.round(centerX)};
int iy1 = (int)Math.round(y + baseHeight);
int iy2 = (int)Math.round(y);
int[] yPoints = {iy1, iy2, iy2, iy1, (int)Math.round(y + height + (height < 0 ? 1 : 0))};
//xPoints[4] -= 1;
if (xPoints[4] - xPoints[0] != xPoints[3] - xPoints[4]) {
xPoints[0] += (xPoints[4] - xPoints[0]) - (xPoints[3] - xPoints[4]);
xPoints[1] = xPoints[0];
}
g.setColor(myEditor.getBackgroundColor());
g.fillPolygon(xPoints, yPoints, 5);
g.setColor(getOutlineColor(active));
g.drawPolygon(xPoints, yPoints, 5);
int w = xPoints[3] - xPoints[0];
int off = (int)Math.round(getSquareInnerOffset(w));
int minusY = (int)Math.round(centerY);
g.drawLine(xPoints[0] + off, minusY, xPoints[3] - off, minusY);
config.restore();
} else {
g.setColor(myEditor.getBackgroundColor());
LinePainter2D.fillPolygon(g, dxPoints, dyPoints, 5, StrokeType.CENTERED_CAPS_SQUARE, sw, RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(getOutlineColor(active));
LinePainter2D.paintPolygon(g, dxPoints, dyPoints, 5, StrokeType.CENTERED_CAPS_SQUARE, sw, RenderingHints.VALUE_ANTIALIAS_ON);
drawLine(g, false, centerX, centerY, width, sw);
}
}
private void drawLine(Graphics2D g, boolean vertical, double centerX, double centerY, double width, double strokeWidth) {
double length = width - getSquareInnerOffset(width) * 2;
Line2D line = LinePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, length, vertical, StrokeType.CENTERED, strokeWidth);
LinePainter2D.paint(g, line, StrokeType.CENTERED, strokeWidth, RenderingHints.VALUE_ANTIALIAS_OFF);
}
private void drawSquareWithPlusOrMinus(@NotNull Graphics2D g,
double centerX,
double centerY,
double width,
boolean plus,
boolean active) {
double sw = getStrokeWidth();
Rectangle2D rect = RectanglePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, width, width, StrokeType.CENTERED, sw);
g.setColor(myEditor.getBackgroundColor());
RectanglePainter2D.FILL.paint(g, rect, null, StrokeType.CENTERED, sw, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setColor(getOutlineColor(active));
RectanglePainter2D.DRAW.paint(g, rect, null, StrokeType.CENTERED, sw, RenderingHints.VALUE_ANTIALIAS_OFF);
if (!SystemInfo.isMac && Registry.is("ide.editor.alternative.folding.icons.painting")) {
double dx1 = rect.getX();
double dx2 = dx1 + rect.getWidth() - 1;
int x1 = (int)Math.round(dx1);
int x2 = (int)Math.round(dx2);
int cX = (int)Math.round(centerX);
int cY = (int)Math.round(centerY);
if (cX - x1 != x2 - cX) {
x1 += (x2 - cX) - (cX - x1);
}
GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
g.setStroke(new BasicStroke((float)getStrokeWidth(), BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER));
int off = (int)Math.round(getSquareInnerOffset(x2 - x1));
g.drawLine(x1 + off, cY, x2 - off, cY);
if (plus) {
g.drawLine((x1 + x2) / 2, cY - (x2 - x1 - 2*off) / 2, (x1 + x2) / 2, cY + (x2 - x1 - 2*off) / 2);
}
config.restore();
} else {
drawLine(g, false, centerX, centerY, width, sw);
if (plus) {
drawLine(g, true, centerX, centerY, width, sw);
}
}
}
/**
* Returns the gap between the sign and the square itself
*/
private double getSquareInnerOffset(double width) {
return Math.max(width / 5, scale(2));
}
private double scale(double v) {
return JBUIScale.scale((float)v) * myEditor.getScale();
}
private int getFoldingAnchorWidth() {
return (int)Math.round(getFoldingAnchorWidth2D());
}
private double getFoldingAnchorWidth2D() {
return Math.min(scale(4f), myEditor.getLineHeight() / 2f - JBUIScale.scale(2f)) * 2;
}
private double getStrokeWidth() {
double sw = JreHiDpiUtil.isJreHiDPIEnabled() || scale(1f) < 2 ? 1 : 2;
ScaleContext ctx = ScaleContext.create(myEditor.getComponent());
return PaintUtil.alignToInt(sw, ctx, PaintUtil.devValue(1, ctx) > 2 ? RoundingMode.FLOOR : RoundingMode.ROUND, null);
}
private int getFoldingAreaOffset() {
return getLineMarkerAreaOffset() + getLineMarkerAreaWidth();
}
private int getFoldingAreaWidth() {
return isFoldingOutlineShown() ? getFoldingAnchorWidth() + JBUIScale.scale(2) :
isRealEditor() ? getFoldingAnchorWidth() : 0;
}
private boolean isRealEditor() {
return EditorUtil.isRealFileEditor(myEditor);
}
boolean isLineMarkersShown() {
return myEditor.getSettings().isLineMarkerAreaShown();
}
boolean areIconsShown() {
return myEditor.getSettings().areGutterIconsShown();
}
boolean isLineNumbersShown() {
return myEditor.getSettings().isLineNumbersShown();
}
@Override
public boolean isAnnotationsShown() {
return !myTextAnnotationGutters.isEmpty();
}
private boolean isFoldingOutlineShown() {
return myEditor.getSettings().isFoldingOutlineShown() &&
myEditor.getFoldingModel().isFoldingEnabled() &&
!myEditor.isInPresentationMode();
}
private static int getGapBetweenAreas() {
return GAP_BETWEEN_AREAS.get();
}
private static int getAreaWidthWithGap(int width) {
if (width > 0) {
return width + getGapBetweenAreas();
}
return 0;
}
private static int getGapBetweenIcons() {
return GAP_BETWEEN_ICONS.get();
}
private static int getGapBetweenAnnotations() {
return GAP_BETWEEN_ANNOTATIONS.get();
}
int getLineNumberAreaWidth() {
return isLineNumbersShown() ? myLineNumberAreaWidth + getAreaWidthWithGap(myAdditionalLineNumberAreaWidth) : 0;
}
private int getLineMarkerAreaWidth() {
return isLineMarkersShown() ? getLeftFreePaintersAreaWidth() + myIconsAreaWidth +
getGapAfterIconsArea() + getRightFreePaintersAreaWidth() : 0;
}
private void calcLineNumberAreaWidth() {
if (!isLineNumbersShown()) return;
Integer maxLineNumber = myLineNumberConverter.getMaxLineNumber(myEditor);
myLineNumberAreaWidth = maxLineNumber == null ? 0 : calcLineNumbersAreaWidth(maxLineNumber);
myAdditionalLineNumberAreaWidth = 0;
if (myAdditionalLineNumberConverter != null) {
Integer maxAdditionalLineNumber = myAdditionalLineNumberConverter.getMaxLineNumber(myEditor);
myAdditionalLineNumberAreaWidth = maxAdditionalLineNumber == null ? 0 : calcLineNumbersAreaWidth(maxAdditionalLineNumber);
}
}
@Nullable
EditorMouseEventArea getEditorMouseAreaByOffset(int offset) {
if (isLineNumbersShown() && offset < getLineNumberAreaOffset() + getLineNumberAreaWidth()) {
return EditorMouseEventArea.LINE_NUMBERS_AREA;
}
if (isAnnotationsShown() && offset < getAnnotationsAreaOffset() + getAnnotationsAreaWidth()) {
return EditorMouseEventArea.ANNOTATIONS_AREA;
}
if (isLineMarkersShown() && offset < getFoldingAreaOffset()) {
return EditorMouseEventArea.LINE_MARKERS_AREA;
}
if (isFoldingOutlineShown() && offset < getFoldingAreaOffset() + getFoldingAreaWidth()) {
return EditorMouseEventArea.FOLDING_OUTLINE_AREA;
}
return null;
}
int getLineNumberAreaOffset() {
if (getLineNumberAreaWidth() == 0 && getAnnotationsAreaWidthEx() == 0 && getLineMarkerAreaWidth() == 0) {
return getFoldingAreaWidth() == 0 ? 0 : 1;
}
if (getLineNumberAreaWidth() == 0 && getAnnotationsAreaWidthEx() > 0) {
return 0; // no gap if annotations area is the first visible
}
return getGapBetweenAreas();
}
@Override
public int getAnnotationsAreaOffset() {
return getLineNumberAreaOffset() + getAreaWidthWithGap(getLineNumberAreaWidth());
}
@Override
public int getAnnotationsAreaWidth() {
return myTextAnnotationGuttersSize;
}
private int getAnnotationsAreaWidthEx() {
return myTextAnnotationGuttersSize + myTextAnnotationExtraSize;
}
@Override
public int getLineMarkerAreaOffset() {
return getAnnotationsAreaOffset() +
(myGapAfterAnnotations || myTextAnnotationExtraSize > 0
? getAreaWidthWithGap(getAnnotationsAreaWidthEx())
: getAnnotationsAreaWidthEx());
}
@Override
public int getIconAreaOffset() {
return getLineMarkerAreaOffset() + getLeftFreePaintersAreaWidth();
}
private int getLeftFreePaintersAreaOffset() {
return getLineMarkerAreaOffset();
}
@Override
public int getLineMarkerFreePaintersAreaOffset() {
return getIconAreaOffset() + myIconsAreaWidth + getGapAfterIconsArea();
}
private int getLeftFreePaintersAreaWidth() {
return myLeftFreePaintersAreaShown ? myForcedLeftFreePaintersAreaWidth < 0 ? FREE_PAINTERS_LEFT_AREA_WIDTH.get()
: myForcedLeftFreePaintersAreaWidth
: 0;
}
private int getRightFreePaintersAreaWidth() {
return myRightFreePaintersAreaShown ? myForcedRightFreePaintersAreaWidth < 0 ? FREE_PAINTERS_RIGHT_AREA_WIDTH.get()
: myForcedRightFreePaintersAreaWidth
: 0;
}
@Override
public int getIconsAreaWidth() {
return myIconsAreaWidth;
}
private int getGapAfterIconsArea() {
return isRealEditor() && areIconsShown() ? getGapBetweenAreas() : 0;
}
private boolean isMirrored() {
return myEditor.getVerticalScrollbarOrientation() != EditorEx.VERTICAL_SCROLLBAR_RIGHT;
}
@Nullable
private AffineTransform setMirrorTransformIfNeeded(Graphics2D g, int offset, int width) {
if (isMirrored()) {
AffineTransform old = g.getTransform();
AffineTransform transform = new AffineTransform(old);
transform.scale(-1, 1);
transform.translate(-offset * 2 - width, 0);
g.setTransform(transform);
return old;
}
else {
return null;
}
}
@Nullable
@Override
public FoldRegion findFoldingAnchorAt(int x, int y) {
if (!myEditor.getSettings().isFoldingOutlineShown()) return null;
int anchorX = getFoldingAreaOffset();
int anchorWidth = getFoldingAnchorWidth();
int visualLine = myEditor.yToVisualLine(y);
int neighbourhoodStartOffset = myEditor.visualPositionToOffset(new VisualPosition(visualLine, 0));
int neighbourhoodEndOffset = myEditor.visualPositionToOffset(new VisualPosition(visualLine, Integer.MAX_VALUE));
Collection<DisplayedFoldingAnchor> displayedAnchors = myAnchorsDisplayStrategy.getAnchorsToDisplay(neighbourhoodStartOffset,
neighbourhoodEndOffset,
Collections.emptyList());
x = convertX(x);
for (DisplayedFoldingAnchor anchor : displayedAnchors) {
Rectangle r = rectangleByFoldOffset(anchor.visualLine, anchorWidth, anchorX);
if (r.x < x && x <= r.x + r.width && r.y < y && y <= r.y + r.height) return anchor.foldRegion;
}
return null;
}
@SuppressWarnings("SuspiciousNameCombination")
private Rectangle rectangleByFoldOffset(int foldStart, int anchorWidth, int anchorX) {
return new Rectangle(anchorX, (int)getFoldAnchorY(foldStart, anchorWidth), anchorWidth, anchorWidth);
}
@Override
public void mouseDragged(MouseEvent e) {
TooltipController.getInstance().cancelTooltips();
}
@Override
public void mouseMoved(final MouseEvent e) {
Point point = e.getPoint();
PointInfo pointInfo = getPointInfo(point);
if (pointInfo == null) {
TextAnnotationGutterProvider provider = getProviderAtPoint(point);
String toolTip = null;
if (provider == null) {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
toolTip = lineRenderer.getTooltipText();
}
}
else {
final int line = getLineNumAtPoint(point);
if (line >= 0) {
toolTip = provider.getToolTip(line, myEditor);
if (!Objects.equals(toolTip, myLastGutterToolTip)) {
TooltipController.getInstance().cancelTooltip(GUTTER_TOOLTIP_GROUP, e, true);
myLastGutterToolTip = toolTip;
}
}
}
showToolTip(toolTip, point, Balloon.Position.atRight);
}
else {
computeTooltipInBackground(pointInfo);
}
}
private GutterIconRenderer myCalculatingInBackground;
private ProgressIndicator myBackgroundIndicator = new EmptyProgressIndicator();
private void computeTooltipInBackground(@NotNull PointInfo pointInfo) {
GutterIconRenderer renderer = pointInfo.renderer;
if (myCalculatingInBackground == renderer && !myBackgroundIndicator.isCanceled()) return; // not yet calculated
myCalculatingInBackground = renderer;
myBackgroundIndicator.cancel();
myBackgroundIndicator = new ProgressIndicatorBase();
myBackgroundIndicator.setModalityProgress(null);
Point point = pointInfo.iconCenterPosition;
Balloon.Position relativePosition = pointInfo.renderersInLine > 1 && pointInfo.rendererPosition == 0 ? Balloon.Position.below
: Balloon.Position.atRight;
AtomicReference<@NlsContexts.Tooltip String> tooltip = new AtomicReference<>();
ProgressManager.getInstance().runProcessWithProgressAsynchronously(new Task.Backgroundable(myEditor.getProject(), IdeBundle.message("progress.title.constructing.tooltip")) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
tooltip.set(ReadAction.compute(() -> renderer.getTooltipText()));
}
@Override
public void onSuccess() {
showToolTip(tooltip.get(), point, relativePosition);
}
}, myBackgroundIndicator);
}
void showToolTip(@Nullable @NlsContexts.Tooltip String toolTip, @NotNull Point location, @NotNull Balloon.Position relativePosition) {
myCalculatingInBackground = null;
TooltipController controller = TooltipController.getInstance();
if (toolTip == null || toolTip.isEmpty() || myEditor.isDisposed()) {
controller.cancelTooltip(GUTTER_TOOLTIP_GROUP, null, false);
}
else {
RelativePoint showPoint = new RelativePoint(this, location);
TooltipRenderer tr =
((EditorMarkupModel)myEditor.getMarkupModel()).getErrorStripTooltipRendererProvider().calcTooltipRenderer(toolTip);
HintHint hint =
new HintHint(this, location).setAwtTooltip(true).setPreferredPosition(relativePosition).setRequestFocus(ScreenReader.isActive());
if (myEditor.getComponent().getRootPane() != null) {
controller.showTooltipByMouseMove(myEditor, showPoint, tr, false, GUTTER_TOOLTIP_GROUP, hint);
}
}
}
void validateMousePointer(@NotNull MouseEvent e) {
if (IdeGlassPaneImpl.hasPreProcessedCursor(this)) return;
FoldRegion foldingAtCursor = findFoldingAnchorAt(e.getX(), e.getY());
setActiveFoldRegions(getGroupRegions(foldingAtCursor));
Cursor cursor = Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR);
if (foldingAtCursor != null) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
GutterIconRenderer renderer = getGutterRenderer(e);
if (renderer != null) {
if (renderer.isNavigateAction()) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
}
else {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
else {
TextAnnotationGutterProvider provider = getProviderAtPoint(e.getPoint());
if (provider != null) {
EditorGutterAction action = myProviderToListener.get(provider);
if (action != null) {
int line = getLineNumAtPoint(e.getPoint());
if (line >= 0) {
cursor = action.getCursor(line);
}
}
}
}
}
UIUtil.setCursor(this, cursor);
}
@NotNull
private List<FoldRegion> getGroupRegions(@Nullable FoldRegion foldingAtCursor) {
if (foldingAtCursor == null) {
return Collections.emptyList();
}
else {
FoldingGroup group = foldingAtCursor.getGroup();
if (group == null) {
return Collections.singletonList(foldingAtCursor);
}
return myEditor.getFoldingModel().getGroupedRegions(group);
}
}
@Override
public void mouseClicked(MouseEvent e) {
if (e.isPopupTrigger()) {
invokePopup(e);
}
}
private void fireEventToTextAnnotationListeners(final MouseEvent e) {
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
final Point clickPoint = e.getPoint();
final TextAnnotationGutterProvider provider = getProviderAtPoint(clickPoint);
if (provider == null) {
return;
}
EditorGutterAction action = myProviderToListener.get(provider);
if (action != null) {
int line = getLineNumAtPoint(clickPoint);
if (line >= 0 && line < myEditor.getDocument().getLineCount() && UIUtil.isActionClick(e, MouseEvent.MOUSE_RELEASED)) {
action.doAction(line);
}
}
}
}
private int getLineNumAtPoint(final Point clickPoint) {
return EditorUtil.yToLogicalLineNoBlockInlays(myEditor, clickPoint.y);
}
@Nullable
private TextAnnotationGutterProvider getProviderAtPoint(final Point clickPoint) {
int current = getAnnotationsAreaOffset();
if (clickPoint.x < current) return null;
for (int i = 0; i < myTextAnnotationGutterSizes.size(); i++) {
current += myTextAnnotationGutterSizes.getInt(i);
if (clickPoint.x <= current) return myTextAnnotationGutters.get(i);
}
return null;
}
@Override
public void mousePressed(MouseEvent e) {
if (e.isPopupTrigger() || isPopupAction(e)) {
invokePopup(e);
}
else if (UIUtil.isCloseClick(e)) {
processClose(e);
}
}
private boolean isPopupAction(MouseEvent e) {
GutterIconRenderer renderer = getGutterRenderer(e);
return renderer != null && renderer.getClickAction() == null && renderer.getPopupMenuActions() != null;
}
@Override
public void mouseReleased(final MouseEvent e) {
if (e.isPopupTrigger()) {
invokePopup(e);
return;
}
GutterIconRenderer renderer = getGutterRenderer(e);
AnAction clickAction = null;
if (renderer != null && e.getButton() < 4) {
clickAction = BitUtil.isSet(e.getModifiers(), InputEvent.BUTTON2_MASK)
? renderer.getMiddleButtonClickAction()
: renderer.getClickAction();
}
if (clickAction != null) {
PluginInfo pluginInfo = PluginInfoDetectorKt.getPluginInfo(renderer.getClass());
FeatureUsageData usageData = new FeatureUsageData();
usageData.addPluginInfo(pluginInfo);
Project project = myEditor.getProject();
if (project != null) {
usageData.addProject(project);
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(myEditor.getDocument());
if (file != null) {
usageData.addCurrentFile(file.getLanguage());
}
}
usageData.addData("icon_id", renderer.getFeatureId());
FUCounterUsageLogger.getInstance().logEvent("gutter.icon.click", "clicked", usageData);
performAction(clickAction, e, ActionPlaces.EDITOR_GUTTER, myEditor.getDataContext());
repaint();
e.consume();
}
else {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
lineRenderer.doAction(myEditor, e);
}
else {
fireEventToTextAnnotationListeners(e);
}
}
}
private boolean isDumbMode() {
Project project = myEditor.getProject();
return project != null && !project.isDisposed() && DumbService.isDumb(project);
}
private boolean checkDumbAware(@NotNull Object possiblyDumbAware) {
return !isDumbMode() || DumbService.isDumbAware(possiblyDumbAware);
}
private void notifyNotDumbAware() {
Project project = myEditor.getProject();
if (project != null) {
DumbService.getInstance(project).showDumbModeNotification(
IdeBundle.message("message.this.functionality.is.not.available.during.indexing"));
}
}
private void performAction(@NotNull AnAction action, @NotNull InputEvent e, @NotNull String place, @NotNull DataContext context) {
if (!checkDumbAware(action)) {
notifyNotDumbAware();
return;
}
AnActionEvent actionEvent = AnActionEvent.createFromAnAction(action, e, place, context);
action.update(actionEvent);
if (actionEvent.getPresentation().isEnabledAndVisible()) {
ActionUtil.performActionDumbAwareWithCallbacks(action, actionEvent, context);
}
}
@Nullable
private ActiveGutterRenderer getActiveRendererByMouseEvent(final MouseEvent e) {
if (findFoldingAnchorAt(e.getX(), e.getY()) != null) {
return null;
}
if (e.isConsumed() || e.getX() > getWhitespaceSeparatorOffset()) {
return null;
}
final ActiveGutterRenderer[] gutterRenderer = {null};
final int[] layer = {-1};
Rectangle clip = myEditor.getScrollingModel().getVisibleArea();
int firstVisibleOffset = myEditor.logicalPositionToOffset(
myEditor.xyToLogicalPosition(new Point(0, clip.y - myEditor.getLineHeight())));
int lastVisibleOffset = myEditor.logicalPositionToOffset(
myEditor.xyToLogicalPosition(new Point(0, clip.y + clip.height + myEditor.getLineHeight())));
processRangeHighlighters(firstVisibleOffset, lastVisibleOffset, highlighter -> {
LineMarkerRenderer renderer = highlighter.getLineMarkerRenderer();
if (renderer == null) return;
if (gutterRenderer[0] != null && layer[0] >= highlighter.getLayer()) return;
Rectangle rectangle = getLineRendererRectangle(highlighter);
if (rectangle == null) return;
int startY = rectangle.y;
int endY = startY + rectangle.height;
if (startY == endY) {
endY += myEditor.getLineHeight();
}
if (startY < e.getY() &&
e.getY() <= endY &&
renderer instanceof ActiveGutterRenderer &&
((ActiveGutterRenderer)renderer).canDoAction(myEditor, e)) {
gutterRenderer[0] = (ActiveGutterRenderer)renderer;
layer[0] = highlighter.getLayer();
}
});
return gutterRenderer[0];
}
@Override
public void closeAllAnnotations() {
closeTextAnnotations(myTextAnnotationGutters);
}
@Override
public void closeTextAnnotations(@NotNull Collection<? extends TextAnnotationGutterProvider> annotations) {
if (!myCanCloseAnnotations) return;
ReferenceOpenHashSet<TextAnnotationGutterProvider> toClose = new ReferenceOpenHashSet<>(annotations);
for (int i = myTextAnnotationGutters.size() - 1; i >= 0; i--) {
TextAnnotationGutterProvider provider = myTextAnnotationGutters.get(i);
if (toClose.contains(provider)) {
provider.gutterClosed();
myTextAnnotationGutters.remove(i);
myTextAnnotationGutterSizes.removeInt(i);
myProviderToListener.remove(provider);
}
}
updateSize();
}
private class CloseAnnotationsAction extends DumbAwareAction {
CloseAnnotationsAction() {
super(EditorBundle.messagePointer("close.editor.annotations.action.name"));
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
closeAllAnnotations();
}
}
@Override
@Nullable
public Point getCenterPoint(final GutterIconRenderer renderer) {
if (!areIconsShown()) {
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
if (ContainerUtil.find(entry.getValue(), renderer) != null) {
return new Point(getIconAreaOffset(), getLineCenterY(entry.getIntKey()));
}
}
}
else {
Ref<Point> result = Ref.create();
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
processIconsRow(entry.getIntKey(), entry.getValue(), (x, y, r) -> {
if (result.isNull() && r.equals(renderer)) {
Icon icon = scaleIcon(r.getIcon());
result.set(new Point(x + icon.getIconWidth() / 2, y + icon.getIconHeight() / 2));
}
});
if (!result.isNull()) {
return result.get();
}
}
}
return null;
}
@Override
public void setLineNumberConverter(@NotNull LineNumberConverter primaryConverter, @Nullable LineNumberConverter additionalConverter) {
myLineNumberConverter = primaryConverter;
myAdditionalLineNumberConverter = additionalConverter;
repaint();
}
@Override
public void setShowDefaultGutterPopup(boolean show) {
myShowDefaultGutterPopup = show;
}
@Override
public void setCanCloseAnnotations(boolean canCloseAnnotations) {
myCanCloseAnnotations = canCloseAnnotations;
}
@Override
public void setGutterPopupGroup(@Nullable ActionGroup group) {
myCustomGutterPopupGroup = group;
}
@Override
public void setPaintBackground(boolean value) {
myPaintBackground = value;
}
@Override
public void setForceShowLeftFreePaintersArea(boolean value) {
myForceLeftFreePaintersAreaShown = value;
}
@Override
public void setForceShowRightFreePaintersArea(boolean value) {
myForceRightFreePaintersAreaShown = value;
}
@Override
public void setLeftFreePaintersAreaWidth(int widthInPixels) {
if (widthInPixels < 0 || widthInPixels > Short.MAX_VALUE) throw new IllegalArgumentException();
myForcedLeftFreePaintersAreaWidth = (short)widthInPixels;
}
@Override
public void setRightFreePaintersAreaWidth(int widthInPixels) {
if (widthInPixels < 0 || widthInPixels > Short.MAX_VALUE) throw new IllegalArgumentException();
myForcedRightFreePaintersAreaWidth = (short)widthInPixels;
}
@Override
public void setInitialIconAreaWidth(int width) {
myStartIconAreaWidth = width;
}
private void invokePopup(MouseEvent e) {
int logicalLineAtCursor = EditorUtil.yPositionToLogicalLine(myEditor, e);
Point point = e.getPoint();
PointInfo info = getPointInfo(point);
myLastActionableClick = new ClickInfo(logicalLineAtCursor, info == null ? point : info.iconCenterPosition);
final ActionManager actionManager = ActionManager.getInstance();
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
final List<AnAction> addActions = new ArrayList<>();
if (myCanCloseAnnotations) addActions.add(new CloseAnnotationsAction());
//if (line >= myEditor.getDocument().getLineCount()) return;
for (TextAnnotationGutterProvider gutterProvider : myTextAnnotationGutters) {
final List<AnAction> list = gutterProvider.getPopupActions(logicalLineAtCursor, myEditor);
if (list != null) {
for (AnAction action : list) {
if (! addActions.contains(action)) {
addActions.add(action);
}
}
}
}
if (!addActions.isEmpty()) {
DefaultActionGroup actionGroup = DefaultActionGroup.createPopupGroup(EditorBundle.messagePointer("editor.annotations.action.group.name"));
for (AnAction addAction : addActions) {
actionGroup.add(addAction);
}
JPopupMenu menu = actionManager.createActionPopupMenu("", actionGroup).getComponent();
menu.show(this, e.getX(), e.getY());
e.consume();
}
}
else {
if (info != null) {
AnAction rightButtonAction = info.renderer.getRightButtonClickAction();
if (rightButtonAction != null) {
performAction(rightButtonAction, e, ActionPlaces.EDITOR_GUTTER_POPUP, myEditor.getDataContext());
e.consume();
}
else {
ActionGroup actionGroup = info.renderer.getPopupMenuActions();
if (actionGroup != null) {
if (checkDumbAware(actionGroup)) {
actionManager.createActionPopupMenu(ActionPlaces.EDITOR_GUTTER_POPUP, actionGroup).getComponent().show(this, e.getX(), e.getY());
}
else {
notifyNotDumbAware();
}
e.consume();
}
}
}
else {
ActionGroup group = myCustomGutterPopupGroup;
if (group == null && myShowDefaultGutterPopup) {
group = (ActionGroup)CustomActionsSchema.getInstance().getCorrectedAction(IdeActions.GROUP_EDITOR_GUTTER);
}
if (group != null) {
ActionPopupMenu popupMenu = actionManager.createActionPopupMenu(ActionPlaces.EDITOR_GUTTER_POPUP, group);
popupMenu.getComponent().show(this, e.getX(), e.getY());
}
e.consume();
}
}
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
TooltipController.getInstance().cancelTooltip(GUTTER_TOOLTIP_GROUP, e, false);
}
private int convertPointToLineNumber(final Point p) {
DocumentEx document = myEditor.getDocument();
int line = EditorUtil.yPositionToLogicalLine(myEditor, p);
if (!isValidLine(document, line)) return -1;
int startOffset = document.getLineStartOffset(line);
final FoldRegion region = myEditor.getFoldingModel().getCollapsedRegionAtOffset(startOffset);
if (region != null) {
return document.getLineNumber(region.getEndOffset());
}
return line;
}
@Override
@Nullable
public GutterIconRenderer getGutterRenderer(final Point p) {
PointInfo info = getPointInfo(p);
return info == null ? null : info.renderer;
}
@Nullable
private PointInfo getPointInfo(@NotNull Point p) {
int cX = convertX((int)p.getX());
int line = myEditor.yToVisualLine(p.y);
int startY = myEditor.visualLineToY(line);
int endY = startY + myEditor.getLineHeight();
if (p.y >= startY && p.y < endY) {
List<GutterMark> renderers = getGutterRenderers(line);
final PointInfo[] result = {null};
Int2IntRBTreeMap xPos = new Int2IntRBTreeMap();
processIconsRowForY(startY, renderers, (x, y, renderer) -> {
Icon icon = scaleIcon(renderer.getIcon());
int iconWidth = icon.getIconWidth();
int centerX = x + iconWidth / 2;
xPos.put(x, centerX);
if (x <= cX && cX <= x + iconWidth) {
int iconHeight = icon.getIconHeight();
result[0] = new PointInfo((GutterIconRenderer)renderer, new Point(centerX, y + iconHeight / 2));
}
});
if (result[0] != null) {
result[0].renderersInLine = xPos.size();
result[0].rendererPosition = new ArrayList<>(xPos.values()).indexOf(result[0].iconCenterPosition.x);
}
return result[0];
}
if (myHasInlaysWithGutterIcons) {
if (p.y < startY) {
List<Inlay<?>> inlays = myEditor.getInlayModel().getBlockElementsForVisualLine(line, true);
int yDiff = startY - p.y;
for (int i = inlays.size() - 1; i >= 0; i--) {
Inlay<?> inlay = inlays.get(i);
int height = inlay.getHeightInPixels();
if (yDiff <= height) {
return getPointInfo(inlay, p.y + yDiff - height, cX, p.y);
}
yDiff -= height;
}
}
else {
List<Inlay<?>> inlays = myEditor.getInlayModel().getBlockElementsForVisualLine(line, false);
int yDiff = p.y - endY;
for (Inlay<?> inlay : inlays) {
int height = inlay.getHeightInPixels();
if (yDiff < height) {
return getPointInfo(inlay, p.y - yDiff, cX, p.y);
}
yDiff -= height;
}
}
}
return null;
}
@Nullable
private PointInfo getPointInfo(@NotNull Inlay<?> inlay, int inlayY, int x, int y) {
GutterIconRenderer renderer = inlay.getGutterIconRenderer();
if (!shouldBeShown(renderer) || !checkDumbAware(renderer)) return null;
Icon icon = scaleIcon(renderer.getIcon());
int iconHeight = icon.getIconHeight();
if ((y - inlayY) >= Math.max(iconHeight, myEditor.getLineHeight()) || iconHeight > inlay.getHeightInPixels()) return null;
int iconWidth = icon.getIconWidth();
int rightX = getIconAreaOffset() + getIconsAreaWidth();
if (x < rightX - iconWidth || x > rightX) return null;
PointInfo pointInfo = new PointInfo(renderer, new Point(rightX - iconWidth / 2,
inlayY + getTextAlignmentShiftForInlayIcon(icon, inlay) + iconHeight / 2));
pointInfo.renderersInLine = 1;
return pointInfo;
}
@Nullable
private GutterIconRenderer getGutterRenderer(final MouseEvent e) {
return getGutterRenderer(e.getPoint());
}
@NotNull
static LineMarkerRendererEx.Position getLineMarkerPosition(@NotNull LineMarkerRenderer renderer) {
if (renderer instanceof LineMarkerRendererEx) {
return ((LineMarkerRendererEx)renderer).getPosition();
}
return LineMarkerRendererEx.Position.RIGHT;
}
int convertX(int x) {
if (!isMirrored()) return x;
return getWidth() - x;
}
public void dispose() {
for (TextAnnotationGutterProvider gutterProvider : myTextAnnotationGutters) {
gutterProvider.gutterClosed();
}
myProviderToListener.clear();
}
@Override
public boolean isFocusable() {
return ScreenReader.isActive();
}
@Override
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleJComponent() {};
}
return accessibleContext;
}
void setCurrentAccessibleLine(@Nullable AccessibleGutterLine line) {
myAccessibleGutterLine = line;
}
@Nullable
AccessibleGutterLine getCurrentAccessibleLine() {
return myAccessibleGutterLine;
}
void escapeCurrentAccessibleLine() {
if (myAccessibleGutterLine != null) {
myAccessibleGutterLine.escape(true);
}
}
private static final class ClickInfo {
final int myLogicalLineAtCursor;
final Point myIconCenterPosition;
private ClickInfo(int logicalLineAtCursor, Point iconCenterPosition) {
myLogicalLineAtCursor = logicalLineAtCursor;
myIconCenterPosition = iconCenterPosition;
}
}
private static final class PointInfo {
private final @NotNull GutterIconRenderer renderer;
private final @NotNull Point iconCenterPosition;
private int renderersInLine;
private int rendererPosition;
private PointInfo(@NotNull GutterIconRenderer renderer, @NotNull Point iconCenterPosition) {
this.renderer = renderer;
this.iconCenterPosition = iconCenterPosition;
}
}
}
| platform/platform-impl/src/com/intellij/openapi/editor/impl/EditorGutterComponentImpl.java | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.impl;
import com.intellij.codeInsight.daemon.GutterMark;
import com.intellij.codeInsight.daemon.NonHideableIconGutterMark;
import com.intellij.codeInsight.folding.impl.FoldingUtil;
import com.intellij.codeInsight.hint.TooltipController;
import com.intellij.codeInsight.hint.TooltipGroup;
import com.intellij.codeInsight.hint.TooltipRenderer;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.dnd.DnDDragStartBean;
import com.intellij.ide.dnd.DnDImage;
import com.intellij.ide.dnd.DnDNativeTarget;
import com.intellij.ide.dnd.DnDSupport;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.customization.CustomActionsSchema;
import com.intellij.internal.statistic.eventLog.FeatureUsageData;
import com.intellij.internal.statistic.service.fus.collectors.FUCounterUsageLogger;
import com.intellij.internal.statistic.utils.PluginInfo;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.ColorKey;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorFontType;
import com.intellij.openapi.editor.event.EditorMouseEventArea;
import com.intellij.openapi.editor.ex.*;
import com.intellij.openapi.editor.ex.util.EditorUIUtil;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.impl.view.FontLayoutService;
import com.intellij.openapi.editor.impl.view.IterationState;
import com.intellij.openapi.editor.impl.view.VisualLinesIterator;
import com.intellij.openapi.editor.markup.*;
import com.intellij.openapi.fileEditor.impl.EditorComposite;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.GraphicsConfig;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.impl.IdeGlassPaneImpl;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.paint.LinePainter2D;
import com.intellij.ui.paint.LinePainter2D.StrokeType;
import com.intellij.ui.paint.PaintUtil;
import com.intellij.ui.paint.PaintUtil.RoundingMode;
import com.intellij.ui.paint.RectanglePainter2D;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.scale.ScaleContext;
import com.intellij.util.BitUtil;
import com.intellij.util.IconUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.*;
import com.intellij.util.ui.JBValue.JBValueGroup;
import com.intellij.util.ui.accessibility.ScreenReader;
import it.unimi.dsi.fastutil.ints.*;
import it.unimi.dsi.fastutil.objects.ObjectIterable;
import it.unimi.dsi.fastutil.objects.ReferenceOpenHashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.accessibility.Accessible;
import javax.accessibility.AccessibleContext;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.*;
import java.awt.geom.AffineTransform;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.util.List;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
/**
* Gutter content (left to right):
* <ul>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Line numbers area
* <ul>
* <li>Line numbers</li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Additional line numbers (used in diff)</li>
* </ul>
* </li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Annotations area
* <ul>
* <li>Annotations</li>
* <li>Annotations extra (used in distraction free mode)</li>
* </ul>
* </li>
* <li>GAP_BETWEEN_AREAS</li>
* <li>Line markers area
* <ul>
* <li>Left free painters</li>
* <li>Icons</li>
* <li>Gap (required by debugger to set breakpoints with mouse click - IDEA-137353) </li>
* <li>Free painters</li>
* </ul>
* </li>
* <li>Folding area</li>
*</ul>
*/
@DirtyUI
final class EditorGutterComponentImpl extends EditorGutterComponentEx implements MouseListener, MouseMotionListener, DataProvider, Accessible {
private static final Logger LOG = Logger.getInstance(EditorGutterComponentImpl.class);
private static final JBValueGroup JBVG = new JBValueGroup();
private static final JBValue START_ICON_AREA_WIDTH = JBVG.value(17);
private static final JBValue FREE_PAINTERS_LEFT_AREA_WIDTH = JBVG.value(8);
private static final JBValue FREE_PAINTERS_RIGHT_AREA_WIDTH = JBVG.value(5);
private static final JBValue GAP_BETWEEN_ICONS = JBVG.value(3);
private static final JBValue GAP_BETWEEN_AREAS = JBVG.value(5);
private static final JBValue GAP_BETWEEN_ANNOTATIONS = JBVG.value(5);
private static final TooltipGroup GUTTER_TOOLTIP_GROUP = new TooltipGroup("GUTTER_TOOLTIP_GROUP", 0);
private ClickInfo myLastActionableClick;
@NotNull
private final EditorImpl myEditor;
private final FoldingAnchorsOverlayStrategy myAnchorsDisplayStrategy;
@Nullable private Int2ObjectMap<List<GutterMark>> myLineToGutterRenderers;
private boolean myLineToGutterRenderersCacheForLogicalLines;
private boolean myHasInlaysWithGutterIcons;
private int myStartIconAreaWidth = START_ICON_AREA_WIDTH.get();
private int myIconsAreaWidth;
private int myLineNumberAreaWidth;
private int myAdditionalLineNumberAreaWidth;
@NotNull private List<FoldRegion> myActiveFoldRegions = Collections.emptyList();
private int myTextAnnotationGuttersSize;
private int myTextAnnotationExtraSize;
final IntList myTextAnnotationGutterSizes = new IntArrayList();
final ArrayList<TextAnnotationGutterProvider> myTextAnnotationGutters = new ArrayList<>();
private boolean myGapAfterAnnotations;
private final Map<TextAnnotationGutterProvider, EditorGutterAction> myProviderToListener = new HashMap<>();
private String myLastGutterToolTip;
@NotNull private LineNumberConverter myLineNumberConverter = LineNumberConverter.DEFAULT;
@Nullable private LineNumberConverter myAdditionalLineNumberConverter;
private boolean myShowDefaultGutterPopup = true;
private boolean myCanCloseAnnotations = true;
@Nullable private ActionGroup myCustomGutterPopupGroup;
private final Int2ObjectMap<Color> myTextFgColors = new Int2ObjectOpenHashMap<>();
private boolean myPaintBackground = true;
private boolean myLeftFreePaintersAreaShown;
private boolean myRightFreePaintersAreaShown;
boolean myForceLeftFreePaintersAreaShown;
boolean myForceRightFreePaintersAreaShown;
private short myForcedLeftFreePaintersAreaWidth = -1;
private short myForcedRightFreePaintersAreaWidth = -1;
private int myLastNonDumbModeIconAreaWidth;
boolean myDnDInProgress;
@Nullable private AccessibleGutterLine myAccessibleGutterLine;
EditorGutterComponentImpl(@NotNull EditorImpl editor) {
myEditor = editor;
if (!ApplicationManager.getApplication().isHeadlessEnvironment()) {
installDnD();
}
setOpaque(true);
myAnchorsDisplayStrategy = new FoldingAnchorsOverlayStrategy(editor);
Project project = myEditor.getProject();
if (project != null) {
project.getMessageBus().connect(myEditor.getDisposable()).subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() {
@Override
public void exitDumbMode() {
updateSize();
}
});
}
if (ScreenReader.isActive()) {
AccessibleGutterLine.installListeners(this);
}
else {
ScreenReader.addPropertyChangeListener(ScreenReader.SCREEN_READER_ACTIVE_PROPERTY, editor.getDisposable(), e -> {
if ((boolean)e.getNewValue()) {
AccessibleGutterLine.installListeners(this);
}
});
}
UISettings.setupEditorAntialiasing(this);
}
@NotNull
EditorImpl getEditor() {
return myEditor;
}
private void installDnD() {
DnDSupport.createBuilder(this)
.setBeanProvider(info -> {
final GutterIconRenderer renderer = getGutterRenderer(info.getPoint());
if (renderer != null &&
renderer.getDraggableObject() != null &&
(info.isCopy() || info.isMove())) {
myDnDInProgress = true;
return new DnDDragStartBean(renderer);
}
return null;
})
.setDropHandlerWithResult(e -> {
boolean success = true;
final Object attachedObject = e.getAttachedObject();
if (attachedObject instanceof GutterIconRenderer && checkDumbAware(attachedObject)) {
final GutterDraggableObject draggableObject = ((GutterIconRenderer)attachedObject).getDraggableObject();
if (draggableObject != null) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
draggableObject.copy(line, myEditor.getVirtualFile(), e.getAction().getActionId());
}
}
}
else if (attachedObject instanceof DnDNativeTarget.EventInfo && myEditor.getSettings().isDndEnabled()) {
Transferable transferable = ((DnDNativeTarget.EventInfo)attachedObject).getTransferable();
if (transferable != null && transferable.isDataFlavorSupported(DataFlavor.stringFlavor)) {
success = EditorImpl.handleDrop(myEditor, transferable, e.getAction().getActionId());
}
}
myDnDInProgress = false;
return success;
})
.setTargetChecker(e -> {
final Object attachedObject = e.getAttachedObject();
if (attachedObject instanceof GutterIconRenderer && checkDumbAware(attachedObject)) {
final GutterDraggableObject draggableObject = ((GutterIconRenderer)attachedObject).getDraggableObject();
if (draggableObject != null) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
e.setDropPossible(true);
e.setCursor(draggableObject.getCursor(line, myEditor.getVirtualFile(), e.getAction().getActionId()));
}
}
}
else if (attachedObject instanceof DnDNativeTarget.EventInfo && myEditor.getSettings().isDndEnabled()) {
Transferable transferable = ((DnDNativeTarget.EventInfo)attachedObject).getTransferable();
if (transferable != null && transferable.isDataFlavorSupported(DataFlavor.stringFlavor)) {
final int line = convertPointToLineNumber(e.getPoint());
if (line != -1) {
e.setDropPossible(true);
myEditor.getCaretModel().moveToOffset(myEditor.getDocument().getLineStartOffset(line));
}
}
}
return true;
})
.setImageProvider(info -> {
// [tav] temp workaround for JRE-224
boolean inUserScale = !SystemInfo.isWindows || !StartupUiUtil.isJreHiDPI(myEditor.getComponent());
Image image = ImageUtil.toBufferedImage(getDragImage(getGutterRenderer(info.getPoint())), inUserScale);
return new DnDImage(image, new Point(image.getWidth(null) / 2, image.getHeight(null) / 2));
})
.enableAsNativeTarget() // required to accept dragging from editor (as editor component doesn't use DnDSupport to implement drag'n'drop)
.install();
}
Image getDragImage(GutterMark renderer) {
return IconUtil.toImage(scaleIcon(renderer.getIcon()));
}
private void fireResized() {
processComponentEvent(new ComponentEvent(this, ComponentEvent.COMPONENT_RESIZED));
}
@Override
public Dimension getPreferredSize() {
int w = getFoldingAreaOffset() + getFoldingAreaWidth();
Dimension size = new Dimension(w, myEditor.getPreferredHeight());
JBInsets.addTo(size, getInsets());
return size;
}
@Override
protected void setUI(ComponentUI newUI) {
super.setUI(newUI);
reinitSettings(true);
}
@Override
public void updateUI() {
super.updateUI();
reinitSettings(true);
}
public void reinitSettings(boolean updateGutterSize) {
updateSize(false, updateGutterSize);
repaint();
}
@Override
protected Graphics getComponentGraphics(Graphics graphics) {
return JBSwingUtilities.runGlobalCGTransform(this, super.getComponentGraphics(graphics));
}
@Override
public void paintComponent(Graphics g_) {
Rectangle clip = g_.getClipBounds();
Graphics2D g = (Graphics2D)getComponentGraphics(g_);
if (myEditor.isDisposed()) {
g.setColor(EditorImpl.getDisposedBackground());
g.fillRect(clip.x, clip.y, clip.width, clip.height);
return;
}
AffineTransform old = setMirrorTransformIfNeeded(g, 0, getWidth());
EditorUIUtil.setupAntialiasing(g);
Color backgroundColor = getBackground();
int startVisualLine;
int endVisualLine;
int firstVisibleOffset;
int lastVisibleOffset;
Segment focusModeRange = myEditor.getFocusModeRange();
if (focusModeRange == null) {
startVisualLine = myEditor.yToVisualLine(clip.y);
endVisualLine = myEditor.yToVisualLine(clip.y + clip.height - 1);
firstVisibleOffset = myEditor.visualLineStartOffset(startVisualLine);
lastVisibleOffset = myEditor.visualLineStartOffset(endVisualLine + 1);
}
else {
firstVisibleOffset = focusModeRange.getStartOffset();
lastVisibleOffset = focusModeRange.getEndOffset();
startVisualLine = myEditor.offsetToVisualLine(firstVisibleOffset);
endVisualLine = myEditor.offsetToVisualLine(lastVisibleOffset);
}
if (firstVisibleOffset > lastVisibleOffset) {
LOG.error("Unexpected painting range: (" + firstVisibleOffset + ":" + lastVisibleOffset
+ "), visual line range: (" + startVisualLine + ":" + endVisualLine
+ "), clip: " + clip + ", focus range: " + focusModeRange);
}
// paint all backgrounds
int gutterSeparatorX = getWhitespaceSeparatorOffset();
paintBackground(g, clip, 0, gutterSeparatorX, backgroundColor);
paintBackground(g, clip, gutterSeparatorX, getFoldingAreaWidth(), myEditor.getBackgroundColor());
paintEditorBackgrounds(g, firstVisibleOffset, lastVisibleOffset);
Object hint = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
if (!JreHiDpiUtil.isJreHiDPI(g)) g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
try {
paintAnnotations(g, startVisualLine, endVisualLine);
if (focusModeRange != null) {
int startY = Math.max(myEditor.visualLineToY(startVisualLine), clip.y);
int endY = Math.min(myEditor.visualLineToY(endVisualLine), clip.y + clip.height);
g.setClip(clip.x, startY, clip.width, endY - startY);
}
paintLineMarkers(g, firstVisibleOffset, lastVisibleOffset, startVisualLine, endVisualLine);
g.setClip(clip);
paintFoldingLines(g, clip);
paintFoldingTree(g, clip, firstVisibleOffset, lastVisibleOffset);
paintLineNumbers(g, startVisualLine, endVisualLine);
paintCurrentAccessibleLine(g);
}
finally {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
}
if (old != null) g.setTransform(old);
}
private void paintEditorBackgrounds(Graphics g, int firstVisibleOffset, int lastVisibleOffset) {
myTextFgColors.clear();
Color defaultBackgroundColor = myEditor.getBackgroundColor();
Color defaultForegroundColor = myEditor.getColorsScheme().getDefaultForeground();
int startX = myEditor.isInDistractionFreeMode() ? 0 : getWhitespaceSeparatorOffset();
IterationState state = new IterationState(myEditor, firstVisibleOffset, lastVisibleOffset, null, true, false, true, false);
while (!state.atEnd()) {
drawEditorBackgroundForRange(g, state.getStartOffset(), state.getEndOffset(), state.getMergedAttributes(),
defaultBackgroundColor, defaultForegroundColor, startX);
state.advance();
}
}
private void drawEditorBackgroundForRange(Graphics g, int startOffset, int endOffset, TextAttributes attributes,
Color defaultBackgroundColor, Color defaultForegroundColor, int startX) {
Color bgColor = myEditor.getBackgroundColor(attributes);
if (Comparing.equal(bgColor, defaultBackgroundColor)) return;
VisualPosition visualStart = myEditor.offsetToVisualPosition(startOffset, true, false);
VisualPosition visualEnd = myEditor.offsetToVisualPosition(endOffset, false, false);
int startVisualLine = visualStart.getLine() + (visualStart.getColumn() == 0 ? 0 : 1);
int endVisualLine = visualEnd.getLine() - (visualEnd.getColumn() == 0 ? 1 : 0);
if (startVisualLine <= endVisualLine) {
int startY = myEditor.visualLineToY(startVisualLine);
int endY = myEditor.visualLineToY(endVisualLine) + myEditor.getLineHeight();
g.setColor(bgColor);
g.fillRect(startX, startY, getWidth() - startX, endY - startY);
Color fgColor = attributes.getForegroundColor();
if (!Comparing.equal(fgColor, defaultForegroundColor)) {
for (int line = startVisualLine; line <= endVisualLine; line++) {
myTextFgColors.put(line, fgColor);
}
}
}
}
private void processClose(final MouseEvent e) {
final IdeEventQueue queue = IdeEventQueue.getInstance();
// See IDEA-59553 for rationale on why this feature is disabled
//if (isLineNumbersShown()) {
// if (e.getX() >= getLineNumberAreaOffset() && getLineNumberAreaOffset() + getLineNumberAreaWidth() >= e.getX()) {
// queue.blockNextEvents(e);
// myEditor.getSettings().setLineNumbersShown(false);
// e.consume();
// return;
// }
//}
if (getGutterRenderer(e) != null) return;
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
queue.blockNextEvents(e);
closeAllAnnotations();
e.consume();
}
}
private void paintAnnotations(Graphics2D g, int startVisualLine, int endVisualLine) {
int x = getAnnotationsAreaOffset();
int w = getAnnotationsAreaWidthEx();
if (w == 0) return;
int viewportStartY = myEditor.getScrollingModel().getVisibleArea().y;
AffineTransform old = setMirrorTransformIfNeeded(g, x, w);
try {
Color color = myEditor.getColorsScheme().getColor(EditorColors.ANNOTATIONS_COLOR);
g.setColor(color != null ? color : JBColor.blue);
g.setFont(myEditor.getColorsScheme().getFont(EditorFontType.PLAIN));
for (int i = 0; i < myTextAnnotationGutters.size(); i++) {
TextAnnotationGutterProvider gutterProvider = myTextAnnotationGutters.get(i);
int lineHeight = myEditor.getLineHeight();
int lastLine = myEditor.logicalToVisualPosition(new LogicalPosition(endLineNumber(), 0)).line;
endVisualLine = Math.min(endVisualLine, lastLine);
if (startVisualLine > endVisualLine) {
break;
}
int annotationSize = myTextAnnotationGutterSizes.getInt(i);
int logicalLine = -1;
Color bg = null;
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, startVisualLine);
while (!visLinesIterator.atEnd() && visLinesIterator.getVisualLine() <= endVisualLine) {
int y = visLinesIterator.getY();
int bgLineHeight = lineHeight;
boolean paintText = !visLinesIterator.startsWithSoftWrap() || y <= viewportStartY;
if (y < viewportStartY && visLinesIterator.endsWithSoftWrap()) { // "sticky" line annotation
y = viewportStartY;
}
else if (viewportStartY < y && y < viewportStartY + lineHeight && visLinesIterator.startsWithSoftWrap()) {
// avoid drawing bg over the "sticky" line above, or over a possible gap in the gutter below (e.g. code vision)
bgLineHeight = y - viewportStartY;
y = viewportStartY + lineHeight;
}
if (paintText || logicalLine == -1) {
logicalLine = visLinesIterator.getDisplayedLogicalLine();
bg = gutterProvider.getBgColor(logicalLine, myEditor);
}
if (bg != null) {
g.setColor(bg);
g.fillRect(x, y, annotationSize, bgLineHeight);
}
if (paintText) {
paintAnnotationLine(g, gutterProvider, logicalLine, x, y);
}
visLinesIterator.advance();
}
x += annotationSize;
}
}
finally {
if (old != null) g.setTransform(old);
}
}
private void paintAnnotationLine(Graphics g, TextAnnotationGutterProvider gutterProvider, int line, int x, int y) {
String s = gutterProvider.getLineText(line, myEditor);
if (!StringUtil.isEmpty(s)) {
g.setColor(myEditor.getColorsScheme().getColor(gutterProvider.getColor(line, myEditor)));
EditorFontType style = gutterProvider.getStyle(line, myEditor);
Font font = getFontForText(s, style);
g.setFont(font);
g.drawString(s, (gutterProvider.useMargin() ? getGapBetweenAnnotations() / 2 : 0) + x, y + myEditor.getAscent());
}
}
private Font getFontForText(String text, EditorFontType style) {
Font font = myEditor.getColorsScheme().getFont(style);
return UIUtil.getFontWithFallbackIfNeeded(font, text);
}
private void paintFoldingTree(@NotNull Graphics g, @NotNull Rectangle clip, int firstVisibleOffset, int lastVisibleOffset) {
if (isFoldingOutlineShown()) {
doPaintFoldingTree((Graphics2D)g, clip, firstVisibleOffset, lastVisibleOffset);
}
}
private void paintLineMarkers(Graphics2D g, int firstVisibleOffset, int lastVisibleOffset, int firstVisibleLine, int lastVisibleLine) {
if (isLineMarkersShown()) {
paintGutterRenderers(g, firstVisibleOffset, lastVisibleOffset, firstVisibleLine, lastVisibleLine);
}
}
private void paintBackground(final Graphics g,
final Rectangle clip,
final int x,
final int width,
Color background) {
g.setColor(background);
g.fillRect(x, clip.y, width, clip.height);
paintCaretRowBackground(g, x, width);
}
private void paintCaretRowBackground(final Graphics g, final int x, final int width) {
if (!myEditor.getSettings().isCaretRowShown()) return;
final VisualPosition visCaret = myEditor.getCaretModel().getVisualPosition();
Color caretRowColor = myEditor.getColorsScheme().getColor(EditorColors.CARET_ROW_COLOR);
if (caretRowColor != null) {
g.setColor(caretRowColor);
final Point caretPoint = myEditor.visualPositionToXY(visCaret);
g.fillRect(x, caretPoint.y, width, myEditor.getLineHeight());
}
}
private void paintLineNumbers(Graphics2D g, int startVisualLine, int endVisualLine) {
if (isLineNumbersShown()) {
int offset = getLineNumberAreaOffset() + myLineNumberAreaWidth;
doPaintLineNumbers(g, startVisualLine, endVisualLine, offset, myLineNumberConverter);
if (myAdditionalLineNumberConverter != null) {
doPaintLineNumbers(g, startVisualLine, endVisualLine, offset + getAreaWidthWithGap(myAdditionalLineNumberAreaWidth),
myAdditionalLineNumberConverter);
}
}
}
private void paintCurrentAccessibleLine(Graphics2D g) {
if (myAccessibleGutterLine != null) {
myAccessibleGutterLine.paint(g);
}
}
@Override
public Color getBackground() {
if (myEditor.isInDistractionFreeMode() || !myPaintBackground) {
return myEditor.getBackgroundColor();
}
Color color = myEditor.getColorsScheme().getColor(EditorColors.GUTTER_BACKGROUND);
return color != null ? color : EditorColors.GUTTER_BACKGROUND.getDefaultColor();
}
private Font getFontForLineNumbers() {
Font editorFont = myEditor.getColorsScheme().getFont(EditorFontType.PLAIN);
float editorFontSize = editorFont.getSize2D();
return editorFont.deriveFont(Math.max(1f, editorFontSize - 1f));
}
private int calcLineNumbersAreaWidth(int maxLineNumber) {
return FontLayoutService.getInstance().stringWidth(getFontMetrics(getFontForLineNumbers()), Integer.toString(maxLineNumber));
}
private void doPaintLineNumbers(Graphics2D g, int startVisualLine, int endVisualLine, int offset,
@NotNull LineNumberConverter converter) {
int lastLine = myEditor.logicalToVisualPosition(
new LogicalPosition(endLineNumber(), 0))
.line;
endVisualLine = Math.min(endVisualLine, lastLine);
if (startVisualLine > endVisualLine) {
return;
}
Color color = myEditor.getColorsScheme().getColor(EditorColors.LINE_NUMBERS_COLOR);
Color colorUnderCaretRow = myEditor.getColorsScheme().getColor(EditorColors.LINE_NUMBER_ON_CARET_ROW_COLOR);
Font font = getFontForLineNumbers();
g.setFont(font);
int viewportStartY = myEditor.getScrollingModel().getVisibleArea().y;
AffineTransform old = setMirrorTransformIfNeeded(g, getLineNumberAreaOffset(), getLineNumberAreaWidth());
try {
int caretLogicalLine = myEditor.getCaretModel().getLogicalPosition().line;
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, startVisualLine);
while (!visLinesIterator.atEnd() && visLinesIterator.getVisualLine() <= endVisualLine) {
if (!visLinesIterator.startsWithSoftWrap() || visLinesIterator.getY() <= viewportStartY) {
int logicalLine = visLinesIterator.getDisplayedLogicalLine();
Integer lineToDisplay = converter.convert(myEditor, logicalLine + 1);
if (lineToDisplay != null) {
int y = visLinesIterator.getY();
if (y < viewportStartY && visLinesIterator.endsWithSoftWrap()) { // "sticky" line number
y = viewportStartY;
}
if (myEditor.isInDistractionFreeMode()) {
Color fgColor = myTextFgColors.get(visLinesIterator.getVisualLine());
g.setColor(fgColor != null ? fgColor : color != null ? color : JBColor.blue);
} else {
g.setColor(color);
}
if (colorUnderCaretRow != null && caretLogicalLine == logicalLine) {
g.setColor(colorUnderCaretRow);
}
String s = String.valueOf(lineToDisplay);
int textOffset = isMirrored() ?
offset - getLineNumberAreaWidth() - 1 :
offset - FontLayoutService.getInstance().stringWidth(g.getFontMetrics(), s);
g.drawString(s,
textOffset,
y + myEditor.getAscent());
}
}
visLinesIterator.advance();
}
}
finally {
if (old != null) g.setTransform(old);
}
}
private int endLineNumber() {
return Math.max(0, myEditor.getDocument().getLineCount() - 1);
}
@Nullable
@Override
public Object getData(@NotNull @NonNls String dataId) {
if (myEditor.isDisposed()) return null;
if (EditorGutter.KEY.is(dataId)) {
return this;
}
if (CommonDataKeys.EDITOR.is(dataId)) {
return myEditor;
}
if (EditorGutterComponentEx.LOGICAL_LINE_AT_CURSOR.is(dataId)) {
if (myLastActionableClick == null) return null;
return myLastActionableClick.myLogicalLineAtCursor;
}
if (EditorGutterComponentEx.ICON_CENTER_POSITION.is(dataId)) {
if (myLastActionableClick == null) return null;
return myLastActionableClick.myIconCenterPosition;
}
return null;
}
@FunctionalInterface
interface RangeHighlighterProcessor {
void process(@NotNull RangeHighlighter highlighter);
}
void processRangeHighlighters(int startOffset, int endOffset, @NotNull RangeHighlighterProcessor processor) {
// we limit highlighters to process to between line starting at startOffset and line ending at endOffset
MarkupIterator<RangeHighlighterEx> docHighlighters =
myEditor.getFilteredDocumentMarkupModel().overlappingIterator(startOffset, endOffset, true);
MarkupIterator<RangeHighlighterEx> editorHighlighters =
myEditor.getMarkupModel().overlappingIterator(startOffset, endOffset, true);
try {
RangeHighlighterEx lastDocHighlighter = null;
RangeHighlighterEx lastEditorHighlighter = null;
while (true) {
if (lastDocHighlighter == null && docHighlighters.hasNext()) {
lastDocHighlighter = docHighlighters.next();
if (lastDocHighlighter.getAffectedAreaStartOffset() > endOffset) {
lastDocHighlighter = null;
continue;
}
if (lastDocHighlighter.getAffectedAreaEndOffset() < startOffset) {
lastDocHighlighter = null;
continue;
}
}
if (lastEditorHighlighter == null && editorHighlighters.hasNext()) {
lastEditorHighlighter = editorHighlighters.next();
if (lastEditorHighlighter.getAffectedAreaStartOffset() > endOffset) {
lastEditorHighlighter = null;
continue;
}
if (lastEditorHighlighter.getAffectedAreaEndOffset() < startOffset) {
lastEditorHighlighter = null;
continue;
}
}
if (lastDocHighlighter == null && lastEditorHighlighter == null) return;
final RangeHighlighterEx lowerHighlighter;
if (less(lastDocHighlighter, lastEditorHighlighter)) {
lowerHighlighter = lastDocHighlighter;
lastDocHighlighter = null;
}
else {
lowerHighlighter = lastEditorHighlighter;
lastEditorHighlighter = null;
}
processor.process(lowerHighlighter);
}
}
finally {
docHighlighters.dispose();
editorHighlighters.dispose();
}
}
private static boolean isValidLine(@NotNull Document document, int line) {
if (line < 0) return false;
int lineCount = document.getLineCount();
return lineCount == 0 ? line == 0 : line < lineCount;
}
private static boolean less(RangeHighlighter h1, RangeHighlighter h2) {
return h1 != null && (h2 == null || h1.getStartOffset() < h2.getStartOffset());
}
@Override
public void revalidateMarkup() {
updateSize();
}
void updateSizeOnShowNotify() {
updateSize(false, true);
}
public void updateSize() {
updateSize(false, false);
}
void updateSize(boolean onLayout, boolean canShrink) {
int prevHash = sizeHash();
if (!onLayout) {
clearLineToGutterRenderersCache();
calcLineNumberAreaWidth();
calcLineMarkerAreaWidth(canShrink);
calcAnnotationsSize();
}
calcAnnotationExtraSize();
if (prevHash != sizeHash()) {
fireResized();
}
repaint();
}
private int sizeHash() {
int result = getLineMarkerAreaWidth();
result = 31 * result + myTextAnnotationGuttersSize;
result = 31 * result + myTextAnnotationExtraSize;
result = 31 * result + getLineNumberAreaWidth();
return result;
}
private void calcAnnotationsSize() {
myTextAnnotationGuttersSize = 0;
myGapAfterAnnotations = false;
final int lineCount = Math.max(myEditor.getDocument().getLineCount(), 1);
final int guttersCount = myTextAnnotationGutters.size();
for (int j = 0; j < guttersCount; j++) {
TextAnnotationGutterProvider gutterProvider = myTextAnnotationGutters.get(j);
int gutterSize = 0;
for (int i = 0; i < lineCount; i++) {
String lineText = gutterProvider.getLineText(i, myEditor);
if (!StringUtil.isEmpty(lineText)) {
EditorFontType style = gutterProvider.getStyle(i, myEditor);
Font font = getFontForText(lineText, style);
FontMetrics fontMetrics = getFontMetrics(font);
gutterSize = Math.max(gutterSize, fontMetrics.stringWidth(lineText));
}
}
if (gutterSize > 0) {
boolean margin = gutterProvider.useMargin();
myGapAfterAnnotations = margin;
if (margin) {
gutterSize += getGapBetweenAnnotations();
}
}
myTextAnnotationGutterSizes.set(j, gutterSize);
myTextAnnotationGuttersSize += gutterSize;
}
}
private void calcAnnotationExtraSize() {
myTextAnnotationExtraSize = 0;
if (!myEditor.isInDistractionFreeMode() || isMirrored()) return;
Component outerContainer = ComponentUtil.findParentByCondition(myEditor.getComponent(), c -> EditorComposite.isEditorComposite(c));
if (outerContainer == null) return;
EditorSettings settings = myEditor.getSettings();
int rightMargin = settings.getRightMargin(myEditor.getProject());
if (rightMargin <= 0) return;
JComponent editorComponent = myEditor.getComponent();
RelativePoint point = new RelativePoint(editorComponent, new Point(0, 0));
Point editorLocationInWindow = point.getPoint(outerContainer);
int editorLocationX = (int)editorLocationInWindow.getX();
int rightMarginX = rightMargin * EditorUtil.getSpaceWidth(Font.PLAIN, myEditor) + editorLocationX;
int width = editorLocationX + editorComponent.getWidth();
if (rightMarginX < width && editorLocationX < width - rightMarginX) {
int centeredSize = (width - rightMarginX - editorLocationX) / 2 - (getLineMarkerAreaWidth() + getLineNumberAreaWidth() +
getFoldingAreaWidth() + 2 * getGapBetweenAreas());
myTextAnnotationExtraSize = Math.max(0, centeredSize - myTextAnnotationGuttersSize);
}
}
private boolean logicalLinesMatchVisualOnes() {
return myEditor.getSoftWrapModel().getSoftWrapsIntroducedLinesNumber() == 0 &&
myEditor.getFoldingModel().getTotalNumberOfFoldedLines() == 0;
}
void clearLineToGutterRenderersCache() {
myLineToGutterRenderers = null;
}
private void buildGutterRenderersCache() {
myLineToGutterRenderersCacheForLogicalLines = logicalLinesMatchVisualOnes();
myLineToGutterRenderers = new Int2ObjectOpenHashMap<>();
processRangeHighlighters(0, myEditor.getDocument().getTextLength(), highlighter -> {
GutterMark renderer = highlighter.getGutterIconRenderer();
if (!shouldBeShown(renderer)) {
return;
}
if (!isHighlighterVisible(highlighter)) {
return;
}
int line = myEditor.offsetToVisualLine(highlighter.getStartOffset());
List<GutterMark> renderers = myLineToGutterRenderers.get(line);
if (renderers == null) {
renderers = new SmartList<>();
myLineToGutterRenderers.put(line, renderers);
}
renderers.add(renderer);
});
List<GutterMarkPreprocessor> gutterMarkPreprocessors = GutterMarkPreprocessor.EP_NAME.getExtensionList();
for (Int2ObjectMap.Entry<List<GutterMark>> entry : Int2ObjectMaps.fastIterable(myLineToGutterRenderers)) {
List<GutterMark> newValue = entry.getValue();
for (GutterMarkPreprocessor preprocessor : gutterMarkPreprocessors) {
newValue = preprocessor.processMarkers(entry.getValue());
}
// don't allow more than 4 icons per line
entry.setValue(ContainerUtil.getFirstItems(newValue, 4));
}
}
private boolean shouldBeShown(@Nullable GutterMark gutterIconRenderer) {
return gutterIconRenderer != null && (areIconsShown() || gutterIconRenderer instanceof NonHideableIconGutterMark);
}
private void calcLineMarkerAreaWidth(boolean canShrink) {
myLeftFreePaintersAreaShown = myForceLeftFreePaintersAreaShown;
myRightFreePaintersAreaShown = myForceRightFreePaintersAreaShown;
processRangeHighlighters(0, myEditor.getDocument().getTextLength(), highlighter -> {
LineMarkerRenderer lineMarkerRenderer = highlighter.getLineMarkerRenderer();
if (lineMarkerRenderer != null) {
LineMarkerRendererEx.Position position = getLineMarkerPosition(lineMarkerRenderer);
if (position == LineMarkerRendererEx.Position.LEFT && isLineMarkerVisible(highlighter)) myLeftFreePaintersAreaShown = true;
if (position == LineMarkerRendererEx.Position.RIGHT && isLineMarkerVisible(highlighter)) myRightFreePaintersAreaShown = true;
}
});
int minWidth = areIconsShown() ? scaleWidth(myStartIconAreaWidth) : 0;
myIconsAreaWidth = canShrink ? minWidth : Math.max(myIconsAreaWidth, minWidth);
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
int width = 1;
List<GutterMark> renderers = entry.getValue();
for (int i = 0; i < renderers.size(); i++) {
GutterMark renderer = renderers.get(i);
if (!checkDumbAware(renderer)) continue;
width += scaleIcon(renderer.getIcon()).getIconWidth();
if (i > 0) width += getGapBetweenIcons();
}
if (myIconsAreaWidth < width) {
myIconsAreaWidth = width + 1;
}
}
myHasInlaysWithGutterIcons = false;
myEditor.getInlayModel().getBlockElementsInRange(0, myEditor.getDocument().getTextLength()).forEach(inlay -> {
GutterIconRenderer iconRenderer = inlay.getGutterIconRenderer();
if (shouldBeShown(iconRenderer) && checkDumbAware(iconRenderer) && !EditorUtil.isInlayFolded(inlay)) {
Icon icon = scaleIcon(iconRenderer.getIcon());
if (icon.getIconHeight() <= inlay.getHeightInPixels()) {
myHasInlaysWithGutterIcons = true;
myIconsAreaWidth = Math.max(myIconsAreaWidth, icon.getIconWidth());
}
}
});
if (isDumbMode()) {
myIconsAreaWidth = Math.max(myIconsAreaWidth, myLastNonDumbModeIconAreaWidth);
}
else {
myLastNonDumbModeIconAreaWidth = myIconsAreaWidth;
}
}
@Override
@NotNull
public List<GutterMark> getGutterRenderers(int line) {
if (myLineToGutterRenderers == null || myLineToGutterRenderersCacheForLogicalLines != logicalLinesMatchVisualOnes()) {
buildGutterRenderersCache();
}
Segment focusModeRange = myEditor.getFocusModeRange();
if (focusModeRange != null) {
int start = myEditor.offsetToVisualLine(focusModeRange.getStartOffset());
int end = myEditor.offsetToVisualLine(focusModeRange.getEndOffset());
if (line < start || line > end) return Collections.emptyList();
}
List<GutterMark> marks = myLineToGutterRenderers.get(line);
return marks != null ? marks : Collections.emptyList();
}
private @NotNull ObjectIterable<Int2ObjectMap.Entry<List<GutterMark>>> processGutterRenderers() {
if (myLineToGutterRenderers == null || myLineToGutterRenderersCacheForLogicalLines != logicalLinesMatchVisualOnes()) {
buildGutterRenderersCache();
}
return Int2ObjectMaps.fastIterable(myLineToGutterRenderers);
}
private boolean isHighlighterVisible(RangeHighlighter highlighter) {
return !FoldingUtil.isHighlighterFolded(myEditor, highlighter);
}
private void paintGutterRenderers(final Graphics2D g,
int firstVisibleOffset, int lastVisibleOffset, int firstVisibleLine, int lastVisibleLine) {
Object hint = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
try {
List<RangeHighlighter> highlighters = new ArrayList<>();
processRangeHighlighters(firstVisibleOffset, lastVisibleOffset, highlighter -> {
LineMarkerRenderer renderer = highlighter.getLineMarkerRenderer();
if (renderer != null) highlighters.add(highlighter);
});
ContainerUtil.sort(highlighters, Comparator.comparingInt(RangeHighlighter::getLayer));
for (RangeHighlighter highlighter : highlighters) {
paintLineMarkerRenderer(highlighter, g);
}
}
finally {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
}
paintIcons(firstVisibleLine, lastVisibleLine, g);
}
private void paintIcons(final int firstVisibleLine, final int lastVisibleLine, final Graphics2D g) {
VisualLinesIterator visLinesIterator = new VisualLinesIterator(myEditor, firstVisibleLine);
while (!visLinesIterator.atEnd()) {
int visualLine = visLinesIterator.getVisualLine();
if (visualLine > lastVisibleLine) break;
int y = visLinesIterator.getY();
List<GutterMark> renderers = getGutterRenderers(visualLine);
paintIconRow(y, renderers, g);
if (myHasInlaysWithGutterIcons) {
Rectangle clip = g.getClipBounds();
int curY = y;
for (Inlay<?> inlay : visLinesIterator.getBlockInlaysAbove()) {
if (curY <= clip.y) break;
int height = inlay.getHeightInPixels();
if (height > 0) {
int newY = curY - height;
paintInlayIcon(inlay, g, newY);
curY = newY;
}
}
curY = y + myEditor.getLineHeight();
for (Inlay<?> inlay : visLinesIterator.getBlockInlaysBelow()) {
if (curY >= clip.y + clip.height) break;
int height = inlay.getHeightInPixels();
if (height > 0) {
paintInlayIcon(inlay, g, curY);
curY += height;
}
}
}
visLinesIterator.advance();
}
}
private void paintInlayIcon(Inlay<?> inlay, Graphics2D g, int y) {
GutterIconRenderer iconRenderer = inlay.getGutterIconRenderer();
if (shouldBeShown(iconRenderer) && checkDumbAware(iconRenderer)) {
Icon icon = scaleIcon(iconRenderer.getIcon());
if (icon.getIconHeight() <= inlay.getHeightInPixels()) {
int iconWidth = icon.getIconWidth();
int x = getIconAreaOffset() + myIconsAreaWidth - iconWidth;
y += getTextAlignmentShiftForInlayIcon(icon, inlay);
AffineTransform old = setMirrorTransformIfNeeded(g, x, iconWidth);
icon.paintIcon(this, g, x, y);
if (old != null) g.setTransform(old);
}
}
}
private void paintIconRow(int lineY, List<? extends GutterMark> row, final Graphics2D g) {
processIconsRowForY(lineY, row, (x, y, renderer) -> {
Icon icon = scaleIcon(renderer.getIcon());
AffineTransform old = setMirrorTransformIfNeeded(g, x, icon.getIconWidth());
try {
icon.paintIcon(this, g, x, y);
}
finally {
if (old != null) g.setTransform(old);
}
});
}
private void paintLineMarkerRenderer(@NotNull RangeHighlighter highlighter, @NotNull Graphics g) {
LineMarkerRenderer lineMarkerRenderer = highlighter.getLineMarkerRenderer();
if (lineMarkerRenderer != null) {
Rectangle rectangle = getLineRendererRectangle(highlighter);
if (rectangle != null) {
lineMarkerRenderer.paint(myEditor, g, rectangle);
}
}
}
private boolean isLineMarkerVisible(RangeHighlighter highlighter) {
int startOffset = highlighter.getStartOffset();
int endOffset = highlighter.getEndOffset();
FoldRegion startFoldRegion = myEditor.getFoldingModel().getCollapsedRegionAtOffset(startOffset);
FoldRegion endFoldRegion = myEditor.getFoldingModel().getCollapsedRegionAtOffset(endOffset);
return startFoldRegion == null || !startFoldRegion.equals(endFoldRegion);
}
@Nullable
Rectangle getLineRendererRectangle(RangeHighlighter highlighter) {
if (!isLineMarkerVisible(highlighter)) return null;
int startOffset = highlighter.getStartOffset();
int endOffset = highlighter.getEndOffset();
int startY = myEditor.visualLineToY(myEditor.offsetToVisualLine(startOffset));
// top edge of the last line of the highlighted area
int endY = myEditor.visualLineToY(myEditor.offsetToVisualLine(endOffset));
// => add one line height to make height correct (bottom edge of the highlighted area)
endY += myEditor.getLineHeight();
LineMarkerRenderer renderer = Objects.requireNonNull(highlighter.getLineMarkerRenderer());
LineMarkerRendererEx.Position position = getLineMarkerPosition(renderer);
int w;
int x;
switch (position) {
case LEFT:
w = getLeftFreePaintersAreaWidth();
x = getLeftFreePaintersAreaOffset();
break;
case RIGHT:
w = getRightFreePaintersAreaWidth();
x = getLineMarkerFreePaintersAreaOffset();
break;
case CUSTOM:
w = getWidth();
x = 0;
break;
default:
throw new IllegalArgumentException(position.name());
}
int height = endY - startY;
return new Rectangle(x, startY, w, height);
}
@FunctionalInterface
interface LineGutterIconRendererProcessor {
void process(int x, int y, @NotNull GutterMark renderer);
}
private float getEditorScaleFactor() {
if (Registry.is("editor.scale.gutter.icons")) {
float scale = myEditor.getScale();
if (Math.abs(1f - scale) > 0.10f) {
return scale;
}
}
return 1f;
}
Icon scaleIcon(Icon icon) {
float scale = getEditorScaleFactor();
return scale == 1 ? icon : IconUtil.scale(icon, this, scale);
}
private int scaleWidth(int width) {
return (int) (getEditorScaleFactor() * width);
}
void processIconsRow(int line, @NotNull List<? extends GutterMark> row, @NotNull LineGutterIconRendererProcessor processor) {
processIconsRowForY(myEditor.visualLineToY(line), row, processor);
}
// y should be equal to visualLineToY(visualLine)
private void processIconsRowForY(int y, @NotNull List<? extends GutterMark> row, @NotNull LineGutterIconRendererProcessor processor) {
if (row.isEmpty()) return;
int middleCount = 0;
int middleSize = 0;
int x = getIconAreaOffset() + 2;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
final GutterIconRenderer.Alignment alignment = ((GutterIconRenderer)r).getAlignment();
final Icon icon = scaleIcon(r.getIcon());
if (alignment == GutterIconRenderer.Alignment.LEFT) {
processor.process(x, y + getTextAlignmentShift(icon), r);
x += icon.getIconWidth() + getGapBetweenIcons();
}
else if (alignment == GutterIconRenderer.Alignment.CENTER) {
middleCount++;
middleSize += icon.getIconWidth() + getGapBetweenIcons();
}
}
final int leftSize = x - getIconAreaOffset();
x = getIconAreaOffset() + myIconsAreaWidth;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
if (((GutterIconRenderer)r).getAlignment() == GutterIconRenderer.Alignment.RIGHT) {
Icon icon = scaleIcon(r.getIcon());
x -= icon.getIconWidth();
processor.process(x, y + getTextAlignmentShift(icon), r);
x -= getGapBetweenIcons();
}
}
int rightSize = myIconsAreaWidth + getIconAreaOffset() - x + 1;
if (middleCount > 0) {
middleSize -= getGapBetweenIcons();
x = getIconAreaOffset() + leftSize + (myIconsAreaWidth - leftSize - rightSize - middleSize) / 2;
for (GutterMark r : row) {
if (!checkDumbAware(r)) continue;
if (((GutterIconRenderer)r).getAlignment() == GutterIconRenderer.Alignment.CENTER) {
Icon icon = scaleIcon(r.getIcon());
processor.process(x, y + getTextAlignmentShift(icon), r);
x += icon.getIconWidth() + getGapBetweenIcons();
}
}
}
}
private int getTextAlignmentShiftForInlayIcon(Icon icon, Inlay<?> inlay) {
return Math.min(getTextAlignmentShift(icon), inlay.getHeightInPixels() - icon.getIconHeight());
}
private int getTextAlignmentShift(Icon icon) {
int centerRelative = (myEditor.getLineHeight() - icon.getIconHeight()) / 2;
int baselineRelative = myEditor.getAscent() - icon.getIconHeight();
return Math.max(centerRelative, baselineRelative);
}
private Color getOutlineColor(boolean isActive) {
ColorKey key = isActive ? EditorColors.SELECTED_TEARLINE_COLOR : EditorColors.TEARLINE_COLOR;
Color color = myEditor.getColorsScheme().getColor(key);
return color != null ? color : JBColor.black;
}
@Override
public void registerTextAnnotation(@NotNull TextAnnotationGutterProvider provider) {
myTextAnnotationGutters.add(provider);
myTextAnnotationGutterSizes.add(0);
updateSize();
}
@Override
public void registerTextAnnotation(@NotNull TextAnnotationGutterProvider provider, @NotNull EditorGutterAction action) {
myTextAnnotationGutters.add(provider);
myProviderToListener.put(provider, action);
myTextAnnotationGutterSizes.add(0);
updateSize();
}
@NotNull
@Override
public List<TextAnnotationGutterProvider> getTextAnnotations() {
return new ArrayList<>(myTextAnnotationGutters);
}
private void doPaintFoldingTree(@NotNull Graphics2D g, @NotNull Rectangle clip, int firstVisibleOffset, int lastVisibleOffset) {
final double width = getFoldingAnchorWidth2D();
Collection<DisplayedFoldingAnchor> anchorsToDisplay =
myAnchorsDisplayStrategy.getAnchorsToDisplay(firstVisibleOffset, lastVisibleOffset, myActiveFoldRegions);
for (DisplayedFoldingAnchor anchor : anchorsToDisplay) {
drawFoldingAnchor(width, clip, g, anchor.visualLine, anchor.type, myActiveFoldRegions.contains(anchor.foldRegion));
}
}
private void paintFoldingLines(final Graphics2D g, final Rectangle clip) {
boolean shown = isFoldingOutlineShown();
double x = getWhitespaceSeparatorOffset2D();
if ((shown || myEditor.isInDistractionFreeMode() && Registry.is("editor.distraction.gutter.separator")) && myPaintBackground) {
g.setColor(getOutlineColor(false));
LinePainter2D.paint(g, x, clip.y, x, clip.y + clip.height, StrokeType.CENTERED, getStrokeWidth());
}
if (!shown) return;
myActiveFoldRegions.forEach(region -> {
if (region.isValid() && region.isExpanded()) {
int foldStart = myEditor.offsetToVisualLine(region.getStartOffset());
int foldEnd = myEditor.offsetToVisualLine(region.getEndOffset());
if (foldStart < foldEnd) {
int startY = getLineCenterY(foldStart);
int endY = getLineCenterY(foldEnd);
if (startY <= clip.y + clip.height && endY + 1 + myEditor.getDescent() >= clip.y) {
g.setColor(getOutlineColor(true));
LinePainter2D.paint(g, x, startY, x, endY, StrokeType.CENTERED, getStrokeWidth());
}
}
}
});
}
@Override
public int getWhitespaceSeparatorOffset() {
return (int)Math.round(getWhitespaceSeparatorOffset2D());
}
private double getWhitespaceSeparatorOffset2D() {
return PaintUtil.alignToInt(getFoldingAreaOffset() + getFoldingAnchorWidth() / 2.,
ScaleContext.create(myEditor.getComponent()), RoundingMode.ROUND, null);
}
void setActiveFoldRegions(@NotNull List<FoldRegion> activeFoldRegions) {
if (!myActiveFoldRegions.equals(activeFoldRegions)) {
myActiveFoldRegions = activeFoldRegions;
repaint();
}
}
private int getLineCenterY(int line) {
return myEditor.visualLineToY(line) + myEditor.getLineHeight() / 2;
}
private double getFoldAnchorY(int line, double width) {
return myEditor.visualLineToY(line) + myEditor.getAscent() - width;
}
private void drawFoldingAnchor(double width, @NotNull Rectangle clip, @NotNull Graphics2D g, int visualLine,
@NotNull DisplayedFoldingAnchor.Type type, boolean active) {
double off = width / 4;
double height = width + off;
double baseHeight = height - width / 2;
double y = getFoldAnchorY(visualLine, width);
double centerX = LinePainter2D.getStrokeCenter(g, getWhitespaceSeparatorOffset2D(), StrokeType.CENTERED, getStrokeWidth());
double strokeOff = centerX - getWhitespaceSeparatorOffset2D();
// need to have the same sub-device-pixel offset as centerX for the square_with_plus rect to have equal dev width/height
double centerY = PaintUtil.alignToInt(y + width / 2, g) + strokeOff;
switch (type) {
case COLLAPSED:
case COLLAPSED_SINGLE_LINE:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawSquareWithPlusOrMinus(g, centerX, centerY, width, true, active);
}
break;
case EXPANDED_SINGLE_LINE:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawSquareWithPlusOrMinus(g, centerX, centerY, width, false, active);
}
break;
case EXPANDED_TOP:
if (y <= clip.y + clip.height && y + height >= clip.y) {
drawDirectedBox(g, centerX, centerY, width, height, baseHeight, active);
}
break;
case EXPANDED_BOTTOM:
y += width;
if (y - height <= clip.y + clip.height && y >= clip.y) {
drawDirectedBox(g, centerX, centerY, width, -height, -baseHeight, active);
}
break;
}
}
private void drawDirectedBox(Graphics2D g,
double centerX,
double centerY,
double width,
double height,
double baseHeight,
boolean active)
{
double sw = getStrokeWidth();
Rectangle2D rect = RectanglePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, width, width, StrokeType.CENTERED, sw);
double x1 = rect.getX();
double x2 = x1 + rect.getWidth() - 1;
double y = height > 0 ? rect.getY() : rect.getY() + rect.getHeight() - 1;
double[] dxPoints = {x1, x1, x2, x2, centerX};
double[] dyPoints = {y + baseHeight, y, y, y + baseHeight, y + height + (height < 0 ? 1 : 0)};
if (!SystemInfo.isMac && Registry.is("ide.editor.alternative.folding.icons.painting")) {
GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
g.setStroke(new BasicStroke((float)getStrokeWidth(), BasicStroke.CAP_SQUARE, BasicStroke.JOIN_ROUND));
int ix1 = (int)Math.round(x1);
int ix2 = (int)Math.round(x2);
int[] xPoints = {ix1, ix1, ix2, ix2, (int)Math.round(centerX)};
int iy1 = (int)Math.round(y + baseHeight);
int iy2 = (int)Math.round(y);
int[] yPoints = {iy1, iy2, iy2, iy1, (int)Math.round(y + height + (height < 0 ? 1 : 0))};
//xPoints[4] -= 1;
if (xPoints[4] - xPoints[0] != xPoints[3] - xPoints[4]) {
xPoints[0] += (xPoints[4] - xPoints[0]) - (xPoints[3] - xPoints[4]);
xPoints[1] = xPoints[0];
}
g.setColor(myEditor.getBackgroundColor());
g.fillPolygon(xPoints, yPoints, 5);
g.setColor(getOutlineColor(active));
g.drawPolygon(xPoints, yPoints, 5);
int w = xPoints[3] - xPoints[0];
int off = (int)Math.round(getSquareInnerOffset(w));
int minusY = (int)Math.round(centerY);
g.drawLine(xPoints[0] + off, minusY, xPoints[3] - off, minusY);
config.restore();
} else {
g.setColor(myEditor.getBackgroundColor());
LinePainter2D.fillPolygon(g, dxPoints, dyPoints, 5, StrokeType.CENTERED_CAPS_SQUARE, sw, RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(getOutlineColor(active));
LinePainter2D.paintPolygon(g, dxPoints, dyPoints, 5, StrokeType.CENTERED_CAPS_SQUARE, sw, RenderingHints.VALUE_ANTIALIAS_ON);
drawLine(g, false, centerX, centerY, width, sw);
}
}
private void drawLine(Graphics2D g, boolean vertical, double centerX, double centerY, double width, double strokeWidth) {
double length = width - getSquareInnerOffset(width) * 2;
Line2D line = LinePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, length, vertical, StrokeType.CENTERED, strokeWidth);
LinePainter2D.paint(g, line, StrokeType.CENTERED, strokeWidth, RenderingHints.VALUE_ANTIALIAS_OFF);
}
private void drawSquareWithPlusOrMinus(@NotNull Graphics2D g,
double centerX,
double centerY,
double width,
boolean plus,
boolean active) {
double sw = getStrokeWidth();
Rectangle2D rect = RectanglePainter2D.align(g,
EnumSet.of(LinePainter2D.Align.CENTER_X, LinePainter2D.Align.CENTER_Y),
centerX, centerY, width, width, StrokeType.CENTERED, sw);
g.setColor(myEditor.getBackgroundColor());
RectanglePainter2D.FILL.paint(g, rect, null, StrokeType.CENTERED, sw, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setColor(getOutlineColor(active));
RectanglePainter2D.DRAW.paint(g, rect, null, StrokeType.CENTERED, sw, RenderingHints.VALUE_ANTIALIAS_OFF);
if (!SystemInfo.isMac && Registry.is("ide.editor.alternative.folding.icons.painting")) {
double dx1 = rect.getX();
double dx2 = dx1 + rect.getWidth() - 1;
int x1 = (int)Math.round(dx1);
int x2 = (int)Math.round(dx2);
int cX = (int)Math.round(centerX);
int cY = (int)Math.round(centerY);
if (cX - x1 != x2 - cX) {
x1 += (x2 - cX) - (cX - x1);
}
GraphicsConfig config = GraphicsUtil.setupAAPainting(g);
g.setStroke(new BasicStroke((float)getStrokeWidth(), BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER));
int off = (int)Math.round(getSquareInnerOffset(x2 - x1));
g.drawLine(x1 + off, cY, x2 - off, cY);
if (plus) {
g.drawLine((x1 + x2) / 2, cY - (x2 - x1 - 2*off) / 2, (x1 + x2) / 2, cY + (x2 - x1 - 2*off) / 2);
}
config.restore();
} else {
drawLine(g, false, centerX, centerY, width, sw);
if (plus) {
drawLine(g, true, centerX, centerY, width, sw);
}
}
}
/**
* Returns the gap between the sign and the square itself
*/
private double getSquareInnerOffset(double width) {
return Math.max(width / 5, scale(2));
}
private double scale(double v) {
return JBUIScale.scale((float)v) * myEditor.getScale();
}
private int getFoldingAnchorWidth() {
return (int)Math.round(getFoldingAnchorWidth2D());
}
private double getFoldingAnchorWidth2D() {
return Math.min(scale(4f), myEditor.getLineHeight() / 2f - JBUIScale.scale(2f)) * 2;
}
private double getStrokeWidth() {
double sw = JreHiDpiUtil.isJreHiDPIEnabled() || scale(1f) < 2 ? 1 : 2;
ScaleContext ctx = ScaleContext.create(myEditor.getComponent());
return PaintUtil.alignToInt(sw, ctx, PaintUtil.devValue(1, ctx) > 2 ? RoundingMode.FLOOR : RoundingMode.ROUND, null);
}
private int getFoldingAreaOffset() {
return getLineMarkerAreaOffset() + getLineMarkerAreaWidth();
}
private int getFoldingAreaWidth() {
return isFoldingOutlineShown() ? getFoldingAnchorWidth() + JBUIScale.scale(2) :
isRealEditor() ? getFoldingAnchorWidth() : 0;
}
private boolean isRealEditor() {
return EditorUtil.isRealFileEditor(myEditor);
}
boolean isLineMarkersShown() {
return myEditor.getSettings().isLineMarkerAreaShown();
}
boolean areIconsShown() {
return myEditor.getSettings().areGutterIconsShown();
}
boolean isLineNumbersShown() {
return myEditor.getSettings().isLineNumbersShown();
}
@Override
public boolean isAnnotationsShown() {
return !myTextAnnotationGutters.isEmpty();
}
private boolean isFoldingOutlineShown() {
return myEditor.getSettings().isFoldingOutlineShown() &&
myEditor.getFoldingModel().isFoldingEnabled() &&
!myEditor.isInPresentationMode();
}
private static int getGapBetweenAreas() {
return GAP_BETWEEN_AREAS.get();
}
private static int getAreaWidthWithGap(int width) {
if (width > 0) {
return width + getGapBetweenAreas();
}
return 0;
}
private static int getGapBetweenIcons() {
return GAP_BETWEEN_ICONS.get();
}
private static int getGapBetweenAnnotations() {
return GAP_BETWEEN_ANNOTATIONS.get();
}
int getLineNumberAreaWidth() {
return isLineNumbersShown() ? myLineNumberAreaWidth + getAreaWidthWithGap(myAdditionalLineNumberAreaWidth) : 0;
}
private int getLineMarkerAreaWidth() {
return isLineMarkersShown() ? getLeftFreePaintersAreaWidth() + myIconsAreaWidth +
getGapAfterIconsArea() + getRightFreePaintersAreaWidth() : 0;
}
private void calcLineNumberAreaWidth() {
if (!isLineNumbersShown()) return;
Integer maxLineNumber = myLineNumberConverter.getMaxLineNumber(myEditor);
myLineNumberAreaWidth = maxLineNumber == null ? 0 : calcLineNumbersAreaWidth(maxLineNumber);
myAdditionalLineNumberAreaWidth = 0;
if (myAdditionalLineNumberConverter != null) {
Integer maxAdditionalLineNumber = myAdditionalLineNumberConverter.getMaxLineNumber(myEditor);
myAdditionalLineNumberAreaWidth = maxAdditionalLineNumber == null ? 0 : calcLineNumbersAreaWidth(maxAdditionalLineNumber);
}
}
@Nullable
EditorMouseEventArea getEditorMouseAreaByOffset(int offset) {
if (isLineNumbersShown() && offset < getLineNumberAreaOffset() + getLineNumberAreaWidth()) {
return EditorMouseEventArea.LINE_NUMBERS_AREA;
}
if (isAnnotationsShown() && offset < getAnnotationsAreaOffset() + getAnnotationsAreaWidth()) {
return EditorMouseEventArea.ANNOTATIONS_AREA;
}
if (isLineMarkersShown() && offset < getFoldingAreaOffset()) {
return EditorMouseEventArea.LINE_MARKERS_AREA;
}
if (isFoldingOutlineShown() && offset < getFoldingAreaOffset() + getFoldingAreaWidth()) {
return EditorMouseEventArea.FOLDING_OUTLINE_AREA;
}
return null;
}
int getLineNumberAreaOffset() {
if (getLineNumberAreaWidth() == 0 && getAnnotationsAreaWidthEx() == 0 && getLineMarkerAreaWidth() == 0) {
return getFoldingAreaWidth() == 0 ? 0 : 1;
}
if (getLineNumberAreaWidth() == 0 && getAnnotationsAreaWidthEx() > 0) {
return 0; // no gap if annotations area is the first visible
}
return getGapBetweenAreas();
}
@Override
public int getAnnotationsAreaOffset() {
return getLineNumberAreaOffset() + getAreaWidthWithGap(getLineNumberAreaWidth());
}
@Override
public int getAnnotationsAreaWidth() {
return myTextAnnotationGuttersSize;
}
private int getAnnotationsAreaWidthEx() {
return myTextAnnotationGuttersSize + myTextAnnotationExtraSize;
}
@Override
public int getLineMarkerAreaOffset() {
return getAnnotationsAreaOffset() +
(myGapAfterAnnotations || myTextAnnotationExtraSize > 0
? getAreaWidthWithGap(getAnnotationsAreaWidthEx())
: getAnnotationsAreaWidthEx());
}
@Override
public int getIconAreaOffset() {
return getLineMarkerAreaOffset() + getLeftFreePaintersAreaWidth();
}
private int getLeftFreePaintersAreaOffset() {
return getLineMarkerAreaOffset();
}
@Override
public int getLineMarkerFreePaintersAreaOffset() {
return getIconAreaOffset() + myIconsAreaWidth + getGapAfterIconsArea();
}
private int getLeftFreePaintersAreaWidth() {
return myLeftFreePaintersAreaShown ? myForcedLeftFreePaintersAreaWidth < 0 ? FREE_PAINTERS_LEFT_AREA_WIDTH.get()
: myForcedLeftFreePaintersAreaWidth
: 0;
}
private int getRightFreePaintersAreaWidth() {
return myRightFreePaintersAreaShown ? myForcedRightFreePaintersAreaWidth < 0 ? FREE_PAINTERS_RIGHT_AREA_WIDTH.get()
: myForcedRightFreePaintersAreaWidth
: 0;
}
@Override
public int getIconsAreaWidth() {
return myIconsAreaWidth;
}
private int getGapAfterIconsArea() {
return isRealEditor() && areIconsShown() ? getGapBetweenAreas() : 0;
}
private boolean isMirrored() {
return myEditor.getVerticalScrollbarOrientation() != EditorEx.VERTICAL_SCROLLBAR_RIGHT;
}
@Nullable
private AffineTransform setMirrorTransformIfNeeded(Graphics2D g, int offset, int width) {
if (isMirrored()) {
AffineTransform old = g.getTransform();
AffineTransform transform = new AffineTransform(old);
transform.scale(-1, 1);
transform.translate(-offset * 2 - width, 0);
g.setTransform(transform);
return old;
}
else {
return null;
}
}
@Nullable
@Override
public FoldRegion findFoldingAnchorAt(int x, int y) {
if (!myEditor.getSettings().isFoldingOutlineShown()) return null;
int anchorX = getFoldingAreaOffset();
int anchorWidth = getFoldingAnchorWidth();
int visualLine = myEditor.yToVisualLine(y);
int neighbourhoodStartOffset = myEditor.visualPositionToOffset(new VisualPosition(visualLine, 0));
int neighbourhoodEndOffset = myEditor.visualPositionToOffset(new VisualPosition(visualLine, Integer.MAX_VALUE));
Collection<DisplayedFoldingAnchor> displayedAnchors = myAnchorsDisplayStrategy.getAnchorsToDisplay(neighbourhoodStartOffset,
neighbourhoodEndOffset,
Collections.emptyList());
x = convertX(x);
for (DisplayedFoldingAnchor anchor : displayedAnchors) {
Rectangle r = rectangleByFoldOffset(anchor.visualLine, anchorWidth, anchorX);
if (r.x < x && x <= r.x + r.width && r.y < y && y <= r.y + r.height) return anchor.foldRegion;
}
return null;
}
@SuppressWarnings("SuspiciousNameCombination")
private Rectangle rectangleByFoldOffset(int foldStart, int anchorWidth, int anchorX) {
return new Rectangle(anchorX, (int)getFoldAnchorY(foldStart, anchorWidth), anchorWidth, anchorWidth);
}
@Override
public void mouseDragged(MouseEvent e) {
TooltipController.getInstance().cancelTooltips();
}
@Override
public void mouseMoved(final MouseEvent e) {
Point point = e.getPoint();
PointInfo pointInfo = getPointInfo(point);
if (pointInfo == null) {
TextAnnotationGutterProvider provider = getProviderAtPoint(point);
String toolTip = null;
if (provider == null) {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
toolTip = lineRenderer.getTooltipText();
}
}
else {
final int line = getLineNumAtPoint(point);
if (line >= 0) {
toolTip = provider.getToolTip(line, myEditor);
if (!Objects.equals(toolTip, myLastGutterToolTip)) {
TooltipController.getInstance().cancelTooltip(GUTTER_TOOLTIP_GROUP, e, true);
myLastGutterToolTip = toolTip;
}
}
}
showToolTip(toolTip, point, Balloon.Position.atRight);
}
else {
computeTooltipInBackground(pointInfo);
}
}
private GutterIconRenderer myCalculatingInBackground;
private ProgressIndicator myBackgroundIndicator = new EmptyProgressIndicator();
private void computeTooltipInBackground(@NotNull PointInfo pointInfo) {
GutterIconRenderer renderer = pointInfo.renderer;
if (myCalculatingInBackground == renderer && !myBackgroundIndicator.isCanceled()) return; // not yet calculated
myCalculatingInBackground = renderer;
myBackgroundIndicator.cancel();
myBackgroundIndicator = new ProgressIndicatorBase();
myBackgroundIndicator.setModalityProgress(null);
Point point = pointInfo.iconCenterPosition;
Balloon.Position relativePosition = pointInfo.renderersInLine > 1 && pointInfo.rendererPosition == 0 ? Balloon.Position.below
: Balloon.Position.atRight;
AtomicReference<@NlsContexts.Tooltip String> tooltip = new AtomicReference<>();
ProgressManager.getInstance().runProcessWithProgressAsynchronously(new Task.Backgroundable(myEditor.getProject(), IdeBundle.message("progress.title.constructing.tooltip")) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
tooltip.set(ReadAction.compute(() -> renderer.getTooltipText()));
}
@Override
public void onSuccess() {
showToolTip(tooltip.get(), point, relativePosition);
}
}, myBackgroundIndicator);
}
void showToolTip(@Nullable @NlsContexts.Tooltip String toolTip, @NotNull Point location, @NotNull Balloon.Position relativePosition) {
myCalculatingInBackground = null;
TooltipController controller = TooltipController.getInstance();
if (toolTip == null || toolTip.isEmpty() || myEditor.isDisposed()) {
controller.cancelTooltip(GUTTER_TOOLTIP_GROUP, null, false);
}
else {
RelativePoint showPoint = new RelativePoint(this, location);
TooltipRenderer tr =
((EditorMarkupModel)myEditor.getMarkupModel()).getErrorStripTooltipRendererProvider().calcTooltipRenderer(toolTip);
HintHint hint =
new HintHint(this, location).setAwtTooltip(true).setPreferredPosition(relativePosition).setRequestFocus(ScreenReader.isActive());
if (myEditor.getComponent().getRootPane() != null) {
controller.showTooltipByMouseMove(myEditor, showPoint, tr, false, GUTTER_TOOLTIP_GROUP, hint);
}
}
}
void validateMousePointer(@NotNull MouseEvent e) {
if (IdeGlassPaneImpl.hasPreProcessedCursor(this)) return;
FoldRegion foldingAtCursor = findFoldingAnchorAt(e.getX(), e.getY());
setActiveFoldRegions(getGroupRegions(foldingAtCursor));
Cursor cursor = Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR);
if (foldingAtCursor != null) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
GutterIconRenderer renderer = getGutterRenderer(e);
if (renderer != null) {
if (renderer.isNavigateAction()) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
}
else {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
else {
TextAnnotationGutterProvider provider = getProviderAtPoint(e.getPoint());
if (provider != null) {
EditorGutterAction action = myProviderToListener.get(provider);
if (action != null) {
int line = getLineNumAtPoint(e.getPoint());
if (line >= 0) {
cursor = action.getCursor(line);
}
}
}
}
}
UIUtil.setCursor(this, cursor);
}
@NotNull
private List<FoldRegion> getGroupRegions(@Nullable FoldRegion foldingAtCursor) {
if (foldingAtCursor == null) {
return Collections.emptyList();
}
else {
FoldingGroup group = foldingAtCursor.getGroup();
if (group == null) {
return Collections.singletonList(foldingAtCursor);
}
return myEditor.getFoldingModel().getGroupedRegions(group);
}
}
@Override
public void mouseClicked(MouseEvent e) {
if (e.isPopupTrigger()) {
invokePopup(e);
}
}
private void fireEventToTextAnnotationListeners(final MouseEvent e) {
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
final Point clickPoint = e.getPoint();
final TextAnnotationGutterProvider provider = getProviderAtPoint(clickPoint);
if (provider == null) {
return;
}
EditorGutterAction action = myProviderToListener.get(provider);
if (action != null) {
int line = getLineNumAtPoint(clickPoint);
if (line >= 0 && line < myEditor.getDocument().getLineCount() && UIUtil.isActionClick(e, MouseEvent.MOUSE_RELEASED)) {
action.doAction(line);
}
}
}
}
private int getLineNumAtPoint(final Point clickPoint) {
return EditorUtil.yToLogicalLineNoBlockInlays(myEditor, clickPoint.y);
}
@Nullable
private TextAnnotationGutterProvider getProviderAtPoint(final Point clickPoint) {
int current = getAnnotationsAreaOffset();
if (clickPoint.x < current) return null;
for (int i = 0; i < myTextAnnotationGutterSizes.size(); i++) {
current += myTextAnnotationGutterSizes.getInt(i);
if (clickPoint.x <= current) return myTextAnnotationGutters.get(i);
}
return null;
}
@Override
public void mousePressed(MouseEvent e) {
if (e.isPopupTrigger() || isPopupAction(e)) {
invokePopup(e);
}
else if (UIUtil.isCloseClick(e)) {
processClose(e);
}
}
private boolean isPopupAction(MouseEvent e) {
GutterIconRenderer renderer = getGutterRenderer(e);
return renderer != null && renderer.getClickAction() == null && renderer.getPopupMenuActions() != null;
}
@Override
public void mouseReleased(final MouseEvent e) {
if (e.isPopupTrigger()) {
invokePopup(e);
return;
}
GutterIconRenderer renderer = getGutterRenderer(e);
AnAction clickAction = null;
if (renderer != null && e.getButton() < 4) {
clickAction = BitUtil.isSet(e.getModifiers(), InputEvent.BUTTON2_MASK)
? renderer.getMiddleButtonClickAction()
: renderer.getClickAction();
}
if (clickAction != null) {
PluginInfo pluginInfo = PluginInfoDetectorKt.getPluginInfo(renderer.getClass());
FeatureUsageData usageData = new FeatureUsageData();
usageData.addPluginInfo(pluginInfo);
Project project = myEditor.getProject();
if (project != null) {
usageData.addProject(project);
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(myEditor.getDocument());
if (file != null) {
usageData.addCurrentFile(file.getLanguage());
}
}
usageData.addData("icon_id", renderer.getFeatureId());
FUCounterUsageLogger.getInstance().logEvent("gutter.icon.click", "clicked", usageData);
performAction(clickAction, e, ActionPlaces.EDITOR_GUTTER, myEditor.getDataContext());
repaint();
e.consume();
}
else {
ActiveGutterRenderer lineRenderer = getActiveRendererByMouseEvent(e);
if (lineRenderer != null) {
lineRenderer.doAction(myEditor, e);
}
else {
fireEventToTextAnnotationListeners(e);
}
}
}
private boolean isDumbMode() {
Project project = myEditor.getProject();
return project != null && DumbService.isDumb(project);
}
private boolean checkDumbAware(@NotNull Object possiblyDumbAware) {
return !isDumbMode() || DumbService.isDumbAware(possiblyDumbAware);
}
private void notifyNotDumbAware() {
Project project = myEditor.getProject();
if (project != null) {
DumbService.getInstance(project).showDumbModeNotification(
IdeBundle.message("message.this.functionality.is.not.available.during.indexing"));
}
}
private void performAction(@NotNull AnAction action, @NotNull InputEvent e, @NotNull String place, @NotNull DataContext context) {
if (!checkDumbAware(action)) {
notifyNotDumbAware();
return;
}
AnActionEvent actionEvent = AnActionEvent.createFromAnAction(action, e, place, context);
action.update(actionEvent);
if (actionEvent.getPresentation().isEnabledAndVisible()) {
ActionUtil.performActionDumbAwareWithCallbacks(action, actionEvent, context);
}
}
@Nullable
private ActiveGutterRenderer getActiveRendererByMouseEvent(final MouseEvent e) {
if (findFoldingAnchorAt(e.getX(), e.getY()) != null) {
return null;
}
if (e.isConsumed() || e.getX() > getWhitespaceSeparatorOffset()) {
return null;
}
final ActiveGutterRenderer[] gutterRenderer = {null};
final int[] layer = {-1};
Rectangle clip = myEditor.getScrollingModel().getVisibleArea();
int firstVisibleOffset = myEditor.logicalPositionToOffset(
myEditor.xyToLogicalPosition(new Point(0, clip.y - myEditor.getLineHeight())));
int lastVisibleOffset = myEditor.logicalPositionToOffset(
myEditor.xyToLogicalPosition(new Point(0, clip.y + clip.height + myEditor.getLineHeight())));
processRangeHighlighters(firstVisibleOffset, lastVisibleOffset, highlighter -> {
LineMarkerRenderer renderer = highlighter.getLineMarkerRenderer();
if (renderer == null) return;
if (gutterRenderer[0] != null && layer[0] >= highlighter.getLayer()) return;
Rectangle rectangle = getLineRendererRectangle(highlighter);
if (rectangle == null) return;
int startY = rectangle.y;
int endY = startY + rectangle.height;
if (startY == endY) {
endY += myEditor.getLineHeight();
}
if (startY < e.getY() &&
e.getY() <= endY &&
renderer instanceof ActiveGutterRenderer &&
((ActiveGutterRenderer)renderer).canDoAction(myEditor, e)) {
gutterRenderer[0] = (ActiveGutterRenderer)renderer;
layer[0] = highlighter.getLayer();
}
});
return gutterRenderer[0];
}
@Override
public void closeAllAnnotations() {
closeTextAnnotations(myTextAnnotationGutters);
}
@Override
public void closeTextAnnotations(@NotNull Collection<? extends TextAnnotationGutterProvider> annotations) {
if (!myCanCloseAnnotations) return;
ReferenceOpenHashSet<TextAnnotationGutterProvider> toClose = new ReferenceOpenHashSet<>(annotations);
for (int i = myTextAnnotationGutters.size() - 1; i >= 0; i--) {
TextAnnotationGutterProvider provider = myTextAnnotationGutters.get(i);
if (toClose.contains(provider)) {
provider.gutterClosed();
myTextAnnotationGutters.remove(i);
myTextAnnotationGutterSizes.removeInt(i);
myProviderToListener.remove(provider);
}
}
updateSize();
}
private class CloseAnnotationsAction extends DumbAwareAction {
CloseAnnotationsAction() {
super(EditorBundle.messagePointer("close.editor.annotations.action.name"));
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
closeAllAnnotations();
}
}
@Override
@Nullable
public Point getCenterPoint(final GutterIconRenderer renderer) {
if (!areIconsShown()) {
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
if (ContainerUtil.find(entry.getValue(), renderer) != null) {
return new Point(getIconAreaOffset(), getLineCenterY(entry.getIntKey()));
}
}
}
else {
Ref<Point> result = Ref.create();
for (Int2ObjectMap.Entry<List<GutterMark>> entry : processGutterRenderers()) {
processIconsRow(entry.getIntKey(), entry.getValue(), (x, y, r) -> {
if (result.isNull() && r.equals(renderer)) {
Icon icon = scaleIcon(r.getIcon());
result.set(new Point(x + icon.getIconWidth() / 2, y + icon.getIconHeight() / 2));
}
});
if (!result.isNull()) {
return result.get();
}
}
}
return null;
}
@Override
public void setLineNumberConverter(@NotNull LineNumberConverter primaryConverter, @Nullable LineNumberConverter additionalConverter) {
myLineNumberConverter = primaryConverter;
myAdditionalLineNumberConverter = additionalConverter;
repaint();
}
@Override
public void setShowDefaultGutterPopup(boolean show) {
myShowDefaultGutterPopup = show;
}
@Override
public void setCanCloseAnnotations(boolean canCloseAnnotations) {
myCanCloseAnnotations = canCloseAnnotations;
}
@Override
public void setGutterPopupGroup(@Nullable ActionGroup group) {
myCustomGutterPopupGroup = group;
}
@Override
public void setPaintBackground(boolean value) {
myPaintBackground = value;
}
@Override
public void setForceShowLeftFreePaintersArea(boolean value) {
myForceLeftFreePaintersAreaShown = value;
}
@Override
public void setForceShowRightFreePaintersArea(boolean value) {
myForceRightFreePaintersAreaShown = value;
}
@Override
public void setLeftFreePaintersAreaWidth(int widthInPixels) {
if (widthInPixels < 0 || widthInPixels > Short.MAX_VALUE) throw new IllegalArgumentException();
myForcedLeftFreePaintersAreaWidth = (short)widthInPixels;
}
@Override
public void setRightFreePaintersAreaWidth(int widthInPixels) {
if (widthInPixels < 0 || widthInPixels > Short.MAX_VALUE) throw new IllegalArgumentException();
myForcedRightFreePaintersAreaWidth = (short)widthInPixels;
}
@Override
public void setInitialIconAreaWidth(int width) {
myStartIconAreaWidth = width;
}
private void invokePopup(MouseEvent e) {
int logicalLineAtCursor = EditorUtil.yPositionToLogicalLine(myEditor, e);
Point point = e.getPoint();
PointInfo info = getPointInfo(point);
myLastActionableClick = new ClickInfo(logicalLineAtCursor, info == null ? point : info.iconCenterPosition);
final ActionManager actionManager = ActionManager.getInstance();
if (myEditor.getMouseEventArea(e) == EditorMouseEventArea.ANNOTATIONS_AREA) {
final List<AnAction> addActions = new ArrayList<>();
if (myCanCloseAnnotations) addActions.add(new CloseAnnotationsAction());
//if (line >= myEditor.getDocument().getLineCount()) return;
for (TextAnnotationGutterProvider gutterProvider : myTextAnnotationGutters) {
final List<AnAction> list = gutterProvider.getPopupActions(logicalLineAtCursor, myEditor);
if (list != null) {
for (AnAction action : list) {
if (! addActions.contains(action)) {
addActions.add(action);
}
}
}
}
if (!addActions.isEmpty()) {
DefaultActionGroup actionGroup = DefaultActionGroup.createPopupGroup(EditorBundle.messagePointer("editor.annotations.action.group.name"));
for (AnAction addAction : addActions) {
actionGroup.add(addAction);
}
JPopupMenu menu = actionManager.createActionPopupMenu("", actionGroup).getComponent();
menu.show(this, e.getX(), e.getY());
e.consume();
}
}
else {
if (info != null) {
AnAction rightButtonAction = info.renderer.getRightButtonClickAction();
if (rightButtonAction != null) {
performAction(rightButtonAction, e, ActionPlaces.EDITOR_GUTTER_POPUP, myEditor.getDataContext());
e.consume();
}
else {
ActionGroup actionGroup = info.renderer.getPopupMenuActions();
if (actionGroup != null) {
if (checkDumbAware(actionGroup)) {
actionManager.createActionPopupMenu(ActionPlaces.EDITOR_GUTTER_POPUP, actionGroup).getComponent().show(this, e.getX(), e.getY());
}
else {
notifyNotDumbAware();
}
e.consume();
}
}
}
else {
ActionGroup group = myCustomGutterPopupGroup;
if (group == null && myShowDefaultGutterPopup) {
group = (ActionGroup)CustomActionsSchema.getInstance().getCorrectedAction(IdeActions.GROUP_EDITOR_GUTTER);
}
if (group != null) {
ActionPopupMenu popupMenu = actionManager.createActionPopupMenu(ActionPlaces.EDITOR_GUTTER_POPUP, group);
popupMenu.getComponent().show(this, e.getX(), e.getY());
}
e.consume();
}
}
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
TooltipController.getInstance().cancelTooltip(GUTTER_TOOLTIP_GROUP, e, false);
}
private int convertPointToLineNumber(final Point p) {
DocumentEx document = myEditor.getDocument();
int line = EditorUtil.yPositionToLogicalLine(myEditor, p);
if (!isValidLine(document, line)) return -1;
int startOffset = document.getLineStartOffset(line);
final FoldRegion region = myEditor.getFoldingModel().getCollapsedRegionAtOffset(startOffset);
if (region != null) {
return document.getLineNumber(region.getEndOffset());
}
return line;
}
@Override
@Nullable
public GutterIconRenderer getGutterRenderer(final Point p) {
PointInfo info = getPointInfo(p);
return info == null ? null : info.renderer;
}
@Nullable
private PointInfo getPointInfo(@NotNull Point p) {
int cX = convertX((int)p.getX());
int line = myEditor.yToVisualLine(p.y);
int startY = myEditor.visualLineToY(line);
int endY = startY + myEditor.getLineHeight();
if (p.y >= startY && p.y < endY) {
List<GutterMark> renderers = getGutterRenderers(line);
final PointInfo[] result = {null};
Int2IntRBTreeMap xPos = new Int2IntRBTreeMap();
processIconsRowForY(startY, renderers, (x, y, renderer) -> {
Icon icon = scaleIcon(renderer.getIcon());
int iconWidth = icon.getIconWidth();
int centerX = x + iconWidth / 2;
xPos.put(x, centerX);
if (x <= cX && cX <= x + iconWidth) {
int iconHeight = icon.getIconHeight();
result[0] = new PointInfo((GutterIconRenderer)renderer, new Point(centerX, y + iconHeight / 2));
}
});
if (result[0] != null) {
result[0].renderersInLine = xPos.size();
result[0].rendererPosition = new ArrayList<>(xPos.values()).indexOf(result[0].iconCenterPosition.x);
}
return result[0];
}
if (myHasInlaysWithGutterIcons) {
if (p.y < startY) {
List<Inlay<?>> inlays = myEditor.getInlayModel().getBlockElementsForVisualLine(line, true);
int yDiff = startY - p.y;
for (int i = inlays.size() - 1; i >= 0; i--) {
Inlay<?> inlay = inlays.get(i);
int height = inlay.getHeightInPixels();
if (yDiff <= height) {
return getPointInfo(inlay, p.y + yDiff - height, cX, p.y);
}
yDiff -= height;
}
}
else {
List<Inlay<?>> inlays = myEditor.getInlayModel().getBlockElementsForVisualLine(line, false);
int yDiff = p.y - endY;
for (Inlay<?> inlay : inlays) {
int height = inlay.getHeightInPixels();
if (yDiff < height) {
return getPointInfo(inlay, p.y - yDiff, cX, p.y);
}
yDiff -= height;
}
}
}
return null;
}
@Nullable
private PointInfo getPointInfo(@NotNull Inlay<?> inlay, int inlayY, int x, int y) {
GutterIconRenderer renderer = inlay.getGutterIconRenderer();
if (!shouldBeShown(renderer) || !checkDumbAware(renderer)) return null;
Icon icon = scaleIcon(renderer.getIcon());
int iconHeight = icon.getIconHeight();
if ((y - inlayY) >= Math.max(iconHeight, myEditor.getLineHeight()) || iconHeight > inlay.getHeightInPixels()) return null;
int iconWidth = icon.getIconWidth();
int rightX = getIconAreaOffset() + getIconsAreaWidth();
if (x < rightX - iconWidth || x > rightX) return null;
PointInfo pointInfo = new PointInfo(renderer, new Point(rightX - iconWidth / 2,
inlayY + getTextAlignmentShiftForInlayIcon(icon, inlay) + iconHeight / 2));
pointInfo.renderersInLine = 1;
return pointInfo;
}
@Nullable
private GutterIconRenderer getGutterRenderer(final MouseEvent e) {
return getGutterRenderer(e.getPoint());
}
@NotNull
static LineMarkerRendererEx.Position getLineMarkerPosition(@NotNull LineMarkerRenderer renderer) {
if (renderer instanceof LineMarkerRendererEx) {
return ((LineMarkerRendererEx)renderer).getPosition();
}
return LineMarkerRendererEx.Position.RIGHT;
}
int convertX(int x) {
if (!isMirrored()) return x;
return getWidth() - x;
}
public void dispose() {
for (TextAnnotationGutterProvider gutterProvider : myTextAnnotationGutters) {
gutterProvider.gutterClosed();
}
myProviderToListener.clear();
}
@Override
public boolean isFocusable() {
return ScreenReader.isActive();
}
@Override
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleJComponent() {};
}
return accessibleContext;
}
void setCurrentAccessibleLine(@Nullable AccessibleGutterLine line) {
myAccessibleGutterLine = line;
}
@Nullable
AccessibleGutterLine getCurrentAccessibleLine() {
return myAccessibleGutterLine;
}
void escapeCurrentAccessibleLine() {
if (myAccessibleGutterLine != null) {
myAccessibleGutterLine.escape(true);
}
}
private static final class ClickInfo {
final int myLogicalLineAtCursor;
final Point myIconCenterPosition;
private ClickInfo(int logicalLineAtCursor, Point iconCenterPosition) {
myLogicalLineAtCursor = logicalLineAtCursor;
myIconCenterPosition = iconCenterPosition;
}
}
private static final class PointInfo {
private final @NotNull GutterIconRenderer renderer;
private final @NotNull Point iconCenterPosition;
private int renderersInLine;
private int rendererPosition;
private PointInfo(@NotNull GutterIconRenderer renderer, @NotNull Point iconCenterPosition) {
this.renderer = renderer;
this.iconCenterPosition = iconCenterPosition;
}
}
}
| IDEA-262426 com.intellij.serviceContainer.AlreadyDisposedException in EditorGutterComponentImpl
GitOrigin-RevId: 8806ff48d06d39d332028f95fe5ba827c424ae1b | platform/platform-impl/src/com/intellij/openapi/editor/impl/EditorGutterComponentImpl.java | IDEA-262426 com.intellij.serviceContainer.AlreadyDisposedException in EditorGutterComponentImpl | <ide><path>latform/platform-impl/src/com/intellij/openapi/editor/impl/EditorGutterComponentImpl.java
<ide> if (outerContainer == null) return;
<ide>
<ide> EditorSettings settings = myEditor.getSettings();
<del> int rightMargin = settings.getRightMargin(myEditor.getProject());
<add> Project project = myEditor.getProject();
<add> if (project != null && project.isDisposed()) return;
<add> int rightMargin = settings.getRightMargin(project);
<ide> if (rightMargin <= 0) return;
<ide>
<ide> JComponent editorComponent = myEditor.getComponent();
<ide>
<ide> private boolean isDumbMode() {
<ide> Project project = myEditor.getProject();
<del> return project != null && DumbService.isDumb(project);
<add> return project != null && !project.isDisposed() && DumbService.isDumb(project);
<ide> }
<ide>
<ide> private boolean checkDumbAware(@NotNull Object possiblyDumbAware) { |
|
Java | mit | bef22a184ca3e1c10537ca4fd4a19aba6935cb05 | 0 | eaglesakura/android-text-kvs | package com.eaglesakura.android.db;
import com.eaglesakura.util.StringUtil;
import android.content.Context;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* 簡易設定用のプロパティを保持するためのクラス
*/
public abstract class BasePropertiesDatabase extends BaseProperties {
/**
* 保存用のデータベースファイル
*/
protected File databaseFile;
protected BasePropertiesDatabase(Context context, String dbName) {
super(context);
if (context != null) {
this.context = context.getApplicationContext();
if (!StringUtil.isEmpty(dbName)) {
// 対象のDBが指定されている
this.databaseFile = context.getDatabasePath(dbName);
}
}
}
public void setDatabaseFile(File databaseFile) {
this.databaseFile = databaseFile;
}
/**
* キャッシュをデータベースに保存する
*/
public synchronized void commit() {
final Map<String, String> commitValues = new HashMap<>();
// Commitする内容を抽出する
{
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property property = iterator.next().getValue();
if (property.modified) {
commitValues.put(property.key, property.value);
}
}
}
// 不要であれば何もしない
if (commitValues.isEmpty()) {
return;
}
// 保存する
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Write);
kvs.putInTx(commitValues);
// コミットが成功したらmodified属性を元に戻す
{
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property property = iterator.next().getValue();
property.modified = false;
}
}
} finally {
kvs.close();
}
}
/**
* 指定したキーのみをDBからロードする
*/
public void load(String key) {
load(new String[]{key});
}
/**
* 指定したキーのみをDBからロードする
*/
public void load(String[] keys) {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null || keys.length == 0) {
return;
}
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
for (String key : keys) {
Property property = propMap.get(key);
if (property != null) {
property.value = kvs.get(property.key, property.defaultValue);
property.modified = false;
}
}
} finally {
kvs.close();
}
}
/**
* データをDBからロードする
* <br>
* 既存のキャッシュはクリーンされる
*/
public void load() {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null) {
return;
}
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property value = iterator.next().getValue();
// リロードする。読み込めなかった場合は規定のデフォルト値を持たせる
value.value = kvs.get(value.key, value.defaultValue);
// sync直後なのでcommit対象ではない
value.modified = false;
}
} finally {
kvs.close();
}
}
/**
* 全てのプロパティを最新に保つ
*/
public void commitAndLoad() {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null) {
return;
}
Map<String, String> commitValues = new HashMap<>();
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property value = iterator.next().getValue();
// リロードする。読み込めなかった場合は規定のデフォルト値を持たせる
if (value.modified) {
// 変更がある値はDBへ反映リストに追加
commitValues.put(value.key, value.value);
} else {
// 変更が無いならばDBから読み出す
value.value = kvs.get(value.key, value.defaultValue);
}
// sync直後なのでcommit対象ではない
value.modified = false;
}
// 変更を一括更新
kvs.putInTx(commitValues);
} finally {
kvs.close();
}
}
}
| src/main/java/com/eaglesakura/android/db/BasePropertiesDatabase.java | package com.eaglesakura.android.db;
import com.eaglesakura.android.thread.async.AsyncTaskController;
import com.eaglesakura.android.thread.async.AsyncTaskResult;
import com.eaglesakura.util.StringUtil;
import android.content.Context;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* 簡易設定用のプロパティを保持するためのクラス
*/
public abstract class BasePropertiesDatabase extends BaseProperties {
/**
* 保存用のデータベースファイル
*/
protected File databaseFile;
protected BasePropertiesDatabase(Context context, String dbName) {
super(context);
if (context != null) {
this.context = context.getApplicationContext();
if (!StringUtil.isEmpty(dbName)) {
// 対象のDBが指定されている
this.databaseFile = context.getDatabasePath(dbName);
}
}
}
public void setDatabaseFile(File databaseFile) {
this.databaseFile = databaseFile;
}
/**
* キャッシュをデータベースに保存する
*/
public synchronized void commit() {
final Map<String, String> commitValues = new HashMap<>();
// Commitする内容を抽出する
{
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property property = iterator.next().getValue();
if (property.modified) {
commitValues.put(property.key, property.value);
}
}
}
// 不要であれば何もしない
if (commitValues.isEmpty()) {
return;
}
// 保存する
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Write);
kvs.putInTx(commitValues);
// コミットが成功したらmodified属性を元に戻す
{
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property property = iterator.next().getValue();
property.modified = false;
}
}
} finally {
kvs.close();
}
}
private static AsyncTaskController gTaskController = new AsyncTaskController(1);
/**
* 非同期で値を保存する。
* その間、値を書き換えても値の保証はしない。
*/
public AsyncTaskResult<AsyncTaskController> commitAsync() {
return gTaskController.pushBack(new Runnable() {
@Override
public void run() {
commit();
}
});
}
/**
* 指定したキーのみをDBからロードする
*/
public void load(String key) {
load(new String[]{key});
}
/**
* 指定したキーのみをDBからロードする
*/
public void load(String[] keys) {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null || keys.length == 0) {
return;
}
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
for (String key : keys) {
Property property = propMap.get(key);
if (property != null) {
property.value = kvs.get(property.key, property.defaultValue);
property.modified = false;
}
}
} finally {
kvs.close();
}
}
/**
* データをDBからロードする
* <br>
* 既存のキャッシュはクリーンされる
*/
public void load() {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null) {
return;
}
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property value = iterator.next().getValue();
// リロードする。読み込めなかった場合は規定のデフォルト値を持たせる
value.value = kvs.get(value.key, value.defaultValue);
// sync直後なのでcommit対象ではない
value.modified = false;
}
} finally {
kvs.close();
}
}
/**
* 非同期でデータを読み込む
*/
public AsyncTaskResult<AsyncTaskController> loadAsync() {
return gTaskController.pushBack(new Runnable() {
@Override
public void run() {
load();
}
});
}
/**
* 全てのプロパティを最新に保つ
*/
public void commitAndLoad() {
// Contextを持たないため読込が行えない
if (context == null || databaseFile == null) {
return;
}
Map<String, String> commitValues = new HashMap<>();
TextKeyValueStore kvs = new TextKeyValueStore(context, databaseFile, TextKeyValueStore.TABLE_NAME_DEFAULT);
try {
kvs.open(DBOpenType.Read);
Iterator<Map.Entry<String, Property>> iterator = propMap.entrySet().iterator();
while (iterator.hasNext()) {
Property value = iterator.next().getValue();
// リロードする。読み込めなかった場合は規定のデフォルト値を持たせる
if (value.modified) {
// 変更がある値はDBへ反映リストに追加
commitValues.put(value.key, value.value);
} else {
// 変更が無いならばDBから読み出す
value.value = kvs.get(value.key, value.defaultValue);
}
// sync直後なのでcommit対象ではない
value.modified = false;
}
// 変更を一括更新
kvs.putInTx(commitValues);
} finally {
kvs.close();
}
}
/**
* 非同期にコミット&ロードを行い、設定を最新に保つ
*/
public AsyncTaskResult<AsyncTaskController> commitAndLoadAsync() {
return gTaskController.pushBack(new Runnable() {
@Override
public void run() {
commitAndLoad();
}
});
}
/**
* @param runnable
*/
public static AsyncTaskResult<AsyncTaskController> runInTaskQueue(Runnable runnable) {
return gTaskController.pushBack(runnable);
}
/**
* タスク管理クラスを取得する
*/
public static AsyncTaskController getTaskController() {
return gTaskController;
}
}
| スレッド操作を行っていた部分を削除
| src/main/java/com/eaglesakura/android/db/BasePropertiesDatabase.java | スレッド操作を行っていた部分を削除 | <ide><path>rc/main/java/com/eaglesakura/android/db/BasePropertiesDatabase.java
<ide> package com.eaglesakura.android.db;
<ide>
<del>import com.eaglesakura.android.thread.async.AsyncTaskController;
<del>import com.eaglesakura.android.thread.async.AsyncTaskResult;
<ide> import com.eaglesakura.util.StringUtil;
<ide>
<ide> import android.content.Context;
<ide> }
<ide> }
<ide>
<del> private static AsyncTaskController gTaskController = new AsyncTaskController(1);
<del>
<del> /**
<del> * 非同期で値を保存する。
<del> * その間、値を書き換えても値の保証はしない。
<del> */
<del> public AsyncTaskResult<AsyncTaskController> commitAsync() {
<del> return gTaskController.pushBack(new Runnable() {
<del> @Override
<del> public void run() {
<del> commit();
<del> }
<del> });
<del> }
<del>
<ide> /**
<ide> * 指定したキーのみをDBからロードする
<ide> */
<ide> }
<ide>
<ide> /**
<del> * 非同期でデータを読み込む
<del> */
<del> public AsyncTaskResult<AsyncTaskController> loadAsync() {
<del> return gTaskController.pushBack(new Runnable() {
<del> @Override
<del> public void run() {
<del> load();
<del> }
<del> });
<del> }
<del>
<del> /**
<ide> * 全てのプロパティを最新に保つ
<ide> */
<ide> public void commitAndLoad() {
<ide> }
<ide> }
<ide>
<del> /**
<del> * 非同期にコミット&ロードを行い、設定を最新に保つ
<del> */
<del> public AsyncTaskResult<AsyncTaskController> commitAndLoadAsync() {
<del> return gTaskController.pushBack(new Runnable() {
<del> @Override
<del> public void run() {
<del> commitAndLoad();
<del> }
<del> });
<del> }
<del>
<del> /**
<del> * @param runnable
<del> */
<del> public static AsyncTaskResult<AsyncTaskController> runInTaskQueue(Runnable runnable) {
<del> return gTaskController.pushBack(runnable);
<del> }
<del>
<del> /**
<del> * タスク管理クラスを取得する
<del> */
<del> public static AsyncTaskController getTaskController() {
<del> return gTaskController;
<del> }
<ide> } |
|
Java | apache-2.0 | db05878f35a5fcb09ac0f3a222bd2dcb586149b5 | 0 | bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,sfunk1x/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,Tycheo/coffeemud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud | package com.planet_ink.coffee_mud.Items.interfaces;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
/*
Copyright 2000-2006 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public interface RawMaterial extends Item
{
public int domainSource();
public void setDomainSource(int src);
// item materials
public final static int MATERIAL_UNKNOWN=0;
public final static int MATERIAL_CLOTH=1<<8;
public final static int MATERIAL_LEATHER=2<<8;
public final static int MATERIAL_METAL=3<<8;
public final static int MATERIAL_MITHRIL=4<<8;
public final static int MATERIAL_WOODEN=5<<8;
public final static int MATERIAL_GLASS=6<<8;
public final static int MATERIAL_VEGETATION=7<<8;
public final static int MATERIAL_FLESH=8<<8;
public final static int MATERIAL_PAPER=9<<8;
public final static int MATERIAL_ROCK=10<<8;
public final static int MATERIAL_LIQUID=11<<8;
public final static int MATERIAL_PRECIOUS=12<<8;
public final static int MATERIAL_ENERGY=13<<8;
public final static int MATERIAL_PLASTIC=14<<8;
public final static int MATERIAL_MASK=255<<8;
public final static String[] MATERIAL_DESCS={
"UNKNOWN",
"CLOTH",
"LEATHER",
"METAL",
"MITHRIL",
"WOODEN",
"GLASS",
"VEGETATION",
"FLESH",
"PAPER",
"ROCK",
"LIQUID",
"PRECIOUS",
"ENERGY",
"PLASTIC"};
public final static String[] MATERIAL_NOUNDESCS={
"Unknown material",
"Cloth",
"Leather",
"Metal",
"Metal",
"Wood",
"Glass",
"Vegetation",
"Flesh",
"Paper",
"Rock",
"Liquid",
"Stone",
"Energy",
"Plastic"};
public final static int RESOURCE_NOTHING=MATERIAL_UNKNOWN|0;
public final static int RESOURCE_MEAT=MATERIAL_FLESH|1;
public final static int RESOURCE_BEEF=MATERIAL_FLESH|2;
public final static int RESOURCE_PORK=MATERIAL_FLESH|3;
public final static int RESOURCE_POULTRY=MATERIAL_FLESH|4;
public final static int RESOURCE_MUTTON=MATERIAL_FLESH|5;
public final static int RESOURCE_FISH=MATERIAL_FLESH|6;
public final static int RESOURCE_WHEAT=MATERIAL_VEGETATION|7;
public final static int RESOURCE_CORN=MATERIAL_VEGETATION|8;
public final static int RESOURCE_RICE=MATERIAL_VEGETATION|9;
public final static int RESOURCE_CARROTS=MATERIAL_VEGETATION|10;
public final static int RESOURCE_TOMATOES=MATERIAL_VEGETATION|11;
public final static int RESOURCE_PEPPERS=MATERIAL_VEGETATION|12;
public final static int RESOURCE_GREENS=MATERIAL_VEGETATION|13;
public final static int RESOURCE_FRUIT=MATERIAL_VEGETATION|14;
public final static int RESOURCE_APPLES=MATERIAL_VEGETATION|15;
public final static int RESOURCE_BERRIES=MATERIAL_VEGETATION|16;
public final static int RESOURCE_ORANGES=MATERIAL_VEGETATION|17;
public final static int RESOURCE_LEMONS=MATERIAL_VEGETATION|18;
public final static int RESOURCE_GRAPES=MATERIAL_VEGETATION|19;
public final static int RESOURCE_OLIVES=MATERIAL_VEGETATION|20;
public final static int RESOURCE_POTATOES=MATERIAL_VEGETATION|21;
public final static int RESOURCE_CACTUS=MATERIAL_VEGETATION|22;
public final static int RESOURCE_DATES=MATERIAL_VEGETATION|23;
public final static int RESOURCE_SEAWEED=MATERIAL_VEGETATION|24;
public final static int RESOURCE_STONE=MATERIAL_ROCK|25;
public final static int RESOURCE_LIMESTONE=MATERIAL_ROCK|26;
public final static int RESOURCE_FLINT=MATERIAL_ROCK|27;
public final static int RESOURCE_GRANITE=MATERIAL_ROCK|28;
public final static int RESOURCE_OBSIDIAN=MATERIAL_ROCK|29;
public final static int RESOURCE_MARBLE=MATERIAL_ROCK|30;
public final static int RESOURCE_SAND=MATERIAL_ROCK|31;
public final static int RESOURCE_JADE=MATERIAL_PRECIOUS|32;
public final static int RESOURCE_IRON=MATERIAL_METAL|33;
public final static int RESOURCE_LEAD=MATERIAL_METAL|34;
public final static int RESOURCE_BRONZE=MATERIAL_METAL|35;
public final static int RESOURCE_SILVER=MATERIAL_METAL|36;
public final static int RESOURCE_GOLD=MATERIAL_METAL|37;
public final static int RESOURCE_ZINC=MATERIAL_METAL|38;
public final static int RESOURCE_COPPER=MATERIAL_METAL|39;
public final static int RESOURCE_TIN=MATERIAL_METAL|40;
public final static int RESOURCE_MITHRIL=MATERIAL_MITHRIL|41;
public final static int RESOURCE_ADAMANTITE=MATERIAL_MITHRIL|42;
public final static int RESOURCE_STEEL=MATERIAL_METAL|43;
public final static int RESOURCE_BRASS=MATERIAL_METAL|44;
public final static int RESOURCE_WOOD=MATERIAL_WOODEN|45;
public final static int RESOURCE_PINE=MATERIAL_WOODEN|46;
public final static int RESOURCE_BALSA=MATERIAL_WOODEN|47;
public final static int RESOURCE_OAK=MATERIAL_WOODEN|48;
public final static int RESOURCE_MAPLE=MATERIAL_WOODEN|49;
public final static int RESOURCE_REDWOOD=MATERIAL_WOODEN|50;
public final static int RESOURCE_HICKORY=MATERIAL_WOODEN|51;
public final static int RESOURCE_SCALES=MATERIAL_LEATHER|52;
public final static int RESOURCE_FUR=MATERIAL_CLOTH|53;
public final static int RESOURCE_LEATHER=MATERIAL_LEATHER|54;
public final static int RESOURCE_HIDE=MATERIAL_CLOTH|55;
public final static int RESOURCE_WOOL=MATERIAL_CLOTH|56;
public final static int RESOURCE_FEATHERS=MATERIAL_CLOTH|57;
public final static int RESOURCE_COTTON=MATERIAL_CLOTH|58;
public final static int RESOURCE_HEMP=MATERIAL_CLOTH|59;
public final static int RESOURCE_FRESHWATER=MATERIAL_LIQUID|60;
public final static int RESOURCE_SALTWATER=MATERIAL_LIQUID|61;
public final static int RESOURCE_DRINKABLE=MATERIAL_LIQUID|62;
public final static int RESOURCE_GLASS=MATERIAL_GLASS|63;
public final static int RESOURCE_PAPER=MATERIAL_PAPER|64;
public final static int RESOURCE_CLAY=MATERIAL_GLASS|65;
public final static int RESOURCE_CHINA=MATERIAL_GLASS|66;
public final static int RESOURCE_DIAMOND=MATERIAL_PRECIOUS|67;
public final static int RESOURCE_CRYSTAL=MATERIAL_GLASS|68;
public final static int RESOURCE_GEM=MATERIAL_PRECIOUS|69;
public final static int RESOURCE_PEARL=MATERIAL_PRECIOUS|70;
public final static int RESOURCE_PLATINUM=MATERIAL_METAL|71;
public final static int RESOURCE_MILK=MATERIAL_LIQUID|72;
public final static int RESOURCE_EGGS=MATERIAL_FLESH|73;
public final static int RESOURCE_HOPS=MATERIAL_VEGETATION|74;
public final static int RESOURCE_COFFEEBEANS=MATERIAL_VEGETATION|75;
public final static int RESOURCE_COFFEE=MATERIAL_LIQUID|76;
public final static int RESOURCE_OPAL=MATERIAL_PRECIOUS|77;
public final static int RESOURCE_TOPAZ=MATERIAL_PRECIOUS|78;
public final static int RESOURCE_AMETHYST=MATERIAL_PRECIOUS|79;
public final static int RESOURCE_GARNET=MATERIAL_PRECIOUS|80;
public final static int RESOURCE_AMBER=MATERIAL_PRECIOUS|81;
public final static int RESOURCE_AQUAMARINE=MATERIAL_PRECIOUS|82;
public final static int RESOURCE_CRYSOBERYL=MATERIAL_PRECIOUS|83;
public final static int RESOURCE_IRONWOOD=MATERIAL_WOODEN|84;
public final static int RESOURCE_SILK=MATERIAL_CLOTH|85;
public final static int RESOURCE_COCOA=MATERIAL_VEGETATION|86;
public final static int RESOURCE_BLOOD=MATERIAL_LIQUID|87;
public final static int RESOURCE_BONE=MATERIAL_ROCK|88;
public final static int RESOURCE_COAL=MATERIAL_ROCK|89;
public final static int RESOURCE_LAMPOIL=MATERIAL_LIQUID|90;
public final static int RESOURCE_POISON=MATERIAL_LIQUID|91;
public final static int RESOURCE_LIQUOR=MATERIAL_LIQUID|92;
public final static int RESOURCE_SUGAR=MATERIAL_VEGETATION|93;
public final static int RESOURCE_HONEY=MATERIAL_LIQUID|94;
public final static int RESOURCE_BARLEY=MATERIAL_VEGETATION|95;
public final static int RESOURCE_MUSHROOMS=MATERIAL_VEGETATION|96;
public final static int RESOURCE_HERBS=MATERIAL_VEGETATION|97;
public final static int RESOURCE_VINE=MATERIAL_VEGETATION|98;
public final static int RESOURCE_FLOWERS=MATERIAL_VEGETATION|99;
public final static int RESOURCE_PLASTIC=MATERIAL_PLASTIC|100;
public final static int RESOURCE_RUBBER=MATERIAL_PLASTIC|101;
public final static int RESOURCE_EBONY=MATERIAL_ROCK|102;
public final static int RESOURCE_IVORY=MATERIAL_ROCK|103;
public final static int RESOURCE_WAX=MATERIAL_FLESH|104;
public final static int RESOURCE_NUTS=MATERIAL_VEGETATION|105;
public final static int RESOURCE_BREAD=MATERIAL_VEGETATION|106;
public final static int RESOURCE_CRACKER=MATERIAL_VEGETATION|107;
public final static int RESOURCE_YEW=MATERIAL_WOODEN|108;
public final static int RESOURCE_DUST=MATERIAL_ROCK|109;
public final static int RESOURCE_PIPEWEED=MATERIAL_VEGETATION|110;
public final static int RESOURCE_ENERGY=MATERIAL_ENERGY|111;
public final static int RESOURCE_STRAWBERRIES=MATERIAL_VEGETATION|112;
public final static int RESOURCE_BLUEBERRIES=MATERIAL_VEGETATION|113;
public final static int RESOURCE_RASPBERRIES=MATERIAL_VEGETATION|114;
public final static int RESOURCE_BOYSENBERRIES=MATERIAL_VEGETATION|115;
public final static int RESOURCE_BLACKBERRIES=MATERIAL_VEGETATION|116;
public final static int RESOURCE_SMURFBERRIES=MATERIAL_VEGETATION|117;
public final static int RESOURCE_PEACHES=MATERIAL_VEGETATION|118;
public final static int RESOURCE_PLUMS=MATERIAL_VEGETATION|119;
public final static int RESOURCE_ONIONS=MATERIAL_VEGETATION|120;
public final static int RESOURCE_CHERRIES=MATERIAL_VEGETATION|121;
public final static int RESOURCE_GARLIC=MATERIAL_VEGETATION|122;
public final static int RESOURCE_PINEAPPLES=MATERIAL_VEGETATION|123;
public final static int RESOURCE_COCONUTS=MATERIAL_VEGETATION|124;
public final static int RESOURCE_BANANAS=MATERIAL_VEGETATION|125;
public final static int RESOURCE_LIMES=MATERIAL_VEGETATION|126;
public final static int RESOURCE_SAP=MATERIAL_LIQUID|127;
public final static int RESOURCE_ONYX=MATERIAL_PRECIOUS|128;
public final static int RESOURCE_TURQUIOSE=MATERIAL_PRECIOUS|129;
public final static int RESOURCE_PERIDOT=MATERIAL_PRECIOUS|130;
public final static int RESOURCE_QUARTZ=MATERIAL_PRECIOUS|131;
public final static int RESOURCE_LAPIS=MATERIAL_PRECIOUS|132;
public final static int RESOURCE_BLOODSTONE=MATERIAL_PRECIOUS|133;
public final static int RESOURCE_MOONSTONE=MATERIAL_PRECIOUS|134;
public final static int RESOURCE_ALEXANDRITE=MATERIAL_PRECIOUS|135;
public final static int RESOURCE_TEAK=MATERIAL_WOODEN|136;
public final static int RESOURCE_CEDAR=MATERIAL_WOODEN|137;
public final static int RESOURCE_ELM=MATERIAL_WOODEN|138;
public final static int RESOURCE_CHERRYWOOD=MATERIAL_WOODEN|139;
public final static int RESOURCE_BEECHWOOD=MATERIAL_WOODEN|140;
public final static int RESOURCE_WILLOW=MATERIAL_WOODEN|141;
public final static int RESOURCE_SYCAMORE=MATERIAL_WOODEN|142;
public final static int RESOURCE_SPRUCE=MATERIAL_WOODEN|143;
public final static int RESOURCE_MESQUITE=MATERIAL_WOODEN|144;
public final static int RESOURCE_BASALT=MATERIAL_ROCK|145;
public final static int RESOURCE_SHALE=MATERIAL_ROCK|146;
public final static int RESOURCE_PUMICE=MATERIAL_ROCK|147;
public final static int RESOURCE_SANDSTONE=MATERIAL_ROCK|148;
public final static int RESOURCE_SOAPSTONE=MATERIAL_ROCK|149;
public final static int RESOURCE_SALMON=MATERIAL_FLESH|150;
public final static int RESOURCE_CARP=MATERIAL_FLESH|151;
public final static int RESOURCE_TROUT=MATERIAL_FLESH|152;
public final static int RESOURCE_SHRIMP=MATERIAL_FLESH|153;
public final static int RESOURCE_TUNA=MATERIAL_FLESH|154;
public final static int RESOURCE_CATFISH=MATERIAL_FLESH|155;
public final static int RESOURCE_BAMBOO=MATERIAL_WOODEN|156;
public final static int RESOURCE_SOAP=MATERIAL_VEGETATION|157;
public final static int RESOURCE_SPIDERSTEEL=MATERIAL_CLOTH|158;
public final static int RESOURCE_ASH=MATERIAL_VEGETATION|159;
public final static int RESOURCE_PERFUME=MATERIAL_LIQUID|160;
public final static int RESOURCE_ATLANTEANSTEEL=MATERIAL_MITHRIL|161;
public final static int RESOURCE_CHEESE=MATERIAL_VEGETATION|162;
public final static int RESOURCE_MASK=255;
public final static String[] RESOURCE_DESCS={
"NOTHING", //0
"MEAT", //1
"BEEF", //2
"PORK", //3
"POULTRY", //4
"MUTTON", //5
"FISH",//6
"WHEAT", //7
"CORN", //8
"RICE", //9
"CARROTS", //10
"TOMATOES", //11
"PEPPERS", //12
"GREENS",//13
"FRUIT", //14
"APPLES", //15
"BERRIES", //16
"ORANGES", //17
"LEMONS", //18
"GRAPES", //19
"OLIVES",//20
"POTATOES", //21
"CACTUS", //22
"DATES", //23
"SEAWEED", //24
"STONE", //25
"LIMESTONE",//26
"FLINT", //27
"GRANITE", //28
"OBSIDIAN", //29
"MARBLE", //30
"SAND", //31
"JADE", //32
"IRON",//33
"LEAD", //34
"BRONZE", //35
"SILVER", //36
"GOLD", //37
"ZINC", //38
"COPPER", //39
"TIN", //40
"MITHRIL",//41
"ADAMANTITE", //42
"STEEL", //43
"BRASS", //44
"WOOD", //45
"PINE", //46
"BALSA", //47
"OAK", //48
"MAPLE",//49
"REDWOOD", //50
"HICKORY", //51
"SCALES", //52
"FUR", //53
"LEATHER", //54
"HIDE", //55
"WOOL",//56
"FEATHERS",//57
"COTTON", //58
"HEMP",//59
"WATER",//60
"SALT WATER",//61
"LIQUID",//62
"GLASS",//63
"PAPER",//64
"CLAY",//65
"CHINA",//66
"DIAMOND",//67
"CRYSTAL",//68
"GEM", //69
"PEARL", //70
"PLATINUM",//71
"MILK",//72
"EGGS",//73
"HOPS",//74
"COFFEEBEANS",//75
"COFFEE",//76
"OPAL",//77
"TOPAZ",//78
"AMETHYST",//79
"GARNET",//80
"AMBER", //81
"AQUAMARINE", //82
"CRYSOBERYL", //83
"IRONWOOD", //84
"SILK", //85
"COCOA", //86
"BLOOD", //87
"BONE", //88
"COAL", //89
"LAMP OIL", //90
"POISON", // 91
"LIQUOR", // 92
"SUGAR", // 93
"HONEY", // 94
"BARLEY", // 95
"MUSHROOMS", // 96
"HERBS", // 97
"VINE", // 98
"FLOWERS", // 99
"PLASTIC", // 100
"RUBBER", // 101
"EBONY", // 102
"IVORY", // 103
"WAX", // 104
"NUTS", // 105
"BREAD", // 106
"CRACKER", // 107
"YEW", // 108
"DUST", // 109
"PIPEWEED", // 110
"ENERGY", // 111
"STRAWBERRIES", // 112
"BLUEBERRIES", // 113
"RASPBERRIES", // 114
"BOYSENBERRIES", // 115
"BLACKBERRIES", // 116
"SMURFBERRIES", // 117
"PEACHES", // 118
"PLUMS", // 119
"ONIONS", // 120
"CHERRIES", // 121
"GARLIC", // 122
"PINEAPPLES", // 123
"COCONUTS", // 124
"BANANAS", // 125
"LIMES", // 126
"SAP", // 127
"ONYX", // 128
"TURQUOISE", // 129
"PERIDOT", // 130
"QUARTZ", // 131
"LAPIS", // 133
"BLOODSTONE", // 133
"MOONSTONE", // 134
"ALEXANDRITE", // 135
"TEAK", // 136
"CEDAR", // 137
"ELM", // 138
"CHERRYWOOD", // 139
"BEECHWOOD", // 140
"WILLOW", // 141
"SYCAMORE", // 142
"SPRUCE", // 143
"MESQUITE", // 144
"BASALT", // 145
"SHALE", // 146
"PUMICE", // 147
"SANDSTONE", // 148
"SOAPSTONE", // 149
"SALMON", // 150
"CARP", // 151
"TROUT", // 152
"SHRIMP", // 153
"TUNA", // 154
"CATFISH", // 155
"BAMBOO", // 156
"SOAP", // 157
"SPIDERSTEEL", // 158
"ASH", // 159
"PERFUME", // 160
"ATLANTITE",//161
"CHEESE",//162
};
public final static int DATA_CODE=0;
public final static int DATA_VALUE=1;
public final static int DATA_FREQ=2;
public final static int DATA_STRENGTH=3;
public final static int DATA_BOUANCY=4;
public final static int[][] RESOURCE_DATA={
// full code, base value, frequency, strength (1-10), bouancy
{RESOURCE_NOTHING, 0, 0, 0, 0},
{RESOURCE_MEAT, 4, 20, 1, 3000},
{RESOURCE_BEEF, 6, 20, 1, 3000},
{RESOURCE_PORK, 8, 20, 1, 2500},
{RESOURCE_POULTRY, 3, 20, 1, 2000},
{RESOURCE_MUTTON, 4, 20, 1, 2800},
{RESOURCE_FISH, 5, 100,1, 590},
{RESOURCE_WHEAT, 1, 20, 1, 770},
{RESOURCE_CORN, 1, 20, 1, 720},
{RESOURCE_RICE, 1, 20, 1, 750},
{RESOURCE_CARROTS, 1, 5, 1, 720},
{RESOURCE_TOMATOES, 1, 5, 1, 640},
{RESOURCE_PEPPERS, 1, 5, 1, 640},
{RESOURCE_GREENS, 1, 5, 1, 540},
{RESOURCE_FRUIT, 2, 10, 1, 720},
{RESOURCE_APPLES, 2, 10, 1, 640},
{RESOURCE_BERRIES, 2, 15, 1, 720},
{RESOURCE_ORANGES, 2, 10, 1, 640},
{RESOURCE_LEMONS, 2, 10, 1, 480},
{RESOURCE_GRAPES, 3, 5, 1, 680},
{RESOURCE_OLIVES, 2, 5, 1, 640},
{RESOURCE_POTATOES, 1, 5, 1, 770},
{RESOURCE_CACTUS, 2, 5, 1, 680},
{RESOURCE_DATES, 2, 2, 1, 720},
{RESOURCE_SEAWEED, 1, 50, 1, 540},
{RESOURCE_STONE, 1, 80, 5, 2500},
{RESOURCE_LIMESTONE, 1, 20, 4, 1550},
{RESOURCE_FLINT, 1, 10, 4, 2600},
{RESOURCE_GRANITE, 2, 10, 6, 2690},
{RESOURCE_OBSIDIAN, 10, 5, 6, 2650},
{RESOURCE_MARBLE, 20, 5, 5, 2560},
{RESOURCE_SAND, 1, 50, 1, 1600},
{RESOURCE_JADE, 50, 2, 5, 3800},
{RESOURCE_IRON, 20, 10, 6, 7900},
{RESOURCE_LEAD, 10, 10, 5, 11300},
{RESOURCE_BRONZE, 10, 10, 5, 8100},
{RESOURCE_SILVER, 30, 2, 5, 10500},
{RESOURCE_GOLD, 50, 1, 5, 19320},
{RESOURCE_ZINC, 10, 5, 5, 7100},
{RESOURCE_COPPER, 10, 10, 5, 8900},
{RESOURCE_TIN, 10, 10, 4, 7300},
{RESOURCE_MITHRIL, 200,1, 9, 3990},
{RESOURCE_ADAMANTITE, 500,1, 10, 4500},
{RESOURCE_STEEL, 150,0, 8, 7840},
{RESOURCE_BRASS, 120,0, 6, 8500},
{RESOURCE_WOOD, 2, 10, 3, 920},
{RESOURCE_PINE, 4, 10, 3, 650},
{RESOURCE_BALSA, 1, 5, 2, 130},
{RESOURCE_OAK, 5, 10, 3, 720},
{RESOURCE_MAPLE, 10, 5, 3, 689},
{RESOURCE_REDWOOD, 20, 2, 3, 450},
{RESOURCE_HICKORY, 5, 5, 3, 830},
{RESOURCE_SCALES, 10, 20, 4, 1800},
{RESOURCE_FUR, 20, 20, 2, 890},
{RESOURCE_LEATHER, 10, 20, 2, 945},
{RESOURCE_HIDE, 4, 20, 1, 920},
{RESOURCE_WOOL, 10, 20, 1, 1310},
{RESOURCE_FEATHERS, 10, 20, 1, 20},
{RESOURCE_COTTON, 5, 20, 1, 590},
{RESOURCE_HEMP, 4, 10, 1, 720},
{RESOURCE_FRESHWATER, 0, 100,0, 1000},
{RESOURCE_SALTWATER, 0, 100,0, 1030},
{RESOURCE_DRINKABLE, 0, 1, 0, 1000},
{RESOURCE_GLASS, 10, 0, 3, 2800},
{RESOURCE_PAPER, 10, 0, 0, 920},
{RESOURCE_CLAY, 1, 50, 1, 1750},
{RESOURCE_CHINA, 30, 0, 3, 2400},
{RESOURCE_DIAMOND, 5000, 1, 9, 3510},
{RESOURCE_CRYSTAL, 10, 5, 3, 2200},
{RESOURCE_GEM, 100,1, 3, 3500},
{RESOURCE_PEARL, 1000, 1, 4, 2000},
{RESOURCE_PLATINUM, 80, 1, 6, 21450},
{RESOURCE_MILK, 2, 10, 0, 1020},
{RESOURCE_EGGS, 2, 10, 0, 1120},
{RESOURCE_HOPS, 2, 20, 1, 340},
{RESOURCE_COFFEEBEANS, 2, 10, 1, 560},
{RESOURCE_COFFEE, 0, 10, 0, 430},
{RESOURCE_OPAL, 80, 2, 5, 2250},
{RESOURCE_TOPAZ, 200,2, 5, 3570},
{RESOURCE_AMETHYST, 300,2, 5, 2651},
{RESOURCE_GARNET, 70, 2, 5, 3870},
{RESOURCE_AMBER, 80, 5, 5, 2500},
{RESOURCE_AQUAMARINE, 50, 2, 5, 2800},
{RESOURCE_CRYSOBERYL, 50, 2, 5, 2800},
{RESOURCE_IRONWOOD, 25, 5, 4, 990},
{RESOURCE_SILK, 200,5, 1, 1600},
{RESOURCE_COCOA, 4, 5, 0, 590},
{RESOURCE_BLOOD, 1, 100,0, 1025},
{RESOURCE_BONE, 1, 100,5, 1600},
{RESOURCE_COAL, 1, 50, 1, 1800},
{RESOURCE_LAMPOIL, 1, 10, 1, 880},
{RESOURCE_POISON, 1, 1, 1, 1000},
{RESOURCE_LIQUOR, 10, 1, 1, 790},
{RESOURCE_SUGAR, 1, 50, 1, 1600},
{RESOURCE_HONEY, 1, 50, 1, 1600},
{RESOURCE_BARLEY, 1, 20, 1, 610},
{RESOURCE_MUSHROOMS, 1, 20, 1, 500},
{RESOURCE_HERBS, 1, 10, 1, 770},
{RESOURCE_VINE, 1, 10, 1, 880},
{RESOURCE_FLOWERS, 1, 10, 1, 720},
{RESOURCE_PLASTIC, 25, 0, 4, 950},
{RESOURCE_RUBBER, 25, 0, 1, 1506},
{RESOURCE_EBONY, 5, 5, 5, 2910},
{RESOURCE_IVORY, 5, 5, 3, 1840},
{RESOURCE_WAX, 1, 0, 0, 900},
{RESOURCE_NUTS, 0, 20, 0, 640},
{RESOURCE_BREAD, 3, 0, 0, 660},
{RESOURCE_CRACKER, 2, 0, 0, 200},
{RESOURCE_YEW, 15, 2, 5, 850},
{RESOURCE_DUST, 0, 20, 0, 1120},
{RESOURCE_PIPEWEED, 3, 10, 1, 320},
{RESOURCE_ENERGY, 30, 0, 4, 0},
{RESOURCE_STRAWBERRIES, 10, 1, 1, 750},
{RESOURCE_BLUEBERRIES, 10, 1, 1, 750},
{RESOURCE_RASPBERRIES, 10, 1, 1, 750},
{RESOURCE_BOYSENBERRIES,10, 1, 1, 750},
{RESOURCE_BLACKBERRIES, 10, 1, 1, 750},
{RESOURCE_SMURFBERRIES, 10, 1, 1, 750},
{RESOURCE_PEACHES, 10, 1, 1, 700},
{RESOURCE_PLUMS, 10, 1, 1, 710},
{RESOURCE_ONIONS, 10, 1, 1, 760},
{RESOURCE_CHERRIES, 10, 1, 1, 810},
{RESOURCE_GARLIC, 10, 1, 1, 815},
{RESOURCE_PINEAPPLES, 10, 1, 1, 500},
{RESOURCE_COCONUTS, 10, 1, 2, 250},
{RESOURCE_BANANAS, 10, 1, 1, 790},
{RESOURCE_LIMES, 10, 1, 1, 690},
{RESOURCE_SAP, 10, 1, 1, 1600},
{RESOURCE_ONYX, 70, 1, 8, 3300},
{RESOURCE_TURQUIOSE, 70, 1, 8, 3300},
{RESOURCE_PERIDOT, 65, 1, 6, 3300},
{RESOURCE_QUARTZ, 25, 1, 5, 3300},
{RESOURCE_LAPIS, 70, 1, 6, 3300},
{RESOURCE_BLOODSTONE, 85, 1, 8, 3300},
{RESOURCE_MOONSTONE, 90, 1, 8, 3300},
{RESOURCE_ALEXANDRITE, 95, 1, 9, 3300},
{RESOURCE_TEAK, 20, 2, 3, 1000},
{RESOURCE_CEDAR, 15, 2, 3, 900},
{RESOURCE_ELM, 15, 2, 3, 1100},
{RESOURCE_CHERRYWOOD, 17, 2, 3, 900},
{RESOURCE_BEECHWOOD, 12, 2, 3, 975},
{RESOURCE_WILLOW, 12, 2, 1, 1000},
{RESOURCE_SYCAMORE, 11, 2, 2, 1000},
{RESOURCE_SPRUCE, 12, 2, 3, 990},
{RESOURCE_MESQUITE, 9, 2, 3, 1150},
{RESOURCE_BASALT, 10, 2, 4, 3300},
{RESOURCE_SHALE, 5, 2, 2, 1200},
{RESOURCE_PUMICE, 5, 2, 4, 600},
{RESOURCE_SANDSTONE, 10, 2, 2, 3500},
{RESOURCE_SOAPSTONE, 60, 2, 5, 3600},
{RESOURCE_SALMON, 6, 1, 1, 1000},
{RESOURCE_CARP, 6, 1, 1, 1000},
{RESOURCE_TROUT, 6, 1, 1, 1000},
{RESOURCE_SHRIMP, 6, 1, 1, 1000},
{RESOURCE_TUNA, 6, 1, 1, 1000},
{RESOURCE_CATFISH, 6, 1, 1, 1000},
{RESOURCE_BAMBOO, 15, 10, 4, 120},
{RESOURCE_SOAP, 1, 0, 1, 430},
{RESOURCE_SPIDERSTEEL, 250,0, 2, 630},
{RESOURCE_ASH, 1, 0, 0, 50},
{RESOURCE_PERFUME, 1, 1, 1, 1000},
{RESOURCE_ATLANTEANSTEEL,1500,1,6, 850},
{RESOURCE_CHEESE, 25, 0, 1, 640},
};
public static final int[] FISHES={
RESOURCE_FISH,
RESOURCE_SALMON,
RESOURCE_CARP,
RESOURCE_TROUT,
RESOURCE_SHRIMP,
RESOURCE_TUNA,
RESOURCE_CATFISH
};
public static final int[] BERRIES={
RESOURCE_BERRIES,
RESOURCE_STRAWBERRIES,
RESOURCE_BLUEBERRIES,
RESOURCE_RASPBERRIES,
RESOURCE_BOYSENBERRIES,
RESOURCE_BLACKBERRIES,
RESOURCE_SMURFBERRIES
};
public final static String[] RESOURCE_SMELLS={
// full code, base value, frequency, strength (1-10), bouancy, smell
"",//RESOURCE_NOTHING
"",//RESOURCE_MEAT
"",//RESOURCE_BEEF
"",//RESOURCE_PORK
"",//RESOURCE_POULTRY
"",//RESOURCE_MUTTON
"strong fishy",//RESOURCE_FISH
"",//RESOURCE_WHEAT
"",//RESOURCE_CORN
"",//RESOURCE_RICE
"",//RESOURCE_CARROTS
"",//RESOURCE_TOMATOES
"spicy",//RESOURCE_PEPPERS
"very mild",//RESOURCE_GREENS
"sweet and fruity",//RESOURCE_FRUIT
"sweet apply",//RESOURCE_APPLES
"sweet berry",//RESOURCE_BERRIES
"citrusy",//RESOURCE_ORANGES
"strong citrusy",//RESOURCE_LEMONS
"mild sweet",//RESOURCE_GRAPES
"pickly olive",//RESOURCE_OLIVES
"",//RESOURCE_POTATOES
"",//RESOURCE_CACTUS
"sweet plumy",//RESOURCE_DATES
"",//RESOURCE_SEAWEED
"mild musty",//RESOURCE_STONE
"",//RESOURCE_LIMESTONE
"",//RESOURCE_FLINT
"",//RESOURCE_GRANITE
"",//RESOURCE_OBSIDIAN
"",//RESOURCE_MARBLE
"",//RESOURCE_SAND
"",//RESOURCE_JADE
"",//RESOURCE_IRON
"",//RESOURCE_LEAD
"",//RESOURCE_BRONZE
"",//RESOURCE_SILVER
"",//RESOURCE_GOLD
"",//RESOURCE_ZINC
"",//RESOURCE_COPPER
"",//RESOURCE_TIN
"",//RESOURCE_MITHRIL
"",//RESOURCE_ADAMANTITE
"",//RESOURCE_STEEL
"",//RESOURCE_BRASS
"",//RESOURCE_WOOD
"fresh, clean piney",//RESOURCE_PINE
"",//RESOURCE_BALSA
"rich oaky",//RESOURCE_OAK
"mild maply",//RESOURCE_MAPLE
"",//RESOURCE_REDWOOD
"",//RESOURCE_HICKORY
"",//RESOURCE_SCALES
"musky",//RESOURCE_FUR
"strong leathery",//RESOURCE_LEATHER
"mild stinky",//RESOURCE_HIDE
"",//RESOURCE_WOOL
"",//RESOURCE_FEATHERS
"",//RESOURCE_COTTON
"grassy",//RESOURCE_HEMP
"",//RESOURCE_FRESHWATER
"",//RESOURCE_SALTWATER
"",//RESOURCE_DRINKABLE
"",//RESOURCE_GLASS
"",//RESOURCE_PAPER
"mild dusty",//RESOURCE_CLAY
"",//RESOURCE_CHINA
"",//RESOURCE_DIAMOND
"",//RESOURCE_CRYSTAL
"",//RESOURCE_GEM
"",//RESOURCE_PEARL
"",//RESOURCE_PLATINUM
"mild milky",//RESOURCE_MILK
"",//RESOURCE_EGGS
"mild grainy",//RESOURCE_HOPS
"mild coffee",//RESOURCE_COFFEEBEANS
"rich coffee",//RESOURCE_COFFEE
"",//RESOURCE_OPAL
"",//RESOURCE_TOPAZ
"",//RESOURCE_AMETHYST
"",//RESOURCE_GARNET
"",//RESOURCE_AMBER
"",//RESOURCE_AQUAMARINE
"",//RESOURCE_CRYSOBERYL
"",//RESOURCE_IRONWOOD
"",//RESOURCE_SILK
"",//RESOURCE_COCOA
"strong salty",//RESOURCE_BLOOD
"",//RESOURCE_BONE
"chalky",//RESOURCE_COAL
"light oily",//RESOURCE_LAMPOIL
"",//RESOURCE_POISON
"alcohol",//RESOURCE_LIQUOR
"",//RESOURCE_SUGAR
"",//RESOURCE_HONEY
"",//RESOURCE_BARLEY
"",//RESOURCE_MUSHROOMS
"fresh herbal",//RESOURCE_HERBS
"rich green",//RESOURCE_VINE
"nice floral",//RESOURCE_FLOWERS
"",//RESOURCE_PLASTIC
"sour rubbery",//RESOURCE_RUBBER
"",//RESOURCE_EBONY
"",//RESOURCE_IVORY
"",//RESOURCE_WAX
"mild nutty",//RESOURCE_NUTS
"",//RESOURCE_BREAD
"",//RESOURCE_CRACKER
"",//RESOURCE_YEW
"dusty",//RESOURCE_DUST
"strong grassy",//RESOURCE_PIPEWEED
"",//RESOURCE_ENERGY
"sweet berry",//RESOURCE_STRAWBERRIES
"sweet berry",//RESOURCE_BLUEBERRIES
"sweet berry",//RESOURCE_RASPBERRIES
"sweet berry",//RESOURCE_BOYSENBERRIES
"sweet berry",//RESOURCE_BLACKBERRIES
"sweet berry",//RESOURCE_SMURFBERRIES
"peachy",//RESOURCE_PEACHES
"sweey plumy",//RESOURCE_PLUMS
"stinging oniony",//RESOURCE_ONIONS
"cherry",//RESOURCE_CHERRIES
"",//RESOURCE_GARLIC
"fruity",//RESOURCE_PINEAPPLES
"",//RESOURCE_COCONUTS
"pungent banana",//RESOURCE_BANANAS
"citrusy",//RESOURCE_LIMES
"strong maply",//RESOURCE_SAP
"",//RESOURCE_ONYX
"",//RESOURCE_TURQUIOSE
"",//RESOURCE_PERIDOT
"",//RESOURCE_QUARTZ
"",//RESOURCE_LAPIS
"",//RESOURCE_BLOODSTONE
"",//RESOURCE_MOONSTONE
"",//RESOURCE_ALEXANDRITE
"",//RESOURCE_TEAK
"strong cedar",//RESOURCE_CEDAR
"",//RESOURCE_ELM
"",//RESOURCE_CHERRYWOOD
"",//RESOURCE_BEECHWOOD
"",//RESOURCE_WILLOW
"",//RESOURCE_SYCAMORE
"",//RESOURCE_SPRUCE
"rich mesquite",//RESOURCE_MESQUITE
"",//RESOURCE_BASALT
"",//RESOURCE_SHALE
"",//RESOURCE_PUMICE
"",//RESOURCE_SANDSTONE
"",//RESOURCE_SOAPSTONE
"strong fishy",//RESOURCE_SALMON
"strong fishy",//RESOURCE_CARP
"strong fishy",//RESOURCE_TROUT
"mild fishy",//RESOURCE_SHRIMP
"strong fishy",//RESOURCE_TUNA
"strong fishy",//RESOURCE_CATFISH
"",//RESOURCE_BAMBOO
"light fragrant",//RESOURCE_SOAP
"",//RESOURCE_SPIDERSTEEL
"dusty",//RESOURCE_ASH
"strong fragrant",//RESOURCE_PERFUME
"",//RESOURCE_ATLANTEANSTEEL
"mild cheesy"
};
} | com/planet_ink/coffee_mud/Items/interfaces/RawMaterial.java | package com.planet_ink.coffee_mud.Items.interfaces;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
/*
Copyright 2000-2006 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public interface RawMaterial extends Item
{
public int domainSource();
public void setDomainSource(int src);
// item materials
public final static int MATERIAL_UNKNOWN=0;
public final static int MATERIAL_CLOTH=1<<8;
public final static int MATERIAL_LEATHER=2<<8;
public final static int MATERIAL_METAL=3<<8;
public final static int MATERIAL_MITHRIL=4<<8;
public final static int MATERIAL_WOODEN=5<<8;
public final static int MATERIAL_GLASS=6<<8;
public final static int MATERIAL_VEGETATION=7<<8;
public final static int MATERIAL_FLESH=8<<8;
public final static int MATERIAL_PAPER=9<<8;
public final static int MATERIAL_ROCK=10<<8;
public final static int MATERIAL_LIQUID=11<<8;
public final static int MATERIAL_PRECIOUS=12<<8;
public final static int MATERIAL_ENERGY=13<<8;
public final static int MATERIAL_PLASTIC=14<<8;
public final static int MATERIAL_MASK=255<<8;
public final static String[] MATERIAL_DESCS={
"UNKNOWN",
"CLOTH",
"LEATHER",
"METAL",
"MITHRIL",
"WOODEN",
"GLASS",
"VEGETATION",
"FLESH",
"PAPER",
"ROCK",
"LIQUID",
"PRECIOUS",
"ENERGY",
"PLASTIC"};
public final static String[] MATERIAL_NOUNDESCS={
"Unknown material",
"Cloth",
"Leather",
"Metal",
"Metal",
"Wood",
"Glass",
"Vegetation",
"Flesh",
"Paper",
"Rock",
"Liquid",
"Stone",
"Energy",
"Plastic"};
public final static int RESOURCE_NOTHING=MATERIAL_UNKNOWN|0;
public final static int RESOURCE_MEAT=MATERIAL_FLESH|1;
public final static int RESOURCE_BEEF=MATERIAL_FLESH|2;
public final static int RESOURCE_PORK=MATERIAL_FLESH|3;
public final static int RESOURCE_POULTRY=MATERIAL_FLESH|4;
public final static int RESOURCE_MUTTON=MATERIAL_FLESH|5;
public final static int RESOURCE_FISH=MATERIAL_FLESH|6;
public final static int RESOURCE_WHEAT=MATERIAL_VEGETATION|7;
public final static int RESOURCE_CORN=MATERIAL_VEGETATION|8;
public final static int RESOURCE_RICE=MATERIAL_VEGETATION|9;
public final static int RESOURCE_CARROTS=MATERIAL_VEGETATION|10;
public final static int RESOURCE_TOMATOES=MATERIAL_VEGETATION|11;
public final static int RESOURCE_PEPPERS=MATERIAL_VEGETATION|12;
public final static int RESOURCE_GREENS=MATERIAL_VEGETATION|13;
public final static int RESOURCE_FRUIT=MATERIAL_VEGETATION|14;
public final static int RESOURCE_APPLES=MATERIAL_VEGETATION|15;
public final static int RESOURCE_BERRIES=MATERIAL_VEGETATION|16;
public final static int RESOURCE_ORANGES=MATERIAL_VEGETATION|17;
public final static int RESOURCE_LEMONS=MATERIAL_VEGETATION|18;
public final static int RESOURCE_GRAPES=MATERIAL_VEGETATION|19;
public final static int RESOURCE_OLIVES=MATERIAL_VEGETATION|20;
public final static int RESOURCE_POTATOES=MATERIAL_VEGETATION|21;
public final static int RESOURCE_CACTUS=MATERIAL_VEGETATION|22;
public final static int RESOURCE_DATES=MATERIAL_VEGETATION|23;
public final static int RESOURCE_SEAWEED=MATERIAL_VEGETATION|24;
public final static int RESOURCE_STONE=MATERIAL_ROCK|25;
public final static int RESOURCE_LIMESTONE=MATERIAL_ROCK|26;
public final static int RESOURCE_FLINT=MATERIAL_ROCK|27;
public final static int RESOURCE_GRANITE=MATERIAL_ROCK|28;
public final static int RESOURCE_OBSIDIAN=MATERIAL_ROCK|29;
public final static int RESOURCE_MARBLE=MATERIAL_ROCK|30;
public final static int RESOURCE_SAND=MATERIAL_ROCK|31;
public final static int RESOURCE_JADE=MATERIAL_PRECIOUS|32;
public final static int RESOURCE_IRON=MATERIAL_METAL|33;
public final static int RESOURCE_LEAD=MATERIAL_METAL|34;
public final static int RESOURCE_BRONZE=MATERIAL_METAL|35;
public final static int RESOURCE_SILVER=MATERIAL_METAL|36;
public final static int RESOURCE_GOLD=MATERIAL_METAL|37;
public final static int RESOURCE_ZINC=MATERIAL_METAL|38;
public final static int RESOURCE_COPPER=MATERIAL_METAL|39;
public final static int RESOURCE_TIN=MATERIAL_METAL|40;
public final static int RESOURCE_MITHRIL=MATERIAL_MITHRIL|41;
public final static int RESOURCE_ADAMANTITE=MATERIAL_MITHRIL|42;
public final static int RESOURCE_STEEL=MATERIAL_METAL|43;
public final static int RESOURCE_BRASS=MATERIAL_METAL|44;
public final static int RESOURCE_WOOD=MATERIAL_WOODEN|45;
public final static int RESOURCE_PINE=MATERIAL_WOODEN|46;
public final static int RESOURCE_BALSA=MATERIAL_WOODEN|47;
public final static int RESOURCE_OAK=MATERIAL_WOODEN|48;
public final static int RESOURCE_MAPLE=MATERIAL_WOODEN|49;
public final static int RESOURCE_REDWOOD=MATERIAL_WOODEN|50;
public final static int RESOURCE_HICKORY=MATERIAL_WOODEN|51;
public final static int RESOURCE_SCALES=MATERIAL_LEATHER|52;
public final static int RESOURCE_FUR=MATERIAL_CLOTH|53;
public final static int RESOURCE_LEATHER=MATERIAL_LEATHER|54;
public final static int RESOURCE_HIDE=MATERIAL_CLOTH|55;
public final static int RESOURCE_WOOL=MATERIAL_CLOTH|56;
public final static int RESOURCE_FEATHERS=MATERIAL_CLOTH|57;
public final static int RESOURCE_COTTON=MATERIAL_CLOTH|58;
public final static int RESOURCE_HEMP=MATERIAL_CLOTH|59;
public final static int RESOURCE_FRESHWATER=MATERIAL_LIQUID|60;
public final static int RESOURCE_SALTWATER=MATERIAL_LIQUID|61;
public final static int RESOURCE_DRINKABLE=MATERIAL_LIQUID|62;
public final static int RESOURCE_GLASS=MATERIAL_GLASS|63;
public final static int RESOURCE_PAPER=MATERIAL_PAPER|64;
public final static int RESOURCE_CLAY=MATERIAL_GLASS|65;
public final static int RESOURCE_CHINA=MATERIAL_GLASS|66;
public final static int RESOURCE_DIAMOND=MATERIAL_PRECIOUS|67;
public final static int RESOURCE_CRYSTAL=MATERIAL_GLASS|68;
public final static int RESOURCE_GEM=MATERIAL_PRECIOUS|69;
public final static int RESOURCE_PEARL=MATERIAL_PRECIOUS|70;
public final static int RESOURCE_PLATINUM=MATERIAL_METAL|71;
public final static int RESOURCE_MILK=MATERIAL_LIQUID|72;
public final static int RESOURCE_EGGS=MATERIAL_FLESH|73;
public final static int RESOURCE_HOPS=MATERIAL_VEGETATION|74;
public final static int RESOURCE_COFFEEBEANS=MATERIAL_VEGETATION|75;
public final static int RESOURCE_COFFEE=MATERIAL_LIQUID|76;
public final static int RESOURCE_OPAL=MATERIAL_PRECIOUS|77;
public final static int RESOURCE_TOPAZ=MATERIAL_PRECIOUS|78;
public final static int RESOURCE_AMETHYST=MATERIAL_PRECIOUS|79;
public final static int RESOURCE_GARNET=MATERIAL_PRECIOUS|80;
public final static int RESOURCE_AMBER=MATERIAL_PRECIOUS|81;
public final static int RESOURCE_AQUAMARINE=MATERIAL_PRECIOUS|82;
public final static int RESOURCE_CRYSOBERYL=MATERIAL_PRECIOUS|83;
public final static int RESOURCE_IRONWOOD=MATERIAL_WOODEN|84;
public final static int RESOURCE_SILK=MATERIAL_CLOTH|85;
public final static int RESOURCE_COCOA=MATERIAL_VEGETATION|86;
public final static int RESOURCE_BLOOD=MATERIAL_LIQUID|87;
public final static int RESOURCE_BONE=MATERIAL_ROCK|88;
public final static int RESOURCE_COAL=MATERIAL_ROCK|89;
public final static int RESOURCE_LAMPOIL=MATERIAL_LIQUID|90;
public final static int RESOURCE_POISON=MATERIAL_LIQUID|91;
public final static int RESOURCE_LIQUOR=MATERIAL_LIQUID|92;
public final static int RESOURCE_SUGAR=MATERIAL_VEGETATION|93;
public final static int RESOURCE_HONEY=MATERIAL_LIQUID|94;
public final static int RESOURCE_BARLEY=MATERIAL_VEGETATION|95;
public final static int RESOURCE_MUSHROOMS=MATERIAL_VEGETATION|96;
public final static int RESOURCE_HERBS=MATERIAL_VEGETATION|97;
public final static int RESOURCE_VINE=MATERIAL_VEGETATION|98;
public final static int RESOURCE_FLOWERS=MATERIAL_VEGETATION|99;
public final static int RESOURCE_PLASTIC=MATERIAL_PLASTIC|100;
public final static int RESOURCE_RUBBER=MATERIAL_PLASTIC|101;
public final static int RESOURCE_EBONY=MATERIAL_ROCK|102;
public final static int RESOURCE_IVORY=MATERIAL_ROCK|103;
public final static int RESOURCE_WAX=MATERIAL_FLESH|104;
public final static int RESOURCE_NUTS=MATERIAL_VEGETATION|105;
public final static int RESOURCE_BREAD=MATERIAL_VEGETATION|106;
public final static int RESOURCE_CRACKER=MATERIAL_VEGETATION|107;
public final static int RESOURCE_YEW=MATERIAL_WOODEN|108;
public final static int RESOURCE_DUST=MATERIAL_ROCK|109;
public final static int RESOURCE_PIPEWEED=MATERIAL_VEGETATION|110;
public final static int RESOURCE_ENERGY=MATERIAL_ENERGY|111;
public final static int RESOURCE_STRAWBERRIES=MATERIAL_VEGETATION|112;
public final static int RESOURCE_BLUEBERRIES=MATERIAL_VEGETATION|113;
public final static int RESOURCE_RASPBERRIES=MATERIAL_VEGETATION|114;
public final static int RESOURCE_BOYSENBERRIES=MATERIAL_VEGETATION|115;
public final static int RESOURCE_BLACKBERRIES=MATERIAL_VEGETATION|116;
public final static int RESOURCE_SMURFBERRIES=MATERIAL_VEGETATION|117;
public final static int RESOURCE_PEACHES=MATERIAL_VEGETATION|118;
public final static int RESOURCE_PLUMS=MATERIAL_VEGETATION|119;
public final static int RESOURCE_ONIONS=MATERIAL_VEGETATION|120;
public final static int RESOURCE_CHERRIES=MATERIAL_VEGETATION|121;
public final static int RESOURCE_GARLIC=MATERIAL_VEGETATION|122;
public final static int RESOURCE_PINEAPPLES=MATERIAL_VEGETATION|123;
public final static int RESOURCE_COCONUTS=MATERIAL_VEGETATION|124;
public final static int RESOURCE_BANANAS=MATERIAL_VEGETATION|125;
public final static int RESOURCE_LIMES=MATERIAL_VEGETATION|126;
public final static int RESOURCE_SAP=MATERIAL_LIQUID|127;
public final static int RESOURCE_ONYX=MATERIAL_PRECIOUS|128;
public final static int RESOURCE_TURQUIOSE=MATERIAL_PRECIOUS|129;
public final static int RESOURCE_PERIDOT=MATERIAL_PRECIOUS|130;
public final static int RESOURCE_QUARTZ=MATERIAL_PRECIOUS|131;
public final static int RESOURCE_LAPIS=MATERIAL_PRECIOUS|132;
public final static int RESOURCE_BLOODSTONE=MATERIAL_PRECIOUS|133;
public final static int RESOURCE_MOONSTONE=MATERIAL_PRECIOUS|134;
public final static int RESOURCE_ALEXANDRITE=MATERIAL_PRECIOUS|135;
public final static int RESOURCE_TEAK=MATERIAL_WOODEN|136;
public final static int RESOURCE_CEDAR=MATERIAL_WOODEN|137;
public final static int RESOURCE_ELM=MATERIAL_WOODEN|138;
public final static int RESOURCE_CHERRYWOOD=MATERIAL_WOODEN|139;
public final static int RESOURCE_BEECHWOOD=MATERIAL_WOODEN|140;
public final static int RESOURCE_WILLOW=MATERIAL_WOODEN|141;
public final static int RESOURCE_SYCAMORE=MATERIAL_WOODEN|142;
public final static int RESOURCE_SPRUCE=MATERIAL_WOODEN|143;
public final static int RESOURCE_MESQUITE=MATERIAL_WOODEN|144;
public final static int RESOURCE_BASALT=MATERIAL_ROCK|145;
public final static int RESOURCE_SHALE=MATERIAL_ROCK|146;
public final static int RESOURCE_PUMICE=MATERIAL_ROCK|147;
public final static int RESOURCE_SANDSTONE=MATERIAL_ROCK|148;
public final static int RESOURCE_SOAPSTONE=MATERIAL_ROCK|149;
public final static int RESOURCE_SALMON=MATERIAL_FLESH|150;
public final static int RESOURCE_CARP=MATERIAL_FLESH|151;
public final static int RESOURCE_TROUT=MATERIAL_FLESH|152;
public final static int RESOURCE_SHRIMP=MATERIAL_FLESH|153;
public final static int RESOURCE_TUNA=MATERIAL_FLESH|154;
public final static int RESOURCE_CATFISH=MATERIAL_FLESH|155;
public final static int RESOURCE_BAMBOO=MATERIAL_WOODEN|156;
public final static int RESOURCE_SOAP=MATERIAL_VEGETATION|157;
public final static int RESOURCE_SPIDERSTEEL=MATERIAL_CLOTH|158;
public final static int RESOURCE_ASH=MATERIAL_VEGETATION|159;
public final static int RESOURCE_PERFUME=MATERIAL_LIQUID|160;
public final static int RESOURCE_ATLANTEANSTEEL=MATERIAL_MITHRIL|161;
public final static int RESOURCE_CHEESE=MATERIAL_LIQUID|162;
public final static int RESOURCE_MASK=255;
public final static String[] RESOURCE_DESCS={
"NOTHING", //0
"MEAT", //1
"BEEF", //2
"PORK", //3
"POULTRY", //4
"MUTTON", //5
"FISH",//6
"WHEAT", //7
"CORN", //8
"RICE", //9
"CARROTS", //10
"TOMATOES", //11
"PEPPERS", //12
"GREENS",//13
"FRUIT", //14
"APPLES", //15
"BERRIES", //16
"ORANGES", //17
"LEMONS", //18
"GRAPES", //19
"OLIVES",//20
"POTATOES", //21
"CACTUS", //22
"DATES", //23
"SEAWEED", //24
"STONE", //25
"LIMESTONE",//26
"FLINT", //27
"GRANITE", //28
"OBSIDIAN", //29
"MARBLE", //30
"SAND", //31
"JADE", //32
"IRON",//33
"LEAD", //34
"BRONZE", //35
"SILVER", //36
"GOLD", //37
"ZINC", //38
"COPPER", //39
"TIN", //40
"MITHRIL",//41
"ADAMANTITE", //42
"STEEL", //43
"BRASS", //44
"WOOD", //45
"PINE", //46
"BALSA", //47
"OAK", //48
"MAPLE",//49
"REDWOOD", //50
"HICKORY", //51
"SCALES", //52
"FUR", //53
"LEATHER", //54
"HIDE", //55
"WOOL",//56
"FEATHERS",//57
"COTTON", //58
"HEMP",//59
"WATER",//60
"SALT WATER",//61
"LIQUID",//62
"GLASS",//63
"PAPER",//64
"CLAY",//65
"CHINA",//66
"DIAMOND",//67
"CRYSTAL",//68
"GEM", //69
"PEARL", //70
"PLATINUM",//71
"MILK",//72
"EGGS",//73
"HOPS",//74
"COFFEEBEANS",//75
"COFFEE",//76
"OPAL",//77
"TOPAZ",//78
"AMETHYST",//79
"GARNET",//80
"AMBER", //81
"AQUAMARINE", //82
"CRYSOBERYL", //83
"IRONWOOD", //84
"SILK", //85
"COCOA", //86
"BLOOD", //87
"BONE", //88
"COAL", //89
"LAMP OIL", //90
"POISON", // 91
"LIQUOR", // 92
"SUGAR", // 93
"HONEY", // 94
"BARLEY", // 95
"MUSHROOMS", // 96
"HERBS", // 97
"VINE", // 98
"FLOWERS", // 99
"PLASTIC", // 100
"RUBBER", // 101
"EBONY", // 102
"IVORY", // 103
"WAX", // 104
"NUTS", // 105
"BREAD", // 106
"CRACKER", // 107
"YEW", // 108
"DUST", // 109
"PIPEWEED", // 110
"ENERGY", // 111
"STRAWBERRIES", // 112
"BLUEBERRIES", // 113
"RASPBERRIES", // 114
"BOYSENBERRIES", // 115
"BLACKBERRIES", // 116
"SMURFBERRIES", // 117
"PEACHES", // 118
"PLUMS", // 119
"ONIONS", // 120
"CHERRIES", // 121
"GARLIC", // 122
"PINEAPPLES", // 123
"COCONUTS", // 124
"BANANAS", // 125
"LIMES", // 126
"SAP", // 127
"ONYX", // 128
"TURQUOISE", // 129
"PERIDOT", // 130
"QUARTZ", // 131
"LAPIS", // 133
"BLOODSTONE", // 133
"MOONSTONE", // 134
"ALEXANDRITE", // 135
"TEAK", // 136
"CEDAR", // 137
"ELM", // 138
"CHERRYWOOD", // 139
"BEECHWOOD", // 140
"WILLOW", // 141
"SYCAMORE", // 142
"SPRUCE", // 143
"MESQUITE", // 144
"BASALT", // 145
"SHALE", // 146
"PUMICE", // 147
"SANDSTONE", // 148
"SOAPSTONE", // 149
"SALMON", // 150
"CARP", // 151
"TROUT", // 152
"SHRIMP", // 153
"TUNA", // 154
"CATFISH", // 155
"BAMBOO", // 156
"SOAP", // 157
"SPIDERSTEEL", // 158
"ASH", // 159
"PERFUME", // 160
"ATLANTITE",//161
"CHEESE",//162
};
public final static int DATA_CODE=0;
public final static int DATA_VALUE=1;
public final static int DATA_FREQ=2;
public final static int DATA_STRENGTH=3;
public final static int DATA_BOUANCY=4;
public final static int[][] RESOURCE_DATA={
// full code, base value, frequency, strength (1-10), bouancy
{RESOURCE_NOTHING, 0, 0, 0, 0},
{RESOURCE_MEAT, 4, 20, 1, 3000},
{RESOURCE_BEEF, 6, 20, 1, 3000},
{RESOURCE_PORK, 8, 20, 1, 2500},
{RESOURCE_POULTRY, 3, 20, 1, 2000},
{RESOURCE_MUTTON, 4, 20, 1, 2800},
{RESOURCE_FISH, 5, 100,1, 590},
{RESOURCE_WHEAT, 1, 20, 1, 770},
{RESOURCE_CORN, 1, 20, 1, 720},
{RESOURCE_RICE, 1, 20, 1, 750},
{RESOURCE_CARROTS, 1, 5, 1, 720},
{RESOURCE_TOMATOES, 1, 5, 1, 640},
{RESOURCE_PEPPERS, 1, 5, 1, 640},
{RESOURCE_GREENS, 1, 5, 1, 540},
{RESOURCE_FRUIT, 2, 10, 1, 720},
{RESOURCE_APPLES, 2, 10, 1, 640},
{RESOURCE_BERRIES, 2, 15, 1, 720},
{RESOURCE_ORANGES, 2, 10, 1, 640},
{RESOURCE_LEMONS, 2, 10, 1, 480},
{RESOURCE_GRAPES, 3, 5, 1, 680},
{RESOURCE_OLIVES, 2, 5, 1, 640},
{RESOURCE_POTATOES, 1, 5, 1, 770},
{RESOURCE_CACTUS, 2, 5, 1, 680},
{RESOURCE_DATES, 2, 2, 1, 720},
{RESOURCE_SEAWEED, 1, 50, 1, 540},
{RESOURCE_STONE, 1, 80, 5, 2500},
{RESOURCE_LIMESTONE, 1, 20, 4, 1550},
{RESOURCE_FLINT, 1, 10, 4, 2600},
{RESOURCE_GRANITE, 2, 10, 6, 2690},
{RESOURCE_OBSIDIAN, 10, 5, 6, 2650},
{RESOURCE_MARBLE, 20, 5, 5, 2560},
{RESOURCE_SAND, 1, 50, 1, 1600},
{RESOURCE_JADE, 50, 2, 5, 3800},
{RESOURCE_IRON, 20, 10, 6, 7900},
{RESOURCE_LEAD, 10, 10, 5, 11300},
{RESOURCE_BRONZE, 10, 10, 5, 8100},
{RESOURCE_SILVER, 30, 2, 5, 10500},
{RESOURCE_GOLD, 50, 1, 5, 19320},
{RESOURCE_ZINC, 10, 5, 5, 7100},
{RESOURCE_COPPER, 10, 10, 5, 8900},
{RESOURCE_TIN, 10, 10, 4, 7300},
{RESOURCE_MITHRIL, 200,1, 9, 3990},
{RESOURCE_ADAMANTITE, 500,1, 10, 4500},
{RESOURCE_STEEL, 150,0, 8, 7840},
{RESOURCE_BRASS, 120,0, 6, 8500},
{RESOURCE_WOOD, 2, 10, 3, 920},
{RESOURCE_PINE, 4, 10, 3, 650},
{RESOURCE_BALSA, 1, 5, 2, 130},
{RESOURCE_OAK, 5, 10, 3, 720},
{RESOURCE_MAPLE, 10, 5, 3, 689},
{RESOURCE_REDWOOD, 20, 2, 3, 450},
{RESOURCE_HICKORY, 5, 5, 3, 830},
{RESOURCE_SCALES, 10, 20, 4, 1800},
{RESOURCE_FUR, 20, 20, 2, 890},
{RESOURCE_LEATHER, 10, 20, 2, 945},
{RESOURCE_HIDE, 4, 20, 1, 920},
{RESOURCE_WOOL, 10, 20, 1, 1310},
{RESOURCE_FEATHERS, 10, 20, 1, 20},
{RESOURCE_COTTON, 5, 20, 1, 590},
{RESOURCE_HEMP, 4, 10, 1, 720},
{RESOURCE_FRESHWATER, 0, 100,0, 1000},
{RESOURCE_SALTWATER, 0, 100,0, 1030},
{RESOURCE_DRINKABLE, 0, 1, 0, 1000},
{RESOURCE_GLASS, 10, 0, 3, 2800},
{RESOURCE_PAPER, 10, 0, 0, 920},
{RESOURCE_CLAY, 1, 50, 1, 1750},
{RESOURCE_CHINA, 30, 0, 3, 2400},
{RESOURCE_DIAMOND, 5000, 1, 9, 3510},
{RESOURCE_CRYSTAL, 10, 5, 3, 2200},
{RESOURCE_GEM, 100,1, 3, 3500},
{RESOURCE_PEARL, 1000, 1, 4, 2000},
{RESOURCE_PLATINUM, 80, 1, 6, 21450},
{RESOURCE_MILK, 2, 10, 0, 1020},
{RESOURCE_EGGS, 2, 10, 0, 1120},
{RESOURCE_HOPS, 2, 20, 1, 340},
{RESOURCE_COFFEEBEANS, 2, 10, 1, 560},
{RESOURCE_COFFEE, 0, 10, 0, 430},
{RESOURCE_OPAL, 80, 2, 5, 2250},
{RESOURCE_TOPAZ, 200,2, 5, 3570},
{RESOURCE_AMETHYST, 300,2, 5, 2651},
{RESOURCE_GARNET, 70, 2, 5, 3870},
{RESOURCE_AMBER, 80, 5, 5, 2500},
{RESOURCE_AQUAMARINE, 50, 2, 5, 2800},
{RESOURCE_CRYSOBERYL, 50, 2, 5, 2800},
{RESOURCE_IRONWOOD, 25, 5, 4, 990},
{RESOURCE_SILK, 200,5, 1, 1600},
{RESOURCE_COCOA, 4, 5, 0, 590},
{RESOURCE_BLOOD, 1, 100,0, 1025},
{RESOURCE_BONE, 1, 100,5, 1600},
{RESOURCE_COAL, 1, 50, 1, 1800},
{RESOURCE_LAMPOIL, 1, 10, 1, 880},
{RESOURCE_POISON, 1, 1, 1, 1000},
{RESOURCE_LIQUOR, 10, 1, 1, 790},
{RESOURCE_SUGAR, 1, 50, 1, 1600},
{RESOURCE_HONEY, 1, 50, 1, 1600},
{RESOURCE_BARLEY, 1, 20, 1, 610},
{RESOURCE_MUSHROOMS, 1, 20, 1, 500},
{RESOURCE_HERBS, 1, 10, 1, 770},
{RESOURCE_VINE, 1, 10, 1, 880},
{RESOURCE_FLOWERS, 1, 10, 1, 720},
{RESOURCE_PLASTIC, 25, 0, 4, 950},
{RESOURCE_RUBBER, 25, 0, 1, 1506},
{RESOURCE_EBONY, 5, 5, 5, 2910},
{RESOURCE_IVORY, 5, 5, 3, 1840},
{RESOURCE_WAX, 1, 0, 0, 900},
{RESOURCE_NUTS, 0, 20, 0, 640},
{RESOURCE_BREAD, 3, 0, 0, 660},
{RESOURCE_CRACKER, 2, 0, 0, 200},
{RESOURCE_YEW, 15, 2, 5, 850},
{RESOURCE_DUST, 0, 20, 0, 1120},
{RESOURCE_PIPEWEED, 3, 10, 1, 320},
{RESOURCE_ENERGY, 30, 0, 4, 0},
{RESOURCE_STRAWBERRIES, 10, 1, 1, 750},
{RESOURCE_BLUEBERRIES, 10, 1, 1, 750},
{RESOURCE_RASPBERRIES, 10, 1, 1, 750},
{RESOURCE_BOYSENBERRIES,10, 1, 1, 750},
{RESOURCE_BLACKBERRIES, 10, 1, 1, 750},
{RESOURCE_SMURFBERRIES, 10, 1, 1, 750},
{RESOURCE_PEACHES, 10, 1, 1, 700},
{RESOURCE_PLUMS, 10, 1, 1, 710},
{RESOURCE_ONIONS, 10, 1, 1, 760},
{RESOURCE_CHERRIES, 10, 1, 1, 810},
{RESOURCE_GARLIC, 10, 1, 1, 815},
{RESOURCE_PINEAPPLES, 10, 1, 1, 500},
{RESOURCE_COCONUTS, 10, 1, 2, 250},
{RESOURCE_BANANAS, 10, 1, 1, 790},
{RESOURCE_LIMES, 10, 1, 1, 690},
{RESOURCE_SAP, 10, 1, 1, 1600},
{RESOURCE_ONYX, 70, 1, 8, 3300},
{RESOURCE_TURQUIOSE, 70, 1, 8, 3300},
{RESOURCE_PERIDOT, 65, 1, 6, 3300},
{RESOURCE_QUARTZ, 25, 1, 5, 3300},
{RESOURCE_LAPIS, 70, 1, 6, 3300},
{RESOURCE_BLOODSTONE, 85, 1, 8, 3300},
{RESOURCE_MOONSTONE, 90, 1, 8, 3300},
{RESOURCE_ALEXANDRITE, 95, 1, 9, 3300},
{RESOURCE_TEAK, 20, 2, 3, 1000},
{RESOURCE_CEDAR, 15, 2, 3, 900},
{RESOURCE_ELM, 15, 2, 3, 1100},
{RESOURCE_CHERRYWOOD, 17, 2, 3, 900},
{RESOURCE_BEECHWOOD, 12, 2, 3, 975},
{RESOURCE_WILLOW, 12, 2, 1, 1000},
{RESOURCE_SYCAMORE, 11, 2, 2, 1000},
{RESOURCE_SPRUCE, 12, 2, 3, 990},
{RESOURCE_MESQUITE, 9, 2, 3, 1150},
{RESOURCE_BASALT, 10, 2, 4, 3300},
{RESOURCE_SHALE, 5, 2, 2, 1200},
{RESOURCE_PUMICE, 5, 2, 4, 600},
{RESOURCE_SANDSTONE, 10, 2, 2, 3500},
{RESOURCE_SOAPSTONE, 60, 2, 5, 3600},
{RESOURCE_SALMON, 6, 1, 1, 1000},
{RESOURCE_CARP, 6, 1, 1, 1000},
{RESOURCE_TROUT, 6, 1, 1, 1000},
{RESOURCE_SHRIMP, 6, 1, 1, 1000},
{RESOURCE_TUNA, 6, 1, 1, 1000},
{RESOURCE_CATFISH, 6, 1, 1, 1000},
{RESOURCE_BAMBOO, 15, 10, 4, 120},
{RESOURCE_SOAP, 1, 0, 1, 430},
{RESOURCE_SPIDERSTEEL, 250,0, 2, 630},
{RESOURCE_ASH, 1, 0, 0, 50},
{RESOURCE_PERFUME, 1, 1, 1, 1000},
{RESOURCE_ATLANTEANSTEEL,1500,1,6, 850},
{RESOURCE_CHEESE, 25, 0, 1, 640},
};
public static final int[] FISHES={
RESOURCE_FISH,
RESOURCE_SALMON,
RESOURCE_CARP,
RESOURCE_TROUT,
RESOURCE_SHRIMP,
RESOURCE_TUNA,
RESOURCE_CATFISH
};
public static final int[] BERRIES={
RESOURCE_BERRIES,
RESOURCE_STRAWBERRIES,
RESOURCE_BLUEBERRIES,
RESOURCE_RASPBERRIES,
RESOURCE_BOYSENBERRIES,
RESOURCE_BLACKBERRIES,
RESOURCE_SMURFBERRIES
};
public final static String[] RESOURCE_SMELLS={
// full code, base value, frequency, strength (1-10), bouancy, smell
"",//RESOURCE_NOTHING
"",//RESOURCE_MEAT
"",//RESOURCE_BEEF
"",//RESOURCE_PORK
"",//RESOURCE_POULTRY
"",//RESOURCE_MUTTON
"strong fishy",//RESOURCE_FISH
"",//RESOURCE_WHEAT
"",//RESOURCE_CORN
"",//RESOURCE_RICE
"",//RESOURCE_CARROTS
"",//RESOURCE_TOMATOES
"spicy",//RESOURCE_PEPPERS
"very mild",//RESOURCE_GREENS
"sweet and fruity",//RESOURCE_FRUIT
"sweet apply",//RESOURCE_APPLES
"sweet berry",//RESOURCE_BERRIES
"citrusy",//RESOURCE_ORANGES
"strong citrusy",//RESOURCE_LEMONS
"mild sweet",//RESOURCE_GRAPES
"pickly olive",//RESOURCE_OLIVES
"",//RESOURCE_POTATOES
"",//RESOURCE_CACTUS
"sweet plumy",//RESOURCE_DATES
"",//RESOURCE_SEAWEED
"mild musty",//RESOURCE_STONE
"",//RESOURCE_LIMESTONE
"",//RESOURCE_FLINT
"",//RESOURCE_GRANITE
"",//RESOURCE_OBSIDIAN
"",//RESOURCE_MARBLE
"",//RESOURCE_SAND
"",//RESOURCE_JADE
"",//RESOURCE_IRON
"",//RESOURCE_LEAD
"",//RESOURCE_BRONZE
"",//RESOURCE_SILVER
"",//RESOURCE_GOLD
"",//RESOURCE_ZINC
"",//RESOURCE_COPPER
"",//RESOURCE_TIN
"",//RESOURCE_MITHRIL
"",//RESOURCE_ADAMANTITE
"",//RESOURCE_STEEL
"",//RESOURCE_BRASS
"",//RESOURCE_WOOD
"fresh, clean piney",//RESOURCE_PINE
"",//RESOURCE_BALSA
"rich oaky",//RESOURCE_OAK
"mild maply",//RESOURCE_MAPLE
"",//RESOURCE_REDWOOD
"",//RESOURCE_HICKORY
"",//RESOURCE_SCALES
"musky",//RESOURCE_FUR
"strong leathery",//RESOURCE_LEATHER
"mild stinky",//RESOURCE_HIDE
"",//RESOURCE_WOOL
"",//RESOURCE_FEATHERS
"",//RESOURCE_COTTON
"grassy",//RESOURCE_HEMP
"",//RESOURCE_FRESHWATER
"",//RESOURCE_SALTWATER
"",//RESOURCE_DRINKABLE
"",//RESOURCE_GLASS
"",//RESOURCE_PAPER
"mild dusty",//RESOURCE_CLAY
"",//RESOURCE_CHINA
"",//RESOURCE_DIAMOND
"",//RESOURCE_CRYSTAL
"",//RESOURCE_GEM
"",//RESOURCE_PEARL
"",//RESOURCE_PLATINUM
"mild milky",//RESOURCE_MILK
"",//RESOURCE_EGGS
"mild grainy",//RESOURCE_HOPS
"mild coffee",//RESOURCE_COFFEEBEANS
"rich coffee",//RESOURCE_COFFEE
"",//RESOURCE_OPAL
"",//RESOURCE_TOPAZ
"",//RESOURCE_AMETHYST
"",//RESOURCE_GARNET
"",//RESOURCE_AMBER
"",//RESOURCE_AQUAMARINE
"",//RESOURCE_CRYSOBERYL
"",//RESOURCE_IRONWOOD
"",//RESOURCE_SILK
"",//RESOURCE_COCOA
"strong salty",//RESOURCE_BLOOD
"",//RESOURCE_BONE
"chalky",//RESOURCE_COAL
"light oily",//RESOURCE_LAMPOIL
"",//RESOURCE_POISON
"alcohol",//RESOURCE_LIQUOR
"",//RESOURCE_SUGAR
"",//RESOURCE_HONEY
"",//RESOURCE_BARLEY
"",//RESOURCE_MUSHROOMS
"fresh herbal",//RESOURCE_HERBS
"rich green",//RESOURCE_VINE
"nice floral",//RESOURCE_FLOWERS
"",//RESOURCE_PLASTIC
"sour rubbery",//RESOURCE_RUBBER
"",//RESOURCE_EBONY
"",//RESOURCE_IVORY
"",//RESOURCE_WAX
"mild nutty",//RESOURCE_NUTS
"",//RESOURCE_BREAD
"",//RESOURCE_CRACKER
"",//RESOURCE_YEW
"dusty",//RESOURCE_DUST
"strong grassy",//RESOURCE_PIPEWEED
"",//RESOURCE_ENERGY
"sweet berry",//RESOURCE_STRAWBERRIES
"sweet berry",//RESOURCE_BLUEBERRIES
"sweet berry",//RESOURCE_RASPBERRIES
"sweet berry",//RESOURCE_BOYSENBERRIES
"sweet berry",//RESOURCE_BLACKBERRIES
"sweet berry",//RESOURCE_SMURFBERRIES
"peachy",//RESOURCE_PEACHES
"sweey plumy",//RESOURCE_PLUMS
"stinging oniony",//RESOURCE_ONIONS
"cherry",//RESOURCE_CHERRIES
"",//RESOURCE_GARLIC
"fruity",//RESOURCE_PINEAPPLES
"",//RESOURCE_COCONUTS
"pungent banana",//RESOURCE_BANANAS
"citrusy",//RESOURCE_LIMES
"strong maply",//RESOURCE_SAP
"",//RESOURCE_ONYX
"",//RESOURCE_TURQUIOSE
"",//RESOURCE_PERIDOT
"",//RESOURCE_QUARTZ
"",//RESOURCE_LAPIS
"",//RESOURCE_BLOODSTONE
"",//RESOURCE_MOONSTONE
"",//RESOURCE_ALEXANDRITE
"",//RESOURCE_TEAK
"strong cedar",//RESOURCE_CEDAR
"",//RESOURCE_ELM
"",//RESOURCE_CHERRYWOOD
"",//RESOURCE_BEECHWOOD
"",//RESOURCE_WILLOW
"",//RESOURCE_SYCAMORE
"",//RESOURCE_SPRUCE
"rich mesquite",//RESOURCE_MESQUITE
"",//RESOURCE_BASALT
"",//RESOURCE_SHALE
"",//RESOURCE_PUMICE
"",//RESOURCE_SANDSTONE
"",//RESOURCE_SOAPSTONE
"strong fishy",//RESOURCE_SALMON
"strong fishy",//RESOURCE_CARP
"strong fishy",//RESOURCE_TROUT
"mild fishy",//RESOURCE_SHRIMP
"strong fishy",//RESOURCE_TUNA
"strong fishy",//RESOURCE_CATFISH
"",//RESOURCE_BAMBOO
"light fragrant",//RESOURCE_SOAP
"",//RESOURCE_SPIDERSTEEL
"dusty",//RESOURCE_ASH
"strong fragrant",//RESOURCE_PERFUME
"",//RESOURCE_ATLANTEANSTEEL
"mild cheesy"
};
} |
git-svn-id: svn://192.168.1.10/public/CoffeeMud@5481 0d6f1817-ed0e-0410-87c9-987e46238f29
| com/planet_ink/coffee_mud/Items/interfaces/RawMaterial.java | <ide><path>om/planet_ink/coffee_mud/Items/interfaces/RawMaterial.java
<ide> public final static int RESOURCE_ASH=MATERIAL_VEGETATION|159;
<ide> public final static int RESOURCE_PERFUME=MATERIAL_LIQUID|160;
<ide> public final static int RESOURCE_ATLANTEANSTEEL=MATERIAL_MITHRIL|161;
<del> public final static int RESOURCE_CHEESE=MATERIAL_LIQUID|162;
<add> public final static int RESOURCE_CHEESE=MATERIAL_VEGETATION|162;
<ide> public final static int RESOURCE_MASK=255;
<ide>
<ide> |
||
JavaScript | mit | d99ae00e1996bd67614d27aca80ce6b2ca43a9b7 | 0 | mloy/node-jet,lipp/node-jet,mloy/node-jet,lipp/node-jet,mloy/node-jet,zemirco/node-jet,zemirco/node-jet,lipp/node-jet,zemirco/node-jet | var util = require('util');
var net = require('net');
var http = require('http');
var WebSocketServer = require('ws').Server;
var assert = require('assert');
var EventEmitter = require('events').EventEmitter;
var MessageSocket = require('./message-socket.js').MessageSocket;
var jetUtils = require('./utils');
var jetSorter = require('./daemon/sorter');
var jetFetcher = require('./daemon/fetcher');
var isDefined = jetUtils.isDefined;
var noop = jetUtils.noop;
var invalidParams = jetUtils.invalidParams;
var invalidRequest = jetUtils.invalidRequest;
var responseTimeout = jetUtils.responseTimeout;
var methodNotFound = jetUtils.methodNotFound;
var parseError = jetUtils.parseError;
var createDaemon = function (options) {
options = options || {};
var log = options.log || noop;
var info = options.info || noop;
var debug = options.debug || noop;
var crit = options.crit || console.log;
// all connected peers (clients)
// key and value are peers itself
var peers = {};
// all elements which have been added
// key is unique path, value is element (object)
var elements = {};
// holds info about all pending requests (which are routed)
// key is (daemon generated) unique id, value is Object
// with original request id and receiver (peer) and request
// timer
var routes = {};
// global for tracking the neccessaity of lower casing
// paths when publishing / notifying
// TODO: keep track of case sensitive fetchers as lua-jet does.
var hasCaseInsensitives = true;
// holds all case insensitive fetchers
// key is fetcher (Object) value is true;
// var caseInsensitives = {};
var daemon = new EventEmitter();
// routes an incoming response to the requestor (peer)
// which made the request.
// stops timeout timer eventually.
var routeResponse = function (peer, message) {
var route = routes[message.id];
if (route) {
clearTimeout(route.timer);
delete routes[message.id];
message.id = route.id;
route.receiver.sendMessage(message);
} else {
console.log('cannot route message (timeout?)', message);
}
};
// publishes a notification to all subsbribers / fetchers
var publish = function (path, event, value, element) {
daemon.emit('publish', path, event, value, element);
var lowerPath = hasCaseInsensitives && path.toLowerCase();
for (var fetcherId in element.fetchers) {
try {
element.fetchers[fetcherId](path, lowerPath, event, value);
} catch (e) {
crit('fetcher failed', e);
}
}
};
// flushes all outstanding / queued messages to the peer underlying transport
var flushPeers = function () {
for (var peer in peers) {
peer.flush();
}
};
var checked = function (tab, key, typename) {
var p = tab[key];
if (isDefined(p)) {
if (typename) {
if (typeof (p) === typename) {
return p;
} else {
throw invalidParams({
wrongType: key,
got: tab
});
}
} else {
return p;
}
} else {
throw invalidParams({
missingParam: key,
got: tab
});
}
};
var optional = function (tab, key, typename) {
var p = tab[key];
if (isDefined(p)) {
if (typename) {
if (typeof (p) === typename) {
return p;
}
} else {
throw invalidParams({
wrongType: key,
got: tab
})
}
}
};
// dispatches the 'change' jet call.
// updates the internal cache (element table)
// and publishes a change event.
var change = function (peer, message) {
var notification = checked(message, 'params', 'object');
var path = checked(notification, 'path', 'string');
var element = elements[path];
if (element && element.peer === peer) {
element.value = notification.value;
publish(path, 'change', element.value, element);
} else if (!isDefined(element)) {
throw invalidParams({
pathNotExists: path
});
} else {
assert(peer !== element.peer);
throw invalidParams({
foreignPath: path
});
}
};
// dispatches the 'fetch' jet call.
// creates a fetch operation and optionally a sorter.
// all elements are inputed as "fake" add events. The
// fetcher is only asociated with the element if
// it "shows interest".
var fetch = function (peer, message) {
var params = checked(message, 'params', 'object');
var fetchId = checked(params, 'id');
var queueNotification;
var mayHaveInterest;
var fetchPeerId;
var notify = function (nparams) {
assert(queueNotification);
queueNotification(nparams);
};
var initializing = true;
var sorter = jetSorter.create(params, notify);
if (isDefined(sorter)) {
notify = function (nparams) {
sorter.sorter(nparams, initializing);
};
}
var fetcher = jetFetcher.create(params, notify);
peer.fetchers[fetchId] = fetcher;
if (isDefined(message.id) && !isDefined(sorter)) {
peer.sendMessage({
id: message.id,
result: true
});
}
queueNotification = function (nparams) {
peer.sendMessage({
method: fetchId,
params: nparams
});
};
fetchPeerId = peer.id + fetchId;
for (var path in elements) {
mayHaveInterest = fetcher(
path,
path.toLowerCase(),
'add',
elements[path].value
);
if (mayHaveInterest) {
elements[path].fetchers[fetchPeerId] = fetcher;
}
}
initializing = false;
if (isDefined(sorter) && sorter.flush) {
if (isDefined(message.id)) {
peer.sendMessage({
id: message.id,
result: true
});
}
sorter.flush();
}
};
// dispatchers the 'unfetch' jet call.
// removes all ressources associated with the fetcher.
var unfetch = function (peer, message) {
var params = message.params;
var fetchId = checked(params, 'id', 'string');
var fetcher = peer.fetchers[fetchId];
var fetchPeerId = peer.id + fetchId;
if (!isDefined(fetcher)) {
return;
}
delete peer.fetchers[fetchId];
for (var path in elements) {
delete elements[path].fetchers[fetchPeerId];
}
};
// counter to make the routed request more unique.
// addresses situation if a peer makes two requests with
// same message.id.
var rcount = 0;
// routes / forwards a peer request or notification ("call","set") to the peer
// of the corresponding element specified by "params.path".
// creates an entry in the "route" table if it is a request and sets up a timer
// which will respond a response timeout error to the requestor if
// no corresponding response is received.
var route = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value = params.value;
var args = params.args;
var element = elements[path];
var req = {};
var id;
var timeout;
if (element) {
if (isDefined(message.id)) {
timeout = optional(params, 'timeout', 'number') || element.timeout || 5
rcount = (rcount + 1) % 2 ^ 31;
id = message.id.toString() + peer.id + rcount;
assert.equal(routes[id], null);
routes[id] = {
receiver: peer,
id: message.id,
timer: setTimeout(function () {
delete routes[id];
peer.sendMessage({
id: message.id,
error: responseTimeout(params)
});
}, timeout * 1000)
};
}
req.id = id;
req.method = path;
if (value !== undefined) {
req.params = {
value: value,
valueAsResult: params.valueAsResult
};
} else {
req.params = params.args;
}
element.peer.sendMessage(req);
} else {
var error = invalidParams({
pathNotExists: path
});
if (isDefined(message.id)) {
peer.sendMessage({
id: message.id,
error: error
});
}
}
};
var add = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value = params.value;
var lowerPath = path.toLowerCase();
var element = elements[path];
var mayHaveInterest;
var peerId;
var fetcher, fetcherId;
if (element) {
throw invalidParams({
pathAlreadyExists: path
});
}
element = {
peer: peer,
value: value,
fetchers: {}
};
for (peerId in peers) {
for (fetcherId in peers[peerId].fetchers) {
fetcher = peers[peerId].fetchers[fetcherId];
mayHaveInterest = fetcher(path, lowerPath, 'add', value);
if (mayHaveInterest) {
element.fetchers[peerId + fetcherId] = fetcher;
}
}
}
elements[path] = element;
daemon.emit('publish', path, 'add', value, element);
};
var remove = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value;
if (!isDefined(elements[path])) {
throw invalidParams({
pathNotExists: path
});
}
if (elements[path].peer !== peer) {
throw invalidParams({
foreignPath: path
});
}
value = elements[path].value;
publish(path, 'remove', value, elements[path]);
delete elements[path];
};
var config = function (peer, message) {
var params = message.params;
if (params.name) {
peer.name = params.name;
}
if (params.encoding) {
throw "unsupported encoding";
}
};
var safe = function (f) {
return function (peer, message) {
try {
var result = f(peer, message) || true;
if (message.id) {
peer.sendMessage({
id: message.id,
result: result
});
}
} catch (err) {
if (message.id) {
if (typeof (err) === 'object') {
assert.ok(err.code && err.message, err);
peer.sendMessage({
id: message.id,
error: err
});
} else {
peer.sendMessage({
id: message.id,
error: {
code: -32603,
message: 'Internal error',
data: err
}
});
}
}
}
};
};
var safeForward = function (f) {
return function (peer, message) {
try {
f(peer, message);
} catch (err) {
console.log('jetd.safeForward failed', err, message);
if (message.id) {
if (typeof (err) === 'object') {
assert.ok(err.code && err.message, err);
peer.sendMessage({
id: message.id,
error: err
});
} else {
peer.sendMessage({
id: message.id,
error: {
code: -32603,
message: 'Internal error',
data: err
}
});
}
}
}
};
};
var services = {
add: safe(add),
remove: safe(remove),
call: safeForward(route),
set: safeForward(route),
fetch: safeForward(fetch),
unfetch: safe(unfetch),
change: safe(change),
config: safe(config),
echo: safe(function (peer, message) {
return message.params;
})
};
var dispatchRequest = function (peer, message) {
assert.ok(message.method);
var service = services[message.method];
if (service) {
service(peer, message);
} else {
var error = methodNotFound(message.method);
peer.sendMessage({
id: message.id,
error: error
});
}
};
var dispatchNotification = function (peer, message) {
assert.ok(message.method);
var service = services[message.method];
if (service) {
service(peer, message);
}
};
var releasePeer = function (peer) {
var fetcher;
if (peer) {
daemon.emit('close', peer);
for (var fetchId in peer.fetchers) {
for (var path in elements) {
delete elements[path].fetchers[peer.id + fetchId];
}
}
peer.fetchers = {};
for (var path in elements) {
var element = elements[path];
if (element.peer == peer) {
publish(path, 'remove', element.value, element);
delete elements[path];
}
}
delete peers[peer.id];
}
};
var dispatchMessage = function (peer, message) {
if (message.id) {
if (message.method) {
dispatchRequest(peer, message);
} else if (message.result !== null || message.error !== null) {
routeResponse(peer, message);
} else {
var error = invalidRequest(message);
peer.sendMessage({
id: message.id,
error: error
});
console.log('invalid request', message);
}
} else if (message.method) {
dispatchNotification(peer, message);
} else {
console.log('invalid message', message);
}
};
var registerPeer = function (peerId, sock) {
var peer = {};
peer.sendMessage = function (message) {
message = JSON.stringify(message);
sock.send(message);
};
sock.on('message', function (message) {
message = JSON.parse(message);
if (util.isArray(message)) {
message.forEach(function (msg) {
dispatchMessage(peer, msg);
});
} else {
dispatchMessage(peer, message);
}
});
peer.id = peerId;
peer.fetchers = {};
peers[peerId] = peer;
sock.once('close', function (b) {
releasePeer(peer);
});
daemon.emit('connection', peer);
};
daemon.listen = function (options) {
if (options.tcpPort) {
var listener = net.createServer(function (peerSocket) {
var peerId = peerSocket.remoteAddress + peerSocket.remotePort;
var sock = new MessageSocket(peerSocket);
registerPeer(peerId, sock);
});
listener.listen(options.tcpPort);
}
if (options.wsPort) {
var wsServer = new WebSocketServer({
port: options.wsPort,
handleProtocols: function (protocols, cb) {
if (protocols.indexOf('jet') > -1) {
cb(true, 'jet');
} else {
cb(false);
}
}
});
wsServer.on('connection', function (ws) {
registerPeer(Math.random(), ws);
});
}
};
return daemon;
}
module.exports = createDaemon; | lib/jet/daemon.js | var util = require('util');
var net = require('net');
var http = require('http');
var WebSocketServer = require('ws').Server;
var assert = require('assert');
var EventEmitter = require('events').EventEmitter;
var MessageSocket = require('./message-socket.js').MessageSocket;
var jetUtils = require('./utils');
var jetSorter = require('./daemon/sorter');
var jetFetcher = require('./daemon/fetcher');
var isDefined = jetUtils.isDefined;
var noop = jetUtils.noop;
var invalidParams = jetUtils.invalidParams;
var invalidRequest = jetUtils.invalidRequest;
var responseTimeout = jetUtils.responseTimeout;
var methodNotFound = jetUtils.methodNotFound;
var parseError = jetUtils.parseError;
var createDaemon = function (options) {
options = options || {};
var log = options.log || noop;
var info = options.info || noop;
var debug = options.debug || noop;
var crit = options.crit || console.log;
// all connected peers (clients)
// key and value are peers itself
var peers = {};
// all elements which have been added
// key is unique path, value is element (object)
var elements = {};
// holds info about all pending requests (which are routed)
// key is (daemon generated) unique id, value is Object
// with original request id and receiver (peer) and request
// timer
var routes = {};
// global for tracking the neccessaity of lower casing
// paths when publishing / notifying
// TODO: keep track of case sensitive fetchers as lua-jet does.
var hasCaseInsensitives = true;
// holds all case insensitive fetchers
// key is fetcher (Object) value is true;
// var caseInsensitives = {};
var daemon = new EventEmitter();
// routes an incoming response to the requestor (peer)
// which made the request.
// stops timeout timer eventually.
var routeResponse = function (peer, message) {
var route = routes[message.id];
if (route) {
clearTimeout(route.timer);
delete routes[message.id];
message.id = route.id;
route.receiver.sendMessage(message);
} else {
console.log('cannot route message (timeout?)', message);
}
};
// publishes a notification to all subsbribers / fetchers
var publish = function (path, event, value, element) {
daemon.emit('publish', path, event, value, element);
var lowerPath = hasCaseInsensitives && path.toLowerCase();
for (var fetcherId in element.fetchers) {
try {
element.fetchers[fetcherId](path, lowerPath, event, value);
} catch (e) {
crit('fetcher failed', e);
}
}
};
// flushes all outstanding / queued messages to the peer underlying transport
var flushPeers = function () {
for (var peer in peers) {
peer.flush();
}
};
var checked = function (tab, key, typename) {
var p = tab[key];
if (isDefined(p)) {
if (typename) {
if (typeof (p) === typename) {
return p;
} else {
throw invalidParams({
wrongType: key,
got: tab
});
}
} else {
return p;
}
} else {
throw invalidParams({
missingParam: key,
got: tab
});
}
};
var optional = function (tab, key, typename) {
var p = tab[key];
if (isDefined(p)) {
if (typename) {
if (typeof (p) === typename) {
return p;
}
} else {
throw invalidParams({
wrongType: key,
got: tab
})
}
}
};
// dispatches the 'change' jet call.
// updates the internal cache (element table)
// and publishes a change event.
var change = function (peer, message) {
var notification = checked(message, 'params', 'object');
var path = checked(notification, 'path', 'string');
var element = elements[path];
if (element && element.peer === peer) {
element.value = notification.value;
publish(path, 'change', element.value, element);
} else if (!isDefined(element)) {
throw invalidParams({
pathNotExists: path
});
} else {
assert(peer !== element.peer);
throw invalidParams({
foreignPath: path
});
}
};
// dispatches the 'fetch' jet call.
// creates a fetch operation and optionally a sorter.
// all elements are inputed as "fake" add events. The
// fetcher is only asociated with the element if
// it "shows interest".
var fetch = function (peer, message) {
var params = checked(message, 'params', 'object');
var fetchId = checked(params, 'id');
var queueNotification;
var mayHaveInterest;
var fetchPeerId;
var notify = function (nparams) {
assert(queueNotification);
queueNotification(nparams);
};
var initializing = true;
var sorter = jetSorter.create(params, notify);
if (isDefined(sorter)) {
notify = function (nparams) {
sorter.sorter(nparams, initializing);
};
}
var fetcher = jetFetcher.create(params, notify);
peer.fetchers[fetchId] = fetcher;
if (isDefined(message.id) && !isDefined(sorter)) {
peer.sendMessage({
id: message.id,
result: true
});
}
queueNotification = function (nparams) {
peer.sendMessage({
method: fetchId,
params: nparams
});
};
fetchPeerId = peer.id + fetchId;
for (var path in elements) {
mayHaveInterest = fetcher(
path,
path.toLowerCase(),
'add',
elements[path].value
);
if (mayHaveInterest) {
elements[path].fetchers[fetchPeerId] = fetcher;
}
}
initializing = false;
if (isDefined(sorter) && sorter.flush) {
if (isDefined(message.id)) {
peer.sendMessage({
id: message.id,
result: true
});
}
sorter.flush();
}
};
// dispatchers the 'unfetch' jet call.
// removes all ressources associated with the fetcher.
var unfetch = function (peer, message) {
var params = message.params;
var fetchId = checked(params, 'id', 'string');
var fetcher = peer.fetchers[fetchId];
var fetchPeerId = peer.id + fetchId;
if (!isDefined(fetcher)) {
return;
}
delete peer.fetchers[fetchId];
for (var path in elements) {
delete elements[path].fetchers[fetchPeerId];
}
};
// counter to make the routed request more unique.
// addresses situation if a peer makes two requests with
// same message.id.
var rcount = 0;
// routes / forwards a peer request or notification ("call","set") to the peer
// of the corresponding element specified by "params.path".
// creates an entry in the "route" table if it is a request and sets up a timer
// which will respond a response timeout error to the requestor if
// no corresponding response is received.
var route = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value = params.value;
var args = params.args;
var element = elements[path];
var req = {};
var id;
var timeout;
if (element) {
if (isDefined(message.id)) {
timeout = optional(params, 'timeout', 'number') || element.timeout || 5
rcount = (rcount + 1) % 2 ^ 31;
id = message.id.toString() + peer.id + rcount;
assert.equal(routes[id], null);
routes[id] = {
receiver: peer,
id: message.id,
timer: setTimeout(function () {
delete routes[id];
peer.sendMessage({
id: message.id,
error: responseTimeout(params)
});
}, timeout * 1000)
};
}
req.id = id;
req.method = path;
if (value !== undefined) {
req.params = {
value: value,
valueAsResult: params.valueAsResult
};
} else {
req.params = params.args;
}
element.peer.sendMessage(req);
} else {
var error = invalidParams({
pathNotExists: path
});
if (isDefined(message.id)) {
peer.sendMessage({
id: message.id,
error: error
});
}
}
};
var add = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value = params.value;
var lowerPath = path.toLowerCase();
var element = elements[path];
var mayHaveInterest;
var peerId;
var fetcher, fetcherId;
if (element) {
throw invalidParams({
pathAlreadyExists: path
});
}
element = {
peer: peer,
value: value,
fetchers: {}
};
for (peerId in peers) {
for (fetcherId in peers[peerId].fetchers) {
fetcher = peers[peerId].fetchers[fetcherId];
mayHaveInterest = fetcher(path, lowerPath, 'add', value);
if (mayHaveInterest) {
element.fetchers[peerId + fetcherId] = fetcher;
}
}
}
elements[path] = element;
daemon.emit('publish', path, 'add', value, element);
};
var remove = function (peer, message) {
var params = message.params;
var path = checked(params, 'path', 'string');
var value;
if (!isDefined(elements[path])) {
throw invalidParams({
pathNotExists: path
});
}
if (elements[path].peer !== peer) {
throw invalidParams({
foreignPath: path
});
}
value = elements[path].value;
publish(path, 'remove', value, elements[path]);
delete elements[path];
};
var config = function (peer, message) {
var params = message.params;
if (params.name) {
peer.name = params.name;
}
if (params.encoding) {
throw "unsupported encoding";
}
};
var safe = function (f) {
return function (peer, message) {
try {
var result = f(peer, message) || true;
if (message.id) {
peer.sendMessage({
id: message.id,
result: result
});
}
} catch (err) {
if (message.id) {
if (typeof (err) === 'object') {
assert.ok(err.code && err.message, err);
peer.sendMessage({
id: message.id,
error: err
});
} else {
peer.sendMessage({
id: message.id,
error: {
code: -32603,
message: 'Internal error',
data: err
}
});
}
}
}
};
};
var safeForward = function (f) {
return function (peer, message) {
try {
f(peer, message);
} catch (err) {
console.log('jetd.safeForward failed', err, message);
if (message.id) {
if (typeof (err) === 'object') {
assert.ok(err.code && err.message, err);
peer.sendMessage({
id: message.id,
error: err
});
} else {
peer.sendMessage({
id: message.id,
error: {
code: -32603,
message: 'Internal error',
data: err
}
});
}
}
}
};
};
var services = {
add: safe(add),
remove: safe(remove),
call: safeForward(route),
set: safeForward(route),
fetch: safeForward(fetch),
unfetch: safe(unfetch),
change: safe(change),
config: safe(config),
echo: safe(function (peer, message) {
return message.params;
})
};
var dispatchRequest = function (peer, message) {
assert.ok(message.method);
var service = services[message.method];
if (service) {
service(peer, message);
} else {
var error = methodNotFound(message.method);
peer.sendMessage({
id: message.id,
error: error
});
}
};
var dispatchNotification = function (peer, message) {
assert.ok(message.method);
var service = services[message.method];
if (service) {
service(peer, message);
}
};
var releasePeer = function (peer) {
var fetcher;
if (peer) {
daemon.emit('close', peer);
for (var fetchId in peer.fetchers) {
for (var path in elements) {
delete elements[path].fetchers[peer.id + fetchId];
}
}
peer.fetchers = {};
for (var path in elements) {
var element = elements[path];
if (element.peer == peer) {
publish(path, 'remove', element.value, element);
delete elements[path];
}
}
delete peers[peer.id];
}
};
var dispatchMessage = function (peer, message) {
if (message.id) {
if (message.method) {
dispatchRequest(peer, message);
} else if (message.result !== null || message.error !== null) {
routeResponse(peer, message);
} else {
var error = invalidRequest(message);
peer.sendMessage({
id: message.id,
error: error
});
console.log('invalid request', message);
}
} else if (message.method) {
dispatchNotification(peer, message);
} else {
console.log('invalid message', message);
}
};
var registerPeer = function(peerId,sock) {
var peer = {};
peer.sendMessage = function (message) {
message = JSON.stringify(message);
sock.send(message);
};
sock.on('message', function (message) {
message = JSON.parse(message);
if (util.isArray(message)) {
message.forEach(function (msg) {
dispatchMessage(peer, msg);
});
} else {
dispatchMessage(peer, message);
}
});
peer.id = peerId;
peer.fetchers = {};
peers[peerId] = peer;
sock.once('close', function (b) {
releasePeer(peer);
});
daemon.emit('connection', peer);
};
daemon.listen = function (options) {
if (options.tcpPort) {
var listener = net.createServer(function (peerSocket) {
var peerId = peerSocket.remoteAddress + peerSocket.remotePort;
var sock = new MessageSocket(peerSocket);
registerPeer(peerId,sock);
});
listener.listen(options.tcpPort);
}
if (options.wsPort) {
var wsServer = new WebSocketServer({
port: options.wsPort,
handleProtocols: function(protocols,cb) {
if (protocols.indexOf('jet') > -1) {
cb(true,'jet');
} else {
cb(false);
}
}
});
wsServer.on('connection', function(ws) {
registerPeer(Math.random(),ws);
});
}
};
return daemon;
}
module.exports = createDaemon;
| beautify
| lib/jet/daemon.js | beautify | <ide><path>ib/jet/daemon.js
<ide> }
<ide> };
<ide>
<del> var registerPeer = function(peerId,sock) {
<del> var peer = {};
<add> var registerPeer = function (peerId, sock) {
<add> var peer = {};
<ide> peer.sendMessage = function (message) {
<ide> message = JSON.stringify(message);
<ide> sock.send(message);
<ide>
<ide> daemon.listen = function (options) {
<ide> if (options.tcpPort) {
<del> var listener = net.createServer(function (peerSocket) {
<add> var listener = net.createServer(function (peerSocket) {
<ide> var peerId = peerSocket.remoteAddress + peerSocket.remotePort;
<ide> var sock = new MessageSocket(peerSocket);
<del> registerPeer(peerId,sock);
<del> });
<del> listener.listen(options.tcpPort);
<add> registerPeer(peerId, sock);
<add> });
<add> listener.listen(options.tcpPort);
<ide> }
<ide> if (options.wsPort) {
<del> var wsServer = new WebSocketServer({
<del> port: options.wsPort,
<del> handleProtocols: function(protocols,cb) {
<del> if (protocols.indexOf('jet') > -1) {
<del> cb(true,'jet');
<del> } else {
<del> cb(false);
<del> }
<del> }
<del> });
<del> wsServer.on('connection', function(ws) {
<del> registerPeer(Math.random(),ws);
<del> });
<add> var wsServer = new WebSocketServer({
<add> port: options.wsPort,
<add> handleProtocols: function (protocols, cb) {
<add> if (protocols.indexOf('jet') > -1) {
<add> cb(true, 'jet');
<add> } else {
<add> cb(false);
<add> }
<add> }
<add> });
<add> wsServer.on('connection', function (ws) {
<add> registerPeer(Math.random(), ws);
<add> });
<ide> }
<ide> };
<ide> return daemon; |
|
Java | apache-2.0 | dc346f85f2e0b33d835f694089e4fac417c52a3b | 0 | apache/commons-daemon,apache/commons-daemon,apache/commons-daemon,apache/commons-daemon | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.daemon.support;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Properties;
import java.text.ParseException;
/**
* Used by jsvc for Daemon configuration.
* <p>
* Configuration is read from properties file.
* If no properties file is given the {@code daemon.properties}
* is used from the current directory.
* </p>
* <p>
* The properties file can have property values expanded at runtime
* by using System properties or execution environment. The part
* of the property value between {@code ${} and {@code }}
* will be used as System property or environment key. If found then
* the entire {@code ${foo}} will be replaced by the value of
* either system property or environment variable named {@code foo}.
* </p>
* <p>
* If no variable is found the {@code ${foo}} will be passed as is.
* In case of {@code $${foo}} this will be unescaped and resulting
* value will be {@code ${foo}}.
* </p>
*
*/
public final class DaemonConfiguration
{
/**
* Default configuration file name.
*/
protected final static String DEFAULT_CONFIG = "daemon.properties";
/**
* Property prefix
*/
protected final static String PREFIX = "daemon.";
private final static String BTOKEN = "${";
private final static String ETOKEN = "}";
private final Properties configurationProperties;
private final Properties systemProperties;
/**
* Default constructor
*/
public DaemonConfiguration()
{
configurationProperties = new Properties();
systemProperties = System.getProperties();
}
/**
* Loads the configuration properties file.
*
* @param fileName The properties file to load.
* @return {@code true} if the file was loaded.
*/
public boolean load(String fileName)
{
boolean ok = false;
FileInputStream file = null;
try {
if (fileName == null) {
fileName = DEFAULT_CONFIG;
}
file = new FileInputStream(fileName);
configurationProperties.clear();
configurationProperties.load(file);
ok = true;
}
catch (final IOException ex) {
// Error reading properties file
} finally {
try {
if (file != null) {
file.close();
}
} catch (final IOException ex) {
}
}
return ok;
}
private String expandProperty(final String propValue)
throws ParseException
{
StringBuffer expanded;
int btoken;
int ctoken = 0;
if (propValue == null) {
return null;
}
expanded = new StringBuffer();
btoken = propValue.indexOf(BTOKEN);
while (btoken != -1) {
if (btoken > 0 && propValue.charAt(btoken - 1) == BTOKEN.charAt(0)) {
// Skip and unquote.
expanded.append(propValue.substring(ctoken, btoken));
ctoken = btoken + 1;
btoken = propValue.indexOf(BTOKEN, btoken + BTOKEN.length());
continue;
}
final int etoken = propValue.indexOf(ETOKEN, btoken);
if (etoken != -1) {
final String variable = propValue.substring(btoken + BTOKEN.length(), etoken);
String sysvalue = systemProperties.getProperty(variable);
if (sysvalue == null) {
// Try with the environment if there was no
// property by that name.
sysvalue = System.getenv(variable); // N.B. Deprecated in Java 1.3/1.4, but re-instated in Java 1.5+
}
if (sysvalue != null) {
final String strtoken = propValue.substring(ctoken, btoken);
expanded.append(strtoken);
expanded.append(sysvalue);
ctoken = etoken + ETOKEN.length();
}
}
else {
// We have "${" without "}"
throw new ParseException("Error while looking for teminating '" +
ETOKEN + "'", btoken);
}
btoken = propValue.indexOf(BTOKEN, etoken + ETOKEN.length());
}
// Add what's left.
expanded.append(propValue.substring(ctoken, propValue.length()));
return expanded.toString();
}
/**
* Gets the configuration property.
*
* @param name The name of the property to get.
*
* @throws ParseException if the property is wrongly formatted.
*
* @return Configuration property including any expansion/replacement
*/
public String getProperty(final String name)
throws ParseException
{
if (name == null) {
return null;
}
return expandProperty(configurationProperties.getProperty(PREFIX + name));
}
/**
* Gets the configuration property array.
* <p>
* Property array is constructed form the list of properties
* which end with {@code [index]}
* </p>
* <pre>
* daemon.arg[0] = argument 1
* daemon.arg[1] = argument 2
* daemon.arg[2] = argument 3
* </pre>
* @param name The name of the property array to get.
*
* @throws ParseException if the property is wrongly formatted.
*
* @return Configuration property array including any expansion/replacement
*/
public String[] getPropertyArray(final String name)
throws ParseException
{
final ArrayList<String> list = new ArrayList<String>();
String args;
// Load daemon.arg[0] ... daemon.arg[n] into the String array.
//
while ((args = getProperty(name + "[" + list.size() + "]")) != null) {
list.add(args);
}
return list.toArray(new String[list.size()]);
}
}
| src/main/java/org/apache/commons/daemon/support/DaemonConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.daemon.support;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Properties;
import java.text.ParseException;
/**
* Used by jsvc for Daemon configuration.
* <p>
* Configuration is read from properties file.
* If no properties file is given the {@code daemon.properties}
* is used from the current directory.
* </p>
* <p>
* The properties file can have property values expanded at runtime
* by using System properties or execution environment. The part
* of the property value between {@code ${} and {@code }}
* will be used as System property or environment key. If found then
* the entire {@code ${foo}} will be replaced by the value of
* either system property or environment variable named {@code foo}.
* </p>
* <p>
* If no variable is found the {@code ${foo}} will be passed as is.
* In case of {@code $${foo}} this will be unescaped and resulting
* value will be {@code ${foo}}.
* </p>
*
*/
public final class DaemonConfiguration
{
/**
* Default configuration file name.
*/
protected final static String DEFAULT_CONFIG = "daemon.properties";
/**
* Property prefix
*/
protected final static String PREFIX = "daemon.";
private final static String BTOKEN = "${";
private final static String ETOKEN = "}";
private final Properties configurationProperties;
private final Properties systemProperties;
/**
* Default constructor
*/
public DaemonConfiguration()
{
configurationProperties = new Properties();
systemProperties = System.getProperties();
}
/**
* Loads the configuration properties file.
*
* @param fileName The properties file to load.
* @return {@code true} if the file was loaded.
*/
public boolean load(String fileName)
{
boolean ok = false;
FileInputStream file = null;
try {
if (fileName == null) {
fileName = DEFAULT_CONFIG;
}
file = new FileInputStream(fileName);
configurationProperties.clear();
configurationProperties.load(file);
ok = true;
}
catch (final FileNotFoundException ex) {
// fileName does not exist
}
catch (final IOException ex) {
// Error reading properties file
} finally {
try {
if (file != null) {
file.close();
}
} catch (final IOException ex) {
}
}
return ok;
}
private String expandProperty(final String propValue)
throws ParseException
{
StringBuffer expanded;
int btoken;
int ctoken = 0;
if (propValue == null) {
return null;
}
expanded = new StringBuffer();
btoken = propValue.indexOf(BTOKEN);
while (btoken != -1) {
if (btoken > 0 && propValue.charAt(btoken - 1) == BTOKEN.charAt(0)) {
// Skip and unquote.
expanded.append(propValue.substring(ctoken, btoken));
ctoken = btoken + 1;
btoken = propValue.indexOf(BTOKEN, btoken + BTOKEN.length());
continue;
}
final int etoken = propValue.indexOf(ETOKEN, btoken);
if (etoken != -1) {
final String variable = propValue.substring(btoken + BTOKEN.length(), etoken);
String sysvalue = systemProperties.getProperty(variable);
if (sysvalue == null) {
// Try with the environment if there was no
// property by that name.
sysvalue = System.getenv(variable); // N.B. Deprecated in Java 1.3/1.4, but re-instated in Java 1.5+
}
if (sysvalue != null) {
final String strtoken = propValue.substring(ctoken, btoken);
expanded.append(strtoken);
expanded.append(sysvalue);
ctoken = etoken + ETOKEN.length();
}
}
else {
// We have "${" without "}"
throw new ParseException("Error while looking for teminating '" +
ETOKEN + "'", btoken);
}
btoken = propValue.indexOf(BTOKEN, etoken + ETOKEN.length());
}
// Add what's left.
expanded.append(propValue.substring(ctoken, propValue.length()));
return expanded.toString();
}
/**
* Gets the configuration property.
*
* @param name The name of the property to get.
*
* @throws ParseException if the property is wrongly formatted.
*
* @return Configuration property including any expansion/replacement
*/
public String getProperty(final String name)
throws ParseException
{
if (name == null) {
return null;
}
return expandProperty(configurationProperties.getProperty(PREFIX + name));
}
/**
* Gets the configuration property array.
* <p>
* Property array is constructed form the list of properties
* which end with {@code [index]}
* </p>
* <pre>
* daemon.arg[0] = argument 1
* daemon.arg[1] = argument 2
* daemon.arg[2] = argument 3
* </pre>
* @param name The name of the property array to get.
*
* @throws ParseException if the property is wrongly formatted.
*
* @return Configuration property array including any expansion/replacement
*/
public String[] getPropertyArray(final String name)
throws ParseException
{
final ArrayList<String> list = new ArrayList<String>();
String args;
// Load daemon.arg[0] ... daemon.arg[n] into the String array.
//
while ((args = getProperty(name + "[" + list.size() + "]")) != null) {
list.add(args);
}
return list.toArray(new String[list.size()]);
}
}
| Collapse multiple identical catch clauses into one.
| src/main/java/org/apache/commons/daemon/support/DaemonConfiguration.java | Collapse multiple identical catch clauses into one. | <ide><path>rc/main/java/org/apache/commons/daemon/support/DaemonConfiguration.java
<ide> configurationProperties.load(file);
<ide> ok = true;
<ide> }
<del> catch (final FileNotFoundException ex) {
<del> // fileName does not exist
<del> }
<ide> catch (final IOException ex) {
<ide> // Error reading properties file
<ide> } finally { |
|
Java | apache-2.0 | e6d5e0887643f341ce59d316d381889eb975dd39 | 0 | jbonofre/incubator-beam,lukecwik/incubator-beam,jbonofre/beam,jbonofre/beam,robertwb/incubator-beam,RyanSkraba/beam,staslev/incubator-beam,wangyum/beam,chamikaramj/beam,staslev/beam,jbonofre/beam,chamikaramj/beam,apache/beam,eljefe6a/incubator-beam,jbonofre/beam,tgroh/incubator-beam,charlesccychen/beam,robertwb/incubator-beam,tgroh/beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,charlesccychen/beam,staslev/beam,robertwb/incubator-beam,eljefe6a/incubator-beam,lukecwik/incubator-beam,charlesccychen/beam,apache/beam,chamikaramj/beam,apache/beam,RyanSkraba/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,jbonofre/incubator-beam,markflyhigh/incubator-beam,RyanSkraba/beam,wangyum/beam,lukecwik/incubator-beam,tgroh/beam,robertwb/incubator-beam,markflyhigh/incubator-beam,wangyum/beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,charlesccychen/beam,eljefe6a/incubator-beam,rangadi/beam,staslev/incubator-beam,robertwb/incubator-beam,tgroh/beam,chamikaramj/beam,rangadi/beam,apache/beam,lukecwik/incubator-beam,rangadi/beam,chamikaramj/beam,robertwb/incubator-beam,robertwb/incubator-beam,markflyhigh/incubator-beam,charlesccychen/beam,RyanSkraba/beam,markflyhigh/incubator-beam,rangadi/beam,tgroh/incubator-beam,apache/beam,markflyhigh/incubator-beam,staslev/beam,charlesccychen/beam,RyanSkraba/beam,lukecwik/incubator-beam,apache/beam,chamikaramj/beam,RyanSkraba/beam,apache/beam,rangadi/beam,wangyum/beam,tgroh/beam,iemejia/incubator-beam,rangadi/beam,markflyhigh/incubator-beam,chamikaramj/beam,apache/beam,rangadi/incubator-beam,chamikaramj/beam,mxm/incubator-beam,iemejia/incubator-beam,lukecwik/incubator-beam,charlesccychen/incubator-beam,charlesccychen/incubator-beam,lukecwik/incubator-beam,charlesccychen/incubator-beam,rangadi/incubator-beam,rangadi/beam,RyanSkraba/beam,charlesccychen/beam,apache/beam,mxm/incubator-beam,robertwb/incubator-beam,rangadi/incubator-beam | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray;
import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.clouddebugger.v2.Clouddebugger;
import com.google.api.services.clouddebugger.v2.model.Debuggee;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeRequest;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeResponse;
import com.google.api.services.dataflow.model.DataflowPackage;
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.ListJobsResponse;
import com.google.api.services.dataflow.model.WorkerPool;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.base.Utf8;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.channels.Channels;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.beam.runners.core.construction.CoderTranslation;
import org.apache.beam.runners.core.construction.DeduplicatedFlattenFactory;
import org.apache.beam.runners.core.construction.EmptyFlattenAsCreateFactory;
import org.apache.beam.runners.core.construction.PTransformMatchers;
import org.apache.beam.runners.core.construction.PTransformReplacements;
import org.apache.beam.runners.core.construction.RehydratedComponents;
import org.apache.beam.runners.core.construction.ReplacementOutputs;
import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
import org.apache.beam.runners.core.construction.UnconsumedReads;
import org.apache.beam.runners.core.construction.WriteFilesTranslation;
import org.apache.beam.runners.dataflow.DataflowPipelineTranslator.JobSpecification;
import org.apache.beam.runners.dataflow.StreamingViewOverrides.StreamingCreatePCollectionViewFactory;
import org.apache.beam.runners.dataflow.options.DataflowPipelineDebugOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineWorkerPoolOptions;
import org.apache.beam.runners.dataflow.util.DataflowTemplateJob;
import org.apache.beam.runners.dataflow.util.DataflowTransport;
import org.apache.beam.runners.dataflow.util.MonitoringUtil;
import org.apache.beam.runners.dataflow.util.PropertyNames;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineVisitor;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.PipelineRunner;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Internal;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.Coder.NonDeterministicException;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.common.runner.v1.RunnerApi;
import org.apache.beam.sdk.extensions.gcp.storage.PathValidator;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.FileBasedSink;
import org.apache.beam.sdk.io.FileSystems;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.WriteFiles;
import org.apache.beam.sdk.io.fs.ResourceId;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubMessage;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubMessageWithAttributesCoder;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSink;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsValidator;
import org.apache.beam.sdk.options.ValueProvider.NestedValueProvider;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.runners.PTransformOverride;
import org.apache.beam.sdk.runners.PTransformOverrideFactory;
import org.apache.beam.sdk.runners.TransformHierarchy;
import org.apache.beam.sdk.runners.TransformHierarchy.Node;
import org.apache.beam.sdk.state.MapState;
import org.apache.beam.sdk.state.SetState;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Combine.GroupedValues;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Reshuffle;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.SimpleFunction;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.beam.sdk.util.InstanceBuilder;
import org.apache.beam.sdk.util.MimeTypes;
import org.apache.beam.sdk.util.NameUtils;
import org.apache.beam.sdk.util.ReleaseInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollection.IsBounded;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PCollectionViews;
import org.apache.beam.sdk.values.PDone;
import org.apache.beam.sdk.values.PInput;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.ValueWithRecordId;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link PipelineRunner} that executes the operations in the pipeline by first translating them
* to the Dataflow representation using the {@link DataflowPipelineTranslator} and then submitting
* them to a Dataflow service for execution.
*
* <h3>Permissions</h3>
*
* <p>When reading from a Dataflow source or writing to a Dataflow sink using
* {@code DataflowRunner}, the Google cloudservices account and the Google compute engine service
* account of the GCP project running the Dataflow Job will need access to the corresponding
* source/sink.
*
* <p>Please see <a href="https://cloud.google.com/dataflow/security-and-permissions">Google Cloud
* Dataflow Security and Permissions</a> for more details.
*/
public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
private static final Logger LOG = LoggerFactory.getLogger(DataflowRunner.class);
/** Provided configuration options. */
private final DataflowPipelineOptions options;
/** Client for the Dataflow service. This is used to actually submit jobs. */
private final DataflowClient dataflowClient;
/** Translator for this DataflowRunner, based on options. */
private final DataflowPipelineTranslator translator;
/** A set of user defined functions to invoke at different points in execution. */
private DataflowRunnerHooks hooks;
// The limit of CreateJob request size.
private static final int CREATE_JOB_REQUEST_LIMIT_BYTES = 10 * 1024 * 1024;
@VisibleForTesting
static final int GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT = 1024 * 1024;
private final Set<PCollection<?>> pcollectionsRequiringIndexedFormat;
/**
* Project IDs must contain lowercase letters, digits, or dashes.
* IDs must start with a letter and may not end with a dash.
* This regex isn't exact - this allows for patterns that would be rejected by
* the service, but this is sufficient for basic validation of project IDs.
*/
public static final String PROJECT_ID_REGEXP = "[a-z][-a-z0-9:.]+[a-z0-9]";
/**
* Construct a runner from the provided options.
*
* @param options Properties that configure the runner.
* @return The newly created runner.
*/
public static DataflowRunner fromOptions(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required values: " + Joiner.on(',').join(missing));
}
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!Strings.isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() == null) {
dataflowOptions.setFilesToStage(detectClassPathResourcesToStage(
DataflowRunner.class.getClassLoader()));
if (dataflowOptions.getFilesToStage().isEmpty()) {
throw new IllegalArgumentException("No files to stage has been found.");
} else {
LOG.info("PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
}
// Verify jobName according to service requirements, truncating converting to lowercase if
// necessary.
String jobName =
dataflowOptions
.getJobName()
.toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
// Verify project
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
// Verify the number of worker threads is a valid value
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException("Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
return new DataflowRunner(dataflowOptions);
}
@VisibleForTesting protected DataflowRunner(DataflowPipelineOptions options) {
this.options = options;
this.dataflowClient = DataflowClient.create(options);
this.translator = DataflowPipelineTranslator.fromOptions(options);
this.pcollectionsRequiringIndexedFormat = new HashSet<>();
this.ptransformViewsWithNonDeterministicKeyCoders = new HashSet<>();
}
private List<PTransformOverride> getOverrides(boolean streaming) {
ImmutableList.Builder<PTransformOverride> overridesBuilder = ImmutableList.builder();
// Create is implemented in terms of a Read, so it must precede the override to Read in
// streaming
overridesBuilder
.add(
PTransformOverride.of(
PTransformMatchers.flattenWithDuplicateInputs(),
DeduplicatedFlattenFactory.create()))
.add(
PTransformOverride.of(
PTransformMatchers.emptyFlatten(), EmptyFlattenAsCreateFactory.instance()));
if (streaming) {
if (!hasExperiment(options, "enable_custom_pubsub_source")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(PubsubUnboundedSource.class),
new StreamingPubsubIOReadOverrideFactory()));
}
if (!hasExperiment(options, "enable_custom_pubsub_sink")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(PubsubUnboundedSink.class),
new StreamingPubsubIOWriteOverrideFactory(this)));
}
if (hasExperiment(options, "beam_fn_api")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Create.Values.class),
new StreamingFnApiCreateOverrideFactory()));
}
overridesBuilder
// Support Splittable DoFn for now only in streaming mode.
// The order of the following overrides is important because they are applied in order.
// By default Dataflow runner replaces single-output ParDo with a ParDoSingle override.
// However, we want a different expansion for single-output splittable ParDo.
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDoSingle(),
new ReflectiveOneToOneOverrideFactory(
SplittableParDoOverrides.ParDoSingleViaMulti.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDoMulti(),
new SplittableParDoOverrides.SplittableParDoOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.writeWithRunnerDeterminedSharding(),
new StreamingShardedWriteFactory(options)))
.add(
// Streaming Bounded Read is implemented in terms of Streaming Unbounded Read, and
// must precede it
PTransformOverride.of(
PTransformMatchers.classEqualTo(Read.Bounded.class),
new StreamingBoundedReadOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Read.Unbounded.class),
new StreamingUnboundedReadOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(View.CreatePCollectionView.class),
new StreamingCreatePCollectionViewFactory()));
} else {
overridesBuilder
// State and timer pardos are implemented by expansion to GBK-then-ParDo
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDoMulti(),
BatchStatefulParDoOverrides.multiOutputOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDoSingle(),
BatchStatefulParDoOverrides.singleOutputOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.MapViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsMap.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.MultimapViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsMultimap.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.SingletonViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsSingleton.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.ListViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsList.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.IterableViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsIterable.class, this)));
}
overridesBuilder
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Reshuffle.class), new ReshuffleOverrideFactory()))
// Order is important. Streaming views almost all use Combine internally.
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Combine.GroupedValues.class),
new PrimitiveCombineGroupedValuesOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(ParDo.SingleOutput.class),
new PrimitiveParDoSingleFactory()));
return overridesBuilder.build();
}
private static class ReflectiveOneToOneOverrideFactory<
InputT, OutputT, TransformT extends PTransform<PCollection<InputT>, PCollection<OutputT>>>
extends SingleInputOutputOverrideFactory<
PCollection<InputT>, PCollection<OutputT>, TransformT> {
private final Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement;
private final DataflowRunner runner;
private ReflectiveOneToOneOverrideFactory(
Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement,
DataflowRunner runner) {
this.replacement = replacement;
this.runner = runner;
}
@Override
public PTransformReplacement<PCollection<InputT>, PCollection<OutputT>> getReplacementTransform(
AppliedPTransform<PCollection<InputT>, PCollection<OutputT>, TransformT> transform) {
PTransform<PCollection<InputT>, PCollection<OutputT>> rep =
InstanceBuilder.ofType(replacement)
.withArg(DataflowRunner.class, runner)
.withArg(
(Class<TransformT>) transform.getTransform().getClass(), transform.getTransform())
.build();
return PTransformReplacement.of(PTransformReplacements.getSingletonMainInput(transform), rep);
}
}
private String debuggerMessage(String projectId, String uniquifier) {
return String.format("To debug your job, visit Google Cloud Debugger at: "
+ "https://console.developers.google.com/debug?project=%s&dbgee=%s",
projectId, uniquifier);
}
private void maybeRegisterDebuggee(DataflowPipelineOptions options, String uniquifier) {
if (!options.getEnableCloudDebugger()) {
return;
}
if (options.getDebuggee() != null) {
throw new RuntimeException("Should not specify the debuggee");
}
Clouddebugger debuggerClient = DataflowTransport.newClouddebuggerClient(options).build();
Debuggee debuggee = registerDebuggee(debuggerClient, uniquifier);
options.setDebuggee(debuggee);
System.out.println(debuggerMessage(options.getProject(), debuggee.getUniquifier()));
}
private Debuggee registerDebuggee(Clouddebugger debuggerClient, String uniquifier) {
RegisterDebuggeeRequest registerReq = new RegisterDebuggeeRequest();
registerReq.setDebuggee(new Debuggee()
.setProject(options.getProject())
.setUniquifier(uniquifier)
.setDescription(uniquifier)
.setAgentVersion("google.com/cloud-dataflow-java/v1"));
try {
RegisterDebuggeeResponse registerResponse =
debuggerClient.controller().debuggees().register(registerReq).execute();
Debuggee debuggee = registerResponse.getDebuggee();
if (debuggee.getStatus() != null && debuggee.getStatus().getIsError()) {
throw new RuntimeException("Unable to register with the debugger: "
+ debuggee.getStatus().getDescription().getFormat());
}
return debuggee;
} catch (IOException e) {
throw new RuntimeException("Unable to register with the debugger: ", e);
}
}
@Override
public DataflowPipelineJob run(Pipeline pipeline) {
logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline);
if (containsUnboundedPCollection(pipeline)) {
options.setStreaming(true);
}
replaceTransforms(pipeline);
LOG.info("Executing pipeline on the Dataflow Service, which will have billing implications "
+ "related to Google Compute Engine usage and other Google Cloud Services.");
List<DataflowPackage> packages = options.getStager().stageFiles();
// Set a unique client_request_id in the CreateJob request.
// This is used to ensure idempotence of job creation across retried
// attempts to create a job. Specifically, if the service returns a job with
// a different client_request_id, it means the returned one is a different
// job previously created with the same job name, and that the job creation
// has been effectively rejected. The SDK should return
// Error::Already_Exists to user in that case.
int randomNum = new Random().nextInt(9000) + 1000;
String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm").withZone(DateTimeZone.UTC)
.print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum;
// Try to create a debuggee ID. This must happen before the job is translated since it may
// update the options.
DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
maybeRegisterDebuggee(dataflowOptions, requestId);
JobSpecification jobSpecification =
translator.translate(pipeline, this, packages);
Job newJob = jobSpecification.getJob();
newJob.setClientRequestId(requestId);
ReleaseInfo releaseInfo = ReleaseInfo.getReleaseInfo();
String version = releaseInfo.getVersion();
checkState(
!version.equals("${pom.version}"),
"Unable to submit a job to the Dataflow service with unset version ${pom.version}");
System.out.println("Dataflow SDK version: " + version);
newJob.getEnvironment().setUserAgent((Map) releaseInfo.getProperties());
// The Dataflow Service may write to the temporary directory directly, so
// must be verified.
if (!isNullOrEmpty(options.getGcpTempLocation())) {
newJob.getEnvironment().setTempStoragePrefix(
dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation()));
}
newJob.getEnvironment().setDataset(options.getTempDatasetId());
newJob.getEnvironment().setExperiments(options.getExperiments());
// Set the Docker container image that executes Dataflow worker harness, residing in Google
// Container Registry. Translator is guaranteed to create a worker pool prior to this point.
String workerHarnessContainerImage = getContainerImageForJob(options);
for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) {
workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage);
}
newJob.getEnvironment().setVersion(getEnvironmentVersion(options));
if (hooks != null) {
hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment());
}
if (!isNullOrEmpty(options.getDataflowJobFile())
|| !isNullOrEmpty(options.getTemplateLocation())) {
boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation());
if (isTemplate) {
checkArgument(isNullOrEmpty(options.getDataflowJobFile()),
"--dataflowJobFile and --templateLocation are mutually exclusive.");
}
String fileLocation = firstNonNull(
options.getTemplateLocation(), options.getDataflowJobFile());
checkArgument(
fileLocation.startsWith("/") || fileLocation.startsWith("gs://"),
"Location must be local or on Cloud Storage, got %s.",
fileLocation);
ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */);
String workSpecJson = DataflowPipelineTranslator.jobToString(newJob);
try (PrintWriter printWriter =
new PrintWriter(
Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)))) {
printWriter.print(workSpecJson);
LOG.info("Printed job specification to {}", fileLocation);
} catch (IOException ex) {
String error =
String.format("Cannot create output file at %s", fileLocation);
if (isTemplate) {
throw new RuntimeException(error, ex);
} else {
LOG.warn(error, ex);
}
}
if (isTemplate) {
LOG.info("Template successfully created.");
return new DataflowTemplateJob();
}
}
String jobIdToUpdate = null;
if (options.isUpdate()) {
jobIdToUpdate = getJobIdFromName(options.getJobName());
newJob.setTransformNameMapping(options.getTransformNameMapping());
newJob.setReplaceJobId(jobIdToUpdate);
}
Job jobResult;
try {
jobResult = dataflowClient.createJob(newJob);
} catch (GoogleJsonResponseException e) {
String errorMessages = "Unexpected errors";
if (e.getDetails() != null) {
if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) {
errorMessages = "The size of the serialized JSON representation of the pipeline "
+ "exceeds the allowable limit. "
+ "For more information, please check the FAQ link below:\n"
+ "https://cloud.google.com/dataflow/faq";
} else {
errorMessages = e.getDetails().getMessage();
}
}
throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e);
} catch (IOException e) {
throw new RuntimeException("Failed to create a workflow job", e);
}
// Use a raw client for post-launch monitoring, as status calls may fail
// regularly and need not be retried automatically.
DataflowPipelineJob dataflowPipelineJob =
new DataflowPipelineJob(
DataflowClient.create(options),
jobResult.getId(),
options,
jobSpecification.getStepNames());
// If the service returned client request id, the SDK needs to compare it
// with the original id generated in the request, if they are not the same
// (i.e., the returned job is not created by this request), throw
// DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedException
// depending on whether this is a reload or not.
if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty()
&& !jobResult.getClientRequestId().equals(requestId)) {
// If updating a job.
if (options.isUpdate()) {
throw new DataflowJobAlreadyUpdatedException(dataflowPipelineJob,
String.format("The job named %s with id: %s has already been updated into job id: %s "
+ "and cannot be updated again.",
newJob.getName(), jobIdToUpdate, jobResult.getId()));
} else {
throw new DataflowJobAlreadyExistsException(dataflowPipelineJob,
String.format("There is already an active job named %s with id: %s. If you want "
+ "to submit a second job, try again by setting a different name using --jobName.",
newJob.getName(), jobResult.getId()));
}
}
LOG.info("To access the Dataflow monitoring console, please navigate to {}",
MonitoringUtil.getJobMonitoringPageURL(
options.getProject(), options.getRegion(), jobResult.getId()));
System.out.println("Submitted job: " + jobResult.getId());
LOG.info("To cancel the job using the 'gcloud' tool, run:\n> {}",
MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId()));
return dataflowPipelineJob;
}
/** Returns true if the specified experiment is enabled, handling null experiments. */
public static boolean hasExperiment(DataflowPipelineDebugOptions options, String experiment) {
List<String> experiments =
firstNonNull(options.getExperiments(), Collections.<String>emptyList());
return experiments.contains(experiment);
}
/** Helper to configure the Dataflow Job Environment based on the user's job options. */
private static Map<String, Object> getEnvironmentVersion(DataflowPipelineOptions options) {
DataflowRunnerInfo runnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo();
String majorVersion;
String jobType;
if (hasExperiment(options, "beam_fn_api")) {
majorVersion = runnerInfo.getFnApiEnvironmentMajorVersion();
jobType = options.isStreaming() ? "FNAPI_STREAMING" : "FNAPI_BATCH";
} else {
majorVersion = runnerInfo.getLegacyEnvironmentMajorVersion();
jobType = options.isStreaming() ? "STREAMING" : "JAVA_BATCH_AUTOSCALING";
}
return ImmutableMap.<String, Object>of(
PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY, majorVersion,
PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType);
}
@VisibleForTesting
void replaceTransforms(Pipeline pipeline) {
boolean streaming = options.isStreaming() || containsUnboundedPCollection(pipeline);
// Ensure all outputs of all reads are consumed before potentially replacing any
// Read PTransforms
UnconsumedReads.ensureAllReadsConsumed(pipeline);
pipeline.replaceAll(getOverrides(streaming));
}
private boolean containsUnboundedPCollection(Pipeline p) {
class BoundednessVisitor extends PipelineVisitor.Defaults {
IsBounded boundedness = IsBounded.BOUNDED;
@Override
public void visitValue(PValue value, Node producer) {
if (value instanceof PCollection) {
boundedness = boundedness.and(((PCollection) value).isBounded());
}
}
}
BoundednessVisitor visitor = new BoundednessVisitor();
p.traverseTopologically(visitor);
return visitor.boundedness == IsBounded.UNBOUNDED;
};
/**
* Returns the DataflowPipelineTranslator associated with this object.
*/
public DataflowPipelineTranslator getTranslator() {
return translator;
}
/**
* Sets callbacks to invoke during execution see {@code DataflowRunnerHooks}.
*/
@Experimental
public void setHooks(DataflowRunnerHooks hooks) {
this.hooks = hooks;
}
/////////////////////////////////////////////////////////////////////////////
/** Outputs a warning about PCollection views without deterministic key coders. */
private void logWarningIfPCollectionViewHasNonDeterministicKeyCoder(Pipeline pipeline) {
// We need to wait till this point to determine the names of the transforms since only
// at this time do we know the hierarchy of the transforms otherwise we could
// have just recorded the full names during apply time.
if (!ptransformViewsWithNonDeterministicKeyCoders.isEmpty()) {
final SortedSet<String> ptransformViewNamesWithNonDeterministicKeyCoders = new TreeSet<>();
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
@Override
public void visitValue(PValue value, TransformHierarchy.Node producer) {}
@Override
public void visitPrimitiveTransform(TransformHierarchy.Node node) {
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
}
@Override
public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
if (node.getTransform() instanceof View.AsMap
|| node.getTransform() instanceof View.AsMultimap) {
PCollection<KV<?, ?>> input =
(PCollection<KV<?, ?>>) Iterables.getOnlyElement(node.getInputs().values());
KvCoder<?, ?> inputCoder = (KvCoder) input.getCoder();
try {
inputCoder.getKeyCoder().verifyDeterministic();
} catch (NonDeterministicException e) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
}
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
return CompositeBehavior.ENTER_TRANSFORM;
}
@Override
public void leaveCompositeTransform(TransformHierarchy.Node node) {}
});
LOG.warn("Unable to use indexed implementation for View.AsMap and View.AsMultimap for {} "
+ "because the key coder is not deterministic. Falling back to singleton implementation "
+ "which may cause memory and/or performance problems. Future major versions of "
+ "Dataflow will require deterministic key coders.",
ptransformViewNamesWithNonDeterministicKeyCoders);
}
}
/**
* Returns true if the passed in {@link PCollection} needs to be materialiazed using
* an indexed format.
*/
boolean doesPCollectionRequireIndexedFormat(PCollection<?> pcol) {
return pcollectionsRequiringIndexedFormat.contains(pcol);
}
/**
* Marks the passed in {@link PCollection} as requiring to be materialized using
* an indexed format.
*/
void addPCollectionRequiringIndexedFormat(PCollection<?> pcol) {
pcollectionsRequiringIndexedFormat.add(pcol);
}
/** A set of {@link View}s with non-deterministic key coders. */
private Set<PTransform<?, ?>> ptransformViewsWithNonDeterministicKeyCoders;
/**
* Records that the {@link PTransform} requires a deterministic key coder.
*/
void recordViewUsesNonDeterministicKeyCoder(PTransform<?, ?> ptransform) {
ptransformViewsWithNonDeterministicKeyCoders.add(ptransform);
}
// ================================================================================
// PubsubIO translations
// ================================================================================
private static class StreamingPubsubIOReadOverrideFactory
implements PTransformOverrideFactory<
PBegin, PCollection<PubsubMessage>, PubsubUnboundedSource> {
@Override
public PTransformReplacement<PBegin, PCollection<PubsubMessage>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<PubsubMessage>, PubsubUnboundedSource> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingPubsubIORead(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<PubsubMessage> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Suppress application of {@link PubsubUnboundedSource#expand} in streaming mode so that we can
* instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIORead
extends PTransform<PBegin, PCollection<PubsubMessage>> {
private final PubsubUnboundedSource transform;
public StreamingPubsubIORead(PubsubUnboundedSource transform) {
this.transform = transform;
}
PubsubUnboundedSource getOverriddenTransform() {
return transform;
}
@Override
public PCollection<PubsubMessage> expand(PBegin input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(),
WindowingStrategy.globalDefault(),
IsBounded.UNBOUNDED,
new PubsubMessageWithAttributesCoder());
}
@Override
protected String getKindString() {
return "StreamingPubsubIORead";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIORead.class, new StreamingPubsubIOReadTranslator());
}
}
/** Rewrite {@link StreamingPubsubIORead} to the appropriate internal node. */
private static class StreamingPubsubIOReadTranslator
implements TransformTranslator<StreamingPubsubIORead> {
@Override
public void translate(StreamingPubsubIORead transform, TranslationContext context) {
checkArgument(
context.getPipelineOptions().isStreaming(),
"StreamingPubsubIORead is only for streaming pipelines.");
PubsubUnboundedSource overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelRead");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider() != null) {
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
}
if (overriddenTransform.getSubscriptionProvider() != null) {
if (overriddenTransform.getSubscriptionProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION,
overriddenTransform.getSubscription().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION_OVERRIDE,
((NestedValueProvider) overriddenTransform.getSubscriptionProvider()).propertyName());
}
}
if (overriddenTransform.getTimestampAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE, overriddenTransform.getTimestampAttribute());
}
if (overriddenTransform.getIdAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_ID_ATTRIBUTE, overriddenTransform.getIdAttribute());
}
// In both cases, the transform needs to read PubsubMessage. However, in case it needs
// the attributes, we supply an identity "parse fn" so the worker will read PubsubMessage's
// from Windmill and simply pass them around; and in case it doesn't need attributes,
// we're already implicitly using a "Coder" that interprets the data as a PubsubMessage's
// payload.
if (overriddenTransform.getNeedsAttributes()) {
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(
serializeToByteArray(new IdentityMessageFn())));
}
stepContext.addOutput(context.getOutput(transform));
}
}
private static class IdentityMessageFn
extends SimpleFunction<PubsubMessage, PubsubMessage> {
@Override
public PubsubMessage apply(PubsubMessage input) {
return input;
}
}
/**
* Suppress application of {@link PubsubUnboundedSink#expand} in streaming mode so that we can
* instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIOWrite
extends PTransform<PCollection<PubsubMessage>, PDone> {
private final PubsubUnboundedSink transform;
/**
* Builds an instance of this class from the overridden transform.
*/
public StreamingPubsubIOWrite(
DataflowRunner runner, PubsubUnboundedSink transform) {
this.transform = transform;
}
PubsubUnboundedSink getOverriddenTransform() {
return transform;
}
@Override
public PDone expand(PCollection<PubsubMessage> input) {
return PDone.in(input.getPipeline());
}
@Override
protected String getKindString() {
return "StreamingPubsubIOWrite";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIOWrite.class, new StreamingPubsubIOWriteTranslator());
}
}
/**
* Rewrite {@link StreamingPubsubIOWrite} to the appropriate internal node.
*/
private static class StreamingPubsubIOWriteTranslator implements
TransformTranslator<StreamingPubsubIOWrite> {
@Override
public void translate(
StreamingPubsubIOWrite transform,
TranslationContext context) {
checkArgument(context.getPipelineOptions().isStreaming(),
"StreamingPubsubIOWrite is only for streaming pipelines.");
PubsubUnboundedSink overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelWrite");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
if (overriddenTransform.getTimestampAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE, overriddenTransform.getTimestampAttribute());
}
if (overriddenTransform.getIdAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_ID_ATTRIBUTE, overriddenTransform.getIdAttribute());
}
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(serializeToByteArray(new IdentityMessageFn())));
// No coder is needed in this case since the collection being written is already of
// PubsubMessage, however the Dataflow backend require a coder to be set.
stepContext.addEncodingInput(WindowedValue.getValueOnlyCoder(VoidCoder.of()));
stepContext.addInput(PropertyNames.PARALLEL_INPUT, context.getInput(transform));
}
}
// ================================================================================
/**
* A PTranform override factory which maps Create.Values PTransforms for streaming pipelines
* into a Dataflow specific variant.
*/
private static class StreamingFnApiCreateOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Create.Values<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Create.Values<T>> transform) {
Create.Values<T> original = transform.getTransform();
PCollection<T> output =
(PCollection) Iterables.getOnlyElement(transform.getOutputs().values());
return PTransformReplacement.of(
transform.getPipeline().begin(),
new StreamingFnApiCreate<>(original, output));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for
* {@link org.apache.beam.sdk.transforms.Create.Values Create.Values} for the Dataflow runner in
* streaming mode over the Fn API.
*/
private static class StreamingFnApiCreate<T> extends PTransform<PBegin, PCollection<T>> {
private final Create.Values<T> transform;
private final PCollection<T> originalOutput;
private StreamingFnApiCreate(
Create.Values<T> transform,
PCollection<T> originalOutput) {
this.transform = transform;
this.originalOutput = originalOutput;
}
@Override
public final PCollection<T> expand(PBegin input) {
try {
PCollection<T> pc = Pipeline
.applyTransform(input, new Impulse(IsBounded.BOUNDED))
.apply(ParDo.of(DecodeAndEmitDoFn
.fromIterable(transform.getElements(), originalOutput.getCoder())));
pc.setCoder(originalOutput.getCoder());
return pc;
} catch (IOException e) {
throw new IllegalStateException("Unable to encode elements.", e);
}
}
/**
* A DoFn which stores encoded versions of elements and a representation of a Coder
* capable of decoding those elements.
*
* <p>TODO: BEAM-2422 - Make this a SplittableDoFn.
*/
private static class DecodeAndEmitDoFn<T> extends DoFn<byte[], T> {
public static <T> DecodeAndEmitDoFn<T> fromIterable(Iterable<T> elements, Coder<T> elemCoder)
throws IOException {
ImmutableList.Builder<byte[]> allElementsBytes = ImmutableList.builder();
for (T element : elements) {
byte[] bytes = CoderUtils.encodeToByteArray(elemCoder, element);
allElementsBytes.add(bytes);
}
return new DecodeAndEmitDoFn<>(allElementsBytes.build(), elemCoder);
}
private final Collection<byte[]> elements;
private final RunnerApi.MessageWithComponents coderSpec;
// lazily initialized by parsing coderSpec
private transient Coder<T> coder;
private Coder<T> getCoder() throws IOException {
if (coder == null) {
coder =
(Coder)
CoderTranslation.fromProto(
coderSpec.getCoder(),
RehydratedComponents.forComponents(coderSpec.getComponents()));
}
return coder;
}
private DecodeAndEmitDoFn(Collection<byte[]> elements, Coder<T> coder) throws IOException {
this.elements = elements;
this.coderSpec = CoderTranslation.toProto(coder);
}
@ProcessElement
public void processElement(ProcessContext context) throws IOException {
for (byte[] element : elements) {
context.output(CoderUtils.decodeFromByteArray(getCoder(), element));
}
}
}
}
/** The Dataflow specific override for the impulse primitive. */
private static class Impulse extends PTransform<PBegin, PCollection<byte[]>> {
private final IsBounded isBounded;
private Impulse(IsBounded isBounded) {
this.isBounded = isBounded;
}
@Override
public PCollection<byte[]> expand(PBegin input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), isBounded, ByteArrayCoder.of());
}
private static class Translator implements TransformTranslator<Impulse> {
@Override
public void translate(Impulse transform, TranslationContext context) {
if (context.getPipelineOptions().isStreaming()) {
StepTranslationContext stepContext = context.addStep(transform, "ParallelRead");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
stepContext.addInput(PropertyNames.PUBSUB_SUBSCRIPTION, "_starting_signal/");
stepContext.addOutput(context.getOutput(transform));
} else {
throw new UnsupportedOperationException(
"Impulse source for batch pipelines has not been defined.");
}
}
}
static {
DataflowPipelineTranslator.registerTransformTranslator(Impulse.class, new Translator());
}
}
private static class StreamingUnboundedReadOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Read.Unbounded<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Read.Unbounded<T>> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingUnboundedRead<>(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for
* {@link org.apache.beam.sdk.io.Read.Unbounded Read.Unbounded} for the
* Dataflow runner in streaming mode.
*
* <p>In particular, if an UnboundedSource requires deduplication, then features of WindmillSink
* are leveraged to do the deduplication.
*/
private static class StreamingUnboundedRead<T> extends PTransform<PBegin, PCollection<T>> {
private final UnboundedSource<T, ?> source;
public StreamingUnboundedRead(Read.Unbounded<T> transform) {
this.source = transform.getSource();
}
@Override
public final PCollection<T> expand(PBegin input) {
source.validate();
if (source.requiresDeduping()) {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply(new Deduplicate<T>());
} else {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply("StripIds", ParDo.of(new ValueWithRecordId.StripIdsDoFn<T>()));
}
}
/**
* {@link PTransform} that reads {@code (record,recordId)} pairs from an
* {@link UnboundedSource}.
*/
private static class ReadWithIds<T>
extends PTransform<PInput, PCollection<ValueWithRecordId<T>>> {
private final UnboundedSource<T, ?> source;
private ReadWithIds(UnboundedSource<T, ?> source) {
this.source = source;
}
@Override
public final PCollection<ValueWithRecordId<T>> expand(PInput input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), IsBounded.UNBOUNDED,
ValueWithRecordId.ValueWithRecordIdCoder.of(source.getOutputCoder()));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.delegate(source);
}
public UnboundedSource<T, ?> getSource() {
return source;
}
}
@Override
public String getKindString() {
return String.format("Read(%s)", NameUtils.approximateSimpleName(source));
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
ReadWithIds.class, new ReadWithIdsTranslator());
}
private static class ReadWithIdsTranslator
implements TransformTranslator<ReadWithIds<?>> {
@Override
public void translate(ReadWithIds<?> transform,
TranslationContext context) {
ReadTranslator.translateReadHelper(transform.getSource(), transform, context);
}
}
}
/**
* Remove values with duplicate ids.
*/
private static class Deduplicate<T>
extends PTransform<PCollection<ValueWithRecordId<T>>, PCollection<T>> {
// Use a finite set of keys to improve bundling. Without this, the key space
// will be the space of ids which is potentially very large, which results in much
// more per-key overhead.
private static final int NUM_RESHARD_KEYS = 10000;
@Override
public PCollection<T> expand(PCollection<ValueWithRecordId<T>> input) {
return input
.apply(WithKeys.of(new SerializableFunction<ValueWithRecordId<T>, Integer>() {
@Override
public Integer apply(ValueWithRecordId<T> value) {
return Arrays.hashCode(value.getId()) % NUM_RESHARD_KEYS;
}
}))
// Reshuffle will dedup based on ids in ValueWithRecordId by passing the data through
// WindmillSink.
.apply(Reshuffle.<Integer, ValueWithRecordId<T>>of())
.apply("StripIds", ParDo.of(
new DoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.element().getValue().getValue());
}
}));
}
}
private static class StreamingBoundedReadOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Read.Bounded<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Read.Bounded<T>> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingBoundedRead<>(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for {@link org.apache.beam.sdk.io.Read.Bounded Read.Bounded} for the
* Dataflow runner in streaming mode.
*/
private static class StreamingBoundedRead<T> extends PTransform<PBegin, PCollection<T>> {
private final BoundedSource<T> source;
public StreamingBoundedRead(Read.Bounded<T> transform) {
this.source = transform.getSource();
}
@Override
public final PCollection<T> expand(PBegin input) {
source.validate();
return Pipeline.applyTransform(input, new UnboundedReadFromBoundedSource<>(source))
.setIsBoundedInternal(IsBounded.BOUNDED);
}
}
/**
* A marker {@link DoFn} for writing the contents of a {@link PCollection} to a streaming
* {@link PCollectionView} backend implementation.
*/
@Internal
public static class StreamingPCollectionViewWriterFn<T> extends DoFn<Iterable<T>, T> {
private final PCollectionView<?> view;
private final Coder<T> dataCoder;
public static <T> StreamingPCollectionViewWriterFn<T> create(
PCollectionView<?> view, Coder<T> dataCoder) {
return new StreamingPCollectionViewWriterFn<>(view, dataCoder);
}
private StreamingPCollectionViewWriterFn(PCollectionView<?> view, Coder<T> dataCoder) {
this.view = view;
this.dataCoder = dataCoder;
}
public PCollectionView<?> getView() {
return view;
}
public Coder<T> getDataCoder() {
return dataCoder;
}
@ProcessElement
public void processElement(ProcessContext c, BoundedWindow w) throws Exception {
throw new UnsupportedOperationException(
String.format(
"%s is a marker class only and should never be executed.", getClass().getName()));
}
}
@Override
public String toString() {
return "DataflowRunner#" + options.getJobName();
}
/**
* Attempts to detect all the resources the class loader has access to. This does not recurse
* to class loader parents stopping it from pulling in resources from the system class loader.
*
* @param classLoader The URLClassLoader to use to detect resources to stage.
* @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one
* of the resources the class loader exposes is not a file resource.
* @return A list of absolute paths to the resources the class loader uses.
*/
protected static List<String> detectClassPathResourcesToStage(ClassLoader classLoader) {
if (!(classLoader instanceof URLClassLoader)) {
String message = String.format("Unable to use ClassLoader to detect classpath elements. "
+ "Current ClassLoader is %s, only URLClassLoaders are supported.", classLoader);
LOG.error(message);
throw new IllegalArgumentException(message);
}
List<String> files = new ArrayList<>();
for (URL url : ((URLClassLoader) classLoader).getURLs()) {
try {
files.add(new File(url.toURI()).getAbsolutePath());
} catch (IllegalArgumentException | URISyntaxException e) {
String message = String.format("Unable to convert url (%s) to file.", url);
LOG.error(message);
throw new IllegalArgumentException(message, e);
}
}
return files;
}
/**
* Finds the id for the running job of the given name.
*/
private String getJobIdFromName(String jobName) {
try {
ListJobsResponse listResult;
String token = null;
do {
listResult = dataflowClient.listJobs(token);
token = listResult.getNextPageToken();
for (Job job : listResult.getJobs()) {
if (job.getName().equals(jobName)
&& MonitoringUtil.toState(job.getCurrentState()).equals(State.RUNNING)) {
return job.getId();
}
}
} while (token != null);
} catch (GoogleJsonResponseException e) {
throw new RuntimeException(
"Got error while looking up jobs: "
+ (e.getDetails() != null ? e.getDetails().getMessage() : e), e);
} catch (IOException e) {
throw new RuntimeException("Got error while looking up jobs: ", e);
}
throw new IllegalArgumentException("Could not find running job named " + jobName);
}
static class CombineGroupedValues<K, InputT, OutputT>
extends PTransform<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>> {
private final Combine.GroupedValues<K, InputT, OutputT> original;
private final Coder<KV<K, OutputT>> outputCoder;
CombineGroupedValues(
GroupedValues<K, InputT, OutputT> original, Coder<KV<K, OutputT>> outputCoder) {
this.original = original;
this.outputCoder = outputCoder;
}
@Override
public PCollection<KV<K, OutputT>> expand(PCollection<KV<K, Iterable<InputT>>> input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), input.getWindowingStrategy(), input.isBounded(),
outputCoder);
}
public Combine.GroupedValues<K, InputT, OutputT> getOriginalCombine() {
return original;
}
}
private static class PrimitiveCombineGroupedValuesOverrideFactory<K, InputT, OutputT>
implements PTransformOverrideFactory<
PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
Combine.GroupedValues<K, InputT, OutputT>> {
@Override
public PTransformReplacement<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>>
getReplacementTransform(
AppliedPTransform<
PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
GroupedValues<K, InputT, OutputT>>
transform) {
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
new CombineGroupedValues<>(
transform.getTransform(),
PTransformReplacements.getSingletonMainOutput(transform).getCoder()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<KV<K, OutputT>> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
private class StreamingPubsubIOWriteOverrideFactory
implements PTransformOverrideFactory<
PCollection<PubsubMessage>, PDone, PubsubUnboundedSink> {
private final DataflowRunner runner;
private StreamingPubsubIOWriteOverrideFactory(DataflowRunner runner) {
this.runner = runner;
}
@Override
public PTransformReplacement<PCollection<PubsubMessage>, PDone>
getReplacementTransform(
AppliedPTransform<PCollection<PubsubMessage>, PDone, PubsubUnboundedSink>
transform) {
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
new StreamingPubsubIOWrite(runner, transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PDone newOutput) {
return Collections.emptyMap();
}
}
@VisibleForTesting
static class StreamingShardedWriteFactory<UserT, DestinationT, OutputT>
implements PTransformOverrideFactory<
PCollection<UserT>, PDone, WriteFiles<UserT, DestinationT, OutputT>> {
// We pick 10 as a a default, as it works well with the default number of workers started
// by Dataflow.
static final int DEFAULT_NUM_SHARDS = 10;
DataflowPipelineWorkerPoolOptions options;
StreamingShardedWriteFactory(PipelineOptions options) {
this.options = options.as(DataflowPipelineWorkerPoolOptions.class);
}
@Override
public PTransformReplacement<PCollection<UserT>, PDone> getReplacementTransform(
AppliedPTransform<PCollection<UserT>, PDone, WriteFiles<UserT, DestinationT, OutputT>>
transform) {
// By default, if numShards is not set WriteFiles will produce one file per bundle. In
// streaming, there are large numbers of small bundles, resulting in many tiny files.
// Instead we pick max workers * 2 to ensure full parallelism, but prevent too-many files.
// (current_num_workers * 2 might be a better choice, but that value is not easily available
// today).
// If the user does not set either numWorkers or maxNumWorkers, default to 10 shards.
int numShards;
if (options.getMaxNumWorkers() > 0) {
numShards = options.getMaxNumWorkers() * 2;
} else if (options.getNumWorkers() > 0) {
numShards = options.getNumWorkers() * 2;
} else {
numShards = DEFAULT_NUM_SHARDS;
}
try {
List<PCollectionView<?>> sideInputs =
WriteFilesTranslation.getDynamicDestinationSideInputs(transform);
FileBasedSink sink = WriteFilesTranslation.getSink(transform);
WriteFiles<UserT, DestinationT, OutputT> replacement =
WriteFiles.to(sink).withSideInputs(sideInputs);
if (WriteFilesTranslation.isWindowedWrites(transform)) {
replacement = replacement.withWindowedWrites();
}
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
replacement.withNumShards(numShards));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(Map<TupleTag<?>, PValue> outputs,
PDone newOutput) {
return Collections.emptyMap();
}
}
@VisibleForTesting
static String getContainerImageForJob(DataflowPipelineOptions options) {
String workerHarnessContainerImage = options.getWorkerHarnessContainerImage();
if (!workerHarnessContainerImage.contains("IMAGE")) {
return workerHarnessContainerImage;
} else if (hasExperiment(options, "beam_fn_api")) {
return workerHarnessContainerImage.replace("IMAGE", "java");
} else if (options.isStreaming()) {
return workerHarnessContainerImage.replace("IMAGE", "beam-java-streaming");
} else {
return workerHarnessContainerImage.replace("IMAGE", "beam-java-batch");
}
}
static void verifyStateSupported(DoFn<?, ?> fn) {
DoFnSignature signature = DoFnSignatures.getSignature(fn.getClass());
for (DoFnSignature.StateDeclaration stateDecl : signature.stateDeclarations().values()) {
// https://issues.apache.org/jira/browse/BEAM-1474
if (stateDecl.stateType().isSubtypeOf(TypeDescriptor.of(MapState.class))) {
throw new UnsupportedOperationException(String.format(
"%s does not currently support %s",
DataflowRunner.class.getSimpleName(),
MapState.class.getSimpleName()
));
}
// https://issues.apache.org/jira/browse/BEAM-1479
if (stateDecl.stateType().isSubtypeOf(TypeDescriptor.of(SetState.class))) {
throw new UnsupportedOperationException(String.format(
"%s does not currently support %s",
DataflowRunner.class.getSimpleName(),
SetState.class.getSimpleName()
));
}
}
}
static void verifyStateSupportForWindowingStrategy(WindowingStrategy strategy) {
// https://issues.apache.org/jira/browse/BEAM-2507
if (!strategy.getWindowFn().isNonMerging()) {
throw new UnsupportedOperationException(
String.format(
"%s does not currently support state or timers with merging windows",
DataflowRunner.class.getSimpleName()));
}
}
}
| runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray;
import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.clouddebugger.v2.Clouddebugger;
import com.google.api.services.clouddebugger.v2.model.Debuggee;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeRequest;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeResponse;
import com.google.api.services.dataflow.model.DataflowPackage;
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.ListJobsResponse;
import com.google.api.services.dataflow.model.WorkerPool;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.base.Utf8;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.channels.Channels;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.beam.runners.core.construction.CoderTranslation;
import org.apache.beam.runners.core.construction.DeduplicatedFlattenFactory;
import org.apache.beam.runners.core.construction.EmptyFlattenAsCreateFactory;
import org.apache.beam.runners.core.construction.PTransformMatchers;
import org.apache.beam.runners.core.construction.PTransformReplacements;
import org.apache.beam.runners.core.construction.RehydratedComponents;
import org.apache.beam.runners.core.construction.ReplacementOutputs;
import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
import org.apache.beam.runners.core.construction.UnconsumedReads;
import org.apache.beam.runners.core.construction.WriteFilesTranslation;
import org.apache.beam.runners.dataflow.DataflowPipelineTranslator.JobSpecification;
import org.apache.beam.runners.dataflow.StreamingViewOverrides.StreamingCreatePCollectionViewFactory;
import org.apache.beam.runners.dataflow.options.DataflowPipelineDebugOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineWorkerPoolOptions;
import org.apache.beam.runners.dataflow.util.DataflowTemplateJob;
import org.apache.beam.runners.dataflow.util.DataflowTransport;
import org.apache.beam.runners.dataflow.util.MonitoringUtil;
import org.apache.beam.runners.dataflow.util.PropertyNames;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineVisitor;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.PipelineRunner;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Internal;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.Coder.NonDeterministicException;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.common.runner.v1.RunnerApi;
import org.apache.beam.sdk.extensions.gcp.storage.PathValidator;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.FileBasedSink;
import org.apache.beam.sdk.io.FileSystems;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.WriteFiles;
import org.apache.beam.sdk.io.fs.ResourceId;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubMessage;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubMessageWithAttributesCoder;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSink;
import org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsValidator;
import org.apache.beam.sdk.options.ValueProvider.NestedValueProvider;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.runners.PTransformOverride;
import org.apache.beam.sdk.runners.PTransformOverrideFactory;
import org.apache.beam.sdk.runners.TransformHierarchy;
import org.apache.beam.sdk.runners.TransformHierarchy.Node;
import org.apache.beam.sdk.state.MapState;
import org.apache.beam.sdk.state.SetState;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Combine.GroupedValues;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Reshuffle;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.SimpleFunction;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.CoderUtils;
import org.apache.beam.sdk.util.InstanceBuilder;
import org.apache.beam.sdk.util.MimeTypes;
import org.apache.beam.sdk.util.NameUtils;
import org.apache.beam.sdk.util.ReleaseInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollection.IsBounded;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PCollectionViews;
import org.apache.beam.sdk.values.PDone;
import org.apache.beam.sdk.values.PInput;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.ValueWithRecordId;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link PipelineRunner} that executes the operations in the pipeline by first translating them
* to the Dataflow representation using the {@link DataflowPipelineTranslator} and then submitting
* them to a Dataflow service for execution.
*
* <h3>Permissions</h3>
*
* <p>When reading from a Dataflow source or writing to a Dataflow sink using
* {@code DataflowRunner}, the Google cloudservices account and the Google compute engine service
* account of the GCP project running the Dataflow Job will need access to the corresponding
* source/sink.
*
* <p>Please see <a href="https://cloud.google.com/dataflow/security-and-permissions">Google Cloud
* Dataflow Security and Permissions</a> for more details.
*/
public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
private static final Logger LOG = LoggerFactory.getLogger(DataflowRunner.class);
/** Provided configuration options. */
private final DataflowPipelineOptions options;
/** Client for the Dataflow service. This is used to actually submit jobs. */
private final DataflowClient dataflowClient;
/** Translator for this DataflowRunner, based on options. */
private final DataflowPipelineTranslator translator;
/** A set of user defined functions to invoke at different points in execution. */
private DataflowRunnerHooks hooks;
// The limit of CreateJob request size.
private static final int CREATE_JOB_REQUEST_LIMIT_BYTES = 10 * 1024 * 1024;
@VisibleForTesting
static final int GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT = 1024 * 1024;
private final Set<PCollection<?>> pcollectionsRequiringIndexedFormat;
/**
* Project IDs must contain lowercase letters, digits, or dashes.
* IDs must start with a letter and may not end with a dash.
* This regex isn't exact - this allows for patterns that would be rejected by
* the service, but this is sufficient for basic validation of project IDs.
*/
public static final String PROJECT_ID_REGEXP = "[a-z][-a-z0-9:.]+[a-z0-9]";
/**
* Construct a runner from the provided options.
*
* @param options Properties that configure the runner.
* @return The newly created runner.
*/
public static DataflowRunner fromOptions(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required values: " + Joiner.on(',').join(missing));
}
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!Strings.isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() == null) {
dataflowOptions.setFilesToStage(detectClassPathResourcesToStage(
DataflowRunner.class.getClassLoader()));
if (dataflowOptions.getFilesToStage().isEmpty()) {
throw new IllegalArgumentException("No files to stage has been found.");
} else {
LOG.info("PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
}
// Verify jobName according to service requirements, truncating converting to lowercase if
// necessary.
String jobName =
dataflowOptions
.getJobName()
.toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
// Verify project
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
// Verify the number of worker threads is a valid value
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException("Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
return new DataflowRunner(dataflowOptions);
}
@VisibleForTesting protected DataflowRunner(DataflowPipelineOptions options) {
this.options = options;
this.dataflowClient = DataflowClient.create(options);
this.translator = DataflowPipelineTranslator.fromOptions(options);
this.pcollectionsRequiringIndexedFormat = new HashSet<>();
this.ptransformViewsWithNonDeterministicKeyCoders = new HashSet<>();
}
private List<PTransformOverride> getOverrides(boolean streaming) {
ImmutableList.Builder<PTransformOverride> overridesBuilder = ImmutableList.builder();
// Create is implemented in terms of a Read, so it must precede the override to Read in
// streaming
overridesBuilder
.add(
PTransformOverride.of(
PTransformMatchers.flattenWithDuplicateInputs(),
DeduplicatedFlattenFactory.create()))
.add(
PTransformOverride.of(
PTransformMatchers.emptyFlatten(), EmptyFlattenAsCreateFactory.instance()));
if (streaming) {
if (!hasExperiment(options, "enable_custom_pubsub_source")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(PubsubUnboundedSource.class),
new StreamingPubsubIOReadOverrideFactory()));
}
if (!hasExperiment(options, "enable_custom_pubsub_sink")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(PubsubUnboundedSink.class),
new StreamingPubsubIOWriteOverrideFactory(this)));
}
if (hasExperiment(options, "beam_fn_api")) {
overridesBuilder.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Create.Values.class),
new StreamingFnApiCreateOverrideFactory()));
}
overridesBuilder
// Support Splittable DoFn for now only in streaming mode.
// The order of the following overrides is important because they are applied in order.
// By default Dataflow runner replaces single-output ParDo with a ParDoSingle override.
// However, we want a different expansion for single-output splittable ParDo.
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDoSingle(),
new ReflectiveOneToOneOverrideFactory(
SplittableParDoOverrides.ParDoSingleViaMulti.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.splittableParDoMulti(),
new SplittableParDoOverrides.SplittableParDoOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.writeWithRunnerDeterminedSharding(),
new StreamingShardedWriteFactory(options)))
.add(
// Streaming Bounded Read is implemented in terms of Streaming Unbounded Read, and
// must precede it
PTransformOverride.of(
PTransformMatchers.classEqualTo(Read.Bounded.class),
new StreamingBoundedReadOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Read.Unbounded.class),
new StreamingUnboundedReadOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(View.CreatePCollectionView.class),
new StreamingCreatePCollectionViewFactory()));
} else {
overridesBuilder
// State and timer pardos are implemented by expansion to GBK-then-ParDo
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDoMulti(),
BatchStatefulParDoOverrides.multiOutputOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.stateOrTimerParDoSingle(),
BatchStatefulParDoOverrides.singleOutputOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.MapViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsMap.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.MultimapViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsMultimap.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.SingletonViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsSingleton.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.ListViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsList.class, this)))
.add(
PTransformOverride.of(
PTransformMatchers.createViewWithViewFn(PCollectionViews.IterableViewFn.class),
new ReflectiveOneToOneOverrideFactory(
BatchViewOverrides.BatchViewAsIterable.class, this)));
}
overridesBuilder
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Reshuffle.class), new ReshuffleOverrideFactory()))
// Order is important. Streaming views almost all use Combine internally.
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(Combine.GroupedValues.class),
new PrimitiveCombineGroupedValuesOverrideFactory()))
.add(
PTransformOverride.of(
PTransformMatchers.classEqualTo(ParDo.SingleOutput.class),
new PrimitiveParDoSingleFactory()));
return overridesBuilder.build();
}
private static class ReflectiveOneToOneOverrideFactory<
InputT, OutputT, TransformT extends PTransform<PCollection<InputT>, PCollection<OutputT>>>
extends SingleInputOutputOverrideFactory<
PCollection<InputT>, PCollection<OutputT>, TransformT> {
private final Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement;
private final DataflowRunner runner;
private ReflectiveOneToOneOverrideFactory(
Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement,
DataflowRunner runner) {
this.replacement = replacement;
this.runner = runner;
}
@Override
public PTransformReplacement<PCollection<InputT>, PCollection<OutputT>> getReplacementTransform(
AppliedPTransform<PCollection<InputT>, PCollection<OutputT>, TransformT> transform) {
PTransform<PCollection<InputT>, PCollection<OutputT>> rep =
InstanceBuilder.ofType(replacement)
.withArg(DataflowRunner.class, runner)
.withArg(
(Class<TransformT>) transform.getTransform().getClass(), transform.getTransform())
.build();
return PTransformReplacement.of(PTransformReplacements.getSingletonMainInput(transform), rep);
}
}
private String debuggerMessage(String projectId, String uniquifier) {
return String.format("To debug your job, visit Google Cloud Debugger at: "
+ "https://console.developers.google.com/debug?project=%s&dbgee=%s",
projectId, uniquifier);
}
private void maybeRegisterDebuggee(DataflowPipelineOptions options, String uniquifier) {
if (!options.getEnableCloudDebugger()) {
return;
}
if (options.getDebuggee() != null) {
throw new RuntimeException("Should not specify the debuggee");
}
Clouddebugger debuggerClient = DataflowTransport.newClouddebuggerClient(options).build();
Debuggee debuggee = registerDebuggee(debuggerClient, uniquifier);
options.setDebuggee(debuggee);
System.out.println(debuggerMessage(options.getProject(), debuggee.getUniquifier()));
}
private Debuggee registerDebuggee(Clouddebugger debuggerClient, String uniquifier) {
RegisterDebuggeeRequest registerReq = new RegisterDebuggeeRequest();
registerReq.setDebuggee(new Debuggee()
.setProject(options.getProject())
.setUniquifier(uniquifier)
.setDescription(uniquifier)
.setAgentVersion("google.com/cloud-dataflow-java/v1"));
try {
RegisterDebuggeeResponse registerResponse =
debuggerClient.controller().debuggees().register(registerReq).execute();
Debuggee debuggee = registerResponse.getDebuggee();
if (debuggee.getStatus() != null && debuggee.getStatus().getIsError()) {
throw new RuntimeException("Unable to register with the debugger: "
+ debuggee.getStatus().getDescription().getFormat());
}
return debuggee;
} catch (IOException e) {
throw new RuntimeException("Unable to register with the debugger: ", e);
}
}
@Override
public DataflowPipelineJob run(Pipeline pipeline) {
logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline);
if (containsUnboundedPCollection(pipeline)) {
options.setStreaming(true);
}
replaceTransforms(pipeline);
LOG.info("Executing pipeline on the Dataflow Service, which will have billing implications "
+ "related to Google Compute Engine usage and other Google Cloud Services.");
List<DataflowPackage> packages = options.getStager().stageFiles();
// Set a unique client_request_id in the CreateJob request.
// This is used to ensure idempotence of job creation across retried
// attempts to create a job. Specifically, if the service returns a job with
// a different client_request_id, it means the returned one is a different
// job previously created with the same job name, and that the job creation
// has been effectively rejected. The SDK should return
// Error::Already_Exists to user in that case.
int randomNum = new Random().nextInt(9000) + 1000;
String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm").withZone(DateTimeZone.UTC)
.print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum;
// Try to create a debuggee ID. This must happen before the job is translated since it may
// update the options.
DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
maybeRegisterDebuggee(dataflowOptions, requestId);
JobSpecification jobSpecification =
translator.translate(pipeline, this, packages);
Job newJob = jobSpecification.getJob();
newJob.setClientRequestId(requestId);
ReleaseInfo releaseInfo = ReleaseInfo.getReleaseInfo();
String version = releaseInfo.getVersion();
checkState(
!version.equals("${pom.version}"),
"Unable to submit a job to the Dataflow service with unset version ${pom.version}");
System.out.println("Dataflow SDK version: " + version);
newJob.getEnvironment().setUserAgent((Map) releaseInfo.getProperties());
// The Dataflow Service may write to the temporary directory directly, so
// must be verified.
if (!isNullOrEmpty(options.getGcpTempLocation())) {
newJob.getEnvironment().setTempStoragePrefix(
dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation()));
}
newJob.getEnvironment().setDataset(options.getTempDatasetId());
newJob.getEnvironment().setExperiments(options.getExperiments());
// Set the Docker container image that executes Dataflow worker harness, residing in Google
// Container Registry. Translator is guaranteed to create a worker pool prior to this point.
String workerHarnessContainerImage = getContainerImageForJob(options);
for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) {
workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage);
}
newJob.getEnvironment().setVersion(getEnvironmentVersion(options));
if (hooks != null) {
hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment());
}
if (!isNullOrEmpty(options.getDataflowJobFile())
|| !isNullOrEmpty(options.getTemplateLocation())) {
boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation());
if (isTemplate) {
checkArgument(isNullOrEmpty(options.getDataflowJobFile()),
"--dataflowJobFile and --templateLocation are mutually exclusive.");
}
String fileLocation = firstNonNull(
options.getTemplateLocation(), options.getDataflowJobFile());
checkArgument(
fileLocation.startsWith("/") || fileLocation.startsWith("gs://"),
"Location must be local or on Cloud Storage, got %s.",
fileLocation);
ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */);
String workSpecJson = DataflowPipelineTranslator.jobToString(newJob);
try (PrintWriter printWriter =
new PrintWriter(
Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)))) {
printWriter.print(workSpecJson);
LOG.info("Printed job specification to {}", fileLocation);
} catch (IOException ex) {
String error =
String.format("Cannot create output file at %s", fileLocation);
if (isTemplate) {
throw new RuntimeException(error, ex);
} else {
LOG.warn(error, ex);
}
}
if (isTemplate) {
LOG.info("Template successfully created.");
return new DataflowTemplateJob();
}
}
String jobIdToUpdate = null;
if (options.isUpdate()) {
jobIdToUpdate = getJobIdFromName(options.getJobName());
newJob.setTransformNameMapping(options.getTransformNameMapping());
newJob.setReplaceJobId(jobIdToUpdate);
}
Job jobResult;
try {
jobResult = dataflowClient.createJob(newJob);
} catch (GoogleJsonResponseException e) {
String errorMessages = "Unexpected errors";
if (e.getDetails() != null) {
if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) {
errorMessages = "The size of the serialized JSON representation of the pipeline "
+ "exceeds the allowable limit. "
+ "For more information, please check the FAQ link below:\n"
+ "https://cloud.google.com/dataflow/faq";
} else {
errorMessages = e.getDetails().getMessage();
}
}
throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e);
} catch (IOException e) {
throw new RuntimeException("Failed to create a workflow job", e);
}
// Use a raw client for post-launch monitoring, as status calls may fail
// regularly and need not be retried automatically.
DataflowPipelineJob dataflowPipelineJob =
new DataflowPipelineJob(
DataflowClient.create(options),
jobResult.getId(),
options,
jobSpecification.getStepNames());
// If the service returned client request id, the SDK needs to compare it
// with the original id generated in the request, if they are not the same
// (i.e., the returned job is not created by this request), throw
// DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedException
// depending on whether this is a reload or not.
if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty()
&& !jobResult.getClientRequestId().equals(requestId)) {
// If updating a job.
if (options.isUpdate()) {
throw new DataflowJobAlreadyUpdatedException(dataflowPipelineJob,
String.format("The job named %s with id: %s has already been updated into job id: %s "
+ "and cannot be updated again.",
newJob.getName(), jobIdToUpdate, jobResult.getId()));
} else {
throw new DataflowJobAlreadyExistsException(dataflowPipelineJob,
String.format("There is already an active job named %s with id: %s. If you want "
+ "to submit a second job, try again by setting a different name using --jobName.",
newJob.getName(), jobResult.getId()));
}
}
LOG.info("To access the Dataflow monitoring console, please navigate to {}",
MonitoringUtil.getJobMonitoringPageURL(
options.getProject(), options.getRegion(), jobResult.getId()));
System.out.println("Submitted job: " + jobResult.getId());
LOG.info("To cancel the job using the 'gcloud' tool, run:\n> {}",
MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId()));
return dataflowPipelineJob;
}
/** Returns true if the specified experiment is enabled, handling null experiments. */
public static boolean hasExperiment(DataflowPipelineDebugOptions options, String experiment) {
List<String> experiments =
firstNonNull(options.getExperiments(), Collections.<String>emptyList());
return experiments.contains(experiment);
}
/** Helper to configure the Dataflow Job Environment based on the user's job options. */
private static Map<String, Object> getEnvironmentVersion(DataflowPipelineOptions options) {
DataflowRunnerInfo runnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo();
String majorVersion;
String jobType;
if (hasExperiment(options, "beam_fn_api")) {
majorVersion = runnerInfo.getFnApiEnvironmentMajorVersion();
jobType = options.isStreaming() ? "FNAPI_STREAMING" : "FNAPI_BATCH";
} else {
majorVersion = runnerInfo.getLegacyEnvironmentMajorVersion();
jobType = options.isStreaming() ? "STREAMING" : "JAVA_BATCH_AUTOSCALING";
}
return ImmutableMap.<String, Object>of(
PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY, majorVersion,
PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType);
}
@VisibleForTesting
void replaceTransforms(Pipeline pipeline) {
boolean streaming = options.isStreaming() || containsUnboundedPCollection(pipeline);
// Ensure all outputs of all reads are consumed before potentially replacing any
// Read PTransforms
UnconsumedReads.ensureAllReadsConsumed(pipeline);
pipeline.replaceAll(getOverrides(streaming));
}
private boolean containsUnboundedPCollection(Pipeline p) {
class BoundednessVisitor extends PipelineVisitor.Defaults {
IsBounded boundedness = IsBounded.BOUNDED;
@Override
public void visitValue(PValue value, Node producer) {
if (value instanceof PCollection) {
boundedness = boundedness.and(((PCollection) value).isBounded());
}
}
}
BoundednessVisitor visitor = new BoundednessVisitor();
p.traverseTopologically(visitor);
return visitor.boundedness == IsBounded.UNBOUNDED;
};
/**
* Returns the DataflowPipelineTranslator associated with this object.
*/
public DataflowPipelineTranslator getTranslator() {
return translator;
}
/**
* Sets callbacks to invoke during execution see {@code DataflowRunnerHooks}.
*/
@Experimental
public void setHooks(DataflowRunnerHooks hooks) {
this.hooks = hooks;
}
/////////////////////////////////////////////////////////////////////////////
/** Outputs a warning about PCollection views without deterministic key coders. */
private void logWarningIfPCollectionViewHasNonDeterministicKeyCoder(Pipeline pipeline) {
// We need to wait till this point to determine the names of the transforms since only
// at this time do we know the hierarchy of the transforms otherwise we could
// have just recorded the full names during apply time.
if (!ptransformViewsWithNonDeterministicKeyCoders.isEmpty()) {
final SortedSet<String> ptransformViewNamesWithNonDeterministicKeyCoders = new TreeSet<>();
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
@Override
public void visitValue(PValue value, TransformHierarchy.Node producer) {}
@Override
public void visitPrimitiveTransform(TransformHierarchy.Node node) {
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
}
@Override
public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
if (node.getTransform() instanceof View.AsMap
|| node.getTransform() instanceof View.AsMultimap) {
PCollection<KV<?, ?>> input =
(PCollection<KV<?, ?>>) Iterables.getOnlyElement(node.getInputs().values());
KvCoder<?, ?> inputCoder = (KvCoder) input.getCoder();
try {
inputCoder.getKeyCoder().verifyDeterministic();
} catch (NonDeterministicException e) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
}
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
return CompositeBehavior.ENTER_TRANSFORM;
}
@Override
public void leaveCompositeTransform(TransformHierarchy.Node node) {}
});
LOG.warn("Unable to use indexed implementation for View.AsMap and View.AsMultimap for {} "
+ "because the key coder is not deterministic. Falling back to singleton implementation "
+ "which may cause memory and/or performance problems. Future major versions of "
+ "Dataflow will require deterministic key coders.",
ptransformViewNamesWithNonDeterministicKeyCoders);
}
}
/**
* Returns true if the passed in {@link PCollection} needs to be materialiazed using
* an indexed format.
*/
boolean doesPCollectionRequireIndexedFormat(PCollection<?> pcol) {
return pcollectionsRequiringIndexedFormat.contains(pcol);
}
/**
* Marks the passed in {@link PCollection} as requiring to be materialized using
* an indexed format.
*/
void addPCollectionRequiringIndexedFormat(PCollection<?> pcol) {
pcollectionsRequiringIndexedFormat.add(pcol);
}
/** A set of {@link View}s with non-deterministic key coders. */
private Set<PTransform<?, ?>> ptransformViewsWithNonDeterministicKeyCoders;
/**
* Records that the {@link PTransform} requires a deterministic key coder.
*/
void recordViewUsesNonDeterministicKeyCoder(PTransform<?, ?> ptransform) {
ptransformViewsWithNonDeterministicKeyCoders.add(ptransform);
}
// ================================================================================
// PubsubIO translations
// ================================================================================
private static class StreamingPubsubIOReadOverrideFactory
implements PTransformOverrideFactory<
PBegin, PCollection<PubsubMessage>, PubsubUnboundedSource> {
@Override
public PTransformReplacement<PBegin, PCollection<PubsubMessage>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<PubsubMessage>, PubsubUnboundedSource> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingPubsubIORead(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<PubsubMessage> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Suppress application of {@link PubsubUnboundedSource#expand} in streaming mode so that we can
* instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIORead
extends PTransform<PBegin, PCollection<PubsubMessage>> {
private final PubsubUnboundedSource transform;
public StreamingPubsubIORead(PubsubUnboundedSource transform) {
this.transform = transform;
}
PubsubUnboundedSource getOverriddenTransform() {
return transform;
}
@Override
public PCollection<PubsubMessage> expand(PBegin input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(),
WindowingStrategy.globalDefault(),
IsBounded.UNBOUNDED,
new PubsubMessageWithAttributesCoder());
}
@Override
protected String getKindString() {
return "StreamingPubsubIORead";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIORead.class, new StreamingPubsubIOReadTranslator());
}
}
/** Rewrite {@link StreamingPubsubIORead} to the appropriate internal node. */
private static class StreamingPubsubIOReadTranslator
implements TransformTranslator<StreamingPubsubIORead> {
@Override
public void translate(StreamingPubsubIORead transform, TranslationContext context) {
checkArgument(
context.getPipelineOptions().isStreaming(),
"StreamingPubsubIORead is only for streaming pipelines.");
PubsubUnboundedSource overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelRead");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider() != null) {
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
}
if (overriddenTransform.getSubscriptionProvider() != null) {
if (overriddenTransform.getSubscriptionProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION,
overriddenTransform.getSubscription().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION_OVERRIDE,
((NestedValueProvider) overriddenTransform.getSubscriptionProvider()).propertyName());
}
}
if (overriddenTransform.getTimestampAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE, overriddenTransform.getTimestampAttribute());
}
if (overriddenTransform.getIdAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_ID_ATTRIBUTE, overriddenTransform.getIdAttribute());
}
// In both cases, the transform needs to read PubsubMessage. However, in case it needs
// the attributes, we supply an identity "parse fn" so the worker will read PubsubMessage's
// from Windmill and simply pass them around; and in case it doesn't need attributes,
// we're already implicitly using a "Coder" that interprets the data as a PubsubMessage's
// payload.
if (overriddenTransform.getNeedsAttributes()) {
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(
serializeToByteArray(new IdentityMessageFn())));
}
stepContext.addOutput(context.getOutput(transform));
}
}
private static class IdentityMessageFn
extends SimpleFunction<PubsubMessage, PubsubMessage> {
@Override
public PubsubMessage apply(PubsubMessage input) {
return input;
}
}
/**
* Suppress application of {@link PubsubUnboundedSink#expand} in streaming mode so that we can
* instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIOWrite
extends PTransform<PCollection<PubsubMessage>, PDone> {
private final PubsubUnboundedSink transform;
/**
* Builds an instance of this class from the overridden transform.
*/
public StreamingPubsubIOWrite(
DataflowRunner runner, PubsubUnboundedSink transform) {
this.transform = transform;
}
PubsubUnboundedSink getOverriddenTransform() {
return transform;
}
@Override
public PDone expand(PCollection<PubsubMessage> input) {
return PDone.in(input.getPipeline());
}
@Override
protected String getKindString() {
return "StreamingPubsubIOWrite";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIOWrite.class, new StreamingPubsubIOWriteTranslator());
}
}
/**
* Rewrite {@link StreamingPubsubIOWrite} to the appropriate internal node.
*/
private static class StreamingPubsubIOWriteTranslator implements
TransformTranslator<StreamingPubsubIOWrite> {
@Override
public void translate(
StreamingPubsubIOWrite transform,
TranslationContext context) {
checkArgument(context.getPipelineOptions().isStreaming(),
"StreamingPubsubIOWrite is only for streaming pipelines.");
PubsubUnboundedSink overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelWrite");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
if (overriddenTransform.getTimestampAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE, overriddenTransform.getTimestampAttribute());
}
if (overriddenTransform.getIdAttribute() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_ID_ATTRIBUTE, overriddenTransform.getIdAttribute());
}
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(serializeToByteArray(new IdentityMessageFn())));
// No coder is needed in this case since the collection being written is already of
// PubsubMessage, however the Dataflow backend require a coder to be set.
stepContext.addEncodingInput(WindowedValue.getValueOnlyCoder(VoidCoder.of()));
stepContext.addInput(PropertyNames.PARALLEL_INPUT, context.getInput(transform));
}
}
// ================================================================================
/**
* A PTranform override factory which maps Create.Values PTransforms for streaming pipelines
* into a Dataflow specific variant.
*/
private static class StreamingFnApiCreateOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Create.Values<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Create.Values<T>> transform) {
Create.Values<T> original = transform.getTransform();
PCollection<T> output =
(PCollection) Iterables.getOnlyElement(transform.getOutputs().values());
return PTransformReplacement.of(
transform.getPipeline().begin(),
new StreamingFnApiCreate<>(original, output));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for
* {@link org.apache.beam.sdk.transforms.Create.Values Create.Values} for the Dataflow runner in
* streaming mode over the Fn API.
*/
private static class StreamingFnApiCreate<T> extends PTransform<PBegin, PCollection<T>> {
private final Create.Values<T> transform;
private final PCollection<T> originalOutput;
private StreamingFnApiCreate(
Create.Values<T> transform,
PCollection<T> originalOutput) {
this.transform = transform;
this.originalOutput = originalOutput;
}
@Override
public final PCollection<T> expand(PBegin input) {
try {
PCollection<T> pc = Pipeline
.applyTransform(input, new Impulse(IsBounded.BOUNDED))
.apply(ParDo.of(DecodeAndEmitDoFn
.fromIterable(transform.getElements(), originalOutput.getCoder())));
pc.setCoder(originalOutput.getCoder());
return pc;
} catch (IOException e) {
throw new IllegalStateException("Unable to encode elements.", e);
}
}
/**
* A DoFn which stores encoded versions of elements and a representation of a Coder
* capable of decoding those elements.
*
* <p>TODO: BEAM-2422 - Make this a SplittableDoFn.
*/
private static class DecodeAndEmitDoFn<T> extends DoFn<byte[], T> {
public static <T> DecodeAndEmitDoFn<T> fromIterable(Iterable<T> elements, Coder<T> elemCoder)
throws IOException {
ImmutableList.Builder<byte[]> allElementsBytes = ImmutableList.builder();
for (T element : elements) {
byte[] bytes = CoderUtils.encodeToByteArray(elemCoder, element);
allElementsBytes.add(bytes);
}
return new DecodeAndEmitDoFn<>(allElementsBytes.build(), elemCoder);
}
private final Collection<byte[]> elements;
private final RunnerApi.MessageWithComponents coderSpec;
// lazily initialized by parsing coderSpec
private transient Coder<T> coder;
private Coder<T> getCoder() throws IOException {
if (coder == null) {
coder =
(Coder)
CoderTranslation.fromProto(
coderSpec.getCoder(),
RehydratedComponents.forComponents(coderSpec.getComponents()));
}
return coder;
}
private DecodeAndEmitDoFn(Collection<byte[]> elements, Coder<T> coder) throws IOException {
this.elements = elements;
this.coderSpec = CoderTranslation.toProto(coder);
}
@ProcessElement
public void processElement(ProcessContext context) throws IOException {
for (byte[] element : elements) {
context.output(CoderUtils.decodeFromByteArray(coder, element));
}
}
}
}
/** The Dataflow specific override for the impulse primitive. */
private static class Impulse extends PTransform<PBegin, PCollection<byte[]>> {
private final IsBounded isBounded;
private Impulse(IsBounded isBounded) {
this.isBounded = isBounded;
}
@Override
public PCollection<byte[]> expand(PBegin input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), isBounded, ByteArrayCoder.of());
}
private static class Translator implements TransformTranslator<Impulse> {
@Override
public void translate(Impulse transform, TranslationContext context) {
if (context.getPipelineOptions().isStreaming()) {
StepTranslationContext stepContext = context.addStep(transform, "ParallelRead");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
stepContext.addInput(PropertyNames.PUBSUB_SUBSCRIPTION, "_starting_signal/");
stepContext.addOutput(context.getOutput(transform));
} else {
throw new UnsupportedOperationException(
"Impulse source for batch pipelines has not been defined.");
}
}
}
static {
DataflowPipelineTranslator.registerTransformTranslator(Impulse.class, new Translator());
}
}
private static class StreamingUnboundedReadOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Read.Unbounded<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Read.Unbounded<T>> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingUnboundedRead<>(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for
* {@link org.apache.beam.sdk.io.Read.Unbounded Read.Unbounded} for the
* Dataflow runner in streaming mode.
*
* <p>In particular, if an UnboundedSource requires deduplication, then features of WindmillSink
* are leveraged to do the deduplication.
*/
private static class StreamingUnboundedRead<T> extends PTransform<PBegin, PCollection<T>> {
private final UnboundedSource<T, ?> source;
public StreamingUnboundedRead(Read.Unbounded<T> transform) {
this.source = transform.getSource();
}
@Override
public final PCollection<T> expand(PBegin input) {
source.validate();
if (source.requiresDeduping()) {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply(new Deduplicate<T>());
} else {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply("StripIds", ParDo.of(new ValueWithRecordId.StripIdsDoFn<T>()));
}
}
/**
* {@link PTransform} that reads {@code (record,recordId)} pairs from an
* {@link UnboundedSource}.
*/
private static class ReadWithIds<T>
extends PTransform<PInput, PCollection<ValueWithRecordId<T>>> {
private final UnboundedSource<T, ?> source;
private ReadWithIds(UnboundedSource<T, ?> source) {
this.source = source;
}
@Override
public final PCollection<ValueWithRecordId<T>> expand(PInput input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), IsBounded.UNBOUNDED,
ValueWithRecordId.ValueWithRecordIdCoder.of(source.getOutputCoder()));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.delegate(source);
}
public UnboundedSource<T, ?> getSource() {
return source;
}
}
@Override
public String getKindString() {
return String.format("Read(%s)", NameUtils.approximateSimpleName(source));
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
ReadWithIds.class, new ReadWithIdsTranslator());
}
private static class ReadWithIdsTranslator
implements TransformTranslator<ReadWithIds<?>> {
@Override
public void translate(ReadWithIds<?> transform,
TranslationContext context) {
ReadTranslator.translateReadHelper(transform.getSource(), transform, context);
}
}
}
/**
* Remove values with duplicate ids.
*/
private static class Deduplicate<T>
extends PTransform<PCollection<ValueWithRecordId<T>>, PCollection<T>> {
// Use a finite set of keys to improve bundling. Without this, the key space
// will be the space of ids which is potentially very large, which results in much
// more per-key overhead.
private static final int NUM_RESHARD_KEYS = 10000;
@Override
public PCollection<T> expand(PCollection<ValueWithRecordId<T>> input) {
return input
.apply(WithKeys.of(new SerializableFunction<ValueWithRecordId<T>, Integer>() {
@Override
public Integer apply(ValueWithRecordId<T> value) {
return Arrays.hashCode(value.getId()) % NUM_RESHARD_KEYS;
}
}))
// Reshuffle will dedup based on ids in ValueWithRecordId by passing the data through
// WindmillSink.
.apply(Reshuffle.<Integer, ValueWithRecordId<T>>of())
.apply("StripIds", ParDo.of(
new DoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.element().getValue().getValue());
}
}));
}
}
private static class StreamingBoundedReadOverrideFactory<T>
implements PTransformOverrideFactory<PBegin, PCollection<T>, Read.Bounded<T>> {
@Override
public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
AppliedPTransform<PBegin, PCollection<T>, Read.Bounded<T>> transform) {
return PTransformReplacement.of(
transform.getPipeline().begin(), new StreamingBoundedRead<>(transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<T> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
/**
* Specialized implementation for {@link org.apache.beam.sdk.io.Read.Bounded Read.Bounded} for the
* Dataflow runner in streaming mode.
*/
private static class StreamingBoundedRead<T> extends PTransform<PBegin, PCollection<T>> {
private final BoundedSource<T> source;
public StreamingBoundedRead(Read.Bounded<T> transform) {
this.source = transform.getSource();
}
@Override
public final PCollection<T> expand(PBegin input) {
source.validate();
return Pipeline.applyTransform(input, new UnboundedReadFromBoundedSource<>(source))
.setIsBoundedInternal(IsBounded.BOUNDED);
}
}
/**
* A marker {@link DoFn} for writing the contents of a {@link PCollection} to a streaming
* {@link PCollectionView} backend implementation.
*/
@Internal
public static class StreamingPCollectionViewWriterFn<T> extends DoFn<Iterable<T>, T> {
private final PCollectionView<?> view;
private final Coder<T> dataCoder;
public static <T> StreamingPCollectionViewWriterFn<T> create(
PCollectionView<?> view, Coder<T> dataCoder) {
return new StreamingPCollectionViewWriterFn<>(view, dataCoder);
}
private StreamingPCollectionViewWriterFn(PCollectionView<?> view, Coder<T> dataCoder) {
this.view = view;
this.dataCoder = dataCoder;
}
public PCollectionView<?> getView() {
return view;
}
public Coder<T> getDataCoder() {
return dataCoder;
}
@ProcessElement
public void processElement(ProcessContext c, BoundedWindow w) throws Exception {
throw new UnsupportedOperationException(
String.format(
"%s is a marker class only and should never be executed.", getClass().getName()));
}
}
@Override
public String toString() {
return "DataflowRunner#" + options.getJobName();
}
/**
* Attempts to detect all the resources the class loader has access to. This does not recurse
* to class loader parents stopping it from pulling in resources from the system class loader.
*
* @param classLoader The URLClassLoader to use to detect resources to stage.
* @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one
* of the resources the class loader exposes is not a file resource.
* @return A list of absolute paths to the resources the class loader uses.
*/
protected static List<String> detectClassPathResourcesToStage(ClassLoader classLoader) {
if (!(classLoader instanceof URLClassLoader)) {
String message = String.format("Unable to use ClassLoader to detect classpath elements. "
+ "Current ClassLoader is %s, only URLClassLoaders are supported.", classLoader);
LOG.error(message);
throw new IllegalArgumentException(message);
}
List<String> files = new ArrayList<>();
for (URL url : ((URLClassLoader) classLoader).getURLs()) {
try {
files.add(new File(url.toURI()).getAbsolutePath());
} catch (IllegalArgumentException | URISyntaxException e) {
String message = String.format("Unable to convert url (%s) to file.", url);
LOG.error(message);
throw new IllegalArgumentException(message, e);
}
}
return files;
}
/**
* Finds the id for the running job of the given name.
*/
private String getJobIdFromName(String jobName) {
try {
ListJobsResponse listResult;
String token = null;
do {
listResult = dataflowClient.listJobs(token);
token = listResult.getNextPageToken();
for (Job job : listResult.getJobs()) {
if (job.getName().equals(jobName)
&& MonitoringUtil.toState(job.getCurrentState()).equals(State.RUNNING)) {
return job.getId();
}
}
} while (token != null);
} catch (GoogleJsonResponseException e) {
throw new RuntimeException(
"Got error while looking up jobs: "
+ (e.getDetails() != null ? e.getDetails().getMessage() : e), e);
} catch (IOException e) {
throw new RuntimeException("Got error while looking up jobs: ", e);
}
throw new IllegalArgumentException("Could not find running job named " + jobName);
}
static class CombineGroupedValues<K, InputT, OutputT>
extends PTransform<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>> {
private final Combine.GroupedValues<K, InputT, OutputT> original;
private final Coder<KV<K, OutputT>> outputCoder;
CombineGroupedValues(
GroupedValues<K, InputT, OutputT> original, Coder<KV<K, OutputT>> outputCoder) {
this.original = original;
this.outputCoder = outputCoder;
}
@Override
public PCollection<KV<K, OutputT>> expand(PCollection<KV<K, Iterable<InputT>>> input) {
return PCollection.createPrimitiveOutputInternal(
input.getPipeline(), input.getWindowingStrategy(), input.isBounded(),
outputCoder);
}
public Combine.GroupedValues<K, InputT, OutputT> getOriginalCombine() {
return original;
}
}
private static class PrimitiveCombineGroupedValuesOverrideFactory<K, InputT, OutputT>
implements PTransformOverrideFactory<
PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
Combine.GroupedValues<K, InputT, OutputT>> {
@Override
public PTransformReplacement<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>>
getReplacementTransform(
AppliedPTransform<
PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
GroupedValues<K, InputT, OutputT>>
transform) {
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
new CombineGroupedValues<>(
transform.getTransform(),
PTransformReplacements.getSingletonMainOutput(transform).getCoder()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PCollection<KV<K, OutputT>> newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
}
private class StreamingPubsubIOWriteOverrideFactory
implements PTransformOverrideFactory<
PCollection<PubsubMessage>, PDone, PubsubUnboundedSink> {
private final DataflowRunner runner;
private StreamingPubsubIOWriteOverrideFactory(DataflowRunner runner) {
this.runner = runner;
}
@Override
public PTransformReplacement<PCollection<PubsubMessage>, PDone>
getReplacementTransform(
AppliedPTransform<PCollection<PubsubMessage>, PDone, PubsubUnboundedSink>
transform) {
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
new StreamingPubsubIOWrite(runner, transform.getTransform()));
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PValue> outputs, PDone newOutput) {
return Collections.emptyMap();
}
}
@VisibleForTesting
static class StreamingShardedWriteFactory<UserT, DestinationT, OutputT>
implements PTransformOverrideFactory<
PCollection<UserT>, PDone, WriteFiles<UserT, DestinationT, OutputT>> {
// We pick 10 as a a default, as it works well with the default number of workers started
// by Dataflow.
static final int DEFAULT_NUM_SHARDS = 10;
DataflowPipelineWorkerPoolOptions options;
StreamingShardedWriteFactory(PipelineOptions options) {
this.options = options.as(DataflowPipelineWorkerPoolOptions.class);
}
@Override
public PTransformReplacement<PCollection<UserT>, PDone> getReplacementTransform(
AppliedPTransform<PCollection<UserT>, PDone, WriteFiles<UserT, DestinationT, OutputT>>
transform) {
// By default, if numShards is not set WriteFiles will produce one file per bundle. In
// streaming, there are large numbers of small bundles, resulting in many tiny files.
// Instead we pick max workers * 2 to ensure full parallelism, but prevent too-many files.
// (current_num_workers * 2 might be a better choice, but that value is not easily available
// today).
// If the user does not set either numWorkers or maxNumWorkers, default to 10 shards.
int numShards;
if (options.getMaxNumWorkers() > 0) {
numShards = options.getMaxNumWorkers() * 2;
} else if (options.getNumWorkers() > 0) {
numShards = options.getNumWorkers() * 2;
} else {
numShards = DEFAULT_NUM_SHARDS;
}
try {
List<PCollectionView<?>> sideInputs =
WriteFilesTranslation.getDynamicDestinationSideInputs(transform);
FileBasedSink sink = WriteFilesTranslation.getSink(transform);
WriteFiles<UserT, DestinationT, OutputT> replacement =
WriteFiles.to(sink).withSideInputs(sideInputs);
if (WriteFilesTranslation.isWindowedWrites(transform)) {
replacement = replacement.withWindowedWrites();
}
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
replacement.withNumShards(numShards));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public Map<PValue, ReplacementOutput> mapOutputs(Map<TupleTag<?>, PValue> outputs,
PDone newOutput) {
return Collections.emptyMap();
}
}
@VisibleForTesting
static String getContainerImageForJob(DataflowPipelineOptions options) {
String workerHarnessContainerImage = options.getWorkerHarnessContainerImage();
if (!workerHarnessContainerImage.contains("IMAGE")) {
return workerHarnessContainerImage;
} else if (hasExperiment(options, "beam_fn_api")) {
return workerHarnessContainerImage.replace("IMAGE", "java");
} else if (options.isStreaming()) {
return workerHarnessContainerImage.replace("IMAGE", "beam-java-streaming");
} else {
return workerHarnessContainerImage.replace("IMAGE", "beam-java-batch");
}
}
static void verifyStateSupported(DoFn<?, ?> fn) {
DoFnSignature signature = DoFnSignatures.getSignature(fn.getClass());
for (DoFnSignature.StateDeclaration stateDecl : signature.stateDeclarations().values()) {
// https://issues.apache.org/jira/browse/BEAM-1474
if (stateDecl.stateType().isSubtypeOf(TypeDescriptor.of(MapState.class))) {
throw new UnsupportedOperationException(String.format(
"%s does not currently support %s",
DataflowRunner.class.getSimpleName(),
MapState.class.getSimpleName()
));
}
// https://issues.apache.org/jira/browse/BEAM-1479
if (stateDecl.stateType().isSubtypeOf(TypeDescriptor.of(SetState.class))) {
throw new UnsupportedOperationException(String.format(
"%s does not currently support %s",
DataflowRunner.class.getSimpleName(),
SetState.class.getSimpleName()
));
}
}
}
static void verifyStateSupportForWindowingStrategy(WindowingStrategy strategy) {
// https://issues.apache.org/jira/browse/BEAM-2507
if (!strategy.getWindowFn().isNonMerging()) {
throw new UnsupportedOperationException(
String.format(
"%s does not currently support state or timers with merging windows",
DataflowRunner.class.getSimpleName()));
}
}
}
| Initialize the Coder in DecodeAndEmitDoFn
This closes #3769
| runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java | Initialize the Coder in DecodeAndEmitDoFn | <ide><path>unners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
<ide> @ProcessElement
<ide> public void processElement(ProcessContext context) throws IOException {
<ide> for (byte[] element : elements) {
<del> context.output(CoderUtils.decodeFromByteArray(coder, element));
<add> context.output(CoderUtils.decodeFromByteArray(getCoder(), element));
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | 47efd9dc561c99b76b86fd7cdce42922a7c760d3 | 0 | xfournet/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,signed/intellij-community,ibinti/intellij-community,ibinti/intellij-community,da1z/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,da1z/intellij-community,ibinti/intellij-community,xfournet/intellij-community,semonte/intellij-community,ibinti/intellij-community,ibinti/intellij-community,xfournet/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ibinti/intellij-community,apixandru/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,semonte/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,signed/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,da1z/intellij-community,da1z/intellij-community,asedunov/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,asedunov/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,da1z/intellij-community,FHannes/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,allotria/intellij-community,FHannes/intellij-community,semonte/intellij-community,allotria/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,signed/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,semonte/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,signed/intellij-community,allotria/intellij-community,suncycheng/intellij-community,signed/intellij-community,da1z/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,signed/intellij-community,semonte/intellij-community,asedunov/intellij-community,semonte/intellij-community,allotria/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ibinti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,vvv1559/intellij-community,allotria/intellij-community,apixandru/intellij-community,allotria/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,allotria/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ibinti/intellij-community,semonte/intellij-community,FHannes/intellij-community,xfournet/intellij-community,signed/intellij-community,allotria/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,apixandru/intellij-community,allotria/intellij-community,signed/intellij-community,da1z/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,suncycheng/intellij-community,da1z/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,signed/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,signed/intellij-community,FHannes/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,semonte/intellij-community,asedunov/intellij-community,asedunov/intellij-community,xfournet/intellij-community,asedunov/intellij-community,allotria/intellij-community,da1z/intellij-community,mglukhikh/intellij-community | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Encapsulates logic of filtering test classes (classes that contain test-cases).
* <p/>
* We want to have such facility in order to be able to execute different sets of tests like <code>'fast tests'</code>,
* <code>'problem tests'</code> etc.
* <p/>
* I.e. assumed usage scenario is to create object of this class with necessary filtering criteria and use it's
* {@link TestClassesFilter#matches(String, String)} method for determining if particular test should be executed.
* <p/>
* The filtering is performed by fully-qualified test class name. There are two ways to define the criteria at the moment:
* <ul>
* <li>
* Define target class name filters (at regexp format) explicitly using
* {@link PatternListTestClassFilter#PatternListTestClassFilter(List) PatternListTestClassFilter};
* </li>
* <li>
* Read class name filters (at regexp format) from the given stream - see {@link #createOn(Reader, List)};
* </li>
* </ul>
*/
public class GroupBasedTestClassFilter extends TestClassesFilter {
/**
* Holds reserved test group name that serves as a negation of matching result.
*
* @see TestClassesFilter#matches(String, String)
*/
public static final String ALL_EXCLUDE_DEFINED = "ALL_EXCLUDE_DEFINED";
private final List<Group> myGroups = ContainerUtil.newSmartList();
private final Set<String> myTestGroupNames;
public GroupBasedTestClassFilter(MultiMap<String, String> filters, List<String> testGroupNames) {
//empty group means all patterns from each defined group should be excluded
myTestGroupNames = ContainerUtil.newTroveSet(testGroupNames);
for (String groupName : filters.keySet()) {
Collection<String> groupFilters = filters.get(groupName);
List<Pattern> includePatterns = compilePatterns(ContainerUtil.filter(groupFilters, s -> !s.startsWith("-")));
List<Pattern> excludedPatterns = compilePatterns(groupFilters.stream()
.filter(s -> s.startsWith("-") && s.length() > 1)
.map(s -> s.substring(1))
.collect(Collectors.toList()));
myGroups.add(new Group(groupName, includePatterns, excludedPatterns));
}
}
/**
* Creates <code>TestClassesFilter</code> object assuming that the given stream contains grouped test class filters
* at the following format:
* <p/>
* <ul>
* <li>
* every line that starts with <code>'['</code> symbol and ends with <code>']'</code> symbol defines start
* of the new test group. That means that all test class filters that follows this line belongs to the same
* test group which name is defined by the text contained between <code>'['</code> and <code>']'</code>
* </li>
* <li>every line that is not a test-group definition is considered to be a test class filter at regexp format;</li>
* </ul>
* <p/>
* <b>Example</b>
* Consider that given stream points to the following data:
* <pre>
* [CVS]
* com.intellij.cvsSupport2.*
* [STRESS_TESTS]
* com.intellij.application.InspectionPerformanceTest
* com.intellij.application.TraverseUITest
* </pre>
* <p/>
* It defines two test groups:
* <ul>
* <li><b>CVS</b> group with the single test class name pattern <code>'com.intellij.cvsSupport2.*'</code>;</li>
* <li>
* <b>STRESS_TESTS</b> group with the following test class name patterns:
* <ul>
* <li>com.intellij.application.InspectionPerformanceTest</li>
* <li>com.intellij.application.TraverseUITest</li>
* </ul>
* </li>
* </ul>
* <p/>
* This method doesn't suppose itself to be owner of the given stream reader, i.e. it assumes that the stream should be
* closed on caller side.
*
*
* @param reader reader that points to the target test groups config
* @param testGroupNames
* @return newly created {@link GroupBasedTestClassFilter} object with the data contained at the given reader
* @see TestClassesFilter#matches(String, String)
*/
@NotNull
public static TestClassesFilter createOn(@NotNull Reader reader, @NotNull List<String> testGroupNames) throws IOException {
return new GroupBasedTestClassFilter(readGroups(reader), testGroupNames);
}
public static MultiMap<String, String> readGroups(Reader reader) throws IOException {
MultiMap<String, String> groupNameToPatternsMap = MultiMap.createLinked();
String currentGroupName = "";
@SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"}) BufferedReader bufferedReader = new BufferedReader(reader);
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.startsWith("#")) continue;
if (line.startsWith("[") && line.endsWith("]")) {
currentGroupName = line.substring(1, line.length() - 1);
}
else {
groupNameToPatternsMap.putValue(currentGroupName, line);
}
}
return groupNameToPatternsMap;
}
/**
* Allows to check if given class name belongs to the test group with the given name based on filtering rules encapsulated
* at the current {@link GroupBasedTestClassFilter} object. I.e. this method returns <code>true</code> if given test class name
* is matched with any test class name filter configured for the test group with the given name.
* <p/>
* <b>Note:</b> there is a special case processing when given group name is {@link #ALL_EXCLUDE_DEFINED}. This method
* returns <code>true</code> only if all registered patterns (for all test groups) don't match given test class name.
*
* @param className target test class name to check
* @param moduleName
* @return <code>true</code> if given test group name is defined (not <code>null</code>) and test class with given
* name belongs to the test group with given name;
* <code>true</code> if given group if undefined or equal to {@link #ALL_EXCLUDE_DEFINED} and given test
* class name is not matched by all registered patterns;
* <code>false</code> otherwise
*/
@Override
public boolean matches(String className, String moduleName) {
if (myGroups.stream().filter(g -> myTestGroupNames.contains(g.name)).anyMatch(g -> g.matches(className))) return true;
return containsAllExcludeDefinedGroup(myTestGroupNames) && myGroups.stream().noneMatch(g -> g.matches(className));
}
private static boolean containsAllExcludeDefinedGroup(Set<String> groupNames) {
return groupNames.isEmpty() || groupNames.contains(ALL_EXCLUDE_DEFINED);
}
private static class Group {
private final String name;
private final List<Pattern> included;
private final List<Pattern> excluded;
private Group(String name, List<Pattern> included, List<Pattern> excluded) {
this.name = name;
this.excluded = excluded;
this.included = included;
}
private boolean matches(String className) {
return !matchesAnyPattern(excluded, className) && matchesAnyPattern(included, className);
}
}
}
| platform/testFramework/src/com/intellij/GroupBasedTestClassFilter.java | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Encapsulates logic of filtering test classes (classes that contain test-cases).
* <p/>
* We want to have such facility in order to be able to execute different sets of tests like <code>'fast tests'</code>,
* <code>'problem tests'</code> etc.
* <p/>
* I.e. assumed usage scenario is to create object of this class with necessary filtering criteria and use it's
* {@link TestClassesFilter#matches(String, String)} method for determining if particular test should be executed.
* <p/>
* The filtering is performed by fully-qualified test class name. There are two ways to define the criteria at the moment:
* <ul>
* <li>
* Define target class name filters (at regexp format) explicitly using
* {@link PatternListTestClassFilter#PatternListTestClassFilter(List) PatternListTestClassFilter};
* </li>
* <li>
* Read class name filters (at regexp format) from the given stream - see {@link #createOn(Reader, List)};
* </li>
* </ul>
*/
public class GroupBasedTestClassFilter extends TestClassesFilter {
/**
* Holds reserved test group name that serves as a negation of matching result.
*
* @see TestClassesFilter#matches(String, String)
*/
public static final String ALL_EXCLUDE_DEFINED = "ALL_EXCLUDE_DEFINED";
private final List<Pattern> myIncludedTestGroupPatterns = ContainerUtil.newSmartList();
private final List<Pattern> myExcludedTestGroupPatterns = ContainerUtil.newSmartList();
private final List<Group> myGroups = ContainerUtil.newSmartList();
private boolean myContainsAllExcludeDefinedGroup;
public GroupBasedTestClassFilter(MultiMap<String, String> filters, List<String> testGroupNames) {
//empty group means all patterns from each defined group should be excluded
myContainsAllExcludeDefinedGroup = containsAllExcludeDefinedGroup(testGroupNames);
for (String groupName : filters.keySet()) {
Collection<String> groupFilters = filters.get(groupName);
List<Pattern> includePatterns = compilePatterns(ContainerUtil.filter(groupFilters, s -> !s.startsWith("-")));
List<Pattern> excludedPatterns = compilePatterns(groupFilters.stream()
.filter(s -> s.startsWith("-") && s.length() > 1)
.map(s -> s.substring(1))
.collect(Collectors.toList()));
myGroups.add(new Group(includePatterns, excludedPatterns));
if (testGroupNames.contains(groupName)) {
myIncludedTestGroupPatterns.addAll(includePatterns);
myExcludedTestGroupPatterns.addAll(excludedPatterns);
}
}
}
/**
* Creates <code>TestClassesFilter</code> object assuming that the given stream contains grouped test class filters
* at the following format:
* <p/>
* <ul>
* <li>
* every line that starts with <code>'['</code> symbol and ends with <code>']'</code> symbol defines start
* of the new test group. That means that all test class filters that follows this line belongs to the same
* test group which name is defined by the text contained between <code>'['</code> and <code>']'</code>
* </li>
* <li>every line that is not a test-group definition is considered to be a test class filter at regexp format;</li>
* </ul>
* <p/>
* <b>Example</b>
* Consider that given stream points to the following data:
* <pre>
* [CVS]
* com.intellij.cvsSupport2.*
* [STRESS_TESTS]
* com.intellij.application.InspectionPerformanceTest
* com.intellij.application.TraverseUITest
* </pre>
* <p/>
* It defines two test groups:
* <ul>
* <li><b>CVS</b> group with the single test class name pattern <code>'com.intellij.cvsSupport2.*'</code>;</li>
* <li>
* <b>STRESS_TESTS</b> group with the following test class name patterns:
* <ul>
* <li>com.intellij.application.InspectionPerformanceTest</li>
* <li>com.intellij.application.TraverseUITest</li>
* </ul>
* </li>
* </ul>
* <p/>
* This method doesn't suppose itself to be owner of the given stream reader, i.e. it assumes that the stream should be
* closed on caller side.
*
*
* @param reader reader that points to the target test groups config
* @param testGroupNames
* @return newly created {@link GroupBasedTestClassFilter} object with the data contained at the given reader
* @see TestClassesFilter#matches(String, String)
*/
@NotNull
public static TestClassesFilter createOn(@NotNull Reader reader, @NotNull List<String> testGroupNames) throws IOException {
return new GroupBasedTestClassFilter(readGroups(reader), testGroupNames);
}
public static MultiMap<String, String> readGroups(Reader reader) throws IOException {
MultiMap<String, String> groupNameToPatternsMap = MultiMap.createLinked();
String currentGroupName = "";
@SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"}) BufferedReader bufferedReader = new BufferedReader(reader);
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.startsWith("#")) continue;
if (line.startsWith("[") && line.endsWith("]")) {
currentGroupName = line.substring(1, line.length() - 1);
}
else {
groupNameToPatternsMap.putValue(currentGroupName, line);
}
}
return groupNameToPatternsMap;
}
/**
* Allows to check if given class name belongs to the test group with the given name based on filtering rules encapsulated
* at the current {@link GroupBasedTestClassFilter} object. I.e. this method returns <code>true</code> if given test class name
* is matched with any test class name filter configured for the test group with the given name.
* <p/>
* <b>Note:</b> there is a special case processing when given group name is {@link #ALL_EXCLUDE_DEFINED}. This method
* returns <code>true</code> only if all registered patterns (for all test groups) don't match given test class name.
*
* @param className target test class name to check
* @param moduleName
* @return <code>true</code> if given test group name is defined (not <code>null</code>) and test class with given
* name belongs to the test group with given name;
* <code>true</code> if given group if undefined or equal to {@link #ALL_EXCLUDE_DEFINED} and given test
* class name is not matched by all registered patterns;
* <code>false</code> otherwise
*/
@Override
public boolean matches(String className, String moduleName) {
if (matchesAnyPattern(myExcludedTestGroupPatterns, className)) return false;
if (matchesAnyPattern(myIncludedTestGroupPatterns, className)) return true;
return myContainsAllExcludeDefinedGroup && myGroups.stream().noneMatch(g -> g.matches(className));
}
private static boolean containsAllExcludeDefinedGroup(List<String> groupNames) {
return groupNames.isEmpty() || groupNames.contains(ALL_EXCLUDE_DEFINED);
}
private static class Group {
public final List<Pattern> included;
public final List<Pattern> excluded;
public Group(List<Pattern> included, List<Pattern> excluded) {
this.excluded = excluded;
this.included = included;
}
private boolean matches(String className) {
return !matchesAnyPattern(excluded, className) && matchesAnyPattern(included, className);
}
}
}
| on IDEA-CR-19158: introduce a group name and remove fields, use filtration
| platform/testFramework/src/com/intellij/GroupBasedTestClassFilter.java | on IDEA-CR-19158: introduce a group name and remove fields, use filtration | <ide><path>latform/testFramework/src/com/intellij/GroupBasedTestClassFilter.java
<ide> import java.io.Reader;
<ide> import java.util.Collection;
<ide> import java.util.List;
<add>import java.util.Set;
<ide> import java.util.regex.Pattern;
<ide> import java.util.stream.Collectors;
<ide>
<ide> */
<ide> public static final String ALL_EXCLUDE_DEFINED = "ALL_EXCLUDE_DEFINED";
<ide>
<del> private final List<Pattern> myIncludedTestGroupPatterns = ContainerUtil.newSmartList();
<del> private final List<Pattern> myExcludedTestGroupPatterns = ContainerUtil.newSmartList();
<ide> private final List<Group> myGroups = ContainerUtil.newSmartList();
<del> private boolean myContainsAllExcludeDefinedGroup;
<add> private final Set<String> myTestGroupNames;
<ide>
<ide> public GroupBasedTestClassFilter(MultiMap<String, String> filters, List<String> testGroupNames) {
<ide> //empty group means all patterns from each defined group should be excluded
<del> myContainsAllExcludeDefinedGroup = containsAllExcludeDefinedGroup(testGroupNames);
<add> myTestGroupNames = ContainerUtil.newTroveSet(testGroupNames);
<ide>
<ide> for (String groupName : filters.keySet()) {
<ide> Collection<String> groupFilters = filters.get(groupName);
<ide> .filter(s -> s.startsWith("-") && s.length() > 1)
<ide> .map(s -> s.substring(1))
<ide> .collect(Collectors.toList()));
<del> myGroups.add(new Group(includePatterns, excludedPatterns));
<del>
<del> if (testGroupNames.contains(groupName)) {
<del> myIncludedTestGroupPatterns.addAll(includePatterns);
<del> myExcludedTestGroupPatterns.addAll(excludedPatterns);
<del> }
<add> myGroups.add(new Group(groupName, includePatterns, excludedPatterns));
<ide> }
<ide> }
<ide>
<ide> */
<ide> @Override
<ide> public boolean matches(String className, String moduleName) {
<del> if (matchesAnyPattern(myExcludedTestGroupPatterns, className)) return false;
<del> if (matchesAnyPattern(myIncludedTestGroupPatterns, className)) return true;
<del> return myContainsAllExcludeDefinedGroup && myGroups.stream().noneMatch(g -> g.matches(className));
<add> if (myGroups.stream().filter(g -> myTestGroupNames.contains(g.name)).anyMatch(g -> g.matches(className))) return true;
<add> return containsAllExcludeDefinedGroup(myTestGroupNames) && myGroups.stream().noneMatch(g -> g.matches(className));
<ide> }
<ide>
<del>
<del> private static boolean containsAllExcludeDefinedGroup(List<String> groupNames) {
<add> private static boolean containsAllExcludeDefinedGroup(Set<String> groupNames) {
<ide> return groupNames.isEmpty() || groupNames.contains(ALL_EXCLUDE_DEFINED);
<ide> }
<ide>
<ide> private static class Group {
<del> public final List<Pattern> included;
<del> public final List<Pattern> excluded;
<add> private final String name;
<add> private final List<Pattern> included;
<add> private final List<Pattern> excluded;
<ide>
<del> public Group(List<Pattern> included, List<Pattern> excluded) {
<add> private Group(String name, List<Pattern> included, List<Pattern> excluded) {
<add> this.name = name;
<ide> this.excluded = excluded;
<ide> this.included = included;
<ide> } |
|
Java | bsd-2-clause | d292fbcaa41d8707412f7afc47f0c25f8b8c16a2 | 0 | gab1one/imagej-ops,imagej/imagej-ops,stelfrich/imagej-ops,kephale/imagej-ops | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2015 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.math;
import java.util.Random;
import net.imagej.ops.AbstractNamespace;
import net.imagej.ops.MathOps;
import net.imagej.ops.OpMethod;
import net.imglib2.IterableInterval;
import net.imglib2.IterableRealInterval;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.img.array.ArrayImg;
import net.imglib2.img.basictypeaccess.array.ByteArray;
import net.imglib2.img.basictypeaccess.array.DoubleArray;
import net.imglib2.img.planar.PlanarImg;
import net.imglib2.type.numeric.NumericType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.integer.ByteType;
import net.imglib2.type.numeric.real.DoubleType;
/**
* The math namespace contains arithmetic operations.
*
* @author Curtis Rueden
*/
public class MathNamespace extends AbstractNamespace {
// -- Math namespace ops --
@OpMethod(op = net.imagej.ops.MathOps.Abs.class)
public Object abs(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Abs.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAbs.class)
public int abs(final int a) {
final int result =
(Integer) ops()
.run(net.imagej.ops.math.PrimitiveMath.IntegerAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAbs.class)
public long abs(final long a) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatAbs.class)
public float abs(final float a) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleAbs.class)
public double abs(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAbs.class)
public <I extends RealType<I>, O extends RealType<O>> O abs(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAbs.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Add.class)
public Object add(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Add.class, args);
}
@OpMethod(ops = {
net.imagej.ops.arithmetic.add.parallel.AddConstantToArrayByteImageP.class,
net.imagej.ops.arithmetic.add.AddConstantToArrayByteImage.class })
public ArrayImg<ByteType, ByteArray> add(
final ArrayImg<ByteType, ByteArray> image, final byte value)
{
@SuppressWarnings("unchecked")
final ArrayImg<ByteType, ByteArray> result =
(ArrayImg<ByteType, ByteArray>) ops().run(MathOps.Add.NAME, image, value);
return result;
}
@OpMethod(
ops = {
net.imagej.ops.arithmetic.add.parallel.AddConstantToArrayDoubleImageP.class,
net.imagej.ops.arithmetic.add.AddConstantToArrayDoubleImage.class })
public
ArrayImg<DoubleType, DoubleArray> add(
final ArrayImg<DoubleType, DoubleArray> image, final double value)
{
@SuppressWarnings("unchecked")
final ArrayImg<DoubleType, DoubleArray> result =
(ArrayImg<DoubleType, DoubleArray>) ops().run(MathOps.Add.NAME, image,
value);
return result;
}
@OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.AddOp.class)
public Object add(final Object result, final Object a, final Object b) {
final Object result_op =
ops().run(net.imagej.ops.onthefly.ArithmeticOp.AddOp.class, result, a, b);
return result_op;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAdd.class)
public int add(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerAdd.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAdd.class)
public long add(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAdd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatAdd.class)
public float add(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatAdd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleAdd.class)
public double add(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleAdd.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAdd.class)
public <I extends RealType<I>, O extends RealType<O>> RealType<O> add(
final RealType<O> out, final RealType<I> in, final double constant)
{
@SuppressWarnings("unchecked")
final RealType<O> result =
(RealType<O>) ops().run(net.imagej.ops.arithmetic.real.RealAdd.class,
out, in, constant);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddRandomAccessibleIntervalToIterableInterval.class)
public
<T extends NumericType<T>> IterableInterval<T> add(
final IterableInterval<T> a, final RandomAccessibleInterval<T> b)
{
@SuppressWarnings("unchecked")
final IterableInterval<T> result =
(IterableInterval<T>) ops()
.run(
net.imagej.ops.arithmetic.add.AddRandomAccessibleIntervalToIterableInterval.class,
a, b);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddConstantToPlanarDoubleImage.class)
public PlanarImg<DoubleType, DoubleArray> add(
final PlanarImg<DoubleType, DoubleArray> image, final double value)
{
@SuppressWarnings("unchecked")
final PlanarImg<DoubleType, DoubleArray> result =
(PlanarImg<DoubleType, DoubleArray>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToPlanarDoubleImage.class,
image, value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToImageInPlace.class)
public <T extends NumericType<T>> IterableRealInterval<T> add(
final IterableRealInterval<T> image, final T value)
{
@SuppressWarnings("unchecked")
final IterableRealInterval<T> result =
(IterableRealInterval<T>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToImageInPlace.class, image,
value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToNumericType.class)
public <T extends NumericType<T>> T add(final T in, final T value) {
@SuppressWarnings("unchecked")
final T result =
(T) ops()
.run(net.imagej.ops.arithmetic.add.AddConstantToNumericType.class, in,
value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToNumericType.class)
public <T extends NumericType<T>> T
add(final T out, final T in, final T value)
{
@SuppressWarnings("unchecked")
final T result =
(T) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToNumericType.class, out, in,
value);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddConstantToImageFunctional.class)
public <T extends NumericType<T>> RandomAccessibleInterval<T> add(
final RandomAccessibleInterval<T> out, final IterableInterval<T> in,
final T value)
{
@SuppressWarnings("unchecked")
final RandomAccessibleInterval<T> result =
(RandomAccessibleInterval<T>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToImageFunctional.class, out,
in, value);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.AddNoise.class)
public Object addnoise(final Object... args) {
return ops().run(net.imagej.ops.MathOps.AddNoise.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAddNoise.class)
public <I extends RealType<I>, O extends RealType<O>> O addnoise(final O out,
final I in, final double rangeMin, final double rangeMax,
final double rangeStdDev, final Random rng)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAddNoise.class, out, in,
rangeMin, rangeMax, rangeStdDev, rng);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.And.class)
public Object and(final Object... args) {
return ops().run(net.imagej.ops.MathOps.And.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAnd.class)
public int and(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerAnd.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAnd.class)
public long and(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAnd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAndConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O and(final O out,
final I in, final long constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAndConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccos.class)
public Object arccos(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccos.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArccos.class)
public double arccos(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArccos.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccos.class)
public <I extends RealType<I>, O extends RealType<O>> O arccos(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccos.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccosh.class)
public Object arccosh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccosh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccosh.class)
public <I extends RealType<I>, O extends RealType<O>> O arccosh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccosh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccot.class)
public Object arccot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccot.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccot.class)
public <I extends RealType<I>, O extends RealType<O>> O arccot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccoth.class)
public Object arccoth(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccoth.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccoth.class)
public <I extends RealType<I>, O extends RealType<O>> O arccoth(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccoth.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccsc.class)
public Object arccsc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccsc.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccsc.class)
public <I extends RealType<I>, O extends RealType<O>> O arccsc(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccsc.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccsch.class)
public Object arccsch(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccsch.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccsch.class)
public <I extends RealType<I>, O extends RealType<O>> O arccsch(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccsch.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsec.class)
public Object arcsec(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsec.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsec.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsec(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsec.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsech.class)
public Object arcsech(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsech.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsech.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsech(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsech.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsin.class)
public Object arcsin(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsin.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArcsin.class)
public double arcsin(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArcsin.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsin.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsin(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsin.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsinh.class)
public Object arcsinh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsinh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsinh.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsinh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsinh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arctan.class)
public Object arctan(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arctan.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArctan.class)
public double arctan(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArctan.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArctan.class)
public <I extends RealType<I>, O extends RealType<O>> O arctan(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArctan.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arctanh.class)
public Object arctanh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arctanh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArctanh.class)
public <I extends RealType<I>, O extends RealType<O>> O arctanh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArctanh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Ceil.class)
public Object ceil(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Ceil.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCeil.class)
public double ceil(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCeil.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCeil.class)
public <I extends RealType<I>, O extends RealType<O>> O ceil(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCeil.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Complement.class)
public Object complement(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Complement.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerComplement.class)
public int complement(final int a) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerComplement.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongComplement.class)
public long complement(final long a) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongComplement.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Copy.class)
public Object copy(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Copy.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCopy.class)
public <I extends RealType<I>, O extends RealType<O>> O copy(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCopy.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cos.class)
public Object cos(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cos.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCos.class)
public double cos(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCos.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCos.class)
public <I extends RealType<I>, O extends RealType<O>> O cos(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCos.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cosh.class)
public Object cosh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cosh.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCosh.class)
public double cosh(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCosh.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCosh.class)
public <I extends RealType<I>, O extends RealType<O>> O cosh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCosh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cot.class)
public Object cot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cot.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCot.class)
public <I extends RealType<I>, O extends RealType<O>> O cot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Coth.class)
public Object coth(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Coth.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCoth.class)
public <I extends RealType<I>, O extends RealType<O>> O coth(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCoth.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Csc.class)
public Object csc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Csc.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCsc.class)
public <I extends RealType<I>, O extends RealType<O>> O csc(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCsc.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Csch.class)
public Object csch(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Csch.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCsch.class)
public <I extends RealType<I>, O extends RealType<O>> O csch(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCsch.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.CubeRoot.class)
public Object cuberoot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.CubeRoot.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCubeRoot.class)
public double cuberoot(final double a) {
final double result =
(Double) ops().run(
net.imagej.ops.math.PrimitiveMath.DoubleCubeRoot.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCubeRoot.class)
public <I extends RealType<I>, O extends RealType<O>> O cuberoot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCubeRoot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Divide.class)
public Object divide(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Divide.class, args);
}
@OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.DivideOp.class)
public Object divide(final Object result, final Object a, final Object b) {
final Object result_op =
ops().run(net.imagej.ops.onthefly.ArithmeticOp.DivideOp.class, result, a,
b);
return result_op;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerDivide.class)
public int divide(final int a, final int b) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerDivide.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongDivide.class)
public long divide(final long a, final long b) {
final long result =
(Long) ops()
.run(net.imagej.ops.math.PrimitiveMath.LongDivide.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatDivide.class)
public float divide(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatDivide.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleDivide.class)
public double divide(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleDivide.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealDivide.class)
public <I extends RealType<I>, O extends RealType<O>> O divide(final O out,
final I in, final double constant, final double dbzVal)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealDivide.class, out, in,
constant, dbzVal);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Exp.class)
public Object exp(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Exp.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleExp.class)
public double exp(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleExp.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealExp.class)
public <I extends RealType<I>, O extends RealType<O>> O exp(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealExp.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.ExpMinusOne.class)
public Object expminusone(final Object... args) {
return ops().run(net.imagej.ops.MathOps.ExpMinusOne.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealExpMinusOne.class)
public <I extends RealType<I>, O extends RealType<O>> O expminusone(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealExpMinusOne.class, out,
in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Floor.class)
public Object floor(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Floor.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleFloor.class)
public double floor(final double a) {
final double result =
(Double) ops()
.run(net.imagej.ops.math.PrimitiveMath.DoubleFloor.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealFloor.class)
public <I extends RealType<I>, O extends RealType<O>> O floor(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealFloor.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Gamma.class)
public Object gamma(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Gamma.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGammaConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O gamma(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGammaConstant.class,
out, in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.GaussianRandom.class)
public Object gaussianrandom(final Object... args) {
return ops().run(net.imagej.ops.MathOps.GaussianRandom.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGaussianRandom.class)
public <I extends RealType<I>, O extends RealType<O>> O gaussianrandom(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGaussianRandom.class,
out, in);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGaussianRandom.class)
public <I extends RealType<I>, O extends RealType<O>> O gaussianrandom(
final O out, final I in, final long seed)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGaussianRandom.class,
out, in, seed);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Invert.class)
public Object invert(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Invert.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealInvert.class)
public <I extends RealType<I>, O extends RealType<O>> O invert(final O out,
final I in, final double specifiedMin, final double specifiedMax)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealInvert.class, out, in,
specifiedMin, specifiedMax);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.LeftShift.class)
public Object leftshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.LeftShift.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerLeftShift.class)
public int leftshift(final int a, final int b) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerLeftShift.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongLeftShift.class)
public long leftshift(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongLeftShift.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log.class)
public Object log(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLog.class)
public double log(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleLog.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog.class)
public <I extends RealType<I>, O extends RealType<O>> O log(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log2.class)
public Object log2(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log2.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog2.class)
public <I extends RealType<I>, O extends RealType<O>> O log2(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog2.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log10.class)
public Object log10(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log10.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLog10.class)
public double log10(final double a) {
final double result =
(Double) ops()
.run(net.imagej.ops.math.PrimitiveMath.DoubleLog10.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog10.class)
public <I extends RealType<I>, O extends RealType<O>> O log10(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog10.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.LogOnePlusX.class)
public Object logoneplusx(final Object... args) {
return ops().run(net.imagej.ops.MathOps.LogOnePlusX.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLogOnePlusX.class)
public double logoneplusx(final double a) {
final double result =
(Double) ops().run(
net.imagej.ops.math.PrimitiveMath.DoubleLogOnePlusX.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLogOnePlusX.class)
public <I extends RealType<I>, O extends RealType<O>> O logoneplusx(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLogOnePlusX.class, out,
in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Max.class)
public Object max(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Max.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMax.class)
public int max(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerMax.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMax.class)
public long max(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMax.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMax.class)
public float max(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMax.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMax.class)
public double max(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleMax.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealMaxConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O max(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealMaxConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Min.class)
public Object min(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Min.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMin.class)
public int min(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerMin.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMin.class)
public long min(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMin.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMin.class)
public float min(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMin.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMin.class)
public double min(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleMin.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealMinConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O min(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealMinConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Multiply.class)
public Object multiply(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Multiply.class, args);
}
@OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.MultiplyOp.class)
public Object multiply(final Object result, final Object a, final Object b) {
final Object result_op =
ops().run(net.imagej.ops.onthefly.ArithmeticOp.MultiplyOp.class, result,
a, b);
return result_op;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMultiply.class)
public int multiply(final int a, final int b) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerMultiply.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMultiply.class)
public long multiply(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMultiply.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMultiply.class)
public float multiply(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMultiply.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMultiply.class)
public double multiply(final double a, final double b) {
final double result =
(Double) ops().run(
net.imagej.ops.math.PrimitiveMath.DoubleMultiply.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealMultiply.class)
public <I extends RealType<I>, O extends RealType<O>> O multiply(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealMultiply.class, out, in,
constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.NearestInt.class)
public Object nearestint(final Object... args) {
return ops().run(net.imagej.ops.MathOps.NearestInt.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Negate.class)
public Object negate(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Negate.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Or.class)
public Object or(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Or.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Power.class)
public Object power(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Power.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Reciprocal.class)
public Object reciprocal(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Reciprocal.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Remainder.class)
public Object remainder(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Remainder.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.RightShift.class)
public Object rightshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.RightShift.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Round.class)
public Object round(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Round.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sec.class)
public Object sec(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sec.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sech.class)
public Object sech(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sech.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Signum.class)
public Object signum(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Signum.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sin.class)
public Object sin(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sin.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sinc.class)
public Object sinc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sinc.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.SincPi.class)
public Object sincpi(final Object... args) {
return ops().run(net.imagej.ops.MathOps.SincPi.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sinh.class)
public Object sinh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sinh.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sqr.class)
public Object sqr(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sqr.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sqrt.class)
public Object sqrt(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sqrt.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Step.class)
public Object step(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Step.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Subtract.class)
public Object subtract(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Subtract.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Tan.class)
public Object tan(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Tan.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Tanh.class)
public Object tanh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Tanh.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Ulp.class)
public Object ulp(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Ulp.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.UniformRandom.class)
public Object uniformrandom(final Object... args) {
return ops().run(net.imagej.ops.MathOps.UniformRandom.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.UnsignedRightShift.class)
public Object unsignedrightshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.UnsignedRightShift.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Xor.class)
public Object xor(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Xor.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Zero.class)
public Object zero(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Zero.class, args);
}
// -- Named methods --
@Override
public String getName() {
return "math";
}
}
| src/main/java/net/imagej/ops/math/MathNamespace.java | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2015 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.math;
import java.util.Random;
import net.imagej.ops.AbstractNamespace;
import net.imagej.ops.MathOps;
import net.imagej.ops.OpMethod;
import net.imglib2.IterableInterval;
import net.imglib2.IterableRealInterval;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.img.array.ArrayImg;
import net.imglib2.img.basictypeaccess.array.ByteArray;
import net.imglib2.img.basictypeaccess.array.DoubleArray;
import net.imglib2.img.planar.PlanarImg;
import net.imglib2.type.numeric.NumericType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.integer.ByteType;
import net.imglib2.type.numeric.real.DoubleType;
/**
* The math namespace contains arithmetic operations.
*
* @author Curtis Rueden
*/
public class MathNamespace extends AbstractNamespace {
// -- Math namespace ops --
@OpMethod(op = net.imagej.ops.MathOps.Abs.class)
public Object abs(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Abs.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAbs.class)
public int abs(final int a) {
final int result =
(Integer) ops()
.run(net.imagej.ops.math.PrimitiveMath.IntegerAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAbs.class)
public long abs(final long a) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatAbs.class)
public float abs(final float a) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleAbs.class)
public double abs(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleAbs.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAbs.class)
public <I extends RealType<I>, O extends RealType<O>> O abs(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAbs.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Add.class)
public Object add(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Add.class, args);
}
@OpMethod(ops = {
net.imagej.ops.arithmetic.add.parallel.AddConstantToArrayByteImageP.class,
net.imagej.ops.arithmetic.add.AddConstantToArrayByteImage.class })
public ArrayImg<ByteType, ByteArray> add(
final ArrayImg<ByteType, ByteArray> image, final byte value)
{
@SuppressWarnings("unchecked")
final ArrayImg<ByteType, ByteArray> result =
(ArrayImg<ByteType, ByteArray>) ops().run(MathOps.Add.NAME, image, value);
return result;
}
@OpMethod(
ops = {
net.imagej.ops.arithmetic.add.parallel.AddConstantToArrayDoubleImageP.class,
net.imagej.ops.arithmetic.add.AddConstantToArrayDoubleImage.class })
public
ArrayImg<DoubleType, DoubleArray> add(
final ArrayImg<DoubleType, DoubleArray> image, final double value)
{
@SuppressWarnings("unchecked")
final ArrayImg<DoubleType, DoubleArray> result =
(ArrayImg<DoubleType, DoubleArray>) ops().run(MathOps.Add.NAME, image,
value);
return result;
}
@OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.AddOp.class)
public Object add(final Object result, final Object a, final Object b) {
final Object result_op =
ops().run(net.imagej.ops.onthefly.ArithmeticOp.AddOp.class, result, a, b);
return result_op;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAdd.class)
public int add(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerAdd.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAdd.class)
public long add(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAdd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatAdd.class)
public float add(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatAdd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleAdd.class)
public double add(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleAdd.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAdd.class)
public <I extends RealType<I>, O extends RealType<O>> RealType<O> add(
final RealType<O> out, final RealType<I> in, final double constant)
{
@SuppressWarnings("unchecked")
final RealType<O> result =
(RealType<O>) ops().run(net.imagej.ops.arithmetic.real.RealAdd.class,
out, in, constant);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddRandomAccessibleIntervalToIterableInterval.class)
public
<T extends NumericType<T>> IterableInterval<T> add(
final IterableInterval<T> a, final RandomAccessibleInterval<T> b)
{
@SuppressWarnings("unchecked")
final IterableInterval<T> result =
(IterableInterval<T>) ops()
.run(
net.imagej.ops.arithmetic.add.AddRandomAccessibleIntervalToIterableInterval.class,
a, b);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddConstantToPlanarDoubleImage.class)
public PlanarImg<DoubleType, DoubleArray> add(
final PlanarImg<DoubleType, DoubleArray> image, final double value)
{
@SuppressWarnings("unchecked")
final PlanarImg<DoubleType, DoubleArray> result =
(PlanarImg<DoubleType, DoubleArray>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToPlanarDoubleImage.class,
image, value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToImageInPlace.class)
public <T extends NumericType<T>> IterableRealInterval<T> add(
final IterableRealInterval<T> image, final T value)
{
@SuppressWarnings("unchecked")
final IterableRealInterval<T> result =
(IterableRealInterval<T>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToImageInPlace.class, image,
value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToNumericType.class)
public <T extends NumericType<T>> T add(final T in, final T value) {
@SuppressWarnings("unchecked")
final T result =
(T) ops()
.run(net.imagej.ops.arithmetic.add.AddConstantToNumericType.class, in,
value);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.add.AddConstantToNumericType.class)
public <T extends NumericType<T>> T
add(final T out, final T in, final T value)
{
@SuppressWarnings("unchecked")
final T result =
(T) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToNumericType.class, out, in,
value);
return result;
}
@OpMethod(
op = net.imagej.ops.arithmetic.add.AddConstantToImageFunctional.class)
public <T extends NumericType<T>> RandomAccessibleInterval<T> add(
final RandomAccessibleInterval<T> out, final IterableInterval<T> in,
final T value)
{
@SuppressWarnings("unchecked")
final RandomAccessibleInterval<T> result =
(RandomAccessibleInterval<T>) ops().run(
net.imagej.ops.arithmetic.add.AddConstantToImageFunctional.class, out,
in, value);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.AddNoise.class)
public Object addnoise(final Object... args) {
return ops().run(net.imagej.ops.MathOps.AddNoise.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAddNoise.class)
public <I extends RealType<I>, O extends RealType<O>> O addnoise(final O out,
final I in, final double rangeMin, final double rangeMax,
final double rangeStdDev, final Random rng)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAddNoise.class, out, in,
rangeMin, rangeMax, rangeStdDev, rng);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.And.class)
public Object and(final Object... args) {
return ops().run(net.imagej.ops.MathOps.And.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerAnd.class)
public int and(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerAnd.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongAnd.class)
public long and(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongAnd.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealAndConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O and(final O out,
final I in, final long constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealAndConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccos.class)
public Object arccos(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccos.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArccos.class)
public double arccos(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArccos.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccos.class)
public <I extends RealType<I>, O extends RealType<O>> O arccos(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccos.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccosh.class)
public Object arccosh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccosh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccosh.class)
public <I extends RealType<I>, O extends RealType<O>> O arccosh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccosh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccot.class)
public Object arccot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccot.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccot.class)
public <I extends RealType<I>, O extends RealType<O>> O arccot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccoth.class)
public Object arccoth(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccoth.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccoth.class)
public <I extends RealType<I>, O extends RealType<O>> O arccoth(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccoth.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccsc.class)
public Object arccsc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccsc.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccsc.class)
public <I extends RealType<I>, O extends RealType<O>> O arccsc(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccsc.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arccsch.class)
public Object arccsch(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arccsch.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArccsch.class)
public <I extends RealType<I>, O extends RealType<O>> O arccsch(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArccsch.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsec.class)
public Object arcsec(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsec.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsec.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsec(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsec.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsech.class)
public Object arcsech(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsech.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsech.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsech(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsech.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsin.class)
public Object arcsin(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsin.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArcsin.class)
public double arcsin(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArcsin.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsin.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsin(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsin.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arcsinh.class)
public Object arcsinh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arcsinh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArcsinh.class)
public <I extends RealType<I>, O extends RealType<O>> O arcsinh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArcsinh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arctan.class)
public Object arctan(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arctan.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleArctan.class)
public double arctan(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleArctan.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArctan.class)
public <I extends RealType<I>, O extends RealType<O>> O arctan(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArctan.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Arctanh.class)
public Object arctanh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Arctanh.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealArctanh.class)
public <I extends RealType<I>, O extends RealType<O>> O arctanh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealArctanh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Ceil.class)
public Object ceil(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Ceil.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCeil.class)
public double ceil(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCeil.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCeil.class)
public <I extends RealType<I>, O extends RealType<O>> O ceil(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCeil.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Complement.class)
public Object complement(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Complement.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerComplement.class)
public int complement(final int a) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerComplement.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongComplement.class)
public long complement(final long a) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongComplement.class,
a);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Copy.class)
public Object copy(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Copy.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCopy.class)
public <I extends RealType<I>, O extends RealType<O>> O copy(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCopy.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cos.class)
public Object cos(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cos.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCos.class)
public double cos(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCos.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCos.class)
public <I extends RealType<I>, O extends RealType<O>> O cos(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCos.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cosh.class)
public Object cosh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cosh.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCosh.class)
public double cosh(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleCosh.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCosh.class)
public <I extends RealType<I>, O extends RealType<O>> O cosh(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCosh.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Cot.class)
public Object cot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Cot.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCot.class)
public <I extends RealType<I>, O extends RealType<O>> O cot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Coth.class)
public Object coth(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Coth.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCoth.class)
public <I extends RealType<I>, O extends RealType<O>> O coth(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCoth.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Csc.class)
public Object csc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Csc.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCsc.class)
public <I extends RealType<I>, O extends RealType<O>> O csc(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCsc.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Csch.class)
public Object csch(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Csch.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCsch.class)
public <I extends RealType<I>, O extends RealType<O>> O csch(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCsch.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.CubeRoot.class)
public Object cuberoot(final Object... args) {
return ops().run(net.imagej.ops.MathOps.CubeRoot.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleCubeRoot.class)
public double cuberoot(final double a) {
final double result =
(Double) ops().run(
net.imagej.ops.math.PrimitiveMath.DoubleCubeRoot.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealCubeRoot.class)
public <I extends RealType<I>, O extends RealType<O>> O cuberoot(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealCubeRoot.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Divide.class)
public Object divide(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Divide.class, args);
}
@OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.DivideOp.class)
public Object divide(final Object result, final Object a, final Object b) {
final Object result_op =
ops().run(net.imagej.ops.onthefly.ArithmeticOp.DivideOp.class, result, a,
b);
return result_op;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerDivide.class)
public int divide(final int a, final int b) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerDivide.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongDivide.class)
public long divide(final long a, final long b) {
final long result =
(Long) ops()
.run(net.imagej.ops.math.PrimitiveMath.LongDivide.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatDivide.class)
public float divide(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatDivide.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleDivide.class)
public double divide(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleDivide.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealDivide.class)
public <I extends RealType<I>, O extends RealType<O>> O divide(final O out,
final I in, final double constant, final double dbzVal)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealDivide.class, out, in,
constant, dbzVal);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Exp.class)
public Object exp(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Exp.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleExp.class)
public double exp(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleExp.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealExp.class)
public <I extends RealType<I>, O extends RealType<O>> O exp(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealExp.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.ExpMinusOne.class)
public Object expminusone(final Object... args) {
return ops().run(net.imagej.ops.MathOps.ExpMinusOne.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealExpMinusOne.class)
public <I extends RealType<I>, O extends RealType<O>> O expminusone(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealExpMinusOne.class, out,
in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Floor.class)
public Object floor(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Floor.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleFloor.class)
public double floor(final double a) {
final double result =
(Double) ops()
.run(net.imagej.ops.math.PrimitiveMath.DoubleFloor.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealFloor.class)
public <I extends RealType<I>, O extends RealType<O>> O floor(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealFloor.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Gamma.class)
public Object gamma(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Gamma.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGammaConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O gamma(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGammaConstant.class,
out, in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.GaussianRandom.class)
public Object gaussianrandom(final Object... args) {
return ops().run(net.imagej.ops.MathOps.GaussianRandom.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGaussianRandom.class)
public <I extends RealType<I>, O extends RealType<O>> O gaussianrandom(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGaussianRandom.class,
out, in);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealGaussianRandom.class)
public <I extends RealType<I>, O extends RealType<O>> O gaussianrandom(
final O out, final I in, final long seed)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealGaussianRandom.class,
out, in, seed);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Invert.class)
public Object invert(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Invert.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealInvert.class)
public <I extends RealType<I>, O extends RealType<O>> O invert(final O out,
final I in, final double specifiedMin, final double specifiedMax)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealInvert.class, out, in,
specifiedMin, specifiedMax);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.LeftShift.class)
public Object leftshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.LeftShift.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerLeftShift.class)
public int leftshift(final int a, final int b) {
final int result =
(Integer) ops().run(
net.imagej.ops.math.PrimitiveMath.IntegerLeftShift.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongLeftShift.class)
public long leftshift(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongLeftShift.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log.class)
public Object log(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLog.class)
public double log(final double a) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleLog.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog.class)
public <I extends RealType<I>, O extends RealType<O>> O log(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log2.class)
public Object log2(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log2.class, args);
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog2.class)
public <I extends RealType<I>, O extends RealType<O>> O log2(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog2.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Log10.class)
public Object log10(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Log10.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLog10.class)
public double log10(final double a) {
final double result =
(Double) ops()
.run(net.imagej.ops.math.PrimitiveMath.DoubleLog10.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLog10.class)
public <I extends RealType<I>, O extends RealType<O>> O log10(final O out,
final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLog10.class, out, in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.LogOnePlusX.class)
public Object logoneplusx(final Object... args) {
return ops().run(net.imagej.ops.MathOps.LogOnePlusX.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleLogOnePlusX.class)
public double logoneplusx(final double a) {
final double result =
(Double) ops().run(
net.imagej.ops.math.PrimitiveMath.DoubleLogOnePlusX.class, a);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealLogOnePlusX.class)
public <I extends RealType<I>, O extends RealType<O>> O logoneplusx(
final O out, final I in)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealLogOnePlusX.class, out,
in);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Max.class)
public Object max(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Max.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMax.class)
public int max(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerMax.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMax.class)
public long max(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMax.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMax.class)
public float max(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMax.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMax.class)
public double max(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleMax.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealMaxConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O max(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealMaxConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Min.class)
public Object min(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Min.class, args);
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMin.class)
public int min(final int a, final int b) {
final int result =
(Integer) ops().run(net.imagej.ops.math.PrimitiveMath.IntegerMin.class,
a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMin.class)
public long min(final long a, final long b) {
final long result =
(Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMin.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMin.class)
public float min(final float a, final float b) {
final float result =
(Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMin.class, a, b);
return result;
}
@OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMin.class)
public double min(final double a, final double b) {
final double result =
(Double) ops().run(net.imagej.ops.math.PrimitiveMath.DoubleMin.class, a,
b);
return result;
}
@OpMethod(op = net.imagej.ops.arithmetic.real.RealMinConstant.class)
public <I extends RealType<I>, O extends RealType<O>> O min(final O out,
final I in, final double constant)
{
@SuppressWarnings("unchecked")
final O result =
(O) ops().run(net.imagej.ops.arithmetic.real.RealMinConstant.class, out,
in, constant);
return result;
}
@OpMethod(op = net.imagej.ops.MathOps.Multiply.class)
public Object multiply(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Multiply.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.NearestInt.class)
public Object nearestint(final Object... args) {
return ops().run(net.imagej.ops.MathOps.NearestInt.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Negate.class)
public Object negate(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Negate.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Or.class)
public Object or(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Or.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Power.class)
public Object power(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Power.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Reciprocal.class)
public Object reciprocal(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Reciprocal.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Remainder.class)
public Object remainder(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Remainder.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.RightShift.class)
public Object rightshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.RightShift.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Round.class)
public Object round(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Round.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sec.class)
public Object sec(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sec.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sech.class)
public Object sech(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sech.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Signum.class)
public Object signum(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Signum.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sin.class)
public Object sin(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sin.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sinc.class)
public Object sinc(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sinc.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.SincPi.class)
public Object sincpi(final Object... args) {
return ops().run(net.imagej.ops.MathOps.SincPi.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sinh.class)
public Object sinh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sinh.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sqr.class)
public Object sqr(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sqr.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Sqrt.class)
public Object sqrt(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Sqrt.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Step.class)
public Object step(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Step.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Subtract.class)
public Object subtract(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Subtract.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Tan.class)
public Object tan(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Tan.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Tanh.class)
public Object tanh(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Tanh.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Ulp.class)
public Object ulp(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Ulp.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.UniformRandom.class)
public Object uniformrandom(final Object... args) {
return ops().run(net.imagej.ops.MathOps.UniformRandom.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.UnsignedRightShift.class)
public Object unsignedrightshift(final Object... args) {
return ops().run(net.imagej.ops.MathOps.UnsignedRightShift.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Xor.class)
public Object xor(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Xor.class, args);
}
@OpMethod(op = net.imagej.ops.MathOps.Zero.class)
public Object zero(final Object... args) {
return ops().run(net.imagej.ops.MathOps.Zero.class, args);
}
// -- Named methods --
@Override
public String getName() {
return "math";
}
}
| Add implementation for Multiply built-in
| src/main/java/net/imagej/ops/math/MathNamespace.java | Add implementation for Multiply built-in | <ide><path>rc/main/java/net/imagej/ops/math/MathNamespace.java
<ide> return ops().run(net.imagej.ops.MathOps.Multiply.class, args);
<ide> }
<ide>
<add> @OpMethod(op = net.imagej.ops.onthefly.ArithmeticOp.MultiplyOp.class)
<add> public Object multiply(final Object result, final Object a, final Object b) {
<add> final Object result_op =
<add> ops().run(net.imagej.ops.onthefly.ArithmeticOp.MultiplyOp.class, result,
<add> a, b);
<add> return result_op;
<add> }
<add>
<add> @OpMethod(op = net.imagej.ops.math.PrimitiveMath.IntegerMultiply.class)
<add> public int multiply(final int a, final int b) {
<add> final int result =
<add> (Integer) ops().run(
<add> net.imagej.ops.math.PrimitiveMath.IntegerMultiply.class, a, b);
<add> return result;
<add> }
<add>
<add> @OpMethod(op = net.imagej.ops.math.PrimitiveMath.LongMultiply.class)
<add> public long multiply(final long a, final long b) {
<add> final long result =
<add> (Long) ops().run(net.imagej.ops.math.PrimitiveMath.LongMultiply.class, a,
<add> b);
<add> return result;
<add> }
<add>
<add> @OpMethod(op = net.imagej.ops.math.PrimitiveMath.FloatMultiply.class)
<add> public float multiply(final float a, final float b) {
<add> final float result =
<add> (Float) ops().run(net.imagej.ops.math.PrimitiveMath.FloatMultiply.class,
<add> a, b);
<add> return result;
<add> }
<add>
<add> @OpMethod(op = net.imagej.ops.math.PrimitiveMath.DoubleMultiply.class)
<add> public double multiply(final double a, final double b) {
<add> final double result =
<add> (Double) ops().run(
<add> net.imagej.ops.math.PrimitiveMath.DoubleMultiply.class, a, b);
<add> return result;
<add> }
<add>
<add> @OpMethod(op = net.imagej.ops.arithmetic.real.RealMultiply.class)
<add> public <I extends RealType<I>, O extends RealType<O>> O multiply(final O out,
<add> final I in, final double constant)
<add> {
<add> @SuppressWarnings("unchecked")
<add> final O result =
<add> (O) ops().run(net.imagej.ops.arithmetic.real.RealMultiply.class, out, in,
<add> constant);
<add> return result;
<add> }
<add>
<ide> @OpMethod(op = net.imagej.ops.MathOps.NearestInt.class)
<ide> public Object nearestint(final Object... args) {
<ide> return ops().run(net.imagej.ops.MathOps.NearestInt.class, args); |
|
Java | bsd-3-clause | e09f12005435aa8d5ac57f5e0a5a5a5bee46936d | 0 | team178/Hawkeye | /*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
//Imports
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.camera.AxisCameraException;
import edu.wpi.first.wpilibj.image.NIVisionException;
import org.usfirst.frc178.DashboardHigh;
import org.usfirst.frc178.Drivetrain;
import org.usfirst.frc178.EnhancedIOFHS;
import org.usfirst.frc178.Sensors;
import org.usfirst.frc178.Tower;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class RobotTemplate extends IterativeRobot
{
//Controls
private Joystick joystick; //main joystick
private Joystick joystickAux; //Aux joystick
private Joystick joystickKiddy; //Kiddie Joystick - not full speed.
private EnhancedIOFHS enhancedIO; //Auxbox
//Driverstation
private DriverStation driverStation; //sends things to the lower driverstation
private DashboardHigh dashboardHigh; //sends things to the upper driverstation
//Movement
private Drivetrain drivetrain;
private Tower tower;
//Sensors
private Sensors sensors;
private CameraFHS camera;
private ImageAnalysis imageAnalysis;
//Watchdog
private Watchdog watchdog;
//Solenoid
private Solenoid intakeSolenoid;
//Misc. Variables
Timer timer, timer2;
double rangeLast = 0;
boolean first = true;
boolean stopped = false;
double rangeBeforeStop = 0;
int luminosityMin;
double numParticles;
DriverStationLCD dsout;
boolean autoFist = true;
public void robotInit() {
timer2 = new Timer();
//Controls
joystick = new Joystick(1); // joystick
joystickAux = new Joystick(2); // xbox
joystickKiddy = new Joystick(3); // Kiddy
//DriverStation
driverStation = DriverStation.getInstance();
dashboardHigh = new DashboardHigh();
enhancedIO = new EnhancedIOFHS(driverStation);
//Movement
drivetrain = new Drivetrain(8,4,9,5,joystick,1.0,joystickKiddy);
tower = new Tower(driverStation,6,1,7,3,4,enhancedIO,joystickAux);
//Sensors
sensors = new Sensors();
//Solenoid
//intakeSolenoid = new Solenoid(3);
//Image
imageAnalysis = new ImageAnalysis(AxisCamera.getInstance());
//camera = new CameraFHS(drivetrain, imageAnalysis);
//Watchdog
watchdog = Watchdog.getInstance();
dsout = DriverStationLCD.getInstance();
dsout.updateLCD();
}
public void autonomousPeriodic()
{
/* //old autonomus (2012) Rebound Rumble
double range = sensors.getUltrasonicRight().getRangeInches();
System.out.println(range);
double robo_speed_far = 0.3;
double robo_speed_close = 0.10;
if((first || (Math.abs(range-rangeLast) < 20)) && !stopped)
{
rangeBeforeStop = range;
if(first)
{
first = false;
}
if(range < 150)
{
drivetrain.frontLeftSet(-robo_speed_far);
drivetrain.frontRightSet(robo_speed_far);
drivetrain.rearLeftSet(-robo_speed_far);
drivetrain.rearRightSet(robo_speed_far);
}
else if(range >= 150 && range < 180)
{
drivetrain.frontLeftSet(-robo_speed_close);
drivetrain.frontRightSet(robo_speed_close);
drivetrain.rearLeftSet(-robo_speed_close);
drivetrain.rearRightSet(robo_speed_close);
}
else
{
System.out.println("STOP!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
}
rangeLast = range;
}
else if(stopped)
{
timer.start();
System.out.println(timer.get());
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
if((Math.abs(range-rangeBeforeStop) < 5) && timer.get() < 5)
{
timer.stop();
timer.reset();
stopped = false;
rangeLast = range;
}
}
else
{
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
}
*/
if(autoFist)
{
autoFist = false;
timer2.start();
}
double time = timer2.get();
// Wait 4 seconds for shooting motors to power up (more dense ball first)
if(time <= 4)
{
tower.setShooterMotors(0.43);
}
// Start Elevator for 2 seconds (first + second ball)
else if(time > 4 && time <= 5)
{
tower.setBallElevator(1.0);
}
// Stop Elevator for a second
else if(time > 5 && time <= 6)
{
tower.setBallElevator(0.0);
}//.35 for 3
else if(time > 6 && time <= 7)
{
tower.setBallElevator(1.0);
}
// Start Elevator for 2 seconds
else if(time > 7 && time <= 10)
{
tower.setShooterMotors(0.0);
tower.setBallElevator(0.0);
/*drivetrain.rearLeftSet(-0.35);
drivetrain.rearRightSet(0.42);
drivetrain.frontLeftSet(-0.35);
drivetrain.frontRightSet(0.42);
tower.bridgeSolenoid.set(true);*/
}
else if(time > 10)
{
/*drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);*/
timer2.stop();
}
// Update values on Dashboard
dashboardHigh.updateDashboardHigh(drivetrain, 0, sensors.getUltrasonicLeft().getRangeInches(), sensors.getUltrasonicRight().getRangeInches(), 0, luminosityMin, 0, joystick);
tower.startCompressor();
}
public void teleopPeriodic()
{
//Movement
if(joystick.getRawButton(11))
{
/*try
{
imageAnalysis.updateImage(0);
camera.centerOnTarget(0, 0);
}
catch (AxisCameraException ex){}
catch (NIVisionException ex){}*/
}
/*else if (joystick.getRawButton(9)) {
if (camera.isCenter()) {
}
}*/
else
{
drivetrain.drive();
}
dsout.println(DriverStationLCD.Line.kUser4, 2, "Slider: "+(int)(enhancedIO.getSlider()*100)+"% ");
tower.cameraLightOn();
tower.runJoystick();
//Dashboard
dashboardHigh.updateDashboardHigh(drivetrain, 0, sensors.getUltrasonicLeft().getRangeInches(), sensors.getUltrasonicRight().getRangeInches(), 0, luminosityMin, 0, joystick);
//Watchdog
watchdog.feed();
dsout.updateLCD();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
}
} | src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | /*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
//Imports
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.camera.AxisCameraException;
import edu.wpi.first.wpilibj.image.NIVisionException;
import org.usfirst.frc178.DashboardHigh;
import org.usfirst.frc178.Drivetrain;
import org.usfirst.frc178.EnhancedIOFHS;
import org.usfirst.frc178.Sensors;
import org.usfirst.frc178.Tower;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class RobotTemplate extends IterativeRobot
{
//Controls
private Joystick joystick; //main joystick
private Joystick joystickAux; //Aux joystick
private Joystick joystickKiddy; //Kiddie Joystick - not full speed.
private EnhancedIOFHS enhancedIO; //Auxbox
//Driverstation
private DriverStation driverStation; //sends things to the lower driverstation
private DashboardHigh dashboardHigh; //sends things to the upper driverstation
//Movement
private Drivetrain drivetrain;
private Tower tower;
//Sensors
private Sensors sensors;
private CameraFHS camera;
private ImageAnalysis imageAnalysis;
//Watchdog
private Watchdog watchdog;
//Solenoid
private Solenoid intakeSolenoid;
//Misc. Variables
Timer timer, timer2;
double rangeLast = 0;
boolean first = true;
boolean stopped = false;
double rangeBeforeStop = 0;
int luminosityMin;
double numParticles;
DriverStationLCD dsout;
boolean autoFist = true;
public void robotInit() {
timer2 = new Timer();
//Controls
joystick = new Joystick(1); // joystick
joystickAux = new Joystick(2); // xbox
joystickKiddy = new Joystick(3); // Kiddy
//DriverStation
driverStation = DriverStation.getInstance();
dashboardHigh = new DashboardHigh();
enhancedIO = new EnhancedIOFHS(driverStation);
//Movement
drivetrain = new Drivetrain(8,4,9,5,joystick,1.0,joystickKiddy);
tower = new Tower(driverStation,6,1,7,3,4,enhancedIO,joystickAux);
//Sensors
sensors = new Sensors();
//Solenoid
//intakeSolenoid = new Solenoid(3);
//Image
imageAnalysis = new ImageAnalysis(AxisCamera.getInstance());
//camera = new CameraFHS(drivetrain, imageAnalysis);
//Watchdog
watchdog = Watchdog.getInstance();
dsout = DriverStationLCD.getInstance();
dsout.updateLCD();
}
public void autonomousPeriodic()
{
/* //old autonomus (2012) Rebound Rumble
double range = sensors.getUltrasonicRight().getRangeInches();
System.out.println(range);
double robo_speed_far = 0.3;
double robo_speed_close = 0.10;
if((first || (Math.abs(range-rangeLast) < 20)) && !stopped)
{
rangeBeforeStop = range;
if(first)
{
first = false;
}
if(range < 150)
{
drivetrain.frontLeftSet(-robo_speed_far);
drivetrain.frontRightSet(robo_speed_far);
drivetrain.rearLeftSet(-robo_speed_far);
drivetrain.rearRightSet(robo_speed_far);
}
else if(range >= 150 && range < 180)
{
drivetrain.frontLeftSet(-robo_speed_close);
drivetrain.frontRightSet(robo_speed_close);
drivetrain.rearLeftSet(-robo_speed_close);
drivetrain.rearRightSet(robo_speed_close);
}
else
{
System.out.println("STOP!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
}
rangeLast = range;
}
else if(stopped)
{
timer.start();
System.out.println(timer.get());
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
if((Math.abs(range-rangeBeforeStop) < 5) && timer.get() < 5)
{
timer.stop();
timer.reset();
stopped = false;
rangeLast = range;
}
}
else
{
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);
drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
}
*/
if(autoFist)
{
autoFist = false;
timer2.start();
}
double time = timer2.get();
// Wait 4 seconds for shooting motors to power up (more dense ball first)
if(time <= 4)
{
tower.setShooterMotors(0.43);
}
// Start Elevator for 2 seconds (first + second ball)
else if(time > 4 && time <= 5)
{
tower.setBallElevator(1.0);
}
// Stop Elevator for a second
else if(time > 5 && time <= 6)
{
tower.setBallElevator(0.0);
}//.35 for 3
else if(time > 6 && time <= 7)
{
tower.setBallElevator(1.0);
}
// Start Elevator for 2 seconds
else if(time > 7 && time <= 10)
{
tower.setShooterMotors(0.0);
tower.setBallElevator(0.0);
/*drivetrain.rearLeftSet(-0.35);
drivetrain.rearRightSet(0.42);
drivetrain.frontLeftSet(-0.35);
drivetrain.frontRightSet(0.42);
tower.bridgeSolenoid.set(true);*/
}
else if(time > 10)
{
/*drivetrain.rearLeftSet(0.0);
drivetrain.rearRightSet(0.0);
drivetrain.frontLeftSet(0.0);
drivetrain.frontRightSet(0.0);*/
timer2.stop();
}
// Update values on Dashboard
dashboardHigh.updateDashboardHigh(drivetrain, 0, sensors.getUltrasonicLeft().getRangeInches(), sensors.getUltrasonicRight().getRangeInches(), 0, luminosityMin, 0, joystick);
tower.startCompressor();
}
public void teleopPeriodic()
{
//Movement
if(joystick.getRawButton(11))
{
/*try
{
imageAnalysis.updateImage(0);
camera.centerOnTarget(0, 0);
}
catch (AxisCameraException ex){}
catch (NIVisionException ex){}*/
}
/*else if (joystick.getRawButton(9)) {
if (camera.isCenter()) {
}
}*/
else
{
drivetrain.drive();
}
dsout.println(DriverStationLCD.Line.kUser4, 2, "Slider: "+(int)(enhancedIO.getSlider()*100)+"% ");
tower.cameraLightOn();
tower.runJoystick();
//Dashboard
dashboardHigh.updateDashboardHigh(drivetrain, 0, sensors.getUltrasonicLeft().getRangeInches(), sensors.getUltrasonicRight().getRangeInches(), 0, luminosityMin, 0, joystick);
//Watchdog
watchdog.feed();
dsout.updateLCD();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
}
} | Real Drive code! commented out omni, fixed 4-wheel drive code | src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | Real Drive code! commented out omni, fixed 4-wheel drive code | <ide><path>rc/edu/wpi/first/wpilibj/templates/RobotTemplate.java
<ide> drivetrain.rearRightSet(0.0);
<ide> }
<ide> */
<add>
<ide> if(autoFist)
<ide> {
<ide> autoFist = false; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.