code
stringlengths 5
1.04M
| repo_name
stringlengths 7
108
| path
stringlengths 6
299
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1.04M
|
---|---|---|---|---|---|
/*
* Copyright (c) 2016 NECTEC
* National Electronics and Computer Technology Center, Thailand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tanrabad.survey.repository.persistence;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.junit.Rule;
import org.junit.Test;
import org.tanrabad.survey.base.SurveyDbTestRule;
import java.io.File;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class SurveyLiteDatabaseTest {
@Rule
public SurveyDbTestRule dbTestRule = new SurveyDbTestRule();
@Test
public void testDatabaseProperties() throws Exception {
SQLiteDatabase db = dbTestRule.getReadable();
assertEquals(SurveyLiteDatabase.DB_NAME, new File(db.getPath()).getName());
assertEquals(SurveyLiteDatabase.DB_VERSION, db.getVersion());
}
@Test
public void testGetReadable() throws Exception {
SQLiteDatabase db = dbTestRule.getReadable();
Cursor province = db.query("province", new String[]{"province_code", "name"}, null, null, null, null, null);
Cursor subdistricts = db.query(
"subdistrict", new String[]{"subdistrict_code", "name", "district_code"}, null, null, null, null, null);
assertEquals(52, subdistricts.getCount());
assertEquals(true, province.moveToFirst());
assertEquals(1, province.getCount());
assertEquals("12", province.getString(0));
assertEquals("นนทบุรี", province.getString(1));
province.close();
subdistricts.close();
}
@Test
public void testWritable() throws Exception {
long code = dbTestRule.getWritable().insert("province", null, getBangkokProvince());
assertEquals(true, code != -1);
}
private ContentValues getBangkokProvince() {
ContentValues cv = new ContentValues();
cv.put("province_code", "01");
cv.put("name", "กรุงเทพมหานคร");
return cv;
}
@Test
public void testIntegrity() throws Exception {
assertTrue(dbTestRule.getReadable().isDatabaseIntegrityOk());
}
}
| tanrabad/survey | app/src/androidTest/java/org/tanrabad/survey/repository/persistence/SurveyLiteDatabaseTest.java | Java | apache-2.0 | 2,739 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.clojure;
import org.apache.storm.coordination.CoordinatedBolt.FinishedCallback;
import org.apache.storm.generated.StreamInfo;
import org.apache.storm.task.IBolt;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.utils.Utils;
import clojure.lang.IFn;
import clojure.lang.PersistentArrayMap;
import clojure.lang.Keyword;
import clojure.lang.Symbol;
import clojure.lang.RT;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class ClojureBolt implements IRichBolt, FinishedCallback {
Map<String, StreamInfo> _fields;
List<String> _fnSpec;
List<String> _confSpec;
List<Object> _params;
IBolt _bolt;
public ClojureBolt(List fnSpec, List confSpec, List<Object> params, Map<String, StreamInfo> fields) {
_fnSpec = fnSpec;
_confSpec = confSpec;
_params = params;
_fields = fields;
}
@Override
public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
IFn hof = Utils.loadClojureFn(_fnSpec.get(0), _fnSpec.get(1));
try {
IFn preparer = (IFn) hof.applyTo(RT.seq(_params));
final Map<Keyword,Object> collectorMap = new PersistentArrayMap( new Object[] {
Keyword.intern(Symbol.create("output-collector")), collector,
Keyword.intern(Symbol.create("context")), context});
List<Object> args = new ArrayList<Object>() {{
add(stormConf);
add(context);
add(collectorMap);
}};
_bolt = (IBolt) preparer.applyTo(RT.seq(args));
//this is kind of unnecessary for clojure
try {
_bolt.prepare(stormConf, context, collector);
} catch(AbstractMethodError ame) {
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void execute(Tuple input) {
_bolt.execute(input);
}
@Override
public void cleanup() {
try {
_bolt.cleanup();
} catch(AbstractMethodError ame) {
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
for(String stream: _fields.keySet()) {
StreamInfo info = _fields.get(stream);
declarer.declareStream(stream, info.is_direct(), new Fields(info.get_output_fields()));
}
}
@Override
public void finishedId(Object id) {
if(_bolt instanceof FinishedCallback) {
((FinishedCallback) _bolt).finishedId(id);
}
}
@Override
public Map<String, Object> getComponentConfiguration() {
IFn hof = Utils.loadClojureFn(_confSpec.get(0), _confSpec.get(1));
try {
return (Map) hof.applyTo(RT.seq(_params));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| anshuiisc/storm-Allbolts-wiring | storm-core/src/jvm/org/apache/storm/clojure/ClojureBolt.java | Java | apache-2.0 | 4,059 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.rdf.api;
import java.util.Optional;
/**
* A generalised "quad-like" interface, extended by {@link Quad}.
* <p>
* A QuadLike statement has at least a {@link #getSubject()},
* {@link #getPredicate()}, {@link #getObject()} and {@link #getGraphName()},
* but unlike a {@link Quad} does not have a formalised
* {@link Quad#equals(Object)} or {@link Quad#hashCode()} semantics and is not
* required to be <em>immutable</em> or <em>thread-safe</em>. This interface can
* also be used for <em>generalised quads</em> (e.g. a {@link BlankNode} as
* predicate).
* <p>
* Implementations should specialise which specific {@link RDFTerm} types they
* return by overriding {@link #getSubject()}, {@link #getPredicate()},
* {@link #getObject()} and {@link #getGraphName()}.
*
* @since 0.3.0-incubating
* @see Quad
*/
public interface QuadLike<G extends RDFTerm> extends TripleLike {
/**
* The graph name (graph label) of this statement, if present.
* <p>
* If {@link Optional#isPresent()}, then the {@link Optional#get()} indicate
* the graph name of this statement. If the graph name is not present,e.g.
* the value is {@link Optional#empty()}, it indicates that this Quad is in
* the default graph.
*
* @return If {@link Optional#isPresent()}, the graph name of this quad,
* otherwise {@link Optional#empty()}, indicating the default graph.
* The graph name is typically an {@link IRI} or {@link BlankNode}.
*/
Optional<G> getGraphName();
}
| ansell/commons-rdf | api/src/main/java/org/apache/commons/rdf/api/QuadLike.java | Java | apache-2.0 | 2,359 |
package ContinuousGridWorld.messages;
import org.rlcommunity.rlglue.codec.RLGlue;
import rlVizLib.messaging.AbstractMessage;
import rlVizLib.messaging.GenericMessage;
import rlVizLib.messaging.MessageUser;
import rlVizLib.messaging.MessageValueType;
import rlVizLib.messaging.NotAnRLVizMessageException;
import rlVizLib.messaging.environment.EnvMessageType;
import rlVizLib.messaging.environment.EnvironmentMessages;
public class MapRequest extends EnvironmentMessages{
public MapRequest(GenericMessage theMessageObject){
super(theMessageObject);
}
public static MapResponse Execute(){
String theRequest=AbstractMessage.makeMessage(
MessageUser.kEnv.id(),
MessageUser.kBenchmark.id(),
EnvMessageType.kEnvCustom.id(),
MessageValueType.kString.id(),
"GETCGWMAP");
String responseMessage=RLGlue.RL_env_message(theRequest);
MapResponse theResponse;
try {
theResponse = new MapResponse(responseMessage);
} catch (NotAnRLVizMessageException e) {
System.err.println("In CGWMapRequest, the response was not RL-Viz compatible");
theResponse=null;
}
return theResponse;
}
}
| chaostrigger/rl-library | projects/packages/sutton-fall-2008-ai/oldPlan/gridWorldEnv/src/ContinuousGridWorld/messages/MapRequest.java | Java | apache-2.0 | 1,122 |
package liquibase.util;
import liquibase.exception.DatabaseException;
import java.sql.*;
import java.util.Collection;
public abstract class JdbcUtils {
/**
* Constant that indicates an unknown (or unspecified) SQL type.
*
* @see java.sql.Types
*/
public static final int TYPE_UNKNOWN = Integer.MIN_VALUE;
/**
* Close the given JDBC Statement and ignore any thrown exception.
* This is useful for typical finally blocks in manual JDBC code.
*
* @param stmt the JDBC Statement to close (may be <code>null</code>)
*/
public static void closeStatement(Statement stmt) {
if (stmt != null) {
try {
stmt.close();
}
catch (SQLException ex) {
// logger.debug("Could not close JDBC Statement", ex);
}
catch (Throwable ex) {
// We don't trust the JDBC driver: It might throw RuntimeException or Error.
// logger.debug("Unexpected exception on closing JDBC Statement", ex);
}
}
}
/**
* Close the given JDBC ResultSet and ignore any thrown exception.
* This is useful for typical finally blocks in manual JDBC code.
*
* @param rs the JDBC ResultSet to close (may be <code>null</code>)
*/
public static void closeResultSet(ResultSet rs) {
if (rs != null) {
try {
rs.close();
}
catch (SQLException ex) {
// logger.debug("Could not close JDBC ResultSet", ex);
}
catch (Throwable ex) {
// We don't trust the JDBC driver: It might throw RuntimeException or Error.
// logger.debug("Unexpected exception on closing JDBC ResultSet", ex);
}
}
}
/**
* Retrieve a JDBC column value from a ResultSet, using the most appropriate
* value type. The returned value should be a detached value object, not having
* any ties to the active ResultSet: in particular, it should not be a Blob or
* Clob object but rather a byte array respectively String representation.
* <p>Uses the <code>getObject(index)</code> method, but includes additional "hacks"
* to get around Oracle 10g returning a non-standard object for its TIMESTAMP
* datatype and a <code>java.sql.Date</code> for DATE columns leaving out the
* time portion: These columns will explicitly be extracted as standard
* <code>java.sql.Timestamp</code> object.
*
* @param rs is the ResultSet holding the data
* @param index is the column index
* @return the value object
* @throws SQLException if thrown by the JDBC API
* @see java.sql.Blob
* @see java.sql.Clob
* @see java.sql.Timestamp
*/
public static Object getResultSetValue(ResultSet rs, int index) throws SQLException {
Object obj = rs.getObject(index);
if (obj instanceof Blob) {
obj = rs.getBytes(index);
} else if (obj instanceof Clob) {
obj = rs.getString(index);
} else if (obj != null && obj.getClass().getName().startsWith("oracle.sql.TIMESTAMP")) {
obj = rs.getTimestamp(index);
} else if (obj != null && obj.getClass().getName().startsWith("oracle.sql.DATE")) {
String metaDataClassName = rs.getMetaData().getColumnClassName(index);
if ("java.sql.Timestamp".equals(metaDataClassName) ||
"oracle.sql.TIMESTAMP".equals(metaDataClassName)) {
obj = rs.getTimestamp(index);
} else {
obj = rs.getDate(index);
}
} else if (obj != null && obj instanceof java.sql.Date) {
if ("java.sql.Timestamp".equals(rs.getMetaData().getColumnClassName(index))) {
obj = rs.getTimestamp(index);
}
}
return obj;
}
/**
* Check whether the given SQL type is numeric.
*
* @param sqlType the SQL type to be checked
* @return whether the type is numeric
*/
public static boolean isNumeric(int sqlType) {
return Types.BIT == sqlType || Types.BIGINT == sqlType || Types.DECIMAL == sqlType ||
Types.DOUBLE == sqlType || Types.FLOAT == sqlType || Types.INTEGER == sqlType ||
Types.NUMERIC == sqlType || Types.REAL == sqlType || Types.SMALLINT == sqlType ||
Types.TINYINT == sqlType;
}
/**
* Return a single result object from the given Collection.
* <p>Throws an exception if 0 or more than 1 element found.
* @param results the result Collection (can be <code>null</code>)
* @return the single result object
*/
public static Object requiredSingleResult(Collection results) throws DatabaseException {
int size = (results != null ? results.size() : 0);
if (size == 0) {
throw new DatabaseException("Empty result set, expected one row");
}
if (results.size() > 1) {
throw new DatabaseException("Result set larger than one row");
}
return results.iterator().next();
}
}
| liquibase/BACKUP_FROM_SVN | liquibase-core/src/main/java/liquibase/util/JdbcUtils.java | Java | apache-2.0 | 5,317 |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.exception;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import com.fasterxml.jackson.databind.JsonMappingException;
/**
* @author Thorben Lindhauer
*
*/
@Provider
public class JsonMappingExceptionHandler implements ExceptionMapper<JsonMappingException> {
@Override
public Response toResponse(JsonMappingException exception) {
InvalidRequestException badRequestException = new InvalidRequestException(Status.BAD_REQUEST,
exception, "");
return ExceptionHandlerHelper.getInstance().getResponse(badRequestException);
}
}
| camunda/camunda-bpm-platform | engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/exception/JsonMappingExceptionHandler.java | Java | apache-2.0 | 1,564 |
package org.mosspaper.prefs;
import org.mosspaper.Config;
import org.mosspaper.R;
import android.content.Context;
import android.content.res.Resources;
import android.content.SharedPreferences;
import android.preference.DialogPreference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
import android.widget.EditText;
public class IntervalPreference extends DialogPreference {
static final String TAG = "IntervalPreference";
private EditText mText;
private Spinner mSpinner;
private int[] typeValues;
private int intervalType;
public IntervalPreference(Context context, AttributeSet attrs) {
super(context, attrs);
setupLayout(context, attrs);
}
public IntervalPreference(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
setupLayout(context, attrs);
}
private void setupLayout(Context context, AttributeSet attrs) {
Resources r = getContext().getResources();
typeValues = r.getIntArray(R.array.interval_values);
setPersistent(true);
setDialogLayoutResource(R.layout.dia_interval);
}
@Override
protected View onCreateDialogView() {
View view = super.onCreateDialogView();
float val = getPersistedFloat(1.0f);
int idx = 0; // seconds
if (val >= 3600) {
idx = 2; // hours
val /= 3600.0f;
} else if (val >= 60) {
idx = 1; // minutes
val /= 60.0f;
}
mText = (EditText) view.findViewById(R.id.interval_string);
mText.setText(String.valueOf(val));
mSpinner = (Spinner) view.findViewById(R.id.interval_spinner);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(
getContext(), R.array.interval_types, android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSpinner.setAdapter(adapter);
mSpinner.setSelection(idx);
mSpinner.setOnItemSelectedListener(new OnItemSelectedListener() {
public void onItemSelected(AdapterView parent, View view, int pos, long id) {
intervalType = typeValues[pos];
}
public void onNothingSelected(AdapterView parent) {
}
});
return view;
}
@Override
protected void onDialogClosed(boolean positiveResult) {
if (positiveResult) {
try {
float interval = Float.parseFloat(mText.getText().toString());
persistFloat(interval * intervalType);
} catch (NumberFormatException e) {
android.util.Log.e(TAG, "", e);
}
}
}
}
| eric-stanley/moss | src/org/mosspaper/prefs/IntervalPreference.java | Java | apache-2.0 | 2,878 |
package dataMapper.diagram.custom.part;
import org.eclipse.gef.GraphicalViewer;
import org.eclipse.gmf.runtime.diagram.ui.internal.parts.DiagramGraphicalViewerKeyHandler;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.ui.IWorkbenchPart;
import dataMapper.diagram.part.DeleteElementAction;
public class CustomDiagramGraphicalViewerKeyHandler extends DiagramGraphicalViewerKeyHandler{
private DeleteElementAction deleteAction;
IWorkbenchPart part;
public CustomDiagramGraphicalViewerKeyHandler(IWorkbenchPart part,GraphicalViewer viewer) {
super(viewer);
this.part=part;
deleteAction = new DeleteElementAction(part);
deleteAction.init();
deleteAction.setAccelerator(SWT.DEL);
//viewer.getContents().getViewer()viewer.getEditDomain().get
// TODO Auto-generated constructor stub
}
public boolean keyPressed(KeyEvent event) {
switch (event.keyCode) {
case SWT.DEL :
deleteAction.run(null);
return true;
}
return super.keyPressed(event);
}
}
| thiliniish/developer-studio | data-mapper/org.wso2.developerstudio.visualdatamapper.diagram/src/dataMapper/diagram/custom/part/CustomDiagramGraphicalViewerKeyHandler.java | Java | apache-2.0 | 1,039 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Copyright 2004-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.common.lucene.search;
import java.io.*;
import java.util.*;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.*;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.TFIDFSimilarity;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.FastStringReader;
/**
* Generate "more like this" similarity queries.
* Based on this mail:
* <code><pre>
* Lucene does let you access the document frequency of terms, with IndexReader.docFreq().
* Term frequencies can be computed by re-tokenizing the text, which, for a single document,
* is usually fast enough. But looking up the docFreq() of every term in the document is
* probably too slow.
* <p/>
* You can use some heuristics to prune the set of terms, to avoid calling docFreq() too much,
* or at all. Since you're trying to maximize a tf*idf score, you're probably most interested
* in terms with a high tf. Choosing a tf threshold even as low as two or three will radically
* reduce the number of terms under consideration. Another heuristic is that terms with a
* high idf (i.e., a low df) tend to be longer. So you could threshold the terms by the
* number of characters, not selecting anything less than, e.g., six or seven characters.
* With these sorts of heuristics you can usually find small set of, e.g., ten or fewer terms
* that do a pretty good job of characterizing a document.
* <p/>
* It all depends on what you're trying to do. If you're trying to eek out that last percent
* of precision and recall regardless of computational difficulty so that you can win a TREC
* competition, then the techniques I mention above are useless. But if you're trying to
* provide a "more like this" button on a search results page that does a decent job and has
* good performance, such techniques might be useful.
* <p/>
* An efficient, effective "more-like-this" query generator would be a great contribution, if
* anyone's interested. I'd imagine that it would take a Reader or a String (the document's
* text), analyzer Analyzer, and return a set of representative terms using heuristics like those
* above. The frequency and length thresholds could be parameters, etc.
* <p/>
* Doug
* </pre></code>
* <p/>
* <p/>
* <p/>
* <h3>Initial Usage</h3>
* <p/>
* This class has lots of options to try to make it efficient and flexible.
* The simplest possible usage is as follows. The bold
* fragment is specific to this class.
* <p/>
* <pre class="prettyprint">
* <p/>
* IndexReader ir = ...
* IndexSearcher is = ...
* <p/>
* MoreLikeThis mlt = new MoreLikeThis(ir);
* Reader target = ... // orig source of doc you want to find similarities to
* Query query = mlt.like( target);
* <p/>
* Hits hits = is.search(query);
* // now the usual iteration thru 'hits' - the only thing to watch for is to make sure
* //you ignore the doc if it matches your 'target' document, as it should be similar to itself
* <p/>
* </pre>
* <p/>
* Thus you:
* <ol>
* <li> do your normal, Lucene setup for searching,
* <li> create a MoreLikeThis,
* <li> get the text of the doc you want to find similarities to
* <li> then call one of the like() calls to generate a similarity query
* <li> call the searcher to find the similar docs
* </ol>
* <p/>
* <h3>More Advanced Usage</h3>
* <p/>
* You may want to use {@link #setFieldNames setFieldNames(...)} so you can examine
* multiple fields (e.g. body and title) for similarity.
* <p/>
* <p/>
* Depending on the size of your index and the size and makeup of your documents you
* may want to call the other set methods to control how the similarity queries are
* generated:
* <ul>
* <li> {@link #setMinTermFreq setMinTermFreq(...)}
* <li> {@link #setMinDocFreq setMinDocFreq(...)}
* <li> {@link #setMaxDocFreq setMaxDocFreq(...)}
* <li> {@link #setMaxDocFreqPct setMaxDocFreqPct(...)}
* <li> {@link #setMinWordLen setMinWordLen(...)}
* <li> {@link #setMaxWordLen setMaxWordLen(...)}
* <li> {@link #setMaxQueryTerms setMaxQueryTerms(...)}
* <li> {@link #setMaxNumTokensParsed setMaxNumTokensParsed(...)}
* <li> {@link #setStopWords setStopWord(...)}
* </ul>
* <p/>
* <hr>
* <pre>
* Changes: Mark Harwood 29/02/04
* Some bugfixing, some refactoring, some optimisation.
* - bugfix: retrieveTerms(int docNum) was not working for indexes without a termvector -added missing code
* - bugfix: No significant terms being created for fields with a termvector - because
* was only counting one occurrence per term/field pair in calculations(ie not including frequency info from TermVector)
* - refactor: moved common code into isNoiseWord()
* - optimise: when no termvector support available - used maxNumTermsParsed to limit amount of tokenization
* </pre>
*/
public final class XMoreLikeThis {
static {
assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_48: "Remove this class once we upgrade to Lucene 4.9";
}
/**
* Default maximum number of tokens to parse in each example doc field that is not stored with TermVector support.
*
* @see #getMaxNumTokensParsed
*/
public static final int DEFAULT_MAX_NUM_TOKENS_PARSED = 5000;
/**
* Ignore terms with less than this frequency in the source doc.
*
* @see #getMinTermFreq
* @see #setMinTermFreq
*/
public static final int DEFAULT_MIN_TERM_FREQ = 2;
/**
* Ignore words which do not occur in at least this many docs.
*
* @see #getMinDocFreq
* @see #setMinDocFreq
*/
public static final int DEFAULT_MIN_DOC_FREQ = 5;
/**
* Ignore words which occur in more than this many docs.
*
* @see #getMaxDocFreq
* @see #setMaxDocFreq
* @see #setMaxDocFreqPct
*/
public static final int DEFAULT_MAX_DOC_FREQ = Integer.MAX_VALUE;
/**
* Boost terms in query based on score.
*
* @see #isBoost
* @see #setBoost
*/
public static final boolean DEFAULT_BOOST = false;
/**
* Default field names. Null is used to specify that the field names should be looked
* up at runtime from the provided reader.
*/
public static final String[] DEFAULT_FIELD_NAMES = new String[]{"contents"};
/**
* Ignore words less than this length or if 0 then this has no effect.
*
* @see #getMinWordLen
* @see #setMinWordLen
*/
public static final int DEFAULT_MIN_WORD_LENGTH = 0;
/**
* Ignore words greater than this length or if 0 then this has no effect.
*
* @see #getMaxWordLen
* @see #setMaxWordLen
*/
public static final int DEFAULT_MAX_WORD_LENGTH = 0;
/**
* Default set of stopwords.
* If null means to allow stop words.
*
* @see #setStopWords
* @see #getStopWords
*/
public static final Set<?> DEFAULT_STOP_WORDS = null;
/**
* Current set of stop words.
*/
private Set<?> stopWords = DEFAULT_STOP_WORDS;
/**
* Return a Query with no more than this many terms.
*
* @see BooleanQuery#getMaxClauseCount
* @see #getMaxQueryTerms
* @see #setMaxQueryTerms
*/
public static final int DEFAULT_MAX_QUERY_TERMS = 25;
/**
* Analyzer that will be used to parse the doc.
*/
private Analyzer analyzer = null;
/**
* Ignore words less frequent that this.
*/
private int minTermFreq = DEFAULT_MIN_TERM_FREQ;
/**
* Ignore words which do not occur in at least this many docs.
*/
private int minDocFreq = DEFAULT_MIN_DOC_FREQ;
/**
* Ignore words which occur in more than this many docs.
*/
private int maxDocFreq = DEFAULT_MAX_DOC_FREQ;
/**
* Should we apply a boost to the Query based on the scores?
*/
private boolean boost = DEFAULT_BOOST;
/**
* Field name we'll analyze.
*/
private String[] fieldNames = DEFAULT_FIELD_NAMES;
/**
* The maximum number of tokens to parse in each example doc field that is not stored with TermVector support
*/
private int maxNumTokensParsed = DEFAULT_MAX_NUM_TOKENS_PARSED;
/**
* Ignore words if less than this len.
*/
private int minWordLen = DEFAULT_MIN_WORD_LENGTH;
/**
* Ignore words if greater than this len.
*/
private int maxWordLen = DEFAULT_MAX_WORD_LENGTH;
/**
* Don't return a query longer than this.
*/
private int maxQueryTerms = DEFAULT_MAX_QUERY_TERMS;
/**
* For idf() calculations.
*/
private TFIDFSimilarity similarity;// = new DefaultSimilarity();
/**
* IndexReader to use
*/
private final IndexReader ir;
/**
* Boost factor to use when boosting the terms
*/
private float boostFactor = 1;
/**
* Returns the boost factor used when boosting terms
*
* @return the boost factor used when boosting terms
* @see #setBoostFactor(float)
*/
public float getBoostFactor() {
return boostFactor;
}
/**
* Sets the boost factor to use when boosting terms
*
* @see #getBoostFactor()
*/
public void setBoostFactor(float boostFactor) {
this.boostFactor = boostFactor;
}
/**
* Constructor requiring an IndexReader.
*/
public XMoreLikeThis(IndexReader ir) {
this(ir, new DefaultSimilarity());
}
public XMoreLikeThis(IndexReader ir, TFIDFSimilarity sim) {
this.ir = ir;
this.similarity = sim;
}
public TFIDFSimilarity getSimilarity() {
return similarity;
}
public void setSimilarity(TFIDFSimilarity similarity) {
this.similarity = similarity;
}
/**
* Returns an analyzer that will be used to parse source doc with. The default analyzer
* is not set.
*
* @return the analyzer that will be used to parse source doc with.
*/
public Analyzer getAnalyzer() {
return analyzer;
}
/**
* Sets the analyzer to use. An analyzer is not required for generating a query with the
* {@link #like(int)} method, all other 'like' methods require an analyzer.
*
* @param analyzer the analyzer to use to tokenize text.
*/
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
/**
* Returns the frequency below which terms will be ignored in the source doc. The default
* frequency is the {@link #DEFAULT_MIN_TERM_FREQ}.
*
* @return the frequency below which terms will be ignored in the source doc.
*/
public int getMinTermFreq() {
return minTermFreq;
}
/**
* Sets the frequency below which terms will be ignored in the source doc.
*
* @param minTermFreq the frequency below which terms will be ignored in the source doc.
*/
public void setMinTermFreq(int minTermFreq) {
this.minTermFreq = minTermFreq;
}
/**
* Returns the frequency at which words will be ignored which do not occur in at least this
* many docs. The default frequency is {@link #DEFAULT_MIN_DOC_FREQ}.
*
* @return the frequency at which words will be ignored which do not occur in at least this
* many docs.
*/
public int getMinDocFreq() {
return minDocFreq;
}
/**
* Sets the frequency at which words will be ignored which do not occur in at least this
* many docs.
*
* @param minDocFreq the frequency at which words will be ignored which do not occur in at
* least this many docs.
*/
public void setMinDocFreq(int minDocFreq) {
this.minDocFreq = minDocFreq;
}
/**
* Returns the maximum frequency in which words may still appear.
* Words that appear in more than this many docs will be ignored. The default frequency is
* {@link #DEFAULT_MAX_DOC_FREQ}.
*
* @return get the maximum frequency at which words are still allowed,
* words which occur in more docs than this are ignored.
*/
public int getMaxDocFreq() {
return maxDocFreq;
}
/**
* Set the maximum frequency in which words may still appear. Words that appear
* in more than this many docs will be ignored.
*
* @param maxFreq the maximum count of documents that a term may appear
* in to be still considered relevant
*/
public void setMaxDocFreq(int maxFreq) {
this.maxDocFreq = maxFreq;
}
/**
* Set the maximum percentage in which words may still appear. Words that appear
* in more than this many percent of all docs will be ignored.
*
* @param maxPercentage the maximum percentage of documents (0-100) that a term may appear
* in to be still considered relevant
*/
public void setMaxDocFreqPct(int maxPercentage) {
this.maxDocFreq = maxPercentage * ir.numDocs() / 100;
}
/**
* Returns whether to boost terms in query based on "score" or not. The default is
* {@link #DEFAULT_BOOST}.
*
* @return whether to boost terms in query based on "score" or not.
* @see #setBoost
*/
public boolean isBoost() {
return boost;
}
/**
* Sets whether to boost terms in query based on "score" or not.
*
* @param boost true to boost terms in query based on "score", false otherwise.
* @see #isBoost
*/
public void setBoost(boolean boost) {
this.boost = boost;
}
/**
* Returns the field names that will be used when generating the 'More Like This' query.
* The default field names that will be used is {@link #DEFAULT_FIELD_NAMES}.
*
* @return the field names that will be used when generating the 'More Like This' query.
*/
public String[] getFieldNames() {
return fieldNames;
}
/**
* Sets the field names that will be used when generating the 'More Like This' query.
* Set this to null for the field names to be determined at runtime from the IndexReader
* provided in the constructor.
*
* @param fieldNames the field names that will be used when generating the 'More Like This'
* query.
*/
public void setFieldNames(String[] fieldNames) {
this.fieldNames = fieldNames;
}
/**
* Returns the minimum word length below which words will be ignored. Set this to 0 for no
* minimum word length. The default is {@link #DEFAULT_MIN_WORD_LENGTH}.
*
* @return the minimum word length below which words will be ignored.
*/
public int getMinWordLen() {
return minWordLen;
}
/**
* Sets the minimum word length below which words will be ignored.
*
* @param minWordLen the minimum word length below which words will be ignored.
*/
public void setMinWordLen(int minWordLen) {
this.minWordLen = minWordLen;
}
/**
* Returns the maximum word length above which words will be ignored. Set this to 0 for no
* maximum word length. The default is {@link #DEFAULT_MAX_WORD_LENGTH}.
*
* @return the maximum word length above which words will be ignored.
*/
public int getMaxWordLen() {
return maxWordLen;
}
/**
* Sets the maximum word length above which words will be ignored.
*
* @param maxWordLen the maximum word length above which words will be ignored.
*/
public void setMaxWordLen(int maxWordLen) {
this.maxWordLen = maxWordLen;
}
/**
* Set the set of stopwords.
* Any word in this set is considered "uninteresting" and ignored.
* Even if your Analyzer allows stopwords, you might want to tell the MoreLikeThis code to ignore them, as
* for the purposes of document similarity it seems reasonable to assume that "a stop word is never interesting".
*
* @param stopWords set of stopwords, if null it means to allow stop words
* @see #getStopWords
*/
public void setStopWords(Set<?> stopWords) {
this.stopWords = stopWords;
}
/**
* Get the current stop words being used.
*
* @see #setStopWords
*/
public Set<?> getStopWords() {
return stopWords;
}
/**
* Returns the maximum number of query terms that will be included in any generated query.
* The default is {@link #DEFAULT_MAX_QUERY_TERMS}.
*
* @return the maximum number of query terms that will be included in any generated query.
*/
public int getMaxQueryTerms() {
return maxQueryTerms;
}
/**
* Sets the maximum number of query terms that will be included in any generated query.
*
* @param maxQueryTerms the maximum number of query terms that will be included in any
* generated query.
*/
public void setMaxQueryTerms(int maxQueryTerms) {
this.maxQueryTerms = maxQueryTerms;
}
/**
* @return The maximum number of tokens to parse in each example doc field that is not stored with TermVector support
* @see #DEFAULT_MAX_NUM_TOKENS_PARSED
*/
public int getMaxNumTokensParsed() {
return maxNumTokensParsed;
}
/**
* @param i The maximum number of tokens to parse in each example doc field that is not stored with TermVector support
*/
public void setMaxNumTokensParsed(int i) {
maxNumTokensParsed = i;
}
/**
* Return a query that will return docs like the passed lucene document ID.
*
* @param docNum the documentID of the lucene doc to generate the 'More Like This" query for.
* @return a query that will return docs like the passed lucene document ID.
*/
public Query like(int docNum) throws IOException {
if (fieldNames == null) {
// gather list of valid fields from lucene
Collection<String> fields = MultiFields.getIndexedFields(ir);
fieldNames = fields.toArray(new String[fields.size()]);
}
return createQuery(retrieveTerms(docNum));
}
/**
* Return a query that will return docs like the passed Reader.
*
* @return a query that will return docs like the passed Reader.
*/
@Deprecated
public Query like(Reader r, String fieldName) throws IOException {
return like(fieldName, r);
}
/**
* Return a query that will return docs like the passed Readers.
* This was added in order to treat multi-value fields.
*
* @return a query that will return docs like the passed Readers.
*/
public Query like(String fieldName, Reader... readers) throws IOException {
Map<String, Int> words = new HashMap<>();
for (Reader r : readers) {
addTermFrequencies(r, words, fieldName);
}
return createQuery(createQueue(words));
}
/**
* Create the More like query from a PriorityQueue
*/
private Query createQuery(PriorityQueue<Object[]> q) {
BooleanQuery query = new BooleanQuery();
Object cur;
int qterms = 0;
float bestScore = 0;
while ((cur = q.pop()) != null) {
Object[] ar = (Object[]) cur;
TermQuery tq = new TermQuery(new Term((String) ar[1], (String) ar[0]));
if (boost) {
if (qterms == 0) {
bestScore = ((Float) ar[2]);
}
float myScore = ((Float) ar[2]);
tq.setBoost(boostFactor * myScore / bestScore);
}
try {
query.add(tq, BooleanClause.Occur.SHOULD);
}
catch (BooleanQuery.TooManyClauses ignore) {
break;
}
qterms++;
if (maxQueryTerms > 0 && qterms >= maxQueryTerms) {
break;
}
}
return query;
}
/**
* Create a PriorityQueue from a word->tf map.
*
* @param words a map of words keyed on the word(String) with Int objects as the values.
*/
private PriorityQueue<Object[]> createQueue(Map<String, Int> words) throws IOException {
// have collected all words in doc and their freqs
int numDocs = ir.numDocs();
FreqQ res = new FreqQ(words.size()); // will order words by score
for (String word : words.keySet()) { // for every word
int tf = words.get(word).x; // term freq in the source doc
if (minTermFreq > 0 && tf < minTermFreq) {
continue; // filter out words that don't occur enough times in the source
}
// go through all the fields and find the largest document frequency
String topField = fieldNames[0];
int docFreq = 0;
for (String fieldName : fieldNames) {
int freq = ir.docFreq(new Term(fieldName, word));
topField = (freq > docFreq) ? fieldName : topField;
docFreq = (freq > docFreq) ? freq : docFreq;
}
if (minDocFreq > 0 && docFreq < minDocFreq) {
continue; // filter out words that don't occur in enough docs
}
if (docFreq > maxDocFreq) {
continue; // filter out words that occur in too many docs
}
if (docFreq == 0) {
continue; // index update problem?
}
float idf = similarity.idf(docFreq, numDocs);
float score = tf * idf;
// only really need 1st 3 entries, other ones are for troubleshooting
res.insertWithOverflow(new Object[]{word, // the word
topField, // the top field
score, // overall score
idf, // idf
docFreq, // freq in all docs
tf
});
}
return res;
}
/**
* Describe the parameters that control how the "more like this" query is formed.
*/
public String describeParams() {
StringBuilder sb = new StringBuilder();
sb.append("\t").append("maxQueryTerms : ").append(maxQueryTerms).append("\n");
sb.append("\t").append("minWordLen : ").append(minWordLen).append("\n");
sb.append("\t").append("maxWordLen : ").append(maxWordLen).append("\n");
sb.append("\t").append("fieldNames : ");
String delim = "";
for (String fieldName : fieldNames) {
sb.append(delim).append(fieldName);
delim = ", ";
}
sb.append("\n");
sb.append("\t").append("boost : ").append(boost).append("\n");
sb.append("\t").append("minTermFreq : ").append(minTermFreq).append("\n");
sb.append("\t").append("minDocFreq : ").append(minDocFreq).append("\n");
return sb.toString();
}
/**
* Find words for a more-like-this query former.
*
* @param docNum the id of the lucene document from which to find terms
*/
public PriorityQueue<Object[]> retrieveTerms(int docNum) throws IOException {
Map<String, Int> termFreqMap = new HashMap<>();
for (String fieldName : fieldNames) {
final Fields vectors = ir.getTermVectors(docNum);
final Terms vector;
if (vectors != null) {
vector = vectors.terms(fieldName);
} else {
vector = null;
}
// field does not store term vector info
if (vector == null) {
Document d = ir.document(docNum);
IndexableField fields[] = d.getFields(fieldName);
for (IndexableField field : fields) {
final String stringValue = field.stringValue();
if (stringValue != null) {
addTermFrequencies(new FastStringReader(stringValue), termFreqMap, fieldName);
}
}
} else {
addTermFrequencies(termFreqMap, vector);
}
}
return createQueue(termFreqMap);
}
/**
* Adds terms and frequencies found in vector into the Map termFreqMap
*
* @param termFreqMap a Map of terms and their frequencies
* @param vector List of terms and their frequencies for a doc/field
*/
private void addTermFrequencies(Map<String, Int> termFreqMap, Terms vector) throws IOException {
final TermsEnum termsEnum = vector.iterator(null);
final CharsRef spare = new CharsRef();
BytesRef text;
while((text = termsEnum.next()) != null) {
UnicodeUtil.UTF8toUTF16(text, spare);
final String term = spare.toString();
if (isNoiseWord(term)) {
continue;
}
final int freq = (int) termsEnum.totalTermFreq();
// increment frequency
Int cnt = termFreqMap.get(term);
if (cnt == null) {
cnt = new Int();
termFreqMap.put(term, cnt);
cnt.x = freq;
} else {
cnt.x += freq;
}
}
}
/**
* Adds term frequencies found by tokenizing text from reader into the Map words
*
* @param r a source of text to be tokenized
* @param termFreqMap a Map of terms and their frequencies
* @param fieldName Used by analyzer for any special per-field analysis
*/
private void addTermFrequencies(Reader r, Map<String, Int> termFreqMap, String fieldName)
throws IOException {
if (analyzer == null) {
throw new UnsupportedOperationException("To use MoreLikeThis without " +
"term vectors, you must provide an Analyzer");
}
TokenStream ts = analyzer.tokenStream(fieldName, r);
try {
int tokenCount = 0;
// for every token
CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
ts.reset();
while (ts.incrementToken()) {
String word = termAtt.toString();
tokenCount++;
if (tokenCount > maxNumTokensParsed) {
break;
}
if (isNoiseWord(word)) {
continue;
}
// increment frequency
Int cnt = termFreqMap.get(word);
if (cnt == null) {
termFreqMap.put(word, new Int());
} else {
cnt.x++;
}
}
ts.end();
} finally {
IOUtils.closeWhileHandlingException(ts);
}
}
/**
* determines if the passed term is likely to be of interest in "more like" comparisons
*
* @param term The word being considered
* @return true if should be ignored, false if should be used in further analysis
*/
private boolean isNoiseWord(String term) {
int len = term.length();
if (minWordLen > 0 && len < minWordLen) {
return true;
}
if (maxWordLen > 0 && len > maxWordLen) {
return true;
}
return stopWords != null && stopWords.contains(term);
}
/**
* Find words for a more-like-this query former.
* The result is a priority queue of arrays with one entry for <b>every word</b> in the document.
* Each array has 6 elements.
* The elements are:
* <ol>
* <li> The word (String)
* <li> The top field that this word comes from (String)
* <li> The score for this word (Float)
* <li> The IDF value (Float)
* <li> The frequency of this word in the index (Integer)
* <li> The frequency of this word in the source document (Integer)
* </ol>
* This is a somewhat "advanced" routine, and in general only the 1st entry in the array is of interest.
* This method is exposed so that you can identify the "interesting words" in a document.
* For an easier method to call see {@link #retrieveInterestingTerms retrieveInterestingTerms()}.
*
* @param r the reader that has the content of the document
* @param fieldName field passed to the analyzer to use when analyzing the content
* @return the most interesting words in the document ordered by score, with the highest scoring, or best entry, first
* @see #retrieveInterestingTerms
*/
public PriorityQueue<Object[]> retrieveTerms(Reader r, String fieldName) throws IOException {
Map<String, Int> words = new HashMap<>();
addTermFrequencies(r, words, fieldName);
return createQueue(words);
}
/**
* @see #retrieveInterestingTerms(java.io.Reader, String)
*/
public String[] retrieveInterestingTerms(int docNum) throws IOException {
ArrayList<Object> al = new ArrayList<>(maxQueryTerms);
PriorityQueue<Object[]> pq = retrieveTerms(docNum);
Object cur;
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
// we just want to return the top words
while (((cur = pq.pop()) != null) && lim-- > 0) {
Object[] ar = (Object[]) cur;
al.add(ar[0]); // the 1st entry is the interesting word
}
String[] res = new String[al.size()];
return al.toArray(res);
}
/**
* Convenience routine to make it easy to return the most interesting words in a document.
* More advanced users will call {@link #retrieveTerms(Reader, String) retrieveTerms()} directly.
*
* @param r the source document
* @param fieldName field passed to analyzer to use when analyzing the content
* @return the most interesting words in the document
* @see #retrieveTerms(java.io.Reader, String)
* @see #setMaxQueryTerms
*/
public String[] retrieveInterestingTerms(Reader r, String fieldName) throws IOException {
ArrayList<Object> al = new ArrayList<>(maxQueryTerms);
PriorityQueue<Object[]> pq = retrieveTerms(r, fieldName);
Object cur;
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
// we just want to return the top words
while (((cur = pq.pop()) != null) && lim-- > 0) {
Object[] ar = (Object[]) cur;
al.add(ar[0]); // the 1st entry is the interesting word
}
String[] res = new String[al.size()];
return al.toArray(res);
}
/**
* PriorityQueue that orders words by score.
*/
private static class FreqQ extends PriorityQueue<Object[]> {
FreqQ(int s) {
super(s);
}
@Override
protected boolean lessThan(Object[] aa, Object[] bb) {
Float fa = (Float) aa[2];
Float fb = (Float) bb[2];
return fa > fb;
}
}
/**
* Use for frequencies and to avoid renewing Integers.
*/
private static class Int {
int x;
Int() {
x = 1;
}
}
}
| zhaocloud/elasticsearch | src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java | Java | apache-2.0 | 33,340 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.xpack.sql.action.BasicFormatter;
import org.elasticsearch.xpack.sql.proto.ColumnInfo;
import org.elasticsearch.xpack.sql.proto.StringUtils;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import static org.elasticsearch.xpack.sql.action.BasicFormatter.FormatOption.CLI;
final class JdbcTestUtils {
private JdbcTestUtils() {}
private static final int MAX_WIDTH = 20;
static final String SQL_TRACE = "org.elasticsearch.xpack.sql:TRACE";
static final String JDBC_TIMEZONE = "timezone";
static void logResultSetMetaData(ResultSet rs, Logger logger) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
// header
StringBuilder sb = new StringBuilder();
StringBuilder column = new StringBuilder();
int columns = metaData.getColumnCount();
for (int i = 1; i <= columns; i++) {
if (i > 1) {
sb.append(" | ");
}
column.setLength(0);
column.append(metaData.getColumnName(i));
column.append("(");
column.append(metaData.getColumnTypeName(i));
column.append(")");
sb.append(trimOrPad(column));
}
int l = sb.length();
logger.info(sb.toString());
sb.setLength(0);
for (int i = 0; i < l; i++) {
sb.append("-");
}
logger.info(sb.toString());
}
static void logResultSetData(ResultSet rs, Logger log) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
int columns = metaData.getColumnCount();
while (rs.next()) {
log.info(rowAsString(rs, columns));
}
}
static String resultSetCurrentData(ResultSet rs) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
return rowAsString(rs, metaData.getColumnCount());
}
private static String rowAsString(ResultSet rs, int columns) throws SQLException {
StringBuilder sb = new StringBuilder();
StringBuilder column = new StringBuilder();
for (int i = 1; i <= columns; i++) {
column.setLength(0);
if (i > 1) {
sb.append(" | ");
}
sb.append(trimOrPad(column.append(rs.getString(i))));
}
return sb.toString();
}
private static StringBuilder trimOrPad(StringBuilder buffer) {
if (buffer.length() > MAX_WIDTH) {
buffer.setLength(MAX_WIDTH - 1);
buffer.append("~");
} else {
for (int i = buffer.length(); i < MAX_WIDTH; i++) {
buffer.append(" ");
}
}
return buffer;
}
public static void logLikeCLI(ResultSet rs, Logger logger) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
int columns = metaData.getColumnCount();
List<ColumnInfo> cols = new ArrayList<>(columns);
for (int i = 1; i <= columns; i++) {
cols.add(
new ColumnInfo(
metaData.getTableName(i),
metaData.getColumnName(i),
metaData.getColumnTypeName(i),
metaData.getColumnDisplaySize(i)
)
);
}
List<List<Object>> data = new ArrayList<>();
while (rs.next()) {
List<Object> entry = new ArrayList<>(columns);
for (int i = 1; i <= columns; i++) {
entry.add(rs.getObject(i));
}
data.add(entry);
}
BasicFormatter formatter = new BasicFormatter(cols, data, CLI);
logger.info("\n" + formatter.formatWithHeader(cols, data));
}
/**
* Returns the classpath resources matching a simple pattern ("*.csv").
* It supports folders separated by "/" (e.g. "/some/folder/*.txt").
*
* Currently able to resolve resources inside the classpath either from:
* folders in the file-system (typically IDEs) or
* inside jars (gradle).
*/
static List<URL> classpathResources(String pattern) throws Exception {
while (pattern.startsWith("/")) {
pattern = pattern.substring(1);
}
Tuple<String, String> split = pathAndName(pattern);
// the root folder searched inside the classpath - default is the root classpath
// default file match
final String root = split.v1();
final String filePattern = split.v2();
String[] resources = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
List<URL> matches = new ArrayList<>();
for (String resource : resources) {
Path path = PathUtils.get(resource);
// check whether we're dealing with a jar
// Java 7 java.nio.fileFileSystem can be used on top of ZIPs/JARs but consumes more memory
// hence the use of the JAR API
if (path.toString().endsWith(".jar")) {
try (JarInputStream jar = getJarStream(path.toUri().toURL())) {
ZipEntry entry = null;
while ((entry = jar.getNextEntry()) != null) {
String name = entry.getName();
Tuple<String, String> entrySplit = pathAndName(name);
if (root.equals(entrySplit.v1()) && Regex.simpleMatch(filePattern, entrySplit.v2())) {
matches.add(new URL("jar:" + path.toUri() + "!/" + name));
}
}
}
}
// normal file access
else if (Files.isDirectory(path)) {
Files.walkFileTree(path, EnumSet.allOf(FileVisitOption.class), 1, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (Regex.simpleMatch(filePattern, file.toString())) {
matches.add(file.toUri().toURL());
}
return FileVisitResult.CONTINUE;
}
});
}
}
return matches;
}
@SuppressForbidden(reason = "need to open jar")
private static JarInputStream getJarStream(URL resource) throws IOException {
URLConnection con = resource.openConnection();
// do not to cache files (to avoid keeping file handles around)
con.setUseCaches(false);
return new JarInputStream(con.getInputStream());
}
static Tuple<String, String> pathAndName(String string) {
String folder = StringUtils.EMPTY;
String file = string;
int lastIndexOf = string.lastIndexOf("/");
if (lastIndexOf > 0) {
folder = string.substring(0, lastIndexOf - 1);
if (lastIndexOf + 1 < string.length()) {
file = string.substring(lastIndexOf + 1);
}
}
return new Tuple<>(folder, file);
}
}
| gingerwizard/elasticsearch | x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java | Java | apache-2.0 | 8,111 |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui;
import org.eclipse.jface.dialogs.IDialogPage;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
/**
* IDataSourceConnectionEditor
*/
public interface IDataSourceConnectionEditor extends IDialogPage
{
void setSite(IDataSourceConnectionEditorSite site);
boolean isComplete();
void loadSettings();
void saveSettings(DBPDataSourceContainer dataSource);
}
| liuyuanyuan/dbeaver | plugins/org.jkiss.dbeaver.ui/src/org/jkiss/dbeaver/ui/IDataSourceConnectionEditor.java | Java | apache-2.0 | 1,073 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.engine.impl.behavior.impl;
import org.flowable.cmmn.engine.impl.behavior.CoreCmmnActivityBehavior;
import org.flowable.cmmn.engine.impl.persistence.entity.PlanItemInstanceEntity;
import org.flowable.cmmn.engine.impl.util.CommandContextUtil;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.api.delegate.Expression;
import org.flowable.engine.common.impl.interceptor.CommandContext;
/**
* ActivityBehavior that evaluates an expression when executed. Optionally, it sets the result of the expression as a variable on the execution.
*
* @author Tijs Rademakers
*/
public class PlanItemExpressionActivityBehavior extends CoreCmmnActivityBehavior {
protected String expression;
protected String resultVariable;
public PlanItemExpressionActivityBehavior(String expression, String resultVariable) {
this.expression = expression;
this.resultVariable = resultVariable;
}
@Override
public void execute(CommandContext commandContext, PlanItemInstanceEntity planItemInstanceEntity) {
Object value = null;
try {
Expression expressionObject = CommandContextUtil.getCmmnEngineConfiguration(commandContext).getExpressionManager().createExpression(expression);
value = expressionObject.getValue(planItemInstanceEntity);
if (resultVariable != null) {
planItemInstanceEntity.setVariable(resultVariable, value);
}
CommandContextUtil.getAgenda().planCompletePlanItemInstanceOperation(planItemInstanceEntity);
} catch (Exception exc) {
throw new FlowableException(exc.getMessage(), exc);
}
}
}
| marcus-nl/flowable-engine | modules/flowable-cmmn-engine/src/main/java/org/flowable/cmmn/engine/impl/behavior/impl/PlanItemExpressionActivityBehavior.java | Java | apache-2.0 | 2,292 |
/*
* ******************************************************************************
* Copyright 2014-2019 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.models;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import java.util.List;
import java.util.ArrayList;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import java.util.UUID;
@JacksonXmlRootElement(namespace = "ListPartsResult")
public class ListPartsResult {
// Variables
@JsonProperty("Bucket")
private String bucket;
@JsonProperty("Key")
private String key;
@JsonProperty("MaxParts")
private int maxParts;
@JsonProperty("NextPartNumberMarker")
private int nextPartNumberMarker;
@JsonProperty("Owner")
private User owner;
@JsonProperty("PartNumberMarker")
private Integer partNumberMarker;
@JsonProperty("Part")
@JacksonXmlElementWrapper(useWrapping = false)
private List<MultiPartUploadPart> parts = new ArrayList<>();
@JsonProperty("IsTruncated")
private boolean truncated;
@JsonProperty("UploadId")
private UUID uploadId;
// Constructor
public ListPartsResult() {
//pass
}
// Getters and Setters
public String getBucket() {
return this.bucket;
}
public void setBucket(final String bucket) {
this.bucket = bucket;
}
public String getKey() {
return this.key;
}
public void setKey(final String key) {
this.key = key;
}
public int getMaxParts() {
return this.maxParts;
}
public void setMaxParts(final int maxParts) {
this.maxParts = maxParts;
}
public int getNextPartNumberMarker() {
return this.nextPartNumberMarker;
}
public void setNextPartNumberMarker(final int nextPartNumberMarker) {
this.nextPartNumberMarker = nextPartNumberMarker;
}
public User getOwner() {
return this.owner;
}
public void setOwner(final User owner) {
this.owner = owner;
}
public Integer getPartNumberMarker() {
return this.partNumberMarker;
}
public void setPartNumberMarker(final Integer partNumberMarker) {
this.partNumberMarker = partNumberMarker;
}
public List<MultiPartUploadPart> getParts() {
return this.parts;
}
public void setParts(final List<MultiPartUploadPart> parts) {
this.parts = parts;
}
public boolean getTruncated() {
return this.truncated;
}
public void setTruncated(final boolean truncated) {
this.truncated = truncated;
}
public UUID getUploadId() {
return this.uploadId;
}
public void setUploadId(final UUID uploadId) {
this.uploadId = uploadId;
}
} | SpectraLogic/ds3_java_sdk | ds3-sdk/src/main/java/com/spectralogic/ds3client/models/ListPartsResult.java | Java | apache-2.0 | 3,547 |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.actions;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.lang.LanguageImportStatements;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.EditorSettingsExternalizable;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import org.jetbrains.annotations.NonNls;
public class OptimizeImportsAction extends AnAction {
private static final @NonNls String HELP_ID = "editing.manageImports";
@Override
public void actionPerformed(AnActionEvent event) {
actionPerformedImpl(event.getDataContext());
}
public static void actionPerformedImpl(final DataContext dataContext) {
final Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) {
return;
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
final Editor editor = BaseCodeInsightAction.getInjectedEditor(project, CommonDataKeys.EDITOR.getData(dataContext));
final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext);
PsiFile file = null;
PsiDirectory dir;
if (editor != null){
file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (file == null) return;
dir = file.getContainingDirectory();
}
else if (files != null && ReformatCodeAction.areFiles(files)) {
final ReadonlyStatusHandler.OperationStatus operationStatus = ReadonlyStatusHandler.getInstance(project).ensureFilesWritable(files);
if (!operationStatus.hasReadonlyFiles()) {
new OptimizeImportsProcessor(project, ReformatCodeAction.convertToPsiFiles(files, project), null).run();
}
return;
}
else{
Project projectContext = PlatformDataKeys.PROJECT_CONTEXT.getData(dataContext);
Module moduleContext = LangDataKeys.MODULE_CONTEXT.getData(dataContext);
if (projectContext != null || moduleContext != null) {
final String text;
if (moduleContext != null) {
text = CodeInsightBundle.message("process.scope.module", moduleContext.getName());
}
else {
text = CodeInsightBundle.message("process.scope.project", projectContext.getPresentableUrl());
}
LayoutProjectCodeDialog dialog
= new LayoutProjectCodeDialog(project, null, CodeInsightBundle.message("process.optimize.imports"), text, false);
dialog.show();
if (!dialog.isOK()) return;
if (moduleContext != null) {
new OptimizeImportsProcessor(project, moduleContext).run();
}
else {
new OptimizeImportsProcessor(projectContext).run();
}
return;
}
PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
if (element == null) return;
if (element instanceof PsiDirectoryContainer) {
dir = ((PsiDirectoryContainer)element).getDirectories()[0];
}
else if (element instanceof PsiDirectory) {
dir = (PsiDirectory)element;
}
else{
file = element.getContainingFile();
if (file == null) return;
dir = file.getContainingDirectory();
}
}
boolean processDirectory;
boolean includeSubdirectories;
if (ApplicationManager.getApplication().isUnitTestMode()) {
includeSubdirectories = processDirectory = false;
}
else if (!EditorSettingsExternalizable.getInstance().getOptions().SHOW_OPIMIZE_IMPORTS_DIALOG && file != null) {
includeSubdirectories = processDirectory = false;
}
else {
final LayoutCodeDialog dialog = new LayoutCodeDialog(project, CodeInsightBundle.message("process.optimize.imports"), file, dir, null, HELP_ID);
dialog.show();
if (!dialog.isOK()) return;
EditorSettingsExternalizable.getInstance().getOptions().SHOW_OPIMIZE_IMPORTS_DIALOG = !dialog.isDoNotAskMe();
ReformatCodeAction.updateShowDialogSetting(dialog, "\"Optimize Imports\" dialog disabled");
processDirectory = dialog.isProcessDirectory();
includeSubdirectories = dialog.isIncludeSubdirectories();
}
if (processDirectory){
new OptimizeImportsProcessor(project, dir, includeSubdirectories).run();
}
else{
new OptimizeImportsProcessor(project, file).run();
}
}
@Override
public void update(AnActionEvent event){
if (!LanguageImportStatements.INSTANCE.hasAnyExtensions()) {
event.getPresentation().setVisible(false);
return;
}
Presentation presentation = event.getPresentation();
DataContext dataContext = event.getDataContext();
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null){
presentation.setEnabled(false);
return;
}
final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext);
final Editor editor = BaseCodeInsightAction.getInjectedEditor(project, CommonDataKeys.EDITOR.getData(dataContext), false);
if (editor != null){
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (file == null || !isOptimizeImportsAvailable(file)){
presentation.setEnabled(false);
return;
}
}
else if (files != null && ReformatCodeAction.areFiles(files)) {
boolean anyHasOptimizeImports = false;
for (VirtualFile virtualFile : files) {
PsiFile file = PsiManager.getInstance(project).findFile(virtualFile);
if (file == null) {
presentation.setEnabled(false);
return;
}
if (isOptimizeImportsAvailable(file)) {
anyHasOptimizeImports = true;
}
}
if (!anyHasOptimizeImports) {
presentation.setEnabled(false);
return;
}
}
else if (files != null && files.length == 1) {
// skip. Both directories and single files are supported.
}
else if (LangDataKeys.MODULE_CONTEXT.getData(dataContext) == null &&
PlatformDataKeys.PROJECT_CONTEXT.getData(dataContext) == null) {
PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
if (element == null){
presentation.setEnabled(false);
return;
}
if (!(element instanceof PsiDirectory)){
PsiFile file = element.getContainingFile();
if (file == null || !isOptimizeImportsAvailable(file)){
presentation.setEnabled(false);
return;
}
}
}
presentation.setEnabled(true);
}
private static boolean isOptimizeImportsAvailable(final PsiFile file) {
return !LanguageImportStatements.INSTANCE.forFile(file).isEmpty();
}
}
| IllusionRom-deprecated/android_platform_tools_idea | platform/lang-impl/src/com/intellij/codeInsight/actions/OptimizeImportsAction.java | Java | apache-2.0 | 7,509 |
package mil.nga.giat.geowave.core.cli;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.beust.jcommander.JCommander;
public class VersionUtils
{
private final static Logger LOGGER = LoggerFactory.getLogger(VersionUtils.class);
private static final String BUILD_PROPERTIES_FILE_NAME = "build.properties";
private static final String VERSION_PROPERTY_KEY = "project.version";
public static Properties getBuildProperties() {
final Properties props = new Properties();
try (InputStream stream = VersionUtils.class.getClassLoader().getResourceAsStream(
BUILD_PROPERTIES_FILE_NAME);) {
if (stream != null) {
props.load(stream);
}
return props;
}
catch (final IOException e) {
LOGGER.warn(
"Cannot read GeoWave build properties to show version information",
e);
JCommander.getConsole().print(
"Cannot read GeoWave build properties to show version information: " + e.getMessage());
}
return props;
}
public static String getVersion() {
return getBuildProperties().getProperty(
VERSION_PROPERTY_KEY);
}
public static void printVersionInfo() {
final List<String> buildAndPropertyList = Arrays.asList(getBuildProperties().toString().split(
","));
Collections.sort(buildAndPropertyList.subList(
1,
buildAndPropertyList.size()));
for (String str : buildAndPropertyList) {
JCommander.getConsole().println(
str);
}
}
}
| Becca42/geowave | core/cli/src/main/java/mil/nga/giat/geowave/core/cli/VersionUtils.java | Java | apache-2.0 | 1,592 |
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.plugins.ide.idea;
import org.gradle.api.tasks.Internal;
import org.gradle.internal.xml.XmlTransformer;
import org.gradle.plugins.ide.api.XmlGeneratorTask;
import org.gradle.plugins.ide.idea.model.IdeaModule;
import org.gradle.plugins.ide.idea.model.Module;
import org.gradle.work.DisableCachingByDefault;
import javax.inject.Inject;
import java.io.File;
/**
* Generates an IDEA module file. If you want to fine tune the idea configuration
* <p>
* Please refer to interesting examples on idea configuration in {@link IdeaModule}.
* <p>
* At this moment nearly all configuration is done via {@link IdeaModule}.
*/
@DisableCachingByDefault(because = "Not made cacheable, yet")
public class GenerateIdeaModule extends XmlGeneratorTask<Module> {
private IdeaModule module;
public GenerateIdeaModule() {}
@Inject
public GenerateIdeaModule(IdeaModule module) {
this.module = module;
}
@Override
protected Module create() {
return new Module(getXmlTransformer(), module.getPathFactory());
}
@Override
protected void configure(Module xmlModule) {
getModule().mergeXmlModule(xmlModule);
}
@Override
public XmlTransformer getXmlTransformer() {
if (module == null) {
return super.getXmlTransformer();
}
return module.getIml().getXmlTransformer();
}
/**
* Configures output *.iml file. It's <b>optional</b> because the task should configure it correctly for you (including making sure it is unique in the multi-module build). If you really need to
* change the output file name it is much easier to do it via the <b>idea.module.name</b> property. <p> Please refer to documentation in {@link IdeaModule} <b>name</b> property. In IntelliJ IDEA
* the module name is the same as the name of the *.iml file.
*/
@Override
public File getOutputFile() {
if (module == null) {
return super.getOutputFile();
}
return module.getOutputFile();
}
@Override
public void setOutputFile(File newOutputFile) {
module.setOutputFile(newOutputFile);
}
/**
* The Idea module model containing the details required to generate the module file.
*/
@Internal
public IdeaModule getModule() {
return module;
}
public void setModule(IdeaModule module) {
this.module = module;
}
}
| gradle/gradle | subprojects/ide/src/main/java/org/gradle/plugins/ide/idea/GenerateIdeaModule.java | Java | apache-2.0 | 3,050 |
/*
* Copyright (c) 2008-2014 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.connection;
import com.mongodb.MongoNamespace;
import com.mongodb.WriteConcern;
import com.mongodb.bulk.DeleteRequest;
import com.mongodb.internal.validator.NoOpFieldNameValidator;
import org.bson.BsonBinaryWriter;
import org.bson.FieldNameValidator;
import org.bson.codecs.EncoderContext;
import org.bson.io.BsonOutput;
import java.util.Collections;
import java.util.List;
/**
* A message for the delete command.
*
* @mongodb.driver.manual reference/command/insert/#dbcmd.delete Delete Command
*/
class DeleteCommandMessage extends BaseWriteCommandMessage {
private final List<DeleteRequest> deletes;
/**
* Construct an instance.
*
* @param namespace the namespace
* @param ordered whether the writes are ordered
* @param writeConcern the write concern
* @param deletes the list of delete requests
* @param settings the message settings
*/
public DeleteCommandMessage(final MongoNamespace namespace, final boolean ordered, final WriteConcern writeConcern,
final List<DeleteRequest> deletes, final MessageSettings settings) {
super(namespace, ordered, writeConcern, settings);
this.deletes = deletes;
}
@Override
public int getItemCount() {
return deletes.size();
}
@Override
protected FieldNameValidator getFieldNameValidator() {
return new NoOpFieldNameValidator();
}
/**
* Gets the list of requests.
*
* @return the list of requests
*/
public List<DeleteRequest> getRequests() {
return Collections.unmodifiableList(deletes);
}
@Override
protected String getCommandName() {
return "delete";
}
@Override
protected BaseWriteCommandMessage writeTheWrites(final BsonOutput bsonOutput, final int commandStartPosition,
final BsonBinaryWriter writer) {
DeleteCommandMessage nextMessage = null;
writer.writeStartArray("deletes");
for (int i = 0; i < deletes.size(); i++) {
writer.mark();
DeleteRequest deleteRequest = deletes.get(i);
writer.writeStartDocument();
writer.pushMaxDocumentSize(getSettings().getMaxDocumentSize());
writer.writeName("q");
getCodec(deleteRequest.getFilter()).encode(writer, deleteRequest.getFilter(), EncoderContext.builder().build());
writer.writeInt32("limit", deleteRequest.isMulti() ? 0 : 1);
writer.popMaxDocumentSize();
writer.writeEndDocument();
if (exceedsLimits(bsonOutput.getPosition() - commandStartPosition, i + 1)) {
writer.reset();
nextMessage = new DeleteCommandMessage(getWriteNamespace(),
isOrdered(), getWriteConcern(), deletes.subList(i, deletes.size()),
getSettings());
break;
}
}
writer.writeEndArray();
return nextMessage;
}
}
| kay-kim/mongo-java-driver | driver-core/src/main/com/mongodb/connection/DeleteCommandMessage.java | Java | apache-2.0 | 3,715 |
/*
* Copyright 2016 The Johns Hopkins University Applied Physics Laboratory LLC
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.jhuapl.dorset.reporting;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.supercsv.cellprocessor.FmtDate;
import org.supercsv.cellprocessor.Optional;
import org.supercsv.cellprocessor.ParseDate;
import org.supercsv.cellprocessor.ParseInt;
import org.supercsv.cellprocessor.ParseLong;
import org.supercsv.cellprocessor.constraint.NotNull;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.io.CsvBeanReader;
import org.supercsv.io.CsvBeanWriter;
import org.supercsv.io.ICsvBeanReader;
import org.supercsv.io.ICsvBeanWriter;
import org.supercsv.prefs.CsvPreference;
import edu.jhuapl.dorset.reporting.Report;
import edu.jhuapl.dorset.reporting.ReportQuery;
import edu.jhuapl.dorset.reporting.Reporter;
/**
* File Reporter
* <p>
* Stores reports of request handling to a csv file.
* <p>
* This is not intended for significant production use.
*/
public class FileReporter implements Reporter {
private static final Logger logger = LoggerFactory.getLogger(FileReporter.class);
private static final CsvPreference FORMAT = CsvPreference.EXCEL_PREFERENCE;
private static final String[] FIELDS = {"timestamp", "requestId", "requestText",
"agentName", "responseText", "responseCode", "routeTime", "agentTime"};
public static final String ISO_8601 = "yyyy-MM-dd'T'HH:mm:ssZ";
// timestamp, request id, request text, and route time are the required fields
private static final CellProcessor[] WRITE_PROCESSORS = new CellProcessor[] {
new FmtDate(ISO_8601), new NotNull(), new NotNull(), new Optional(), new Optional(),
new Optional(), new NotNull(), new Optional()};
private static final CellProcessor[] READ_PROCESSORS = new CellProcessor[] {
new ParseDate(ISO_8601), new NotNull(), new NotNull(), new Optional(), new Optional(),
new Optional(new ParseInt()), new ParseLong(), new Optional(new ParseLong())};
private final String filename;
private ICsvBeanWriter csvWriter = null;
private FileWriter fw = null;
private Object writeLock = new Object();
/**
* Create a file reporter
*
* @param filename the filename to write to
*/
public FileReporter(String filename) {
this.filename = filename;
if (Files.exists(Paths.get(filename))) {
try {
fw = new FileWriter(filename, true);
csvWriter = new CsvBeanWriter(fw, FORMAT);
} catch (IOException e) {
logger.error("Unable to open " + filename, e);
}
} else {
try {
fw = new FileWriter(filename);
csvWriter = new CsvBeanWriter(fw, FORMAT);
csvWriter.writeHeader(FIELDS);
csvWriter.flush();
} catch (IOException e) {
logger.error("Unable to create " + filename, e);
}
}
}
@Override
public void store(Report report) {
if (csvWriter == null) {
logger.warn("Unable to store report because the csv writer is not initialized.");
return;
}
try {
synchronized (writeLock) {
csvWriter.write(report, FIELDS, WRITE_PROCESSORS);
csvWriter.flush();
}
} catch (IOException e) {
logger.warn("Unable to store report because of internal csv writer error.", e);
return;
}
}
@Override
public Report[] retrieve(ReportQuery query) {
ICsvBeanReader csvReader = null;
try {
csvReader = new CsvBeanReader(new FileReader(filename), FORMAT);
} catch (FileNotFoundException e) {
logger.warn("Could not find " + filename, e);
return new Report[0];
}
List<Report> reports = new ArrayList<Report>();
try {
DateRangeChecker dateChecker = new DateRangeChecker(query);
AgentChecker agentChecker = new AgentChecker(query);
int limit = query.getLimit();
if (limit == ReportQuery.NO_LIMIT) {
limit = Integer.MAX_VALUE;
}
final String[] header = csvReader.getHeader(true);
while (reports.size() < limit) {
Report report = csvReader.read(Report.class, header, READ_PROCESSORS);
if (report == null) {
break;
}
if (!dateChecker.isInDateRange(report)) {
continue;
}
if (!agentChecker.isAgent(report)) {
continue;
}
reports.add(report);
}
} catch (IOException e) {
logger.warn("Could not parse reports from " + filename, e);
}
try {
csvReader.close();
} catch (IOException e) {
logger.warn("Unable to close csv file " + filename, e);
}
return reports.toArray(new Report[reports.size()]);
}
class DateRangeChecker {
private Date start;
private Date end;
public DateRangeChecker(ReportQuery query) {
this.start = query.getStartDate();
this.end = query.getEndDate();
}
public boolean isInDateRange(Report report) {
if (start != null) {
if (start.after(report.getTimestamp())) {
return false;
}
}
if (end != null) {
if (end.before(report.getTimestamp())) {
return false;
}
}
return true;
}
}
class AgentChecker {
private Set<String> names;
public AgentChecker(ReportQuery query) {
String[] agentNames = query.getAgentNames();
if (agentNames != null) {
names = new HashSet<String>();
for (String name : agentNames) {
names.add(name);
}
}
}
public boolean isAgent(Report report) {
if (names != null) {
if (report.getAgentName() == null) {
return false;
}
if (!names.contains(report.getAgentName())) {
return false;
}
}
return true;
}
}
}
| DorsetProject/dorset-framework | components/reporters/file-reporter/src/main/java/edu/jhuapl/dorset/reporting/FileReporter.java | Java | apache-2.0 | 7,423 |
package com.example.android.sunshine.app;
import android.content.Context;
import android.database.Cursor;
import android.support.v4.widget.CursorAdapter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
/**
* {@link ForecastAdapter} exposes a list of weather forecasts
* from a {@link Cursor} to a {@link android.widget.ListView}.
*/
public class ForecastAdapter extends CursorAdapter {
private static final int VIEW_TYPE_COUNT = 2;
private static final int VIEW_TYPE_TODAY = 0;
private static final int VIEW_TYPE_FUTURE_DAY = 1;
/**
* Cache of the children views for a forecast list item.
*/
public static class ViewHolder {
public final ImageView iconView;
public final TextView dateView;
public final TextView descriptionView;
public final TextView highTempView;
public final TextView lowTempView;
public ViewHolder(View view) {
iconView = (ImageView) view.findViewById(R.id.list_item_icon);
dateView = (TextView) view.findViewById(R.id.list_item_date_textview);
descriptionView = (TextView) view.findViewById(R.id.list_item_forecast_textview);
highTempView = (TextView) view.findViewById(R.id.list_item_high_textview);
lowTempView = (TextView) view.findViewById(R.id.list_item_low_textview);
}
}
public ForecastAdapter(Context context, Cursor c, int flags) {
super(context, c, flags);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
// Choose the layout type
int viewType = getItemViewType(cursor.getPosition());
int layoutId = -1;
switch (viewType) {
case VIEW_TYPE_TODAY: {
layoutId = R.layout.list_item_forecast_today;
break;
}
case VIEW_TYPE_FUTURE_DAY: {
layoutId = R.layout.list_item_forecast;
break;
}
}
View view = LayoutInflater.from(context).inflate(layoutId, parent, false);
ViewHolder viewHolder = new ViewHolder(view);
view.setTag(viewHolder);
return view;
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
ViewHolder viewHolder = (ViewHolder) view.getTag();
int viewType = getItemViewType(cursor.getPosition());
switch (viewType) {
case VIEW_TYPE_TODAY: {
// Get weather icon
viewHolder.iconView.setImageResource(Utility.getArtResourceForWeatherCondition(
cursor.getInt(ForecastFragment.COL_WEATHER_CONDITION_ID)));
break;
}
case VIEW_TYPE_FUTURE_DAY: {
// Get weather icon
viewHolder.iconView.setImageResource(Utility.getIconResourceForWeatherCondition(
cursor.getInt(ForecastFragment.COL_WEATHER_CONDITION_ID)));
break;
}
}
// Read date from cursor
long dateInMillis = cursor.getLong(ForecastFragment.COL_WEATHER_DATE);
// Find TextView and set formatted date on it
viewHolder.dateView.setText(Utility.getFriendlyDayString(context, dateInMillis));
// Read weather forecast from cursor
String description = cursor.getString(ForecastFragment.COL_WEATHER_DESC);
// Find TextView and set weather forecast on it
viewHolder.descriptionView.setText(description);
// Read user preference for metric or imperial temperature units
boolean isMetric = Utility.isMetric(context);
// Read high temperature from cursor
double high = cursor.getDouble(ForecastFragment.COL_WEATHER_MAX_TEMP);
viewHolder.highTempView.setText(Utility.formatTemperature(context, high, isMetric));
// Read low temperature from cursor
double low = cursor.getDouble(ForecastFragment.COL_WEATHER_MIN_TEMP);
viewHolder.lowTempView.setText(Utility.formatTemperature(context, low, isMetric));
}
@Override
public int getItemViewType(int position) {
return position == 0 ? VIEW_TYPE_TODAY : VIEW_TYPE_FUTURE_DAY;
}
@Override
public int getViewTypeCount() {
return VIEW_TYPE_COUNT;
}
} | josmarycarrero/MyForecastApp | app/src/main/java/com/example/android/sunshine/app/ForecastAdapter.java | Java | apache-2.0 | 4,420 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.explorer.navigation.qa;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.*;
import org.drools.guvnor.client.common.GenericCallback;
import org.drools.guvnor.client.common.LoadingPopup;
import org.drools.guvnor.client.common.PrettyFormLayout;
import org.drools.guvnor.client.messages.Constants;
import org.drools.guvnor.client.resources.DroolsGuvnorImageResources;
import org.drools.guvnor.client.resources.GuvnorImages;
import org.drools.guvnor.client.rpc.AnalysisReport;
import org.drools.guvnor.client.rpc.VerificationService;
import org.drools.guvnor.client.rpc.VerificationServiceAsync;
/**
* Viewer for, well, analysis !
*/
public class VerifierScreen extends Composite {
private final VerticalPanel layout = new VerticalPanel();
private final String packageUUID;
public VerifierScreen(String packageUUID,
String packageName) {
this.packageUUID = packageUUID;
PrettyFormLayout pf = new PrettyFormLayout();
VerticalPanel vert = new VerticalPanel();
String m = Constants.INSTANCE.AnalysingPackage( packageName );
vert.add( new HTML( m ) );
Button run = new Button( Constants.INSTANCE.RunAnalysis() );
run.addClickHandler( new ClickHandler() {
public void onClick(ClickEvent event) {
runAnalysis();
}
} );
vert.add( run );
pf.addHeader( GuvnorImages.INSTANCE.Analyze(),
vert );
layout.add( pf );
layout.add( new Label() );
layout.setWidth( "100%" );
initWidget( layout );
}
private void runAnalysis() {
LoadingPopup.showMessage( Constants.INSTANCE.AnalysingPackageRunning() );
VerificationServiceAsync verificationService = GWT.create( VerificationService.class );
verificationService.analysePackage( packageUUID,
new GenericCallback<AnalysisReport>() {
public void onSuccess(AnalysisReport rep) {
VerifierResultWidget w = new VerifierResultWidget( rep,
true );
w.setWidth( "100%" );
layout.remove( 1 );
layout.add( w );
LoadingPopup.close();
}
} );
}
}
| psiroky/guvnor | guvnor-webapp-drools/src/main/java/org/drools/guvnor/client/explorer/navigation/qa/VerifierScreen.java | Java | apache-2.0 | 3,097 |
package org.fusesource.process.fabric.commands;
import org.apache.felix.gogo.commands.Option;
import java.net.MalformedURLException;
import java.net.URL;
public abstract class ContainerInstallSupport extends ContainerProcessCommandSupport {
@Option(name="-c", aliases={"--controllerUrl"}, required = false, description = "The optional JSON document URL containing the controller configuration")
protected String controllerJson;
@Option(name="-k", aliases={"--kind"}, required = false, description = "The kind of controller to create")
protected String controllerKind;
protected URL getControllerURL() throws MalformedURLException {
URL controllerUrl = null;
if (controllerJson != null) {
controllerUrl = new URL(controllerJson);
} else if (controllerKind != null) {
String name = controllerKind + ".json";
controllerUrl = new URL("profile:" + name);
if (controllerUrl == null) {
throw new IllegalStateException("Cannot find controller kind: " + name + " on the classpath");
}
}
return controllerUrl;
}
}
| janstey/fuse | process/process-fabric/src/main/java/org/fusesource/process/fabric/commands/ContainerInstallSupport.java | Java | apache-2.0 | 1,150 |
/**
* Copyright 2009-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.submitted.cache;
import java.io.Reader;
import java.lang.reflect.Field;
import java.sql.Connection;
import org.apache.ibatis.annotations.CacheNamespace;
import org.apache.ibatis.annotations.Property;
import org.apache.ibatis.cache.Cache;
import org.apache.ibatis.cache.CacheException;
import org.apache.ibatis.annotations.CacheNamespaceRef;
import org.apache.ibatis.builder.BuilderException;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
// issue #524
public class CacheTest {
private static SqlSessionFactory sqlSessionFactory;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() throws Exception {
// create a SqlSessionFactory
Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cache/mybatis-config.xml");
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
reader.close();
// populate in-memory database
SqlSession session = sqlSessionFactory.openSession();
Connection conn = session.getConnection();
reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cache/CreateDB.sql");
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.runScript(reader);
reader.close();
session.close();
}
/*
* Test Plan:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 executes "select * from A"
*
* Assert:
* Step 4 returns 1 row. (This case fails when caching is enabled.)
*/
@Test
public void testplan1() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
Assert.assertEquals(1, pm.findAll().size());
} finally {
sqlSession2.commit();
sqlSession2.close();
}
}
/*
* Test Plan:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 executes "select * from A"
* 5) SqlSession 2 rollback
* 6) SqlSession 3 executes "select * from A"
*
* Assert:
* Step 6 returns 2 rows.
*/
@Test
public void testplan2() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
} finally {
sqlSession2.rollback();
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession3.close();
}
}
/*
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 closes.
* 5) SqlSession 2 executes "select * from A".
* 6) SqlSession 3 closes.
*
* Assert:
* Step 6 returns 1 row.
*/
@Test
public void testplan3() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
} finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(1, pm.findAll().size());
} finally {
sqlSession3.close();
}
}
/*-
* Test case for #405
*
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "insert into person (id, firstname, lastname) values (3, hello, world)"
* 4) SqlSession 2 closes.
* 5) SqlSession 3 executes "select * from A".
* 6) SqlSession 3 closes.
*
* Assert:
* Step 5 returns 3 row.
*/
@Test
public void shouldInsertWithOptionsFlushesCache() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
Person p = new Person(3, "hello", "world");
pm.createWithOptions(p);
} finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findAll().size());
} finally {
sqlSession3.close();
}
}
/*-
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes select to cache result
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes insert without flushing cache
* 4) SqlSession 2 closes.
* 5) SqlSession 3 executes select (flushCache = false)
* 6) SqlSession 3 closes.
* 7) SqlSession 4 executes select (flushCache = true)
* 8) SqlSession 4 closes.
*
* Assert:
* Step 5 returns 2 row.
* Step 7 returns 3 row.
*/
@Test
public void shouldApplyFlushCacheOptions() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
Person p = new Person(3, "hello", "world");
pm.createWithoutFlushCache(p);
} finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession3.close();
}
SqlSession sqlSession4 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession4.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findWithFlushCache().size());
} finally {
sqlSession4.close();
}
}
@Test
public void shouldApplyCacheNamespaceRef() {
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
Person p = new Person(3, "hello", "world");
pm.createWithoutFlushCache(p);
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
ImportantPersonMapper pm = sqlSession.getMapper(ImportantPersonMapper.class);
Assert.assertEquals(3, pm.findWithFlushCache().size());
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findAll().size());
Person p = new Person(4, "foo", "bar");
pm.createWithoutFlushCache(p);
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
SpecialPersonMapper pm = sqlSession.getMapper(SpecialPersonMapper.class);
Assert.assertEquals(4, pm.findWithFlushCache().size());
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(4, pm.findAll().size());
} finally {
sqlSession.close();
}
}
}
@Test
public void shouldApplyCustomCacheProperties() {
CustomCache customCache = unwrap(sqlSessionFactory.getConfiguration().getCache(CustomCacheMapper.class.getName()));
Assert.assertEquals("bar", customCache.getStringValue());
Assert.assertEquals(1, customCache.getIntegerValue().intValue());
Assert.assertEquals(2, customCache.getIntValue());
Assert.assertEquals(3, customCache.getLongWrapperValue().longValue());
Assert.assertEquals(4, customCache.getLongValue());
Assert.assertEquals(5, customCache.getShortWrapperValue().shortValue());
Assert.assertEquals(6, customCache.getShortValue());
Assert.assertEquals((float) 7.1, customCache.getFloatWrapperValue(), 0);
Assert.assertEquals((float)8.1, customCache.getFloatValue(), 0);
Assert.assertEquals(9.01, customCache.getDoubleWrapperValue(), 0);
Assert.assertEquals(10.01, customCache.getDoubleValue(), 0);
Assert.assertEquals((byte)11, customCache.getByteWrapperValue().byteValue());
Assert.assertEquals((byte)12, customCache.getByteValue());
Assert.assertEquals(true, customCache.getBooleanWrapperValue());
Assert.assertEquals(true, customCache.isBooleanValue());
}
@Test
public void shouldErrorUnsupportedProperties() {
expectedException.expect(CacheException.class);
expectedException.expectMessage("Unsupported property type for cache: 'date' of type class java.util.Date");
sqlSessionFactory.getConfiguration().addMapper(CustomCacheUnsupportedPropertyMapper.class);
}
@Test
public void shouldErrorInvalidCacheNamespaceRefAttributesSpecifyBoth() {
expectedException.expect(BuilderException.class);
expectedException.expectMessage("Cannot use both value() and name() attribute in the @CacheNamespaceRef");
sqlSessionFactory.getConfiguration().getMapperRegistry()
.addMapper(InvalidCacheNamespaceRefBothMapper.class);
}
@Test
public void shouldErrorInvalidCacheNamespaceRefAttributesIsEmpty() {
expectedException.expect(BuilderException.class);
expectedException.expectMessage("Should be specified either value() or name() attribute in the @CacheNamespaceRef");
sqlSessionFactory.getConfiguration().getMapperRegistry()
.addMapper(InvalidCacheNamespaceRefEmptyMapper.class);
}
private CustomCache unwrap(Cache cache){
Field field;
try {
field = cache.getClass().getDeclaredField("delegate");
} catch (NoSuchFieldException e) {
throw new IllegalStateException(e);
}
try {
field.setAccessible(true);
return (CustomCache)field.get(cache);
} catch (IllegalAccessException e) {
throw new IllegalStateException(e);
} finally {
field.setAccessible(false);
}
}
@CacheNamespace(implementation = CustomCache.class, properties = {
@Property(name = "date", value = "2016/11/21")
})
private interface CustomCacheUnsupportedPropertyMapper {
}
@CacheNamespaceRef(value = PersonMapper.class, name = "org.apache.ibatis.submitted.cache.PersonMapper")
private interface InvalidCacheNamespaceRefBothMapper {
}
@CacheNamespaceRef
private interface InvalidCacheNamespaceRefEmptyMapper {
}
}
| qiulim/myBatis-framework-4eclipse | src/test/java/org/apache/ibatis/submitted/cache/CacheTest.java | Java | apache-2.0 | 13,252 |
/**********************************************************************************
* $URL:https://source.sakaiproject.org/svn/osp/trunk/jsf/widgets/src/java/org/theospi/jsf/component/TestComponent.java $
* $Id:TestComponent.java 9134 2006-05-08 20:28:42Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.theospi.jsf.component;
import javax.faces.component.UIComponent;
import javax.faces.component.UIOutput;
/**
* Created by IntelliJ IDEA.
* User: John Ellis
* Date: Dec 28, 2005
* Time: 12:01:59 PM
* To change this template use File | Settings | File Templates.
*/
public class TestComponent extends UIOutput {
private UIComponent layoutRoot = null;
public TestComponent() {
super();
this.setRendererType("org.theospi.TestComponent");
}
public UIComponent getLayoutRoot() {
return layoutRoot;
}
public void setLayoutRoot(UIComponent layoutRoot) {
this.layoutRoot = layoutRoot;
}
}
| eemirtekin/Sakai-10.6-TR | osp/jsf/widgets/src/java/org/theospi/jsf/component/TestComponent.java | Java | apache-2.0 | 1,728 |
/**
* Copyright 2016 Pinterest, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.deployservice.scm;
import com.pinterest.deployservice.bean.CommitBean;
import com.pinterest.deployservice.common.HTTPClient;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
public class GithubManager extends BaseManager {
public final static String TYPE = "Github";
private final static String UNKNOWN_LOGIN = "UNKNOWN";
private String apiPrefix;
private String urlPrefix;
private Map<String, String> headers = new HashMap<String, String>();
public GithubManager(String token, String apiPrefix, String urlPrefix) {
this.apiPrefix = apiPrefix;
this.urlPrefix = urlPrefix;
headers.put("Authorization", String.format("token %s", token));
}
private String getSha(Map<String, Object> jsonMap) {
return (String) jsonMap.get("sha");
}
private String getLogin(Map<String, Object> jsonMap) {
Map<String, Object> authorMap = (Map<String, Object>) jsonMap.get("author");
if (authorMap != null) {
return (String) authorMap.get("login");
}
return UNKNOWN_LOGIN;
}
private long getDate(Map<String, Object> jsonMap) {
Map<String, Object> commiterMap = (Map<String, Object>) jsonMap.get("committer");
String dateGMTStr = (String) commiterMap.get("date");
DateTimeFormatter parser = ISODateTimeFormat.dateTimeNoMillis();
DateTime dt = parser.parseDateTime(dateGMTStr);
return dt.getMillis();
}
private String getMessage(Map<String, Object> jsonMap) {
return (String) jsonMap.get("message");
}
private CommitBean toCommitBean(Map<String, Object> jsonMap, String repo) {
CommitBean CommitBean = new CommitBean();
String sha = getSha(jsonMap);
CommitBean.setSha(sha);
CommitBean.setAuthor(getLogin(jsonMap));
Map<String, Object> commitMap = (Map<String, Object>) jsonMap.get("commit");
CommitBean.setDate(getDate(commitMap));
String message = getMessage(commitMap);
String[] parts = message.split("\n", 2);
CommitBean.setTitle(parts[0]);
if (parts.length > 1) {
CommitBean.setMessage(parts[1]);
}
CommitBean.setInfo(generateCommitLink(repo, sha));
return CommitBean;
}
@Override
public String generateCommitLink(String repo, String sha) {
return String.format("%s/%s/commit/%s", urlPrefix, repo, sha);
}
@Override
public String getCommitLinkTemplate() {
return String.format("%s/%%s/commit/%%s", urlPrefix);
}
@Override
public String getUrlPrefix() {
return urlPrefix;
}
@Override
public String getType() {
return TYPE;
}
@Override
public CommitBean getCommit(String repo, String sha) throws Exception {
HTTPClient httpClient = new HTTPClient();
String url = String.format("%s/repos/%s/commits/%s", apiPrefix, repo, sha);
// TODO: Do not RETRY since it will timeout the thrift caller, need to revisit
String jsonPayload = httpClient.get(url, null, null, headers, 1);
GsonBuilder builder = new GsonBuilder();
Map<String, Object>
jsonMap =
builder.create().fromJson(jsonPayload, new TypeToken<HashMap<String, Object>>() {
}.getType());
return toCommitBean(jsonMap, repo);
}
@Override
public Queue<CommitBean> getCommits(String repo, String startSha, boolean keepHead, String path)
throws Exception {
HTTPClient httpClient = new HTTPClient();
String url = String.format("%s/repos/%s/commits", apiPrefix, repo);
// TODO: Do not RETRY since it will timeout the thrift caller, need to revisit
Map<String, String> params = new HashMap<String, String>();
params.put("sha", startSha);
String jsonPayload = httpClient.get(url, null, params, headers, 1);
Queue<CommitBean> CommitBeans = new LinkedList<CommitBean>();
GsonBuilder builder = new GsonBuilder();
Map<String, Object>[]
jsonMaps =
builder.create().fromJson(jsonPayload, new TypeToken<HashMap<String, Object>[]>() {
}.getType());
for (Map<String, Object> jsonMap : jsonMaps) {
if (!keepHead) {
keepHead = true;
continue;
}
CommitBeans.offer(toCommitBean(jsonMap, repo));
}
return CommitBeans;
}
}
| lilida/teletraan | deploy-service/common/src/main/java/com/pinterest/deployservice/scm/GithubManager.java | Java | apache-2.0 | 5,338 |
package org.drools.core;
import org.drools.core.time.impl.DefaultTimerJobFactoryManager;
import org.drools.core.time.impl.TimerJobFactoryManager;
import org.drools.core.time.impl.TrackableTimeJobFactoryManager;
public enum TimerJobFactoryType {
DEFUALT("default") {
public TimerJobFactoryManager createInstance() {
return DefaultTimerJobFactoryManager.instance;
}
},
TRACKABLE("trackable") {
public TimerJobFactoryManager createInstance() {
return new TrackableTimeJobFactoryManager();
}
},
JPA("jpa") {
public TimerJobFactoryManager createInstance() {
try {
return (TimerJobFactoryManager)Class.forName("org.drools.persistence.jpa.JpaTimeJobFactoryManager").newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
public abstract TimerJobFactoryManager createInstance();
private final String string;
TimerJobFactoryType( String string ) {
this.string = string;
}
public String toExternalForm() {
return this.string;
}
public String toString() {
return this.string;
}
public String getId() {
return this.string;
}
public static TimerJobFactoryType resolveTimerJobFactoryType( String id ) {
if( TRACKABLE.getId().equalsIgnoreCase( id ) ) {
return TRACKABLE;
} else if( DEFUALT.getId().equalsIgnoreCase( id ) ) {
return DEFUALT;
} else if( JPA.getId().equalsIgnoreCase( id ) ) {
return JPA;
}
throw new IllegalArgumentException( "Illegal enum value '" + id + "' for TimerJobFactoryType" );
}
}
| bxf12315/drools | drools-core/src/main/java/org/drools/core/TimerJobFactoryType.java | Java | apache-2.0 | 1,744 |
package net.greghaines.jesque.utils;
import java.util.Collections;
import java.util.HashSet;
import net.greghaines.jesque.Config;
import net.greghaines.jesque.ConfigBuilder;
import net.greghaines.jesque.utils.PoolUtils.PoolWork;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisSentinelPool;
import redis.clients.util.Pool;
public class TestPoolUtils {
private Mockery mockCtx;
private Pool<String> pool;
private PoolWork<String,String> work;
@SuppressWarnings("unchecked")
@Before
public void setUp() {
this.mockCtx = new JUnit4Mockery();
this.mockCtx.setImposteriser(ClassImposteriser.INSTANCE);
this.pool = this.mockCtx.mock(Pool.class);
this.work = this.mockCtx.mock(PoolWork.class);
}
@Test(expected = IllegalArgumentException.class)
public void testDoWorkInPool_NullPool() throws Exception {
PoolUtils.doWorkInPool(null, this.work);
}
@Test(expected = IllegalArgumentException.class)
public void testDoWorkInPool_NullWork() throws Exception {
PoolUtils.doWorkInPool(this.pool, null);
}
@Test
public void testDoWorkInPool() throws Exception {
final String resource = "foo";
final String result = "bar";
this.mockCtx.checking(new Expectations(){{
oneOf(pool).getResource(); will(returnValue(resource));
oneOf(work).doWork(resource); will(returnValue(result));
oneOf(pool).returnResource(resource);
}});
Assert.assertEquals(result, PoolUtils.doWorkInPool(this.pool, this.work));
}
@Test
public void testDoWorkInPoolNicely() throws Exception {
final String resource = "foo";
final String result = "bar";
this.mockCtx.checking(new Expectations(){{
oneOf(pool).getResource(); will(returnValue(resource));
oneOf(work).doWork(resource); will(returnValue(result));
oneOf(pool).returnResource(resource);
}});
Assert.assertEquals(result, PoolUtils.doWorkInPoolNicely(this.pool, this.work));
}
@Test(expected = RuntimeException.class)
public void testDoWorkInPoolNicely_ThrowRuntimeEx() throws Exception {
final String resource = "foo";
final RuntimeException rte = new RuntimeException("foo");
this.mockCtx.checking(new Expectations(){{
oneOf(pool).getResource(); will(returnValue(resource));
oneOf(work).doWork(resource); will(throwException(rte));
oneOf(pool).returnResource(resource);
}});
PoolUtils.doWorkInPoolNicely(this.pool, this.work);
}
@Test(expected = RuntimeException.class)
public void testDoWorkInPoolNicely_ThrowEx() throws Exception {
final String resource = "foo";
final Exception ex = new Exception("foo");
this.mockCtx.checking(new Expectations(){{
oneOf(pool).getResource(); will(returnValue(resource));
oneOf(work).doWork(resource); will(throwException(ex));
oneOf(pool).returnResource(resource);
}});
PoolUtils.doWorkInPoolNicely(this.pool, this.work);
}
@Test
public void testGetDefaultPoolConfig() {
Assert.assertNotNull(PoolUtils.getDefaultPoolConfig());
}
@Test(expected = IllegalArgumentException.class)
public void testCreateJedisPool_NullConfig() {
PoolUtils.createJedisPool(null);
}
@Test
public void testCreateJedisPool() {
final Config config = new ConfigBuilder().build();
final Pool<Jedis> pool = PoolUtils.createJedisPool(config);
Assert.assertNotNull(pool);
Assert.assertTrue(pool instanceof JedisPool);
}
/**
* This will need a sentinel running with the following config
*
* sentinel monitor mymaster 127.0.0.1 6379 1
* sentinel down-after-milliseconds mymaster 6000
* sentinel failover-timeout mymaster 180000
* sentinel parallel-syncs mymaster 1
*
* You should also have a redis-server running to act as the master [mymaster]
*/
@Test
@Ignore("Will only work with sentinel running and travis-ci sentinel support is sketchy at best")
public void testCreateJedisSentinelPool() {
final Config config = new ConfigBuilder().withMasterName("mymaster")
.withSentinels(new HashSet<>(Collections.singletonList("localhost:26379"))).build();
final Pool<Jedis> pool = PoolUtils.createJedisPool(config);
Assert.assertNotNull(pool);
Assert.assertTrue(pool instanceof JedisSentinelPool);
}
@Test(expected = IllegalArgumentException.class)
public void testCreateJedisPool_NullPoolConfig() {
final Config config = new ConfigBuilder().build();
PoolUtils.createJedisPool(config, null);
}
}
| shrayasr/jesque | src/test/java/net/greghaines/jesque/utils/TestPoolUtils.java | Java | apache-2.0 | 5,133 |
package selenium;
import framework.BaseTestClassForBaseJunitTest;
import org.junit.Assert;
import org.junit.Test;
public class JunitBaseTest extends BaseTestClassForBaseJunitTest {
@Test
public void test(){
Assert.assertTrue(true);
}
}
| itaymendel/taurus | examples/selenium/maven-project/src/test/java/selenium/JunitBaseTest.java | Java | apache-2.0 | 258 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.examples.bpmn.servicetask;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.delegate.Expression;
import org.flowable.engine.delegate.JavaDelegate;
/**
* @author Frederik Heremans
*/
public class ToUpperCaseSetterInjected implements JavaDelegate {
private Expression text;
private boolean setterInvoked;
public void execute(DelegateExecution execution) {
if (!setterInvoked) {
throw new RuntimeException("Setter was not invoked");
}
execution.setVariable("setterVar", ((String) text.getValue(execution)).toUpperCase());
}
public void setText(Expression text) {
setterInvoked = true;
this.text = text;
}
}
| robsoncardosoti/flowable-engine | modules/flowable-engine/src/test/java/org/flowable/examples/bpmn/servicetask/ToUpperCaseSetterInjected.java | Java | apache-2.0 | 1,305 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.ejb.test;
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ejb.EJB;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jbpm.kie.services.impl.KModuleDeploymentUnit;
import org.jbpm.services.api.RuntimeDataService.EntryType;
import org.jbpm.services.api.model.DeploymentUnit;
import org.jbpm.services.api.model.NodeInstanceDesc;
import org.jbpm.services.api.model.ProcessDefinition;
import org.jbpm.services.api.model.ProcessInstanceDesc;
import org.jbpm.services.api.model.UserTaskInstanceDesc;
import org.jbpm.services.api.model.VariableDesc;
import org.jbpm.services.ejb.api.DeploymentServiceEJBLocal;
import org.jbpm.services.ejb.api.ProcessServiceEJBLocal;
import org.jbpm.services.ejb.api.RuntimeDataServiceEJBLocal;
import org.jbpm.services.ejb.api.UserTaskServiceEJBLocal;
import org.jbpm.services.ejb.impl.tx.TransactionalCommandServiceEJBImpl;
import org.jbpm.shared.services.impl.TransactionalCommandService;
import org.jbpm.shared.services.impl.commands.UpdateStringCommand;
import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl;
import org.jbpm.workflow.instance.node.WorkItemNodeInstance;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.api.runtime.process.NodeInstance;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.query.QueryContext;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.KieInternalServices;
import org.kie.internal.process.CorrelationKey;
import org.kie.internal.query.QueryFilter;
import org.kie.internal.task.api.AuditTask;
import org.kie.internal.task.api.model.TaskEvent;
import org.kie.scanner.KieMavenRepository;
import static org.junit.Assert.*;
import static org.kie.scanner.KieMavenRepository.getKieMavenRepository;
@RunWith(Arquillian.class)
public class RuntimeDataServiceEJBIntegrationTest extends AbstractTestSupport {
@Deployment
public static WebArchive createDeployment() {
File archive = new File("target/sample-war-ejb-app.war");
if (!archive.exists()) {
throw new IllegalStateException("There is no archive yet generated, run maven build or mvn assembly:assembly");
}
WebArchive war = ShrinkWrap.createFromZipFile(WebArchive.class, archive);
war.addPackage("org.jbpm.services.ejb.test"); // test cases
// deploy test kjar
deployKjar();
return war;
}
private Long processInstanceId = null;
private KModuleDeploymentUnit deploymentUnit = null;
@Before
public void prepare() {
assertNotNull(deploymentService);
deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
deploymentService.deploy(deploymentUnit);
units.add(deploymentUnit);
assertNotNull(processService);
}
protected static void deployKjar() {
KieServices ks = KieServices.Factory.get();
ReleaseId releaseId = ks.newReleaseId(GROUP_ID, ARTIFACT_ID, VERSION);
List<String> processes = new ArrayList<String>();
processes.add("processes/EmptyHumanTask.bpmn");
processes.add("processes/humanTask.bpmn");
processes.add("processes/SimpleHTProcess.bpmn2");
InternalKieModule kJar1 = createKieJar(ks, releaseId, processes);
File pom = new File("target/kmodule", "pom.xml");
pom.getParentFile().mkdir();
try {
FileOutputStream fs = new FileOutputStream(pom);
fs.write(getPom(releaseId).getBytes());
fs.close();
} catch (Exception e) {
}
KieMavenRepository repository = getKieMavenRepository();
repository.installArtifact(releaseId, kJar1, pom);
}
private List<DeploymentUnit> units = new ArrayList<DeploymentUnit>();
@After
public void cleanup() {
if (processInstanceId != null) {
// let's abort process instance to leave the system in clear state
processService.abortProcessInstance(processInstanceId);
ProcessInstance pi = processService.getProcessInstance(processInstanceId);
assertNull(pi);
}
int deleted = 0;
deleted += commandService.execute(new UpdateStringCommand("delete from NodeInstanceLog nid"));
deleted += commandService.execute(new UpdateStringCommand("delete from ProcessInstanceLog pid"));
deleted += commandService.execute(new UpdateStringCommand("delete from VariableInstanceLog vsd"));
deleted += commandService.execute(new UpdateStringCommand("delete from AuditTaskImpl vsd"));
System.out.println("Deleted " + deleted);
cleanupSingletonSessionId();
if (units != null && !units.isEmpty()) {
for (DeploymentUnit unit : units) {
deploymentService.undeploy(unit);
}
units.clear();
}
}
@EJB
private DeploymentServiceEJBLocal deploymentService;
@EJB
private ProcessServiceEJBLocal processService;
@EJB
private RuntimeDataServiceEJBLocal runtimeDataService;
@EJB(beanInterface=TransactionalCommandServiceEJBImpl.class)
private TransactionalCommandService commandService;
@EJB
private UserTaskServiceEJBLocal userTaskService;
@Test
public void testGetProcessByDeploymentId() {
Collection<ProcessDefinition> definitions = runtimeDataService.getProcessesByDeploymentId(deploymentUnit.getIdentifier(), new QueryContext());
assertNotNull(definitions);
assertEquals(3, definitions.size());
List<String> expectedProcessIds = new ArrayList<String>();
expectedProcessIds.add("org.jbpm.writedocument.empty");
expectedProcessIds.add("org.jbpm.writedocument");
expectedProcessIds.add("org.jboss.qa.bpms.HumanTask");
for (ProcessDefinition def : definitions) {
assertTrue(expectedProcessIds.contains(def.getId()));
}
}
@Test
public void testGetProcessByDeploymentIdAndProcessId() {
ProcessDefinition definition = runtimeDataService
.getProcessesByDeploymentIdProcessId(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(definition);
assertEquals("org.jbpm.writedocument", definition.getId());
}
@Test
public void testGetProcessByFilter() {
Collection<ProcessDefinition> definitions = runtimeDataService.getProcessesByFilter("org.jbpm", new QueryContext());
assertNotNull(definitions);
assertEquals(2, definitions.size());
List<String> expectedProcessIds = new ArrayList<String>();
expectedProcessIds.add("org.jbpm.writedocument.empty");
expectedProcessIds.add("org.jbpm.writedocument");
for (ProcessDefinition def : definitions) {
assertTrue(expectedProcessIds.contains(def.getId()));
}
}
@Test
public void testGetProcessByProcessId() {
Collection<ProcessDefinition> definition = runtimeDataService.getProcessesById("org.jbpm.writedocument");
assertNotNull(definition);
assertEquals(1, definition.size());
assertEquals("org.jbpm.writedocument", definition.iterator().next().getId());
}
@Test
public void testGetProcesses() {
Collection<ProcessDefinition> definitions = runtimeDataService.getProcesses(new QueryContext());
assertNotNull(definitions);
assertEquals(3, definitions.size());
List<String> expectedProcessIds = new ArrayList<String>();
expectedProcessIds.add("org.jbpm.writedocument.empty");
expectedProcessIds.add("org.jbpm.writedocument");
expectedProcessIds.add("org.jboss.qa.bpms.HumanTask");
for (ProcessDefinition def : definitions) {
assertTrue(expectedProcessIds.contains(def.getId()));
}
}
@Test
public void testGetProcessIds() {
Collection<String> definitions = runtimeDataService.getProcessIds(deploymentUnit.getIdentifier(), new QueryContext());
assertNotNull(definitions);
assertEquals(3, definitions.size());
assertTrue(definitions.contains("org.jbpm.writedocument.empty"));
assertTrue(definitions.contains("org.jbpm.writedocument"));
assertTrue(definitions.contains("org.jboss.qa.bpms.HumanTask"));
}
@Test
public void testGetProcessInstances() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(1, (int)instances.iterator().next().getState());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstances(states, null, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstances(states, null, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByStateAndInitiator() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for active only
states.add(1);
instances = runtimeDataService.getProcessInstances(states, "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(1, (int)instances.iterator().next().getState());
instances = runtimeDataService.getProcessInstances(states, "wrongUser", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstances(states, "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
}
@Test
public void testGetProcessInstancesByDeploymentIdAndState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstancesByDeploymentId(deploymentUnit.getIdentifier(), states, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByDeploymentId(deploymentUnit.getIdentifier(), states, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByProcessId() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
instances = runtimeDataService.getProcessInstancesByProcessDefinition("org.jbpm.writedocument", new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
ProcessInstanceDesc instance = instances.iterator().next();
assertEquals(1, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessDefinition("org.jbpm.writedocument", new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
instance = instances.iterator().next();
assertEquals(3, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
}
@Test
public void testGetProcessInstancesByProcessIdAndStatus() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
Long processInstanceIdToAbort = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
List<Integer> statuses = new ArrayList<Integer>();
statuses.add(ProcessInstance.STATE_ACTIVE);
instances = runtimeDataService.getProcessInstancesByProcessDefinition("org.jbpm.writedocument", statuses, new QueryContext());
assertNotNull(instances);
assertEquals(2, instances.size());
for (ProcessInstanceDesc instance : instances) {
assertEquals(ProcessInstance.STATE_ACTIVE, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
}
processService.abortProcessInstance(processInstanceIdToAbort);
instances = runtimeDataService.getProcessInstancesByProcessDefinition("org.jbpm.writedocument", statuses, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
ProcessInstanceDesc instance2 = instances.iterator().next();
assertEquals(ProcessInstance.STATE_ACTIVE, (int)instance2.getState());
assertEquals("org.jbpm.writedocument", instance2.getProcessId());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
statuses.clear();
statuses.add(ProcessInstance.STATE_ABORTED);
instances = runtimeDataService.getProcessInstancesByProcessDefinition("org.jbpm.writedocument", statuses, new QueryContext());
assertNotNull(instances);
assertEquals(2, instances.size());
for (ProcessInstanceDesc instance : instances) {
assertEquals(ProcessInstance.STATE_ABORTED, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
}
}
@Test
public void testGetProcessInstanceById() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstanceDesc instance = runtimeDataService.getProcessInstanceById(processInstanceId);
assertNotNull(instance);
assertEquals(1, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
processService.abortProcessInstance(processInstanceId);
instance = runtimeDataService.getProcessInstanceById(processInstanceId);
processInstanceId = null;
assertNotNull(instance);
assertEquals(3, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
}
@Test
public void testGetProcessInstanceByCorrelationKey() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
CorrelationKey key = KieInternalServices.Factory.get().newCorrelationKeyFactory().newCorrelationKey("my business key");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", key);
assertNotNull(processInstanceId);
ProcessInstanceDesc instance = runtimeDataService.getProcessInstanceByCorrelationKey(key);
assertNotNull(instance);
assertEquals(1, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
assertEquals("my business key", instance.getCorrelationKey());
List<UserTaskInstanceDesc> tasks = instance.getActiveTasks();
assertNotNull(tasks);
assertEquals(1, tasks.size());
UserTaskInstanceDesc activeTask = tasks.get(0);
assertNotNull(activeTask);
assertEquals(Status.Reserved.name(), activeTask.getStatus());
assertEquals(instance.getId(), activeTask.getProcessInstanceId());
assertEquals(instance.getProcessId(), activeTask.getProcessId());
assertEquals("Write a Document", activeTask.getName());
assertEquals("salaboy", activeTask.getActualOwner());
assertEquals(deploymentUnit.getIdentifier(), activeTask.getDeploymentId());
processService.abortProcessInstance(processInstanceId);
instance = runtimeDataService.getProcessInstanceByCorrelationKey(key);
processInstanceId = null;
assertNull(instance);
}
@Test
public void testGetProcessInstancesByCorrelationKey() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
CorrelationKey key = KieInternalServices.Factory.get().newCorrelationKeyFactory().newCorrelationKey("my business key");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", key);
assertNotNull(processInstanceId);
Collection<ProcessInstanceDesc> keyedInstances = runtimeDataService.getProcessInstancesByCorrelationKey(key, new QueryContext());
assertNotNull(keyedInstances);
assertEquals(1, keyedInstances.size());
ProcessInstanceDesc instance = keyedInstances.iterator().next();
assertNotNull(instance);
assertEquals(1, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
assertEquals("my business key", instance.getCorrelationKey());
List<UserTaskInstanceDesc> tasks = instance.getActiveTasks();
assertNull(tasks);
processService.abortProcessInstance(processInstanceId);
instance = runtimeDataService.getProcessInstanceByCorrelationKey(key);
processInstanceId = null;
assertNull(instance);
keyedInstances = runtimeDataService.getProcessInstancesByCorrelationKey(key, new QueryContext());
assertNotNull(keyedInstances);
assertEquals(1, keyedInstances.size());
instance = keyedInstances.iterator().next();
assertEquals(3, (int)instance.getState());
assertEquals("org.jbpm.writedocument", instance.getProcessId());
assertEquals("my business key", instance.getCorrelationKey());
}
@Test
public void testGetProcessInstancesByProcessIdAndState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm.writedocument", null, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm.writedocument", null, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByPartialProcessIdAndState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm%", null, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm%", null, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByProcessIdAndStateAndInitiator() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for active only
states.add(1);
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm.writedocument", "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(1, (int)instances.iterator().next().getState());
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm.writedocument", "wrongUser", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessId(states, "org.jbpm.writedocument", "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
}
@Test
public void testGetProcessInstancesByProcessNameAndState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstancesByProcessName(states, "humanTaskSample", null, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessName(states, "humanTaskSample", null, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByPartialProcessNameAndState() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for aborted only
states.add(3);
instances = runtimeDataService.getProcessInstancesByProcessName(states, "human%", null, new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessName(states, "human%", null, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(3, (int)instances.iterator().next().getState());
}
@Test
public void testGetProcessInstancesByProcessNameAndStateAndInitiator() {
Collection<ProcessInstanceDesc> instances = runtimeDataService.getProcessInstances(new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
List<Integer> states = new ArrayList<Integer>();
// search for active only
states.add(1);
instances = runtimeDataService.getProcessInstancesByProcessName(states, "humanTaskSample", "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
assertEquals(1, (int)instances.iterator().next().getState());
instances = runtimeDataService.getProcessInstancesByProcessName(states, "humanTaskSample", "wrongUser", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
instances = runtimeDataService.getProcessInstancesByProcessName(states, "humanTaskSample", "anonymous", new QueryContext());
assertNotNull(instances);
assertEquals(0, instances.size());
}
@Test
public void testGetProcessInstanceHistory() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
// get active nodes as history view
Collection<NodeInstanceDesc> instances = runtimeDataService.getProcessInstanceHistoryActive(processInstanceId, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
// get completed nodes as history view
instances = runtimeDataService.getProcessInstanceHistoryCompleted(processInstanceId, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
// get both active and completed nodes as history view
instances = runtimeDataService.getProcessInstanceFullHistory(processInstanceId, new QueryContext());
assertNotNull(instances);
assertEquals(3, instances.size());
// get nodes filtered by type - start
instances = runtimeDataService.getProcessInstanceFullHistoryByType(processInstanceId, EntryType.START, new QueryContext());
assertNotNull(instances);
assertEquals(2, instances.size());
// get nodes filtered by type - end
instances = runtimeDataService.getProcessInstanceFullHistoryByType(processInstanceId, EntryType.END, new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
}
@Test
public void testGetNodeInstanceForWorkItem() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
Collection<NodeInstance> activeNodes = ((WorkflowProcessInstanceImpl) instance).getNodeInstances();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
NodeInstance node = activeNodes.iterator().next();
assertNotNull(node);
assertTrue(node instanceof WorkItemNodeInstance);
Long workItemId = ((WorkItemNodeInstance) node).getWorkItemId();
assertNotNull(workItemId);
NodeInstanceDesc desc = runtimeDataService.getNodeInstanceForWorkItem(workItemId);
assertNotNull(desc);
assertEquals(processInstanceId, desc.getProcessInstanceId());
assertEquals("Write a Document", desc.getName());
assertEquals("HumanTaskNode", desc.getNodeType());
}
@Test
public void testGetVariableLogs() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("approval_document", "initial content");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params);
assertNotNull(processInstanceId);
Collection<VariableDesc> variableLogs = runtimeDataService.getVariableHistory(processInstanceId, "approval_document", new QueryContext());
assertNotNull(variableLogs);
assertEquals(1, variableLogs.size());
processService.setProcessVariable(processInstanceId, "approval_document", "updated content");
variableLogs = runtimeDataService.getVariableHistory(processInstanceId, "approval_document", new QueryContext());
assertNotNull(variableLogs);
assertEquals(2, variableLogs.size());
processService.setProcessVariable(processInstanceId, "approval_reviewComment", "under review - content");
variableLogs = runtimeDataService.getVariablesCurrentState(processInstanceId);
assertNotNull(variableLogs);
assertEquals(2, variableLogs.size());
for (VariableDesc vDesc : variableLogs) {
if (vDesc.getVariableId().equals("approval_document")) {
assertEquals("updated content", vDesc.getNewValue());
} else if (vDesc.getVariableId().equals("approval_reviewComment")) {
assertEquals("under review - content", vDesc.getNewValue());
}
}
}
@Test
public void testGetTaskByWorkItemId() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
Collection<NodeInstance> activeNodes = ((WorkflowProcessInstanceImpl) instance).getNodeInstances();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
NodeInstance node = activeNodes.iterator().next();
assertNotNull(node);
assertTrue(node instanceof WorkItemNodeInstance);
Long workItemId = ((WorkItemNodeInstance) node).getWorkItemId();
assertNotNull(workItemId);
UserTaskInstanceDesc userTask = runtimeDataService.getTaskByWorkItemId(workItemId);
assertNotNull(userTask);
assertEquals(processInstanceId, userTask.getProcessInstanceId());
assertEquals("Write a Document", userTask.getName());
}
@Test
public void testGetTaskById() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
List<Long> taskIds = runtimeDataService.getTasksByProcessInstanceId(processInstanceId);
assertNotNull(taskIds);
assertEquals(1, taskIds.size());
Long taskId = taskIds.get(0);
UserTaskInstanceDesc userTask = runtimeDataService.getTaskById(taskId);
assertNotNull(userTask);
assertEquals(processInstanceId, userTask.getProcessInstanceId());
assertEquals("Write a Document", userTask.getName());
}
@Test
public void testGetTaskOwned() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jboss.qa.bpms.HumanTask");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
List<TaskSummary> tasks = runtimeDataService.getTasksOwned("john", new QueryFilter(0, 5));
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals(processInstanceId, userTask.getProcessInstanceId());
assertEquals("Hello", userTask.getName());
assertEquals("john", userTask.getActualOwnerId());
assertEquals("Reserved", userTask.getStatusId());
assertNotNull(userTask.getActualOwner());
}
@Test
public void testGetTaskAssignedAsBusinessAdmin() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
processService.getProcessInstance(processInstanceId);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter(0, 5));
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals(processInstanceId, userTask.getProcessInstanceId());
assertEquals("Write a Document", userTask.getName());
}
@Test
public void testGetTaskAssignedAsBusinessAdminPaging() {
for (int i = 0; i < 10; i++) {
processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
}
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter(0, 5));
assertNotNull(tasks);
assertEquals(5, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals("Write a Document", userTask.getName());
Collection<ProcessInstanceDesc> activeProcesses = runtimeDataService.getProcessInstances(new QueryContext(0, 20));
for (ProcessInstanceDesc pi : activeProcesses) {
processService.abortProcessInstance(pi.getId());
}
}
@Test
public void testGetTaskAssignedAsBusinessAdminPagingAndFiltering() {
long processInstanceId = -1;
for (int i = 0; i < 10; i++) {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
}
Map<String, Object> params = new HashMap<String, Object>();
params.put("processInstanceId", processInstanceId);
QueryFilter qf = new QueryFilter( "t.taskData.processInstanceId = :processInstanceId",
params, "t.id", false);
qf.setOffset(0);
qf.setCount(5);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", qf);
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals("Write a Document", userTask.getName());
assertEquals(processInstanceId, (long)userTask.getProcessInstanceId());
Collection<ProcessInstanceDesc> activeProcesses = runtimeDataService.getProcessInstances(new QueryContext(0, 20));
for (ProcessInstanceDesc pi : activeProcesses) {
processService.abortProcessInstance(pi.getId());
}
}
@Test
public void testGetTasksAssignedAsPotentialOwnerByStatusPagingAndFiltering() {
List<Long> processInstanceIds = new ArrayList<Long>();
for (int i = 0; i < 10; i++) {
processInstanceIds.add(processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"));
}
Map<String, Object> params = new HashMap<String, Object>();
params.put("processInstanceId", processInstanceIds);
QueryFilter qf = new QueryFilter( "t.taskData.processInstanceId in (:processInstanceId)",
params, "t.id", false);
qf.setOffset(0);
qf.setCount(5);
List<Status> statuses = new ArrayList<Status>();
statuses.add(Status.Ready);
statuses.add(Status.Reserved);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwnerByStatus("salaboy", statuses, qf);
assertNotNull(tasks);
assertEquals(5, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals("Write a Document", userTask.getName());
Collection<ProcessInstanceDesc> activeProcesses = runtimeDataService.getProcessInstances(new QueryContext(0, 20));
for (ProcessInstanceDesc pi : activeProcesses) {
processService.abortProcessInstance(pi.getId());
}
}
@Test
public void testTasksByStatusByProcessInstanceIdPagingAndFiltering() {
Long pid = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
List<Status> statuses = new ArrayList<Status>();
statuses.add(Status.Ready);
statuses.add(Status.Reserved);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwnerByStatus("salaboy", statuses, new QueryFilter(0, 5));
assertNotNull(tasks);
assertEquals(1, tasks.size());
long taskId = tasks.get(0).getId();
userTaskService.start(taskId, "salaboy");
userTaskService.complete(taskId, "salaboy", null);
Map<String, Object> params = new HashMap<String, Object>();
params.put("name", "Review Document");
QueryFilter qf = new QueryFilter( "t.name = :name",
params, "t.id", false);
qf.setOffset(0);
qf.setCount(5);
tasks = runtimeDataService.getTasksByStatusByProcessInstanceId(pid, statuses, qf);
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary userTask = tasks.get(0);
assertNotNull(userTask);
assertEquals("Review Document", userTask.getName());
tasks = runtimeDataService.getTasksByStatusByProcessInstanceId(pid, statuses, new QueryFilter(0, 5));
assertNotNull(tasks);
assertEquals(2, tasks.size());
Collection<ProcessInstanceDesc> activeProcesses = runtimeDataService.getProcessInstances(new QueryContext(0, 20));
for (ProcessInstanceDesc pi : activeProcesses) {
processService.abortProcessInstance(pi.getId());
}
}
@Test
public void testGetTaskAudit() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
Collection<NodeInstance> activeNodes = ((WorkflowProcessInstanceImpl) instance).getNodeInstances();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
NodeInstance node = activeNodes.iterator().next();
assertNotNull(node);
assertTrue(node instanceof WorkItemNodeInstance);
Long workItemId = ((WorkItemNodeInstance) node).getWorkItemId();
assertNotNull(workItemId);
List<AuditTask> auditTasks = runtimeDataService.getAllAuditTask("salaboy", new QueryFilter(0, 10));
assertNotNull(auditTasks);
assertEquals(1, auditTasks.size());
assertEquals("Write a Document", auditTasks.get(0).getName());
}
@Test
public void testGetTaskEvents() {
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument");
assertNotNull(processInstanceId);
ProcessInstance instance = processService.getProcessInstance(processInstanceId);
assertNotNull(instance);
Collection<NodeInstance> activeNodes = ((WorkflowProcessInstanceImpl) instance).getNodeInstances();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
NodeInstance node = activeNodes.iterator().next();
assertNotNull(node);
assertTrue(node instanceof WorkItemNodeInstance);
Long workItemId = ((WorkItemNodeInstance) node).getWorkItemId();
assertNotNull(workItemId);
UserTaskInstanceDesc userTask = runtimeDataService.getTaskByWorkItemId(workItemId);
assertNotNull(userTask);
List<TaskEvent> auditTasks = runtimeDataService.getTaskEvents(userTask.getTaskId(), new QueryFilter());
assertNotNull(auditTasks);
assertEquals(1, auditTasks.size());
assertEquals(TaskEvent.TaskEventType.ADDED, auditTasks.get(0).getType());
userTaskService.start(userTask.getTaskId(), "salaboy");
auditTasks = runtimeDataService.getTaskEvents(userTask.getTaskId(), new QueryFilter());
assertNotNull(auditTasks);
assertEquals(2, auditTasks.size());
assertEquals(TaskEvent.TaskEventType.ADDED, auditTasks.get(0).getType());
assertEquals(TaskEvent.TaskEventType.STARTED, auditTasks.get(1).getType());
}
@Test
public void testGetProcessInstancesByVariable() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("approval_document", "initial content");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params);
assertNotNull(processInstanceId);
Collection<ProcessInstanceDesc> processInstanceLogs = runtimeDataService.getProcessInstancesByVariable("approval_document", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(1, processInstanceLogs.size());
processService.setProcessVariable(processInstanceId, "approval_document", "updated content");
processInstanceLogs = runtimeDataService.getProcessInstancesByVariable("approval_reviewComment", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(0, processInstanceLogs.size());
processService.setProcessVariable(processInstanceId, "approval_reviewComment", "under review - content");
processInstanceLogs = runtimeDataService.getProcessInstancesByVariable("approval_reviewComment", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(1, processInstanceLogs.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
}
@Test
public void testGetProcessInstancesByVariableAndValue() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("approval_document", "initial content");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params);
assertNotNull(processInstanceId);
Collection<ProcessInstanceDesc> processInstanceLogs = runtimeDataService.getProcessInstancesByVariableAndValue("approval_document", "initial content", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(1, processInstanceLogs.size());
processService.setProcessVariable(processInstanceId, "approval_document", "updated content");
processInstanceLogs = runtimeDataService.getProcessInstancesByVariableAndValue("approval_document", "initial content", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(0, processInstanceLogs.size());
processInstanceLogs = runtimeDataService.getProcessInstancesByVariableAndValue("approval_document", "updated content", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(1, processInstanceLogs.size());
processInstanceLogs = runtimeDataService.getProcessInstancesByVariableAndValue("approval_document", "updated%", null, new QueryContext());
assertNotNull(processInstanceLogs);
assertEquals(1, processInstanceLogs.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
}
@Test
public void testGetTasksByVariable() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("approval_document", "initial content");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params);
assertNotNull(processInstanceId);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", null);
assertNotNull(tasks);
assertEquals(1, tasks.size());
List<TaskSummary> tasksByVariable = runtimeDataService.
taskSummaryQuery("salaboy")
.variableName("TaskName").build().getResultList();
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
tasksByVariable = runtimeDataService.getTasksByVariable("salaboy", "ReviewComment", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
long taskId = tasks.get(0).getId();
Map<String, Object> output = new HashMap<String, Object>();
output.put("ReviewComment", "document reviewed");
userTaskService.saveContent(taskId, output);
tasksByVariable = runtimeDataService.getTasksByVariable("salaboy", "ReviewComment", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
}
@Test
public void testGetTasksByVariableAndValue() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("approval_document", "initial content");
processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params);
assertNotNull(processInstanceId);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", null);
assertNotNull(tasks);
assertEquals(1, tasks.size());
List<TaskSummary> tasksByVariable = runtimeDataService.getTasksByVariableAndValue("salaboy", "TaskName", "Write a Document", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
tasksByVariable = runtimeDataService.getTasksByVariableAndValue("salaboy", "TaskName", "Write", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(0, tasksByVariable.size());
long taskId = tasks.get(0).getId();
Map<String, Object> output = new HashMap<String, Object>();
output.put("ReviewComment", "document reviewed");
userTaskService.saveContent(taskId, output);
tasksByVariable = runtimeDataService.getTasksByVariableAndValue("salaboy", "ReviewComment", "document reviewed", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
tasksByVariable = runtimeDataService.getTasksByVariableAndValue("salaboy", "ReviewComment", "document%", null, new QueryContext());
assertNotNull(tasksByVariable);
assertEquals(1, tasksByVariable.size());
processService.abortProcessInstance(processInstanceId);
processInstanceId = null;
}
}
| etirelli/jbpm | jbpm-services/jbpm-services-ejb/jbpm-services-ejb-impl/src/test/java/org/jbpm/services/ejb/test/RuntimeDataServiceEJBIntegrationTest.java | Java | apache-2.0 | 49,086 |
package org.core4j.test;
import java.util.Iterator;
import junit.framework.Assert;
import org.core4j.Enumerable;
import org.core4j.Enumerables;
import org.core4j.Func;
import org.core4j.Func1;
import org.core4j.Funcs;
import org.core4j.Predicate1;
import org.core4j.Predicates;
import org.core4j.ReadOnlyIterator;
import org.junit.Test;
public class TestEnumerable {
@Test
public void testEnumerable() {
Assert.assertEquals(5, Enumerable.range(1, 5).count());
Assert.assertEquals((Integer) 1, Enumerable.range(1, 5).first());
Assert.assertEquals((Integer) 5, Enumerable.range(1, 5).last());
Assert.assertEquals((Integer) 3, Enumerable.range(1, 5).elementAt(2));
Assert.assertEquals(null, Enumerable.empty(Integer.class).firstOrNull());
Assert.assertEquals("1", Enumerable.create(1).join(","));
Assert.assertEquals("1,2,3,4,5", Enumerable.range(1, 5).join(","));
Assert.assertEquals("5,4,3,2,1", Enumerable.range(1, 5).reverse().join(","));
Assert.assertEquals("10", Enumerable.range(10, 1).join(","));
Assert.assertEquals("1", Enumerable.range(1, 1000000).take(1).join(","));
Assert.assertEquals("3,4,5", Enumerable.range(1, 5).skip(2).join(","));
Assert.assertEquals("2,3,4,5", Enumerable.range(1, 5).skipWhile(IS_ODD).join(","));
Assert.assertEquals((Integer) 10, Enumerable.range(1, 4).sum(Integer.class));
Enumerable<Integer> one = Enumerable.create(5, 3, 9, 7, 5, 9, 3, 7);
Enumerable<Integer> two = Enumerable.create(8, 3, 6, 4, 4, 9, 1, 0);
Assert.assertEquals((Integer) 0, two.min(IDENTITY));
Assert.assertEquals((Integer) 9, two.max(IDENTITY));
Assert.assertEquals("5,3,9,7", one.distinct().join(","));
Assert.assertEquals("5,3,9,7,8,6,4,1,0", one.union(two).join(","));
Assert.assertEquals("5,3,9,7,5,9,3,7,8,3,6,4,4,9,1,0", one.concat(two).join(","));
Assert.assertEquals("3,9", one.intersect(two).join(","));
Assert.assertEquals("3,9,1", two.where(IS_ODD).join(","));
Assert.assertEquals("8,6,4,4,0", two.where(Predicates.not(IS_ODD)).join(","));
Assert.assertEquals("2,4,6,8,10", Enumerable.range(1, 5).select(TIMES_TWO).join(","));
Assert.assertEquals("onetwothree", Enumerable.create("one", "two", "three").selectMany(CHARS).join(""));
// test using an infinite iterator - none of these methods should materialize the enumerable
Assert.assertEquals("1,1", infinite(1).skip(100).take(2).join(","));
Assert.assertEquals(true, infinite(1).any(IS_ODD));
Assert.assertEquals(true, infinite(1).contains(1));
Assert.assertEquals((Integer) 1, infinite(1).first());
Assert.assertEquals((Integer) 1, infinite(1).elementAt(100));
Assert.assertEquals((Integer) 2, infinite(1).select(TIMES_TWO).first());
Assert.assertEquals((Integer) 1, infinite(1).where(IS_ODD).first());
Assert.assertEquals((Integer) 1, infinite(1).cast(Integer.class).first());
Assert.assertEquals("oneone", infinite("one").selectMany(CHARS).take(6).join(""));
Assert.assertEquals("1,1", infinite(1).concat(infinite(1)).take(2).join(","));
}
private static <T> Enumerable<T> infinite(final T value) {
return Enumerable.createFromIterator(new Func<Iterator<T>>() {
public Iterator<T> apply() {
return new InfiniteIterator<T>(value);
}
});
}
private static class InfiniteIterator<T> extends ReadOnlyIterator<T> {
private final T value;
public InfiniteIterator(T value) {
this.value = value;
}
@Override
protected IterationResult<T> advance() throws Exception {
return IterationResult.next(value);
}
}
private static final Func1<Integer, Integer> IDENTITY = Funcs.identity(Integer.class);
private static final Func1<Integer, Integer> TIMES_TWO = new Func1<Integer, Integer>() {
public Integer apply(Integer input) {
return input * 2;
}
};
private static final Predicate1<Integer> IS_ODD = new Predicate1<Integer>() {
public boolean apply(Integer input) {
return input % 2 == 1;
}
};
private static final Func1<String, Enumerable<Character>> CHARS = new Func1<String, Enumerable<Character>>() {
public Enumerable<Character> apply(String input) {
return Enumerables.chars(input);
}
};
}
| rareddy/oreva | common/src/test/java/org/core4j/test/TestEnumerable.java | Java | apache-2.0 | 4,367 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.niochar.charset.additional;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import org.apache.harmony.nio.AddressUtil;
import org.apache.harmony.niochar.CharsetProviderImpl;
public class IBM870 extends Charset {
public IBM870(String csName, String[] aliases) {
super(csName, aliases);
}
public boolean contains(Charset cs) {
return cs.name().equalsIgnoreCase("IBM367") || cs.name().equalsIgnoreCase("IBM870") || cs.name().equalsIgnoreCase("iso-8859-2") || cs.name().equalsIgnoreCase("US-ASCII") ;
}
public CharsetDecoder newDecoder() {
return new Decoder(this);
}
public CharsetEncoder newEncoder() {
return new Encoder(this);
}
private static final class Decoder extends CharsetDecoder{
private Decoder(Charset cs){
super(cs, 1, 1);
}
private native int nDecode(char[] array, int arrPosition, int remaining, long outAddr, int absolutePos);
protected CoderResult decodeLoop(ByteBuffer bb, CharBuffer cb){
int cbRemaining = cb.remaining();
if(CharsetProviderImpl.hasLoadedNatives() && bb.isDirect() && bb.hasRemaining() && cb.hasArray()){
int toProceed = bb.remaining();
int cbPos = cb.position();
int bbPos = bb.position();
boolean throwOverflow = false;
if( cbRemaining < toProceed ) {
toProceed = cbRemaining;
throwOverflow = true;
}
int res = nDecode(cb.array(), cb.arrayOffset()+cbPos, toProceed, AddressUtil.getDirectBufferAddress(bb), bbPos);
bb.position(bbPos+res);
cb.position(cbPos+res);
if(throwOverflow) return CoderResult.OVERFLOW;
}else{
if(bb.hasArray() && cb.hasArray()) {
int rem = bb.remaining();
rem = cbRemaining >= rem ? rem : cbRemaining;
byte[] bArr = bb.array();
char[] cArr = cb.array();
int bStart = bb.position();
int cStart = cb.position();
int i;
for(i=bStart; i<bStart+rem; i++) {
char in = (char)(bArr[i] & 0xFF);
if(in >= 4){
int index = (int)in - 4;
cArr[cStart++] = (char)arr[index];
}else {
cArr[cStart++] = (char)(in & 0xFF);
}
}
bb.position(i);
cb.position(cStart);
if(rem == cbRemaining && bb.hasRemaining()) return CoderResult.OVERFLOW;
} else {
while(bb.hasRemaining()){
if( cbRemaining == 0 ) return CoderResult.OVERFLOW;
char in = (char)(bb.get() & 0xFF);
if(in >= 4){
int index = (int)in - 4;
cb.put(arr[index]);
}else {
cb.put((char)(in & 0xFF));
}
cbRemaining--;
}
}
}
return CoderResult.UNDERFLOW;
}
final static char[] arr = {
0x009C,0x0009,0x0086,0x007F,
0x0097,0x008D,0x008E,0x000B,0x000C,0x000D,0x000E,0x000F,
0x0010,0x0011,0x0012,0x0013,0x009D,0x0085,0x0008,0x0087,
0x0018,0x0019,0x0092,0x008F,0x001C,0x001D,0x001E,0x001F,
0x0080,0x0081,0x0082,0x0083,0x0084,0x000A,0x0017,0x001B,
0x0088,0x0089,0x008A,0x008B,0x008C,0x0005,0x0006,0x0007,
0x0090,0x0091,0x0016,0x0093,0x0094,0x0095,0x0096,0x0004,
0x0098,0x0099,0x009A,0x009B,0x0014,0x0015,0x009E,0x001A,
0x0020,0x00A0,0x00E2,0x00E4,0x0163,0x00E1,0x0103,0x010D,
0x00E7,0x0107,0x005B,0x002E,0x003C,0x0028,0x002B,0x0021,
0x0026,0x00E9,0x0119,0x00EB,0x016F,0x00ED,0x00EE,0x013E,
0x013A,0x00DF,0x005D,0x0024,0x002A,0x0029,0x003B,0x005E,
0x002D,0x002F,0x00C2,0x00C4,0x02DD,0x00C1,0x0102,0x010C,
0x00C7,0x0106,0x007C,0x002C,0x0025,0x005F,0x003E,0x003F,
0x02C7,0x00C9,0x0118,0x00CB,0x016E,0x00CD,0x00CE,0x013D,
0x0139,0x0060,0x003A,0x0023,0x0040,0x0027,0x003D,0x0022,
0x02D8,0x0061,0x0062,0x0063,0x0064,0x0065,0x0066,0x0067,
0x0068,0x0069,0x015B,0x0148,0x0111,0x00FD,0x0159,0x015F,
0x00B0,0x006A,0x006B,0x006C,0x006D,0x006E,0x006F,0x0070,
0x0071,0x0072,0x0142,0x0144,0x0161,0x00B8,0x02DB,0x00A4,
0x0105,0x007E,0x0073,0x0074,0x0075,0x0076,0x0077,0x0078,
0x0079,0x007A,0x015A,0x0147,0x0110,0x00DD,0x0158,0x015E,
0x02D9,0x0104,0x017C,0x0162,0x017B,0x00A7,0x017E,0x017A,
0x017D,0x0179,0x0141,0x0143,0x0160,0x00A8,0x00B4,0x00D7,
0x007B,0x0041,0x0042,0x0043,0x0044,0x0045,0x0046,0x0047,
0x0048,0x0049,0x00AD,0x00F4,0x00F6,0x0155,0x00F3,0x0151,
0x007D,0x004A,0x004B,0x004C,0x004D,0x004E,0x004F,0x0050,
0x0051,0x0052,0x011A,0x0171,0x00FC,0x0165,0x00FA,0x011B,
0x005C,0x00F7,0x0053,0x0054,0x0055,0x0056,0x0057,0x0058,
0x0059,0x005A,0x010F,0x00D4,0x00D6,0x0154,0x00D3,0x0150,
0x0030,0x0031,0x0032,0x0033,0x0034,0x0035,0x0036,0x0037,
0x0038,0x0039,0x010E,0x0170,0x00DC,0x0164,0x00DA,0x009F
};
}
private static final class Encoder extends CharsetEncoder{
private Encoder(Charset cs){
super(cs, 1, 1);
}
private native void nEncode(long outAddr, int absolutePos, char[] array, int arrPosition, int[] res);
protected CoderResult encodeLoop(CharBuffer cb, ByteBuffer bb){
int bbRemaining = bb.remaining();
if(CharsetProviderImpl.hasLoadedNatives() && bb.isDirect() && cb.hasRemaining() && cb.hasArray()){
int toProceed = cb.remaining();
int cbPos = cb.position();
int bbPos = bb.position();
boolean throwOverflow = false;
if( bbRemaining < toProceed ) {
toProceed = bbRemaining;
throwOverflow = true;
}
int[] res = {toProceed, 0};
nEncode(AddressUtil.getDirectBufferAddress(bb), bbPos, cb.array(), cb.arrayOffset()+cbPos, res);
if( res[0] <= 0 ) {
bb.position(bbPos-res[0]);
cb.position(cbPos-res[0]);
if(res[1]!=0) {
if(res[1] < 0)
return CoderResult.malformedForLength(-res[1]);
else
return CoderResult.unmappableForLength(res[1]);
}
}else{
bb.position(bbPos+res[0]);
cb.position(cbPos+res[0]);
if(throwOverflow) return CoderResult.OVERFLOW;
}
}else{
if(bb.hasArray() && cb.hasArray()) {
byte[] byteArr = bb.array();
char[] charArr = cb.array();
int rem = cb.remaining();
int byteArrStart = bb.position();
rem = bbRemaining <= rem ? bbRemaining : rem;
int x;
for(x = cb.position(); x < cb.position()+rem; x++) {
char c = charArr[x];
if(c > (char)0x02DD){
if (c >= 0xD800 && c <= 0xDFFF) {
if(x+1 < cb.limit()) {
char c1 = charArr[x+1];
if(c1 >= 0xD800 && c1 <= 0xDFFF) {
cb.position(x); bb.position(byteArrStart);
return CoderResult.unmappableForLength(2);
}
} else {
cb.position(x); bb.position(byteArrStart);
return CoderResult.UNDERFLOW;
}
cb.position(x); bb.position(byteArrStart);
return CoderResult.malformedForLength(1);
}
cb.position(x); bb.position(byteArrStart);
return CoderResult.unmappableForLength(1);
}else{
if(c < 0x04) {
byteArr[byteArrStart++] = (byte)c;
} else {
int index = (int)c >> 8;
index = encodeIndex[index];
if(index < 0) {
cb.position(x); bb.position(byteArrStart);
return CoderResult.unmappableForLength(1);
}
index <<= 8;
index += (int)c & 0xFF;
if((byte)arr[index] != 0){
byteArr[byteArrStart++] = (byte)arr[index];
}else{
cb.position(x); bb.position(byteArrStart);
return CoderResult.unmappableForLength(1);
}
}
}
}
cb.position(x);
bb.position(byteArrStart);
if(rem == bbRemaining && cb.hasRemaining()) {
return CoderResult.OVERFLOW;
}
} else {
while(cb.hasRemaining()){
if( bbRemaining == 0 ) return CoderResult.OVERFLOW;
char c = cb.get();
if(c > (char)0x02DD){
if (c >= 0xD800 && c <= 0xDFFF) {
if(cb.hasRemaining()) {
char c1 = cb.get();
if(c1 >= 0xD800 && c1 <= 0xDFFF) {
cb.position(cb.position()-2);
return CoderResult.unmappableForLength(2);
} else {
cb.position(cb.position()-1);
}
} else {
cb.position(cb.position()-1);
return CoderResult.UNDERFLOW;
}
cb.position(cb.position()-1);
return CoderResult.malformedForLength(1);
}
cb.position(cb.position()-1);
return CoderResult.unmappableForLength(1);
}else{
if(c < 0x04) {
bb.put((byte)c);
} else {
int index = (int)c >> 8;
index = encodeIndex[index];
if(index < 0) {
cb.position(cb.position()-1);
return CoderResult.unmappableForLength(1);
}
index <<= 8;
index += (int)c & 0xFF;
if((byte)arr[index] != 0){
bb.put((byte)arr[index]);
}else{
cb.position(cb.position()-1);
return CoderResult.unmappableForLength(1);
}
}
bbRemaining--;
}
}
}
}
return CoderResult.UNDERFLOW;
}
final static char arr[] = {
0x00,0x01,0x02,0x03,0x37,0x2D,0x2E,0x2F,0x16,0x05,0x25,0x0B,0x0C,0x0D,0x0E,0x0F,
0x10,0x11,0x12,0x13,0x3C,0x3D,0x32,0x26,0x18,0x19,0x3F,0x27,0x1C,0x1D,0x1E,0x1F,
0x40,0x4F,0x7F,0x7B,0x5B,0x6C,0x50,0x7D,0x4D,0x5D,0x5C,0x4E,0x6B,0x60,0x4B,0x61,
0xF0,0xF1,0xF2,0xF3,0xF4,0xF5,0xF6,0xF7,0xF8,0xF9,0x7A,0x5E,0x4C,0x7E,0x6E,0x6F,
0x7C,0xC1,0xC2,0xC3,0xC4,0xC5,0xC6,0xC7,0xC8,0xC9,0xD1,0xD2,0xD3,0xD4,0xD5,0xD6,
0xD7,0xD8,0xD9,0xE2,0xE3,0xE4,0xE5,0xE6,0xE7,0xE8,0xE9,0x4A,0xE0,0x5A,0x5F,0x6D,
0x79,0x81,0x82,0x83,0x84,0x85,0x86,0x87,0x88,0x89,0x91,0x92,0x93,0x94,0x95,0x96,
0x97,0x98,0x99,0xA2,0xA3,0xA4,0xA5,0xA6,0xA7,0xA8,0xA9,0xC0,0x6A,0xD0,0xA1,0x07,
0x20,0x21,0x22,0x23,0x24,0x15,0x06,0x17,0x28,0x29,0x2A,0x2B,0x2C,0x09,0x0A,0x1B,
0x30,0x31,0x1A,0x33,0x34,0x35,0x36,0x08,0x38,0x39,0x3A,0x3B,0x04,0x14,0x3E,0xFF,
0x41,0x00,0x00,0x00,0x9F,0x00,0x00,0xB5,0xBD,0x00,0x00,0x00,0x00,0xCA,0x00,0x00,
0x90,0x00,0x00,0x00,0xBE,0x00,0x00,0x00,0x9D,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x65,0x62,0x00,0x63,0x00,0x00,0x68,0x00,0x71,0x00,0x73,0x00,0x75,0x76,0x00,
0x00,0x00,0x00,0xEE,0xEB,0x00,0xEC,0xBF,0x00,0x00,0xFE,0x00,0xFC,0xAD,0x00,0x59,
0x00,0x45,0x42,0x00,0x43,0x00,0x00,0x48,0x00,0x51,0x00,0x53,0x00,0x55,0x56,0x00,
0x00,0x00,0x00,0xCE,0xCB,0x00,0xCC,0xE1,0x00,0x00,0xDE,0x00,0xDC,0x8D,0x00,0x00,
0x00,0x00,0x66,0x46,0xB1,0xA0,0x69,0x49,0x00,0x00,0x00,0x00,0x67,0x47,0xFA,0xEA,
0xAC,0x8C,0x00,0x00,0x00,0x00,0x00,0x00,0x72,0x52,0xDA,0xDF,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x78,0x58,0x00,0x00,0x77,0x57,0x00,
0x00,0xBA,0x9A,0xBB,0x9B,0x00,0x00,0xAB,0x8B,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xEF,0xCF,0x00,0x00,0xED,0xCD,0x00,0x00,0xAE,0x8E,0xAA,0x8A,0x00,0x00,0xAF,0x8F,
0xBC,0x9C,0xB3,0x44,0xFD,0xDD,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x74,0x54,
0xFB,0xDB,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xB9,0xB7,0xB4,0xB2,0xB8,0xB6,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xB0,0x00,0x9E,0x00,0x64,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00
};
final static int[] encodeIndex = {
0,1,2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1
};
}
}
| freeVM/freeVM | enhanced/java/classlib/modules/nio_char/src/main/java/org/apache/harmony/niochar/charset/additional/IBM870.java | Java | apache-2.0 | 21,307 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.scimpi.dispatcher.view.action;
import java.util.List;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.spec.ObjectSpecification;
import org.apache.isis.core.metamodel.spec.feature.ObjectAction;
import org.apache.isis.core.metamodel.spec.feature.ObjectActionParameter;
import org.apache.isis.viewer.scimpi.dispatcher.AbstractElementProcessor;
import org.apache.isis.viewer.scimpi.dispatcher.ScimpiException;
import org.apache.isis.viewer.scimpi.dispatcher.action.ActionAction;
import org.apache.isis.viewer.scimpi.dispatcher.context.RequestContext;
import org.apache.isis.viewer.scimpi.dispatcher.context.RequestContext.Scope;
import org.apache.isis.viewer.scimpi.dispatcher.edit.FieldEditState;
import org.apache.isis.viewer.scimpi.dispatcher.edit.FormState;
import org.apache.isis.viewer.scimpi.dispatcher.processor.Request;
import org.apache.isis.viewer.scimpi.dispatcher.util.MethodsUtils;
import org.apache.isis.viewer.scimpi.dispatcher.view.edit.FieldFactory;
import org.apache.isis.viewer.scimpi.dispatcher.view.edit.FormFieldBlock;
import org.apache.isis.viewer.scimpi.dispatcher.view.form.HiddenInputField;
import org.apache.isis.viewer.scimpi.dispatcher.view.form.HtmlFormBuilder;
import org.apache.isis.viewer.scimpi.dispatcher.view.form.InputField;
public class ActionForm extends AbstractElementProcessor {
// REVIEW: confirm this rendering context
private final static Where where = Where.OBJECT_FORMS;
@Override
public void process(final Request request) {
final CreateFormParameter parameters = new CreateFormParameter();
parameters.objectId = request.getOptionalProperty(OBJECT);
parameters.methodName = request.getRequiredProperty(METHOD);
parameters.forwardResultTo = request.getOptionalProperty(VIEW);
parameters.forwardVoidTo = request.getOptionalProperty(VOID);
parameters.forwardErrorTo = request.getOptionalProperty(ERROR);
parameters.cancelTo = request.getOptionalProperty(CANCEL_TO);
parameters.showIcon = request.isRequested(SHOW_ICON, showIconByDefault());
parameters.buttonTitle = request.getOptionalProperty(BUTTON_TITLE);
parameters.formTitle = request.getOptionalProperty(FORM_TITLE);
parameters.labelDelimiter = request.getOptionalProperty(LABEL_DELIMITER, ":");
parameters.formId = request.getOptionalProperty(FORM_ID, request.nextFormId());
parameters.resultName = request.getOptionalProperty(RESULT_NAME);
parameters.resultOverride = request.getOptionalProperty(RESULT_OVERRIDE);
parameters.scope = request.getOptionalProperty(SCOPE);
parameters.className = request.getOptionalProperty(CLASS, "action full");
parameters.showMessage = request.isRequested(SHOW_MESSAGE, false);
parameters.completionMessage = request.getOptionalProperty(MESSAGE);
parameters.id = request.getOptionalProperty(ID, parameters.methodName);
createForm(request, parameters);
}
public static void createForm(final Request request, final CreateFormParameter parameterObject) {
createForm(request, parameterObject, false);
}
protected static void createForm(final Request request, final CreateFormParameter parameterObject, final boolean withoutProcessing) {
final RequestContext context = request.getContext();
final ObjectAdapter object = MethodsUtils.findObject(context, parameterObject.objectId);
final String version = request.getContext().mapVersion(object);
final ObjectAction action = MethodsUtils.findAction(object, parameterObject.methodName);
// TODO how do we distinguish between overloaded methods?
// REVIEW Is this useful?
if (action.getParameterCount() == 0) {
throw new ScimpiException("Action form can only be used for actions with parameters");
}
if (parameterObject.showMessage && MethodsUtils.isVisible(object, action, where)) {
final String notUsable = MethodsUtils.isUsable(object, action, where);
if (notUsable != null) {
if (!withoutProcessing) {
request.skipUntilClose();
}
request.appendHtml("<div class=\"" + parameterObject.className + "-message\" >");
request.appendAsHtmlEncoded(notUsable);
request.appendHtml("</div>");
return;
}
}
if (!MethodsUtils.isVisibleAndUsable(object, action, where)) {
if (!withoutProcessing) {
request.skipUntilClose();
}
return;
}
final String objectId = context.mapObject(object, Scope.INTERACTION);
final String errorView = context.fullFilePath(parameterObject.forwardErrorTo == null ? context.getResourceFile() : parameterObject.forwardErrorTo);
final String voidView = context.fullFilePath(parameterObject.forwardVoidTo == null ? context.getResourceFile() : parameterObject.forwardVoidTo);
if (false /* action.isContributed() && !action.hasReturn() && parameterObject.resultOverride == null */) {
parameterObject.resultOverride = objectId;
}
final HiddenInputField[] hiddenFields = new HiddenInputField[] { new HiddenInputField("_" + OBJECT, objectId), new HiddenInputField("_" + VERSION, version), new HiddenInputField("_" + FORM_ID, parameterObject.formId), new HiddenInputField("_" + METHOD, parameterObject.methodName),
parameterObject.forwardResultTo == null ? null : new HiddenInputField("_" + VIEW, context.fullFilePath(parameterObject.forwardResultTo)), new HiddenInputField("_" + VOID, voidView), new HiddenInputField("_" + ERROR, errorView),
parameterObject.completionMessage == null ? null : new HiddenInputField("_" + MESSAGE, parameterObject.completionMessage), parameterObject.scope == null ? null : new HiddenInputField("_" + SCOPE, parameterObject.scope),
parameterObject.resultOverride == null ? null : new HiddenInputField("_" + RESULT_OVERRIDE, parameterObject.resultOverride), parameterObject.resultName == null ? null : new HiddenInputField("_" + RESULT_NAME, parameterObject.resultName),
parameterObject.resultName == null ? null : new HiddenInputField(RequestContext.RESULT, (String) request.getContext().getVariable(RequestContext.RESULT)) };
// TODO when the block contains a selector tag it doesn't disable it if
// the field cannot be edited!!!
final FormFieldBlock containedBlock = new FormFieldBlock() {
@Override
public boolean isNullable(final String name) {
final int index = Integer.parseInt(name.substring(5)) - 1;
final ObjectActionParameter param = action.getParameters().get(index);
return param.isOptional();
}
};
request.setBlockContent(containedBlock);
if (!withoutProcessing) {
request.processUtilCloseTag();
}
final FormState entryState = (FormState) context.getVariable(ENTRY_FIELDS);
// TODO the list of included fields should be considered in the next
// method (see EditObject)
final InputField[] formFields = createFields(action, object);
containedBlock.hideExcludedParameters(formFields);
containedBlock.setUpValues(formFields);
initializeFields(context, object, action, formFields);
setDefaults(context, object, action, formFields, entryState, parameterObject.showIcon);
String errors = null;
if (entryState != null && entryState.isForForm(parameterObject.formId)) {
copyEntryState(context, object, action, formFields, entryState);
errors = entryState.getError();
}
overrideWithHtml(context, containedBlock, formFields);
String formTitle;
if (parameterObject.formTitle == null) {
formTitle = action.getName();
} else {
formTitle = parameterObject.formTitle;
}
String buttonTitle = parameterObject.buttonTitle;
if (buttonTitle == null) {
buttonTitle = action.getName();
} else if (buttonTitle.equals("")) {
buttonTitle = "Ok";
}
HtmlFormBuilder.createForm(request, ActionAction.ACTION + ".app", hiddenFields, formFields, parameterObject.className,
parameterObject.id, formTitle, parameterObject.labelDelimiter, action.getDescription(), action.getHelp(), buttonTitle, errors, parameterObject.cancelTo);
request.popBlockContent();
}
private static InputField[] createFields(final ObjectAction action, final ObjectAdapter object) {
final int parameterCount = action.getParameterCount();
final InputField[] fields = new InputField[parameterCount];
for (int i = 0; i < fields.length; i++) {
fields[i] = new InputField(ActionAction.parameterName(i));
}
return fields;
}
private static void initializeFields(final RequestContext context, final ObjectAdapter object, final ObjectAction action, final InputField[] fields) {
final List<ObjectActionParameter> parameters = action.getParameters();
for (int i = 0; i < fields.length; i++) {
final InputField field = fields[i];
final ObjectActionParameter param = parameters.get(i);
if (false /*action.isContributed() && i == 0*/) {
// fields[i].setValue(context.mapObject(object,
// Scope.INTERACTION));
fields[i].setType(InputField.REFERENCE);
fields[i].setHidden(true);
} else {
fields[i].setHelpReference("xxxhelp");
final ObjectAdapter[] optionsForParameter = action.getChoices(object)[i];
FieldFactory.initializeField(context, object, param, optionsForParameter, !param.isOptional(), field);
}
}
}
/**
* Sets up the fields with their initial values
*
* @param showIcon
*/
private static void setDefaults(final RequestContext context, final ObjectAdapter object, final ObjectAction action, final InputField[] fields, final FormState entryState, final boolean showIcon) {
final ObjectAdapter[] defaultValues = action.getDefaults(object);
if (defaultValues == null) {
return;
}
for (int i = 0; i < fields.length; i++) {
final InputField field = fields[i];
final ObjectAdapter defaultValue = defaultValues[i];
final String title = defaultValue == null ? "" : defaultValue.titleString();
if (field.getType() == InputField.REFERENCE) {
final ObjectSpecification objectSpecification = action.getParameters().get(i).getSpecification();
if (defaultValue != null) {
final String imageSegment = showIcon ? "<img class=\"small-icon\" src=\"" + context.imagePath(objectSpecification) + "\" alt=\"" + objectSpecification.getShortIdentifier() + "\"/>" : "";
final String html = imageSegment + title;
final String value = context.mapObject(defaultValue, Scope.INTERACTION);
field.setValue(value);
field.setHtml(html);
}
} else {
field.setValue(title);
}
}
}
private static void copyEntryState(final RequestContext context, final ObjectAdapter object, final ObjectAction action, final InputField[] fields, final FormState entryState) {
for (final InputField field : fields) {
final FieldEditState fieldState = entryState.getField(field.getName());
if (fieldState != null) {
if (field.isEditable()) {
String entry;
entry = fieldState.getEntry();
field.setValue(entry);
}
field.setErrorText(fieldState.getError());
}
}
}
private static void overrideWithHtml(final RequestContext context, final FormFieldBlock containedBlock, final InputField[] formFields) {
for (int i = 0; i < formFields.length; i++) {
final String id = ActionAction.parameterName(i);
if (containedBlock.hasContent(id)) {
final String content = containedBlock.getContent(id);
if (content != null) {
formFields[i].setHtml(content);
formFields[i].setType(InputField.HTML);
}
}
}
}
@Override
public String getName() {
return "action-form";
}
}
| howepeng/isis | mothballed/component/viewer/scimpi/dispatcher/src/main/java/org/apache/isis/viewer/scimpi/dispatcher/view/action/ActionForm.java | Java | apache-2.0 | 13,712 |
package fm.liu.timo.parser.ast.expression.primary.function.spatial;
import java.util.List;
import fm.liu.timo.parser.ast.expression.Expression;
import fm.liu.timo.parser.ast.expression.primary.function.FunctionExpression;
import fm.liu.timo.parser.visitor.Visitor;
public class ST_Centroid extends FunctionExpression {
public ST_Centroid(Expression expr) {
super("ST_CENTROID", wrapList(expr));
}
@Override
public FunctionExpression constructFunction(List<Expression> arguments) {
throw new UnsupportedOperationException("function of char has special arguments");
}
@Override
public void accept(Visitor visitor) {
visitor.visit(this);
}
}
| ronghuaxiang/Timo | src/main/parser/fm/liu/timo/parser/ast/expression/primary/function/spatial/ST_Centroid.java | Java | apache-2.0 | 701 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.util.diff.myers;
/**
* Copy from https://code.google.com/p/java-diff-utils/.
* <p>
* Represents a snake in a diffpath.
* <p>
*
* {@link DiffNode DiffNodes} and {@link Snake Snakes} allow for compression
* of diffpaths, as each snake is represented by a single {@link Snake Snake}
* node and each contiguous series of insertions and deletions is represented
* by a single {@link DiffNode DiffNodes}.
*
* @author <a href="mailto:[email protected]">Juanco Anez</a>
*/
public final class Snake extends PathNode {
/**
* Constructs a snake node.
*
* @param i the position in the original sequence
* @param j the position in the revised sequence
* @param prev the previous node in the path.
*/
public Snake(int i, int j, PathNode prev) {
super(i, j, prev);
}
/**
* {@inheritDoc}
* @return true always
*/
public boolean isSnake() {
return true;
}
} | ChrisCanCompute/assertj-core | src/main/java/org/assertj/core/util/diff/myers/Snake.java | Java | apache-2.0 | 1,541 |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.car.app.model;
import static com.google.common.truth.Truth.assertThat;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.style.ForegroundColorSpan;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.internal.DoNotInstrument;
import java.util.ArrayList;
import java.util.List;
/** Tests for {@link CarText}. */
@RunWith(RobolectricTestRunner.class)
@DoNotInstrument
public class CarTextTest {
@Test
public void toCharSequence_noSpans() {
String text = "";
CarText carText = CarText.create(text);
assertThat(carText.toCharSequence().toString()).isEqualTo(text);
text = "Test string";
carText = CarText.create(text);
assertThat(carText.toCharSequence().toString()).isEqualTo(text);
}
@Test
public void toCharSequence_withSpans() {
String text = "Part of this text is red";
SpannableString spannable = new SpannableString(text);
// Add a foreground car color span.
ForegroundCarColorSpan foregroundCarColorSpan = ForegroundCarColorSpan.create(CarColor.RED);
spannable.setSpan(foregroundCarColorSpan, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Add a duration span
DurationSpan durationSpan = DurationSpan.create(46);
spannable.setSpan(durationSpan, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Add a span that will be filtered out.
ForegroundColorSpan foregroundColorSpan = new ForegroundColorSpan(0xffff00);
spannable.setSpan(foregroundColorSpan, 2, 3, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Create the car text from the spannable and verify it.
CarText carText = CarText.create(spannable);
CharSequence charSequence = carText.toCharSequence();
assertThat(charSequence.toString()).isEqualTo(text);
List<CarSpanInfo> carSpans = getCarSpans(charSequence);
assertThat(carSpans).hasSize(2);
CarSpanInfo carSpan = carSpans.get(0);
assertThat(carSpan.mCarSpan instanceof ForegroundCarColorSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(foregroundCarColorSpan);
assertThat(carSpan.mStart).isEqualTo(0);
assertThat(carSpan.mEnd).isEqualTo(5);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
carSpan = carSpans.get(1);
assertThat(carSpan.mCarSpan instanceof DurationSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(durationSpan);
assertThat(carSpan.mStart).isEqualTo(10);
assertThat(carSpan.mEnd).isEqualTo(12);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
@Test
public void variants_toCharSequence_withSpans() {
String text1 = "Part of this text is red";
SpannableString spannable1 = new SpannableString(text1);
ForegroundCarColorSpan foregroundCarColorSpan1 =
ForegroundCarColorSpan.create(CarColor.RED);
spannable1.setSpan(foregroundCarColorSpan1, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
DurationSpan durationSpan1 = DurationSpan.create(46);
spannable1.setSpan(durationSpan1, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Create a text where the string is different
String text2 = "Part of this text is blue";
SpannableString spannable2 = new SpannableString(text2);
ForegroundCarColorSpan foregroundCarColorSpan2 =
ForegroundCarColorSpan.create(CarColor.RED);
spannable2.setSpan(foregroundCarColorSpan2, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
DurationSpan durationSpan2 = DurationSpan.create(46);
spannable2.setSpan(durationSpan2, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Create the car text from the spannables and verify it.
CarText carText = new CarText.Builder(spannable1).addVariant(spannable2).build();
// Check that we have two variants.
assertThat(carText.toCharSequence()).isNotNull();
assertThat(carText.getVariants()).hasSize(1);
// Check the first variant.
CharSequence charSequence1 = carText.toCharSequence();
assertThat(charSequence1.toString()).isEqualTo(text1);
List<CarSpanInfo> carSpans1 = getCarSpans(charSequence1);
assertThat(carSpans1).hasSize(2);
CarSpanInfo carSpan = carSpans1.get(0);
assertThat(carSpan.mCarSpan instanceof ForegroundCarColorSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(foregroundCarColorSpan1);
assertThat(carSpan.mStart).isEqualTo(0);
assertThat(carSpan.mEnd).isEqualTo(5);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
carSpan = carSpans1.get(1);
assertThat(carSpan.mCarSpan instanceof DurationSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(durationSpan1);
assertThat(carSpan.mStart).isEqualTo(10);
assertThat(carSpan.mEnd).isEqualTo(12);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// Check the second variant.
CharSequence charSequence2 = carText.getVariants().get(0);
assertThat(charSequence2.toString()).isEqualTo(text2);
List<CarSpanInfo> carSpans = getCarSpans(charSequence2);
assertThat(carSpans).hasSize(2);
carSpan = carSpans.get(0);
assertThat(carSpan.mCarSpan instanceof ForegroundCarColorSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(foregroundCarColorSpan2);
assertThat(carSpan.mStart).isEqualTo(0);
assertThat(carSpan.mEnd).isEqualTo(5);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
carSpan = carSpans.get(1);
assertThat(carSpan.mCarSpan instanceof DurationSpan).isTrue();
assertThat(carSpan.mCarSpan).isEqualTo(durationSpan2);
assertThat(carSpan.mStart).isEqualTo(10);
assertThat(carSpan.mEnd).isEqualTo(12);
assertThat(carSpan.mFlags).isEqualTo(Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
@Test
public void equals_and_hashCode() {
String text = "Part of this text is red";
SpannableString spannable = new SpannableString(text);
ForegroundCarColorSpan foregroundCarColorSpan = ForegroundCarColorSpan.create(CarColor.RED);
spannable.setSpan(foregroundCarColorSpan, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
DurationSpan durationSpan = DurationSpan.create(46);
spannable.setSpan(durationSpan, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
CarText carText1 = CarText.create(spannable);
text = "Part of this text is red";
spannable = new SpannableString(text);
foregroundCarColorSpan = ForegroundCarColorSpan.create(CarColor.RED);
spannable.setSpan(foregroundCarColorSpan, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
durationSpan = DurationSpan.create(46);
spannable.setSpan(durationSpan, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
CarText carText2 = CarText.create(spannable);
// Create a text where the string is different
text = "Part of this text is blue";
spannable = new SpannableString(text);
foregroundCarColorSpan = ForegroundCarColorSpan.create(CarColor.RED);
spannable.setSpan(foregroundCarColorSpan, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
durationSpan = DurationSpan.create(46);
spannable.setSpan(durationSpan, 10, 12, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
CarText carText3 = CarText.create(spannable);
// Create a text where the spans change
text = "Part of this text is red";
spannable = new SpannableString(text);
foregroundCarColorSpan = ForegroundCarColorSpan.create(CarColor.RED);
spannable.setSpan(foregroundCarColorSpan, 0, 5, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
CarText carText4 = CarText.create(spannable);
assertThat(carText1).isEqualTo(carText2);
assertThat(carText1.hashCode()).isEqualTo(carText2.hashCode());
assertThat(carText1).isEqualTo(carText1);
assertThat(carText1.hashCode()).isEqualTo(carText1.hashCode());
assertThat(carText1).isNotEqualTo(carText3);
assertThat(carText1.hashCode()).isNotEqualTo(carText3.hashCode());
assertThat(carText2).isNotEqualTo(carText4);
assertThat(carText2.hashCode()).isNotEqualTo(carText4.hashCode());
assertThat(carText3).isNotEqualTo(carText4);
assertThat(carText3.hashCode()).isNotEqualTo(carText4.hashCode());
}
private static List<CarSpanInfo> getCarSpans(CharSequence charSequence) {
Spanned spanned = (Spanned) charSequence;
List<CarSpanInfo> carSpans = new ArrayList<>();
for (Object span : spanned.getSpans(0, charSequence.length(), Object.class)) {
assertThat(span instanceof CarSpan).isTrue();
CarSpanInfo info = new CarSpanInfo();
info.mCarSpan = (CarSpan) span;
info.mStart = spanned.getSpanStart(span);
info.mEnd = spanned.getSpanEnd(span);
info.mFlags = spanned.getSpanFlags(span);
carSpans.add(info);
}
return carSpans;
}
private static class CarSpanInfo {
CarSpan mCarSpan;
int mStart;
int mEnd;
int mFlags;
}
}
| AndroidX/androidx | car/app/app/src/test/java/androidx/car/app/model/CarTextTest.java | Java | apache-2.0 | 10,053 |
/**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.infinispan.test;
import javax.naming.InitialContext;
import org.infinispan.Cache;
import org.infinispan.manager.CacheContainer;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.jboss.msc.service.ServiceRegistry;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.swarm.arquillian.DefaultDeployment;
import static org.junit.Assert.assertEquals;
@RunWith(Arquillian.class)
@DefaultDeployment
public class InfinispanRemoteTest {
@ArquillianResource
ServiceRegistry registry;
@Test
public void testBasic() throws Exception {
CacheContainer cacheContainer =
(CacheContainer) new InitialContext().lookup("java:jboss/infinispan/container/server");
Cache cache = cacheContainer.getCache("default");
cache.put("ham", "biscuit");
assertEquals("biscuit", cache.get("ham"));
}
}
| bobmcwhirter/wildfly-swarm | testsuite/testsuite-infinispan/src/test/java/org/wildfly/swarm/infinispan/test/InfinispanRemoteTest.java | Java | apache-2.0 | 1,584 |
package org.activiti.bpmn.model.parse;
import org.activiti.bpmn.model.BaseElement;
public class Warning {
protected String warningMessage;
protected String resource;
protected int line;
protected int column;
public Warning(String warningMessage, String localName, int lineNumber, int columnNumber) {
this.warningMessage = warningMessage;
this.resource = localName;
this.line = lineNumber;
this.column = columnNumber;
}
public Warning(String warningMessage, BaseElement element) {
this.warningMessage = warningMessage;
this.resource = element.getId();
line = element.getXmlRowNumber();
column = element.getXmlColumnNumber();
}
public String toString() {
return warningMessage + (resource != null ? " | " + resource : "") + " | line " + line + " | column " + column;
}
}
| roberthafner/flowable-engine | modules/flowable-bpmn-model/src/main/java/org/activiti/bpmn/model/parse/Warning.java | Java | apache-2.0 | 831 |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server.grpc;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Duration;
import java.util.concurrent.CompletionStage;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
import com.example.helloworld.GreeterServiceGrpc.GreeterServiceBlockingStub;
import com.example.helloworld.GreeterServiceHandlerFactory;
import com.example.helloworld.GreeterServiceImpl;
import com.example.helloworld.HelloReply;
import com.example.helloworld.HelloRequest;
import com.google.common.collect.ImmutableList;
import com.linecorp.armeria.client.ClientRequestContextCaptor;
import com.linecorp.armeria.client.Clients;
import com.linecorp.armeria.client.grpc.GrpcClients;
import com.linecorp.armeria.common.SerializationFormat;
import com.linecorp.armeria.common.grpc.GrpcSerializationFormats;
import com.linecorp.armeria.common.logging.RequestLog;
import akka.actor.typed.ActorSystem;
import akka.actor.typed.javadsl.Adapter;
import akka.actor.typed.javadsl.Behaviors;
import akka.grpc.javadsl.WebHandler;
import akka.http.javadsl.ConnectHttp;
import akka.http.javadsl.Http;
import akka.http.javadsl.ServerBinding;
import akka.http.javadsl.model.HttpRequest;
import akka.http.javadsl.model.HttpResponse;
import akka.japi.Function;
import akka.stream.Materializer;
import akka.stream.SystemMaterializer;
class GrpcWebServiceTest {
private static ServerBinding serverBinding;
@BeforeAll
static void setUp() {
final ActorSystem<Object> system = ActorSystem.create(Behaviors.empty(), "GreeterServer");
final Materializer materializer = SystemMaterializer.get(system).materializer();
final Function<HttpRequest, CompletionStage<HttpResponse>> handler =
GreeterServiceHandlerFactory.create(new GreeterServiceImpl(system), system);
final Function<HttpRequest, CompletionStage<HttpResponse>> grpcWebServiceHandlers =
WebHandler.grpcWebHandler(ImmutableList.of(handler), system, materializer);
final CompletionStage<ServerBinding> future =
Http.get(Adapter.toClassic(system))
.bindAndHandleAsync(grpcWebServiceHandlers, ConnectHttp.toHost("127.0.0.1", 0),
materializer);
serverBinding = future.toCompletableFuture().join();
}
@AfterAll
static void tearDown() {
serverBinding.terminate(Duration.ofSeconds(10));
}
@ParameterizedTest
@ArgumentsSource(GrpcProtoWebSerializationFormats.class)
void grpcProtoWebClient(SerializationFormat serializationFormat) {
final String serverUri = serializationFormat.uriText() + "+http://127.0.0.1:" +
serverBinding.localAddress().getPort();
final GreeterServiceBlockingStub blockingStub =
GrpcClients.newClient(serverUri, GreeterServiceBlockingStub.class);
try (ClientRequestContextCaptor captor = Clients.newContextCaptor()) {
final HelloReply armeria =
blockingStub.sayHello(HelloRequest.newBuilder().setName("Armeria").build());
assertThat(armeria.getMessage()).isEqualTo("Hello, Armeria");
final RequestLog requestLog = captor.get().log().whenComplete().join();
assertThat(requestLog.responseContent().toString()).contains("Hello, Armeria");
}
}
private static class GrpcProtoWebSerializationFormats implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(final ExtensionContext context) throws Exception {
return GrpcSerializationFormats.values().stream()
.filter(GrpcSerializationFormats::isGrpcWeb)
.filter(GrpcSerializationFormats::isProto)
.map(Arguments::of);
}
}
}
| line/armeria | it/grpcweb/src/test/java/com/linecorp/armeria/server/grpc/GrpcWebServiceTest.java | Java | apache-2.0 | 4,886 |
package org.hibernate.ogm.hiking.rest;
import java.util.List;
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import org.hibernate.ogm.hiking.model.business.Order;
import org.hibernate.ogm.hiking.repository.business.OrderRepository;
@Path("/orders")
@Stateless
public class OrderResource {
@Inject
private OrderRepository orderRepository;
public OrderResource() {
}
@POST
@Path("/")
@Consumes("application/json")
@Produces("application/json")
public Order createOrder(Order order) {
order = orderRepository.createOrder(order);
return order;
}
@GET
@Path("/")
@Produces("application/json")
public List<Order> getAllOrders() {
return orderRepository.getAllOrders();
}
}
| hibernate/hibernate-demos | hibernate-ogm/hiking-demo/src/main/java/org/hibernate/ogm/hiking/rest/OrderResource.java | Java | apache-2.0 | 836 |
/*
* Copyright 2014 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.dataflow.nullnesspropagation.testdata;
import static com.google.errorprone.dataflow.nullnesspropagation.NullnessPropagationTest.triggerNullnessChecker;
import static com.google.errorprone.dataflow.nullnesspropagation.NullnessPropagationTest.triggerNullnessCheckerOnPrimitive;
/**
* Dataflow analysis cases for testing transfer functions in nullness propagation, primarily around
* conditionals.
*/
public class NullnessPropagationTransferCases1 {
public void conditionalNot(String foo) {
if (!(foo == null)) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(foo);
return;
}
// BUG: Diagnostic contains: (Null)
triggerNullnessChecker(foo);
}
public void conditionalOr1(String foo, String bar) {
if (foo == null || bar == null) {
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(bar);
return;
}
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(foo);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(bar);
}
public void conditionalOr2(String foo, String bar) {
if (foo != null || bar != null) {
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void conditionalOr3(String foo) {
if (foo != null || foo != null) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(foo);
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void conditionalOr4(String foo) {
// BUG: Diagnostic contains: (Non-null)
if (foo == null || triggerNullnessChecker(foo) == null) {
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
// BUG: Diagnostic contains: (Null)
if (foo != null || triggerNullnessChecker(foo) != null) {
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void conditionalAnd1(String foo, String bar) {
if (foo != null && bar != null) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(foo);
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void conditionalAnd2(String foo) {
if (foo == null && foo != null) {
// BUG: Diagnostic contains: (Bottom)
triggerNullnessChecker(foo);
return;
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void conditionalAnd3(String foo) {
// BUG: Diagnostic contains: (Null)
if (foo == null && triggerNullnessChecker(foo) == null) {
// Something
}
// BUG: Diagnostic contains: (Non-null)
if (foo != null && triggerNullnessChecker(foo) != null) {
// Something
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(foo);
}
public void ternary1(String nullable) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(nullable == null ? "" : nullable);
// BUG: Diagnostic contains: (Null)
triggerNullnessChecker(nullable != null ? null : nullable);
}
public void ternary2(boolean test, String nullable) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(test ? "yes" : "no");
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(test ? nullable : "");
}
public void valueOfComparisonItself() {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(1 == 1);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(1 != 1);
boolean b;
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(b = (1 == 1));
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(b = (1 != 1));
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(!b);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(b || b);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(b && b);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessCheckerOnPrimitive(b = !b);
}
public void leastUpperBoundOfNonNullAndUnknown(String param, boolean b) {
if (b) {
param = "foo";
}
// BUG: Diagnostic contains: (Nullable)
triggerNullnessChecker(param);
}
public void stringConcatenation(String a, String b) {
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(a + b);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(null + b);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(a + 5);
// BUG: Diagnostic contains: (Non-null)
triggerNullnessChecker(null + (String) null);
}
}
| google/error-prone | core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/testdata/NullnessPropagationTransferCases1.java | Java | apache-2.0 | 5,613 |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.components.gridlayout;
import com.vaadin.server.UserError;
import com.vaadin.server.VaadinRequest;
import com.vaadin.tests.components.AbstractReindeerTestUI;
import com.vaadin.tests.components.TestDateField;
import com.vaadin.ui.AbstractDateField;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.GridLayout;
import com.vaadin.ui.Panel;
public class LayoutAfterHidingError extends AbstractReindeerTestUI {
@Override
protected void setup(VaadinRequest request) {
final Panel panel = new Panel();
panel.setWidth("300px");
addComponent(panel);
GridLayout gl = new GridLayout();
gl.setWidth("100%");
panel.setContent(gl);
final AbstractDateField<?, ?> df = new TestDateField();
df.setWidth("100%");
gl.addComponent(df);
Button err = new Button("Set error");
err.addClickListener(new ClickListener() {
@Override
public void buttonClick(ClickEvent event) {
df.setComponentError(new UserError("foo"));
}
});
gl.addComponent(err);
err = new Button("Clear error");
err.addClickListener(new ClickListener() {
@Override
public void buttonClick(ClickEvent event) {
df.setComponentError(null);
}
});
gl.addComponent(err);
}
@Override
protected String getTestDescription() {
return "Setting an error icon for a component in GridLayout and then removing it should properly re-size the component";
}
@Override
protected Integer getTicketNumber() {
return 12011;
}
}
| peterl1084/framework | uitest/src/main/java/com/vaadin/tests/components/gridlayout/LayoutAfterHidingError.java | Java | apache-2.0 | 2,360 |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.common.data;
import com.google.gerrit.common.auth.SignInRequired;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwtjsonrpc.client.RemoteJsonService;
import com.google.gwtjsonrpc.client.RpcImpl;
import com.google.gwtjsonrpc.client.RpcImpl.Version;
@RpcImpl(version = Version.V2_0)
public interface ChangeDetailService extends RemoteJsonService {
void changeDetail(Change.Id id, AsyncCallback<ChangeDetail> callback);
void includedInDetail(Change.Id id, AsyncCallback<IncludedInDetail> callback);
void patchSetDetail(PatchSet.Id key, AsyncCallback<PatchSetDetail> callback);
@SignInRequired
void patchSetPublishDetail(PatchSet.Id key,
AsyncCallback<PatchSetPublishDetail> callback);
}
| skurfuerst/gerrit | gerrit-common/src/main/java/com/google/gerrit/common/data/ChangeDetailService.java | Java | apache-2.0 | 1,450 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.usages;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.PlatformIcons;
import org.jetbrains.annotations.NotNull;
/**
* @author Eugene Zhuravlev
*/
public class ReadWriteAccessUsageInfo2UsageAdapter extends UsageInfo2UsageAdapter implements ReadWriteAccessUsage{
private final boolean myAccessedForReading;
private final boolean myAccessedForWriting;
public ReadWriteAccessUsageInfo2UsageAdapter(@NotNull UsageInfo usageInfo, final boolean accessedForReading, final boolean accessedForWriting) {
super(usageInfo);
myAccessedForReading = accessedForReading;
myAccessedForWriting = accessedForWriting;
if (myAccessedForReading && myAccessedForWriting) {
myIcon = PlatformIcons.VARIABLE_RW_ACCESS;
}
else if (myAccessedForWriting) {
myIcon = PlatformIcons.VARIABLE_WRITE_ACCESS; // If icon is changed, don't forget to change UTCompositeUsageNode.getIcon();
}
else if (myAccessedForReading){
myIcon = PlatformIcons.VARIABLE_READ_ACCESS; // If icon is changed, don't forget to change UTCompositeUsageNode.getIcon();
}
}
@Override
public boolean isAccessedForWriting() {
return myAccessedForWriting;
}
@Override
public boolean isAccessedForReading() {
return myAccessedForReading;
}
}
| ThiagoGarciaAlves/intellij-community | platform/usageView/src/com/intellij/usages/ReadWriteAccessUsageInfo2UsageAdapter.java | Java | apache-2.0 | 1,931 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.api.resources;
import java.util.HashSet;
import java.util.Set;
import org.apache.ambari.server.controller.spi.Resource;
import org.junit.Assert;
import org.junit.Test;
/**
* ViewInstanceResourceDefinition tests.
*/
public class ViewInstanceResourceDefinitionTest {
@Test
public void testGetPluralName() throws Exception {
ViewInstanceResourceDefinition viewInstanceResourceDefinition = getViewInstanceResourceDefinition();
Assert.assertEquals("instances", viewInstanceResourceDefinition.getPluralName());
}
@Test
public void testGetSingularName() throws Exception {
ViewInstanceResourceDefinition viewInstanceResourceDefinition = getViewInstanceResourceDefinition();
Assert.assertEquals("instance", viewInstanceResourceDefinition.getSingularName());
}
@Test
public void testGetSubResourceDefinitions() throws Exception {
ViewInstanceResourceDefinition viewInstanceResourceDefinition = getViewInstanceResourceDefinition();
Set<SubResourceDefinition> subResourceDefinitions = viewInstanceResourceDefinition.getSubResourceDefinitions();
Assert.assertEquals(3, subResourceDefinitions.size());
for (SubResourceDefinition subResourceDefinition : subResourceDefinitions) {
Resource.Type type = subResourceDefinition.getType();
Assert.assertTrue(type.name().equals("sub1") || type.name().equals("sub2") || type.equals(Resource.Type.ViewPrivilege));
}
}
public static ViewInstanceResourceDefinition getViewInstanceResourceDefinition() {
Set<SubResourceDefinition> subResourceDefinitions = new HashSet<>();
subResourceDefinitions.add(new SubResourceDefinition(new Resource.Type("sub1")));
subResourceDefinitions.add(new SubResourceDefinition(new Resource.Type("sub2")));
return new ViewInstanceResourceDefinition(subResourceDefinitions);
}
}
| sekikn/ambari | ambari-server/src/test/java/org/apache/ambari/server/api/resources/ViewInstanceResourceDefinitionTest.java | Java | apache-2.0 | 2,670 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.metadata.api;
import java.io.Serializable;
import java.util.Objects;
import org.apache.asterix.common.api.ExtensionId;
public class ExtensionMetadataDatasetId implements Serializable {
private static final long serialVersionUID = 1L;
private final ExtensionId extensionId;
private final String datasetName;
public ExtensionMetadataDatasetId(ExtensionId extensionId, String datasetName) {
this.extensionId = extensionId;
this.datasetName = datasetName;
}
public ExtensionId getExtensionId() {
return extensionId;
}
public String getDatasetName() {
return datasetName;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof ExtensionMetadataDatasetId) {
ExtensionMetadataDatasetId otherId = (ExtensionMetadataDatasetId) o;
return Objects.equals(extensionId, otherId.getExtensionId())
&& Objects.equals(datasetName, otherId.getDatasetName());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(datasetName, extensionId);
}
}
| ecarm002/incubator-asterixdb | asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/ExtensionMetadataDatasetId.java | Java | apache-2.0 | 2,023 |
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
/**
* Represents a portion of a log. Provides the start and end offset of a log
*/
class FileSpan {
private Offset startOffset;
private Offset endOffset;
/**
* Creates a file span with the given start and end offsets.
* @param startOffset the start {@link Offset} of the FileSpan.
* @param endOffset the end {@link Offset} of the FileSpan.
* @throws IllegalArgumentException if {@code endOffset} < {@code startOffset}
*/
FileSpan(Offset startOffset, Offset endOffset) {
if (endOffset.compareTo(startOffset) < 0) {
throw new IllegalArgumentException("File span needs to be positive");
}
this.startOffset = startOffset;
this.endOffset = endOffset;
}
/**
* @return the start {@link Offset} represented by this FileSpan. Guaranteed to be <= {@link #getEndOffset()}.
*/
Offset getStartOffset() {
return startOffset;
}
/**
* @return the end {@link Offset} represented by this FileSpan. Guaranteed to be >= {@link #getStartOffset()}.
*/
Offset getEndOffset() {
return endOffset;
}
@Override
public String toString() {
return "StartOffset=[" + startOffset + "], EndOffset=[" + endOffset + "]";
}
}
| nsivabalan/ambry | ambry-store/src/main/java/com.github.ambry.store/FileSpan.java | Java | apache-2.0 | 1,729 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testframework.junits.common;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import javax.cache.Cache;
import javax.cache.CacheException;
import javax.cache.integration.CompletionListener;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSession;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteCompute;
import org.apache.ignite.IgniteEvents;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteMessaging;
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.CacheAtomicWriteOrderMode;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.AffinityFunction;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterTopologyException;
import org.apache.ignite.compute.ComputeTask;
import org.apache.ignite.compute.ComputeTaskFuture;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheExplicitLockSpan;
import org.apache.ignite.internal.processors.cache.GridCacheFuture;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheAdapter;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLocalPartition;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionState;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionTopology;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTopologyFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.colocated.GridDhtColocatedCache;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionMap2;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter;
import org.apache.ignite.internal.processors.cache.local.GridLocalCache;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.PA;
import org.apache.ignite.internal.util.typedef.internal.LT;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.GridAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.apache.ignite.transactions.TransactionRollbackException;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheMode.LOCAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheRebalanceMode.NONE;
import static org.apache.ignite.internal.processors.cache.GridCacheUtils.isNearEnabled;
import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
/**
* Super class for all common tests.
*/
public abstract class GridCommonAbstractTest extends GridAbstractTest {
/** Cache peek modes array that consist of only ONHEAP mode. */
protected static final CachePeekMode[] ONHEAP_PEEK_MODES = new CachePeekMode[] {CachePeekMode.ONHEAP};
/**
* @param startGrid If {@code true}, then grid node will be auto-started.
*/
protected GridCommonAbstractTest(boolean startGrid) {
super(startGrid);
}
/** */
protected GridCommonAbstractTest() {
super(false);
}
/**
* @param idx Grid index.
* @return Cache.
*/
protected <K, V> IgniteCache<K, V> jcache(int idx) {
return grid(idx).cache(null);
}
/**
* @param idx Grid index.
* @param name Cache name.
* @return Cache.
*/
protected <K, V> IgniteCache<K, V> jcache(int idx, String name) {
return grid(idx).cache(name);
}
/**
* @param idx Grid index.
* @return Cache.
*/
protected <K, V> GridCacheAdapter<K, V> internalCache(int idx) {
return ((IgniteKernal)grid(idx)).internalCache(null);
}
/**
* @param idx Grid index.
* @param name Cache name.
* @return Cache.
*/
protected <K, V> GridCacheAdapter<K, V> internalCache(int idx, String name) {
return ((IgniteKernal)grid(idx)).internalCache(name);
}
/**
* @param ignite Grid.
* @param name Cache name.
* @return Cache.
*/
protected <K, V> GridCacheAdapter<K, V> internalCache(Ignite ignite, String name) {
return ((IgniteKernal)ignite).internalCache(name);
}
/**
* @param cache Cache.
* @return Cache.
*/
protected <K, V> GridCacheAdapter<K, V> internalCache(IgniteCache<K, V> cache) {
if (isMultiJvmObject(cache))
throw new UnsupportedOperationException("Oparetion can't be supported automatically for multi jvm " +
"(send closure instead).");
return ((IgniteKernal)cache.unwrap(Ignite.class)).internalCache(cache.getName());
}
/**
* @return Cache.
*/
protected <K, V> IgniteCache<K, V> jcache() {
return grid().cache(null);
}
/**
* @param cache Cache.
* @return Cache.
*/
@SuppressWarnings("TypeMayBeWeakened")
protected <K> Set<K> keySet(IgniteCache<K, ?> cache) {
Set<K> res = new HashSet<>();
for (Cache.Entry<K, ?> entry : cache)
res.add(entry.getKey());
return res;
}
/**
* @return Cache.
*/
protected <K, V> GridLocalCache<K, V> local() {
return (GridLocalCache<K, V>)((IgniteKernal)grid()).<K, V>internalCache();
}
/**
* @param cache Cache.
* @return DHT cache.
*/
protected static <K, V> GridDhtCacheAdapter<K, V> dht(IgniteCache<K,V> cache) {
return nearEnabled(cache) ? near(cache).dht() :
((IgniteKernal)cache.unwrap(Ignite.class)).<K, V>internalCache(cache.getName()).context().dht();
}
/**
* @return DHT cache.
*/
protected <K, V> GridDhtCacheAdapter<K, V> dht() {
return this.<K, V>near().dht();
}
/**
* @param idx Grid index.
* @return DHT cache.
*/
protected <K, V> GridDhtCacheAdapter<K, V> dht(int idx) {
return this.<K, V>near(idx).dht();
}
/**
* @param idx Grid index.
* @param cache Cache name.
* @return DHT cache.
*/
protected <K, V> GridDhtCacheAdapter<K, V> dht(int idx, String cache) {
return this.<K, V>near(idx, cache).dht();
}
/**
* @param idx Grid index.
* @param cache Cache name.
* @return Colocated cache.
*/
protected <K, V> GridDhtColocatedCache<K, V> colocated(int idx, String cache) {
return (GridDhtColocatedCache<K, V>)((IgniteKernal)grid(idx)).internalCache(cache);
}
/**
* @param cache Cache.
* @return {@code True} if near cache is enabled.
*/
private static <K, V> boolean nearEnabled(GridCacheAdapter<K, V> cache) {
return isNearEnabled(cache.configuration());
}
/**
* @param cache Cache.
* @return {@code True} if near cache is enabled.
*/
protected static <K, V> boolean nearEnabled(final IgniteCache<K,V> cache) {
CacheConfiguration cfg = GridAbstractTest.executeOnLocalOrRemoteJvm(cache,
new TestCacheCallable<K, V, CacheConfiguration>() {
private static final long serialVersionUID = 0L;
@Override public CacheConfiguration call(Ignite ignite, IgniteCache<K, V> cache) throws Exception {
return ((IgniteKernal)ignite).<K, V>internalCache(cache.getName()).context().config();
}
});
return isNearEnabled(cfg);
}
/**
* @param cache Cache.
* @return Near cache.
*/
private static <K, V> GridNearCacheAdapter<K, V> near(GridCacheAdapter<K, V> cache) {
return cache.context().near();
}
/**
* @param cache Cache.
* @return Near cache.
*/
protected static <K, V> GridNearCacheAdapter<K, V> near(IgniteCache<K,V> cache) {
return ((IgniteKernal)cache.unwrap(Ignite.class)).<K, V>internalCache(cache.getName()).context().near();
}
/**
* @param cache Cache.
* @return Colocated cache.
*/
protected static <K, V> GridDhtColocatedCache<K, V> colocated(IgniteCache<K,V> cache) {
return ((IgniteKernal)cache.unwrap(Ignite.class)).<K, V>internalCache(cache.getName()).context().colocated();
}
/**
* @param cache Cache.
* @param keys Keys.
* @param replaceExistingValues Replace existing values.
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
protected static <K> void loadAll(Cache<K, ?> cache, final Set<K> keys, final boolean replaceExistingValues)
throws Exception {
IgniteCache<K, Object> cacheCp = (IgniteCache<K, Object>)cache;
GridAbstractTest.executeOnLocalOrRemoteJvm(cacheCp, new TestCacheRunnable<K, Object>() {
private static final long serialVersionUID = -3030833765012500545L;
@Override public void run(Ignite ignite, IgniteCache<K, Object> cache) throws Exception {
final AtomicReference<Exception> ex = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
cache.loadAll(keys, replaceExistingValues, new CompletionListener() {
@Override public void onCompletion() {
latch.countDown();
}
@Override public void onException(Exception e) {
ex.set(e);
latch.countDown();
}
});
latch.await();
if (ex.get() != null)
throw ex.get();
}
});
}
/**
* @param cache Cache.
* @param key Keys.
* @param replaceExistingValues Replace existing values.
* @throws Exception If failed.
*/
protected static <K> void load(Cache<K, ?> cache, K key, boolean replaceExistingValues) throws Exception {
loadAll(cache, Collections.singleton(key), replaceExistingValues);
}
/**
* @return Near cache.
*/
protected <K, V> GridNearCacheAdapter<K, V> near() {
return ((IgniteKernal)grid()).<K, V>internalCache().context().near();
}
/**
* @param idx Grid index.
* @return Near cache.
*/
protected <K, V> GridNearCacheAdapter<K, V> near(int idx) {
return ((IgniteKernal)grid(idx)).<K, V>internalCache().context().near();
}
/**
* @param idx Grid index.
* @return Colocated cache.
*/
protected <K, V> GridDhtColocatedCache<K, V> colocated(int idx) {
return (GridDhtColocatedCache<K, V>)((IgniteKernal)grid(idx)).<K, V>internalCache();
}
/**
* @param idx Grid index.
* @param cache Cache name.
* @return Near cache.
*/
protected <K, V> GridNearCacheAdapter<K, V> near(int idx, String cache) {
return ((IgniteKernal)grid(idx)).<K, V>internalCache(cache).context().near();
}
/** {@inheritDoc} */
@Override protected final boolean isJunitFrameworkClass() {
return true;
}
/** {@inheritDoc} */
@Override protected final void setUp() throws Exception {
// Disable SSL hostname verifier.
HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() {
@Override public boolean verify(String s, SSLSession sslSes) {
return true;
}
});
getTestCounters().incrementStarted();
super.setUp();
}
/** {@inheritDoc} */
@Override protected final void tearDown() throws Exception {
getTestCounters().incrementStopped();
super.tearDown();
}
/** {@inheritDoc} */
@Override protected final Ignite startGridsMultiThreaded(int cnt) throws Exception {
return startGridsMultiThreaded(cnt, true);
}
/**
* @param cnt Count.
* @param awaitPartMapExchange If we need to await partition map exchange.
* @return Ignite.
* @throws Exception If failed.
*/
protected final Ignite startGridsMultiThreaded(int cnt, boolean awaitPartMapExchange) throws Exception {
Ignite g = super.startGridsMultiThreaded(cnt);
if (awaitPartMapExchange)
awaitPartitionMapExchange();
return g;
}
/**
* @throws InterruptedException If interrupted.
*/
@SuppressWarnings("BusyWait")
protected void awaitPartitionMapExchange() throws InterruptedException {
awaitPartitionMapExchange(false, false);
}
/**
* @param waitEvicts If {@code true} will wait for evictions finished.
* @param waitNode2PartUpdate If {@code true} will wait for nodes node2part info update finished.
* @throws InterruptedException If interrupted.
*/
@SuppressWarnings("BusyWait")
protected void awaitPartitionMapExchange(boolean waitEvicts, boolean waitNode2PartUpdate) throws InterruptedException {
long timeout = 30_000;
for (Ignite g : G.allGrids()) {
IgniteKernal g0 = (IgniteKernal)g;
for (IgniteCacheProxy<?, ?> c : g0.context().cache().jcaches()) {
CacheConfiguration cfg = c.context().config();
if (cfg == null)
continue;
if (cfg.getCacheMode() == PARTITIONED &&
cfg.getRebalanceMode() != NONE &&
g.cluster().nodes().size() > 1) {
AffinityFunction aff = cfg.getAffinity();
GridDhtCacheAdapter<?, ?> dht = dht(c);
GridDhtPartitionTopology top = dht.topology();
for (int p = 0; p < aff.partitions(); p++) {
long start = 0;
for (int i = 0; ; i++) {
boolean match = false;
AffinityTopologyVersion readyVer = dht.context().shared().exchange().readyAffinityVersion();
if (readyVer.topologyVersion() > 0 && c.context().started()) {
// Must map on updated version of topology.
Collection<ClusterNode> affNodes =
dht.context().affinity().assignment(readyVer).idealAssignment().get(p);
int exp = affNodes.size();
GridDhtTopologyFuture topFut = top.topologyVersionFuture();
Collection<ClusterNode> owners = (topFut != null && topFut.isDone()) ?
top.nodes(p, AffinityTopologyVersion.NONE) : Collections.<ClusterNode>emptyList();
int actual = owners.size();
GridDhtLocalPartition loc = top.localPartition(p, readyVer, false);
if (affNodes.size() != owners.size() || !affNodes.containsAll(owners) ||
(waitEvicts && loc != null && loc.state() != GridDhtPartitionState.OWNING)) {
LT.warn(log(), null, "Waiting for topology map update [" +
"grid=" + g.name() +
", cache=" + cfg.getName() +
", cacheId=" + dht.context().cacheId() +
", topVer=" + top.topologyVersion() +
", p=" + p +
", affNodesCnt=" + exp +
", ownersCnt=" + actual +
", affNodes=" + F.nodeIds(affNodes) +
", owners=" + F.nodeIds(owners) +
", topFut=" + topFut +
", locNode=" + g.cluster().localNode() + ']');
}
else
match = true;
}
else {
LT.warn(log(), null, "Waiting for topology map update [" +
"grid=" + g.name() +
", cache=" + cfg.getName() +
", cacheId=" + dht.context().cacheId() +
", topVer=" + top.topologyVersion() +
", started=" + dht.context().started() +
", p=" + p +
", readVer=" + readyVer +
", locNode=" + g.cluster().localNode() + ']');
}
if (!match) {
if (i == 0)
start = System.currentTimeMillis();
if (System.currentTimeMillis() - start > timeout) {
U.dumpThreads(log);
throw new IgniteException("Timeout of waiting for topology map update [" +
"grid=" + g.name() +
", cache=" + cfg.getName() +
", cacheId=" + dht.context().cacheId() +
", topVer=" + top.topologyVersion() +
", p=" + p +
", readVer=" + readyVer +
", locNode=" + g.cluster().localNode() + ']');
}
Thread.sleep(200); // Busy wait.
continue;
}
if (i > 0)
log().warning("Finished waiting for topology map update [grid=" + g.name() +
", p=" + p + ", duration=" + (System.currentTimeMillis() - start) + "ms]");
break;
}
}
if (waitNode2PartUpdate) {
long start = System.currentTimeMillis();
boolean failed = true;
while (failed) {
failed = false;
for (GridDhtPartitionMap2 pMap : top.partitionMap(true).values()) {
if (failed)
break;
for (Map.Entry entry : pMap.entrySet()) {
if (System.currentTimeMillis() - start > timeout) {
U.dumpThreads(log);
throw new IgniteException("Timeout of waiting for partition state update [" +
"grid=" + g.name() +
", cache=" + cfg.getName() +
", cacheId=" + dht.context().cacheId() +
", topVer=" + top.topologyVersion() +
", locNode=" + g.cluster().localNode() + ']');
}
if (entry.getValue() != GridDhtPartitionState.OWNING) {
LT.warn(log(), null,
"Waiting for correct partition state, should be OWNING [state=" +
entry.getValue() + "]");
Thread.sleep(200); // Busy wait.
failed = true;
break;
}
}
}
}
}
}
}
}
}
/**
* @param ignite Node.
*/
public void dumpCacheDebugInfo(Ignite ignite) {
GridKernalContext ctx = ((IgniteKernal)ignite).context();
log.error("Cache information update [node=" + ignite.name() +
", client=" + ignite.configuration().isClientMode() + ']');
GridCacheSharedContext cctx = ctx.cache().context();
log.error("Pending transactions:");
for (IgniteInternalTx tx : cctx.tm().activeTransactions())
log.error(">>> " + tx);
log.error("Pending explicit locks:");
for (GridCacheExplicitLockSpan lockSpan : cctx.mvcc().activeExplicitLocks())
log.error(">>> " + lockSpan);
log.error("Pending cache futures:");
for (GridCacheFuture<?> fut : cctx.mvcc().activeFutures())
log.error(">>> " + fut);
log.error("Pending atomic cache futures:");
for (GridCacheFuture<?> fut : cctx.mvcc().atomicFutures())
log.error(">>> " + fut);
}
/**
* @param cache Cache.
* @return Affinity.
*/
public static <K> Affinity<K> affinity(IgniteCache<K, ?> cache) {
return cache.unwrap(Ignite.class).affinity(cache.getName());
}
/**
* @param cache Cache.
* @return Local node.
*/
public static ClusterNode localNode(IgniteCache<?, ?> cache) {
return cache.unwrap(Ignite.class).cluster().localNode();
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @param startFrom Start value for keys search.
* @return Collection of keys for which given cache is primary.
*/
@SuppressWarnings("unchecked")
protected List<Integer> primaryKeys(IgniteCache<?, ?> cache, final int cnt, final int startFrom) {
return findKeys(cache, cnt, startFrom, 0);
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @param startFrom Start value for keys search.
* @return Collection of keys for which given cache is primary.
*/
@SuppressWarnings("unchecked")
protected List<Integer> findKeys(IgniteCache<?, ?> cache, final int cnt, final int startFrom, final int type) {
assert cnt > 0 : cnt;
final List<Integer> found = new ArrayList<>(cnt);
final ClusterNode locNode = localNode(cache);
final Affinity<Integer> aff = (Affinity<Integer>)affinity(cache);
try {
GridTestUtils.waitForCondition(new PA() {
@Override public boolean apply() {
for (int i = startFrom; i < startFrom + 100_000; i++) {
Integer key = i;
boolean ok;
if (type == 0)
ok = aff.isPrimary(locNode, key);
else if (type == 1)
ok = aff.isBackup(locNode, key);
else if (type == 2)
ok = !aff.isPrimaryOrBackup(locNode, key);
else {
fail();
return false;
}
if (ok) {
if (!found.contains(key))
found.add(key);
if (found.size() == cnt)
return true;
}
}
return false;
}
}, 5000);
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
if (found.size() != cnt)
throw new IgniteException("Unable to find " + cnt + " requied keys.");
return found;
}
/**
* @param iterable Iterator
* @return Set
*/
protected <K, V> Set<Cache.Entry<K, V>> entrySet(Iterable<Cache.Entry<K, V>> iterable){
Set<Cache.Entry<K, V>> set = new HashSet<>();
for (Cache.Entry<K, V> entry : iterable)
set.add(entry);
return set;
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @return Collection of keys for which given cache is primary.
*/
protected List<Integer> primaryKeys(IgniteCache<?, ?> cache, int cnt) {
return primaryKeys(cache, cnt, 1);
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @param startFrom Start value for keys search.
* @return Collection of keys for which given cache is backup.
*/
@SuppressWarnings("unchecked")
protected List<Integer> backupKeys(IgniteCache<?, ?> cache, int cnt, int startFrom) {
return findKeys(cache, cnt, startFrom, 1);
}
/**
* @param cache Cache.
* @param cnt Keys count.
* @param startFrom Start value for keys search.
* @return Collection of keys for which given cache is neither primary nor backup.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
protected List<Integer> nearKeys(IgniteCache<?, ?> cache, int cnt, int startFrom)
throws IgniteCheckedException {
return findKeys(cache, cnt, startFrom, 2);
}
/**
* @param cache Cache.
* @return Collection of keys for which given cache is primary.
* @throws IgniteCheckedException If failed.
*/
protected Integer primaryKey(IgniteCache<?, ?> cache)
throws IgniteCheckedException {
return primaryKeys(cache, 1, 1).get(0);
}
/**
* @param cache Cache.
* @return Keys for which given cache is backup.
* @throws IgniteCheckedException If failed.
*/
protected Integer backupKey(IgniteCache<?, ?> cache)
throws IgniteCheckedException {
return backupKeys(cache, 1, 1).get(0);
}
/**
* @param cache Cache.
* @return Key for which given cache is neither primary nor backup.
* @throws IgniteCheckedException If failed.
*/
protected Integer nearKey(IgniteCache<?, ?> cache)
throws IgniteCheckedException {
return nearKeys(cache, 1, 1).get(0);
}
/**
* @param key Key.
*/
protected <K, V> V dhtPeek(K key) throws IgniteCheckedException {
return localPeek(this.<K, V>dht(), key);
}
/**
* @param idx Index.
* @param key Key.
*/
protected <K, V> V dhtPeek(int idx, K key) throws IgniteCheckedException {
return localPeek(this.<K, V>dht(idx), key);
}
/**
* @param cache Cache.
* @param key Key.
*/
protected <K, V> V nearPeek(IgniteCache<K, V> cache, K key) throws IgniteCheckedException {
return localPeek(near(cache), key);
}
/**
* @param cache Cache.
* @param key Key.
*/
protected static <K, V> V dhtPeek(IgniteCache<K, V> cache, K key) throws IgniteCheckedException {
return localPeek(dht(cache), key);
}
/**
* @param cache Cache.
* @param key Key.
*/
protected static <K, V> V localPeek(GridCacheAdapter<K, V> cache, K key) throws IgniteCheckedException {
return cache.localPeek(key, ONHEAP_PEEK_MODES, null);
}
/**
* @param comp Compute.
* @param task Task.
* @param arg Task argument.
* @return Task future.
* @throws IgniteCheckedException If failed.
*/
protected <R> ComputeTaskFuture<R> executeAsync(IgniteCompute comp, ComputeTask task, @Nullable Object arg)
throws IgniteCheckedException {
comp = comp.withAsync();
assertNull(comp.execute(task, arg));
ComputeTaskFuture<R> fut = comp.future();
assertNotNull(fut);
return fut;
}
/**
* @param comp Compute.
* @param taskName Task name.
* @param arg Task argument.
* @return Task future.
* @throws IgniteCheckedException If failed.
*/
protected <R> ComputeTaskFuture<R> executeAsync(IgniteCompute comp, String taskName, @Nullable Object arg)
throws IgniteCheckedException {
comp = comp.withAsync();
assertNull(comp.execute(taskName, arg));
ComputeTaskFuture<R> fut = comp.future();
assertNotNull(fut);
return fut;
}
/**
* @param comp Compute.
* @param taskCls Task class.
* @param arg Task argument.
* @return Task future.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
protected <R> ComputeTaskFuture<R> executeAsync(IgniteCompute comp, Class taskCls, @Nullable Object arg)
throws IgniteCheckedException {
comp = comp.withAsync();
assertNull(comp.execute(taskCls, arg));
ComputeTaskFuture<R> fut = comp.future();
assertNotNull(fut);
return fut;
}
/**
* @param evts Events.
* @param filter Filter.
* @param types Events types.
* @return Future.
* @throws IgniteCheckedException If failed.
*/
protected <T extends Event> IgniteFuture<T> waitForLocalEvent(IgniteEvents evts,
@Nullable IgnitePredicate<T> filter, @Nullable int... types) throws IgniteCheckedException {
evts = evts.withAsync();
assertTrue(evts.isAsync());
assertNull(evts.waitForLocal(filter, types));
IgniteFuture<T> fut = evts.future();
assertNotNull(fut);
return fut;
}
/**
* @param e Exception.
* @param exCls Ex class.
*/
protected <T extends IgniteException> void assertCacheExceptionWithCause(RuntimeException e, Class<T> exCls) {
if (exCls.isAssignableFrom(e.getClass()))
return;
if (e.getClass() != CacheException.class
|| e.getCause() == null || !exCls.isAssignableFrom(e.getCause().getClass()))
throw e;
}
/**
* @param cache Cache.
*/
protected <K, V> GridCacheAdapter<K, V> cacheFromCtx(IgniteCache<K, V> cache) {
return ((IgniteKernal)cache.unwrap(Ignite.class)).<K, V>internalCache(cache.getName()).context().cache();
}
/**
* @param ignite Grid.
* @return {@link org.apache.ignite.IgniteCompute} for given grid's local node.
*/
protected IgniteCompute forLocal(Ignite ignite) {
return ignite.compute(ignite.cluster().forLocal());
}
/**
* @param prj Projection.
* @return {@link org.apache.ignite.IgniteCompute} for given projection.
*/
protected IgniteCompute compute(ClusterGroup prj) {
return prj.ignite().compute(prj);
}
/**
* @param prj Projection.
* @return {@link org.apache.ignite.IgniteMessaging} for given projection.
*/
protected IgniteMessaging message(ClusterGroup prj) {
return prj.ignite().message(prj);
}
/**
* @param prj Projection.
* @return {@link org.apache.ignite.IgniteMessaging} for given projection.
*/
protected IgniteEvents events(ClusterGroup prj) {
return prj.ignite().events(prj);
}
/**
* @param cfg Configuration.
* @param cacheName Cache name.
* @return Cache configuration.
*/
protected CacheConfiguration cacheConfiguration(IgniteConfiguration cfg, String cacheName) {
for (CacheConfiguration ccfg : cfg.getCacheConfiguration()) {
if (F.eq(cacheName, ccfg.getName()))
return ccfg;
}
fail("Failed to find cache configuration for cache: " + cacheName);
return null;
}
/**
* @param key Key.
* @return Near cache for key.
*/
protected IgniteCache<Integer, Integer> nearCache(Integer key) {
List<Ignite> allGrids = Ignition.allGrids();
assertFalse("There are no alive nodes.", F.isEmpty(allGrids));
Affinity<Integer> aff = allGrids.get(0).affinity(null);
Collection<ClusterNode> nodes = aff.mapKeyToPrimaryAndBackups(key);
for (Ignite ignite : allGrids) {
if (!nodes.contains(ignite.cluster().localNode()))
return ignite.cache(null);
}
fail();
return null;
}
/**
* @param key Key.
* @param cacheName Cache name.
* @return Near cache for key.
*/
protected <K, V> IgniteCache<K, V> primaryCache(Object key, String cacheName) {
return primaryNode(key, cacheName).cache(cacheName);
}
/**
* @param key Key.
* @param cacheName Cache name.
* @return Near cache for key.
*/
protected IgniteCache<Integer, Integer> backupCache(Integer key, String cacheName) {
return backupNode(key, cacheName).cache(cacheName);
}
/**
* @param key Key.
* @param cacheName Cache name.
* @return Ignite instance which has primary cache for given key.
*/
protected Ignite primaryNode(Object key, String cacheName) {
List<Ignite> allGrids = Ignition.allGrids();
assertFalse("There are no alive nodes.", F.isEmpty(allGrids));
Ignite ignite = allGrids.get(0);
Affinity<Object> aff = ignite.affinity(cacheName);
ClusterNode node = aff.mapKeyToNode(key);
assertNotNull("There are no cache affinity nodes", node);
return grid(node);
}
/**
* @param key Key.
* @param cacheName Cache name.
* @return Ignite instance which has backup cache for given key.
*/
protected Ignite backupNode(Object key, String cacheName) {
List<Ignite> allGrids = Ignition.allGrids();
assertFalse("There are no alive nodes.", F.isEmpty(allGrids));
Ignite ignite = allGrids.get(0);
Affinity<Object> aff = ignite.affinity(cacheName);
Collection<ClusterNode> nodes = aff.mapKeyToPrimaryAndBackups(key);
assertTrue("Expected more than one node for key [key=" + key + ", nodes=" + nodes +']', nodes.size() > 1);
Iterator<ClusterNode> it = nodes.iterator();
it.next(); // Skip primary.
return grid(it.next());
}
/**
* @param key Key.
* @param cacheName Cache name.
* @return Ignite instances which has backup cache for given key.
*/
protected List<Ignite> backupNodes(Object key, String cacheName) {
List<Ignite> allGrids = Ignition.allGrids();
assertFalse("There are no alive nodes.", F.isEmpty(allGrids));
Ignite ignite = allGrids.get(0);
Affinity<Object> aff = ignite.affinity(cacheName);
Collection<ClusterNode> nodes = aff.mapKeyToPrimaryAndBackups(key);
assertTrue("Expected more than one node for key [key=" + key + ", nodes=" + nodes +']', nodes.size() > 1);
Iterator<ClusterNode> it = nodes.iterator();
it.next(); // Skip primary.
List<Ignite> backups = new ArrayList<>(nodes.size() - 1);
while (it.hasNext())
backups.add(grid(it.next()));
return backups;
}
/**
* In ATOMIC cache with CLOCK mode if key is updated from different nodes at same time
* only one update wins others are ignored (can happen in test even when updates are executed from
* different nodes sequentially), this delay is used to avoid lost updates.
*
* @param cache Cache.
* @throws Exception If failed.
*/
protected void atomicClockModeDelay(IgniteCache cache) throws Exception {
CacheConfiguration ccfg = (CacheConfiguration)cache.getConfiguration(CacheConfiguration.class);
if (ccfg.getCacheMode() != LOCAL &&
ccfg.getAtomicityMode() == CacheAtomicityMode.ATOMIC &&
ccfg.getAtomicWriteOrderMode() == CacheAtomicWriteOrderMode.CLOCK)
U.sleep(50);
}
/**
* @param exp Expected.
* @param act Actual.
*/
protected void assertEqualsCollections(Collection<?> exp, Collection<?> act) {
if (exp.size() != act.size())
fail("Collections are not equal:\nExpected:\t" + exp + "\nActual:\t" + act);
Iterator<?> it1 = exp.iterator();
Iterator<?> it2 = act.iterator();
int idx = 0;
while (it1.hasNext()) {
Object item1 = it1.next();
Object item2 = it2.next();
if (!F.eq(item1, item2))
fail("Collections are not equal (position " + idx + "):\nExpected: " + exp + "\nActual: " + act);
idx++;
}
}
/**
* @param ignite Ignite instance.
* @param clo Closure.
* @return Result of closure execution.
* @throws Exception If failed.
*/
protected <T> T doInTransaction(Ignite ignite, Callable<T> clo) throws Exception {
return doInTransaction(ignite, PESSIMISTIC, REPEATABLE_READ, clo);
}
/**
* @param ignite Ignite instance.
* @param concurrency Transaction concurrency.
* @param isolation Transaction isolation.
* @param clo Closure.
* @return Result of closure execution.
* @throws Exception If failed.
*/
protected <T> T doInTransaction(Ignite ignite,
TransactionConcurrency concurrency,
TransactionIsolation isolation,
Callable<T> clo) throws Exception {
while (true) {
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
T res = clo.call();
tx.commit();
return res;
}
catch (CacheException e) {
if (e.getCause() instanceof ClusterTopologyException) {
ClusterTopologyException topEx = (ClusterTopologyException)e.getCause();
topEx.retryReadyFuture().get();
}
else
throw e;
}
catch (ClusterTopologyException e) {
IgniteFuture<?> fut = e.retryReadyFuture();
fut.get();
}
catch (TransactionRollbackException ignore) {
// Safe to retry right away.
}
}
}
}
| DoudTechData/ignite | modules/core/src/test/java/org/apache/ignite/testframework/junits/common/GridCommonAbstractTest.java | Java | apache-2.0 | 40,340 |
// Copyright 2011 Google Inc. All Rights Reserved.
package com.google.devtools.simple.runtime.components.android;
import com.google.devtools.simple.common.ComponentCategory;
import com.google.devtools.simple.common.PropertyCategory;
import com.google.devtools.simple.common.YaVersion;
import com.google.devtools.simple.runtime.annotations.DesignerComponent;
import com.google.devtools.simple.runtime.annotations.DesignerProperty;
import com.google.devtools.simple.runtime.annotations.SimpleEvent;
import com.google.devtools.simple.runtime.annotations.SimpleFunction;
import com.google.devtools.simple.runtime.annotations.SimpleObject;
import com.google.devtools.simple.runtime.annotations.SimpleProperty;
import com.google.devtools.simple.runtime.annotations.UsesPermissions;
import com.google.devtools.simple.runtime.components.Component;
import com.google.devtools.simple.runtime.components.android.util.ClientLoginHelper;
import com.google.devtools.simple.runtime.components.android.util.IClientLoginHelper;
import com.google.devtools.simple.runtime.components.android.util.SdkLevel;
import com.google.devtools.simple.runtime.components.util.ErrorMessages;
import com.google.devtools.simple.runtime.events.EventDispatcher;
import android.app.Activity;
import android.app.ProgressDialog;
import android.os.AsyncTask;
import android.util.Log;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.HttpConnectionParams;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
/**
* Appinventor fusiontables control
*/
@DesignerComponent(version = YaVersion.FUSIONTABLESCONTROL_COMPONENT_VERSION,
description = "Non-visible component that communicates with fusiontables",
category = ComponentCategory.EXPERIMENTAL,
nonVisible = true,
iconName = "images/fusiontables.png")
@SimpleObject
@UsesPermissions(permissionNames =
"android.permission.INTERNET," +
"android.permission.ACCOUNT_MANAGER," +
"android.permission.MANAGE_ACCOUNTS," +
"android.permission.GET_ACCOUNTS," +
"android.permission.USE_CREDENTIALS")
public class FusiontablesControl extends AndroidNonvisibleComponent implements Component {
private static final String LOG_TAG = "fusion";
private static final String DIALOG_TEXT = "Choose an account to access FusionTables";
private static final String FUSION_QUERY_URL = "http://www.google.com/fusiontables/api/query";
private static final String FUSIONTABLES_SERVICE = "fusiontables";
private static final int SERVER_TIMEOUT_MS = 30000;
private final Activity activity;
private final IClientLoginHelper requestHelper;
private String query;
public FusiontablesControl(ComponentContainer componentContainer) {
super(componentContainer.$form());
this.activity = componentContainer.$context();
requestHelper = createClientLoginHelper(DIALOG_TEXT, FUSIONTABLES_SERVICE);
}
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_STRING,
defaultValue = "show tables")
@SimpleProperty
public void Query(String query) {
this.query = query;
}
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public String Query() {
return query;
}
@SimpleFunction
public void DoQuery() {
if (requestHelper != null) {
new QueryProcessor().execute(query);
} else {
form.dispatchErrorOccurredEvent(this, "DoQuery",
ErrorMessages.ERROR_FUNCTIONALITY_NOT_SUPPORTED_FUSIONTABLES_CONTROL);
}
}
@SimpleEvent
public void GotResult(String result) {
// Invoke the application's "GotValue" event handler
EventDispatcher.dispatchEvent(this, "GotResult", result);
}
/* TODO(user): figure out why this isn't working
@SimpleFunction
public void ForgetLogin() {
if (requestHelper != null) {
requestHelper.forgetAccountName();
}
}
*/
private IClientLoginHelper createClientLoginHelper(String accountPrompt, String service) {
if (SdkLevel.getLevel() >= SdkLevel.LEVEL_ECLAIR) {
HttpClient httpClient = new DefaultHttpClient();
HttpConnectionParams.setSoTimeout(httpClient.getParams(), SERVER_TIMEOUT_MS);
HttpConnectionParams.setConnectionTimeout(httpClient.getParams(), SERVER_TIMEOUT_MS);
return new ClientLoginHelper(activity, service, accountPrompt, httpClient);
}
return null;
}
/**
* Generate a FusionTables POST request
*/
private HttpUriRequest genFusiontablesQuery(String query) throws IOException {
HttpPost request = new HttpPost(FUSION_QUERY_URL);
ArrayList<BasicNameValuePair> pair = new ArrayList<BasicNameValuePair>(1);
pair.add(new BasicNameValuePair("sql", query));
UrlEncodedFormEntity entity = new UrlEncodedFormEntity(pair, "UTF-8");
entity.setContentType("application/x-www-form-urlencoded");
request.setEntity(entity);
return request;
}
/**
* Send the fusiontables request to the server and get back the results.
*
*/
private class QueryProcessor extends AsyncTask<String, Void, String> {
private ProgressDialog progress = null;
@Override
protected void onPreExecute() {
progress = ProgressDialog.show(activity, "Fusiontables", "processing query...", true);
}
/**
* Query the fusiontables server.
* @return The resulant table, error page, or exception message.
*/
@Override
protected String doInBackground(String... params) {
try {
HttpUriRequest request = genFusiontablesQuery(params[0]);
Log.d(LOG_TAG, "Fetching: " + params[0]);
HttpResponse response = requestHelper.execute(request);
ByteArrayOutputStream outstream = new ByteArrayOutputStream();
response.getEntity().writeTo(outstream);
Log.d(LOG_TAG, "Response: " + response.getStatusLine().toString());
return outstream.toString();
} catch (IOException e) {
e.printStackTrace();
return e.getMessage();
}
}
/**
* Got the results. We could parse the CSV and do something useful with it.
*/
@Override
protected void onPostExecute(String result) {
progress.dismiss();
GotResult(result);
// (result.stqueryartsWith("<HTML>") ? Html.fromHtml(result) : result);
}
}
}
| mark-friedman/app-inventor-from-google-code | src/components/runtime/components/android/FusiontablesControl.java | Java | apache-2.0 | 6,559 |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.rest;
import alluxio.ConfigurationTestUtils;
import alluxio.master.AlluxioJobMasterRestServiceHandler;
import alluxio.master.LocalAlluxioJobCluster;
import alluxio.security.LoginUserTestUtils;
import com.google.common.collect.Maps;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Map;
import javax.ws.rs.HttpMethod;
/**
* Tests for {@link AlluxioJobMasterRestServiceHandler}.
*/
public final class JobMasterRestApiTest extends RestApiTest {
private static final Map<String, String> NO_PARAMS = Maps.newHashMap();
private LocalAlluxioJobCluster mJobCluster;
@Before
public void before() throws Exception {
mJobCluster = new LocalAlluxioJobCluster();
mJobCluster.start();
mHostname = mJobCluster.getHostname();
mPort = mJobCluster.getMaster().getWebAddress().getPort();
mServicePrefix = AlluxioJobMasterRestServiceHandler.SERVICE_PREFIX;
}
@After
public void after() throws Exception {
mJobCluster.stop();
LoginUserTestUtils.resetLoginUser();
ConfigurationTestUtils.resetConfiguration();
}
@Test
public void getInfo() throws Exception {
new TestCase(mHostname, mPort, getEndpoint(AlluxioJobMasterRestServiceHandler.GET_INFO),
NO_PARAMS, HttpMethod.GET, null).call();
}
}
| apc999/alluxio | tests/src/test/java/alluxio/client/rest/JobMasterRestApiTest.java | Java | apache-2.0 | 1,823 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.servlet;
/**
* Defines an exception that a servlet or filter throws to indicate
* that it is permanently or temporarily unavailable.
*
* <p>When a servlet or filter is permanently unavailable, something is wrong
* with it, and it cannot handle
* requests until some action is taken. For example, a servlet
* might be configured incorrectly, or a filter's state may be corrupted.
* The component should log both the error and the corrective action
* that is needed.
*
* <p>A servlet or filter is temporarily unavailable if it cannot handle
* requests momentarily due to some system-wide problem. For example,
* a third-tier server might not be accessible, or there may be
* insufficient memory or disk storage to handle requests. A system
* administrator may need to take corrective action.
*
* <p>Servlet containers can safely treat both types of unavailable
* exceptions in the same way. However, treating temporary unavailability
* effectively makes the servlet container more robust. Specifically,
* the servlet container might block requests to the servlet or filter for a period
* of time suggested by the exception, rather than rejecting them until
* the servlet container restarts.
*
*
* @author Various
* @version $Version$
*
*/
public class UnavailableException
extends ServletException {
private Servlet servlet; // what's unavailable
private boolean permanent; // needs admin action?
private int seconds; // unavailability estimate
/**
*
* @deprecated As of Java Servlet API 2.2, use {@link
* #UnavailableException(String)} instead.
*
* @param servlet the <code>Servlet</code> instance that is
* unavailable
*
* @param msg a <code>String</code> specifying the
* descriptive message
*
*/
public UnavailableException(Servlet servlet, String msg) {
super(msg);
this.servlet = servlet;
permanent = true;
}
/**
* @deprecated As of Java Servlet API 2.2, use {@link
* #UnavailableException(String, int)} instead.
*
* @param seconds an integer specifying the number of seconds
* the servlet expects to be unavailable; if
* zero or negative, indicates that the servlet
* can't make an estimate
*
* @param servlet the <code>Servlet</code> that is unavailable
*
* @param msg a <code>String</code> specifying the descriptive
* message, which can be written to a log file or
* displayed for the user.
*
*/
public UnavailableException(int seconds, Servlet servlet, String msg) {
super(msg);
this.servlet = servlet;
if (seconds <= 0)
this.seconds = -1;
else
this.seconds = seconds;
permanent = false;
}
/**
*
* Constructs a new exception with a descriptive
* message indicating that the servlet is permanently
* unavailable.
*
* @param msg a <code>String</code> specifying the
* descriptive message
*
*/
public UnavailableException(String msg) {
super(msg);
permanent = true;
}
/**
* Constructs a new exception with a descriptive message
* indicating that the servlet is temporarily unavailable
* and giving an estimate of how long it will be unavailable.
*
* <p>In some cases, the servlet cannot make an estimate. For
* example, the servlet might know that a server it needs is
* not running, but not be able to report how long it will take
* to be restored to functionality. This can be indicated with
* a negative or zero value for the <code>seconds</code> argument.
*
* @param msg a <code>String</code> specifying the
* descriptive message, which can be written
* to a log file or displayed for the user.
*
* @param seconds an integer specifying the number of seconds
* the servlet expects to be unavailable; if
* zero or negative, indicates that the servlet
* can't make an estimate
*
*/
public UnavailableException(String msg, int seconds) {
super(msg);
if (seconds <= 0)
this.seconds = -1;
else
this.seconds = seconds;
permanent = false;
}
/**
*
* Returns a <code>boolean</code> indicating
* whether the servlet is permanently unavailable.
* If so, something is wrong with the servlet, and the
* system administrator must take some corrective action.
*
* @return <code>true</code> if the servlet is
* permanently unavailable; <code>false</code>
* if the servlet is available or temporarily
* unavailable
*
*/
public boolean isPermanent() {
return permanent;
}
/**
* @deprecated As of Java Servlet API 2.2, with no replacement.
*
* Returns the servlet that is reporting its unavailability.
*
* @return the <code>Servlet</code> object that is
* throwing the <code>UnavailableException</code>
*
*/
public Servlet getServlet() {
return servlet;
}
/**
* Returns the number of seconds the servlet expects to
* be temporarily unavailable.
*
* <p>If this method returns a negative number, the servlet
* is permanently unavailable or cannot provide an estimate of
* how long it will be unavailable. No effort is
* made to correct for the time elapsed since the exception was
* first reported.
*
* @return an integer specifying the number of seconds
* the servlet will be temporarily unavailable,
* or a negative number if the servlet is permanently
* unavailable or cannot make an estimate
*
*/
public int getUnavailableSeconds() {
return permanent ? -1 : seconds;
}
}
| salyh/javamailspec | geronimo-servlet_2.5_spec/src/main/java/javax/servlet/UnavailableException.java | Java | apache-2.0 | 6,729 |
/**
*
*/
package org.activejpa.enhancer;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A class loader that emulates the functionality of the agent. To be used for testing in an environment where loading agent is not possible
*
* WARNING: Use it only for testing. Not recommended for production use.
*
* @author ganeshs
*
*/
public class MyClassLoader extends ClassLoader {
private DomainClassEnhancer enhancer;
private boolean initialzed;
private List<String> excludedPackages = new ArrayList<String>(Arrays.asList("java.", "javax.", "sun.", "org.mockito"));
private Logger logger = LoggerFactory.getLogger(MyClassLoader.class);
public MyClassLoader() {
this(Thread.currentThread().getContextClassLoader());
}
public MyClassLoader(ClassLoader loader) {
this(loader, null);
}
public MyClassLoader(ClassLoader loader, List<String> excludedPackages) {
super(loader);
if (excludedPackages != null) {
this.excludedPackages.addAll(excludedPackages);
}
}
private void init() {
enhancer = new DomainClassEnhancer();
initialzed = true;
}
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
if (! initialzed) {
init();
}
logger.trace("Loading the class " + name);
for (String excluded : excludedPackages) {
if (name.startsWith(excluded)) {
return super.loadClass(name, resolve);
}
}
Class<?> clazz = findLoadedClass(name);
if (clazz != null) {
logger.trace("Class " + name + " is already loaded");
return clazz;
}
byte[] bytes = null;
if (enhancer.canEnhance(name)) {
bytes = enhancer.enhance(this, name);
} else {
InputStream classData = getResourceAsStream(name.replace('.', '/') + ".class");
if(classData == null) {
return super.loadClass(name, resolve);
}
try {
bytes = toByteArray(classData);
} catch (Exception e) {
logger.debug("Failed while converting classdata to bytes for the class " + name, e);
return super.loadClass(name, resolve);
} finally {
if (classData != null) {
try {
classData.close();
} catch (IOException e) {
// Ignore. Can't do much
logger.trace("Failed while closing the classData for the class " + name, e);
}
}
}
}
clazz = defineClass(name, bytes, 0, bytes.length);
if (resolve) {
resolveClass(clazz);
}
return clazz;
}
private static byte[] toByteArray(InputStream is) throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] bytes = new byte[4096];
int n = 0;
while (-1 != (n = is.read(bytes))) {
os.write(bytes, 0, n);
}
return os.toByteArray();
}
}
| juanignacionogueira/activejpa | activejpa-core/src/main/java/org/activejpa/enhancer/MyClassLoader.java | Java | apache-2.0 | 2,916 |
package org.apache.lucene.index.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.OrdTermState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.RAMDirectory; // for javadocs
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ByteBlockPool;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IntBlockPool.SliceReader;
import org.apache.lucene.util.IntBlockPool.SliceWriter;
import org.apache.lucene.util.IntBlockPool;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RecyclingByteBlockAllocator;
import org.apache.lucene.util.RecyclingIntBlockAllocator;
/**
* High-performance single-document main memory Apache Lucene fulltext search index.
*
* <h4>Overview</h4>
*
* This class is a replacement/substitute for a large subset of
* {@link RAMDirectory} functionality. It is designed to
* enable maximum efficiency for on-the-fly matchmaking combining structured and
* fuzzy fulltext search in realtime streaming applications such as Nux XQuery based XML
* message queues, publish-subscribe systems for Blogs/newsfeeds, text chat, data acquisition and
* distribution systems, application level routers, firewalls, classifiers, etc.
* Rather than targeting fulltext search of infrequent queries over huge persistent
* data archives (historic search), this class targets fulltext search of huge
* numbers of queries over comparatively small transient realtime data (prospective
* search).
* For example as in
* <pre class="prettyprint">
* float score = search(String text, Query query)
* </pre>
* <p>
* Each instance can hold at most one Lucene "document", with a document containing
* zero or more "fields", each field having a name and a fulltext value. The
* fulltext value is tokenized (split and transformed) into zero or more index terms
* (aka words) on <code>addField()</code>, according to the policy implemented by an
* Analyzer. For example, Lucene analyzers can split on whitespace, normalize to lower case
* for case insensitivity, ignore common terms with little discriminatory value such as "he", "in", "and" (stop
* words), reduce the terms to their natural linguistic root form such as "fishing"
* being reduced to "fish" (stemming), resolve synonyms/inflexions/thesauri
* (upon indexing and/or querying), etc. For details, see
* <a target="_blank" href="http://today.java.net/pub/a/today/2003/07/30/LuceneIntro.html">Lucene Analyzer Intro</a>.
* <p>
* Arbitrary Lucene queries can be run against this class - see <a target="_blank"
* href="{@docRoot}/../queryparser/org/apache/lucene/queryparser/classic/package-summary.html#package_description">
* Lucene Query Syntax</a>
* as well as <a target="_blank"
* href="http://today.java.net/pub/a/today/2003/11/07/QueryParserRules.html">Query Parser Rules</a>.
* Note that a Lucene query selects on the field names and associated (indexed)
* tokenized terms, not on the original fulltext(s) - the latter are not stored
* but rather thrown away immediately after tokenization.
* <p>
* For some interesting background information on search technology, see Bob Wyman's
* <a target="_blank"
* href="http://bobwyman.pubsub.com/main/2005/05/mary_hodder_poi.html">Prospective Search</a>,
* Jim Gray's
* <a target="_blank" href="http://www.acmqueue.org/modules.php?name=Content&pa=showpage&pid=293&page=4">
* A Call to Arms - Custom subscriptions</a>, and Tim Bray's
* <a target="_blank"
* href="http://www.tbray.org/ongoing/When/200x/2003/07/30/OnSearchTOC">On Search, the Series</a>.
*
*
* <h4>Example Usage</h4>
*
* <pre class="prettyprint">
* Analyzer analyzer = new SimpleAnalyzer(version);
* MemoryIndex index = new MemoryIndex();
* index.addField("content", "Readings about Salmons and other select Alaska fishing Manuals", analyzer);
* index.addField("author", "Tales of James", analyzer);
* QueryParser parser = new QueryParser(version, "content", analyzer);
* float score = index.search(parser.parse("+author:james +salmon~ +fish* manual~"));
* if (score > 0.0f) {
* System.out.println("it's a match");
* } else {
* System.out.println("no match found");
* }
* System.out.println("indexData=" + index.toString());
* </pre>
*
*
* <h4>Example XQuery Usage</h4>
*
* <pre class="prettyprint">
* (: An XQuery that finds all books authored by James that have something to do with "salmon fishing manuals", sorted by relevance :)
* declare namespace lucene = "java:nux.xom.pool.FullTextUtil";
* declare variable $query := "+salmon~ +fish* manual~"; (: any arbitrary Lucene query can go here :)
*
* for $book in /books/book[author="James" and lucene:match(abstract, $query) > 0.0]
* let $score := lucene:match($book/abstract, $query)
* order by $score descending
* return $book
* </pre>
*
*
* <h4>No thread safety guarantees</h4>
*
* An instance can be queried multiple times with the same or different queries,
* but an instance is not thread-safe. If desired use idioms such as:
* <pre class="prettyprint">
* MemoryIndex index = ...
* synchronized (index) {
* // read and/or write index (i.e. add fields and/or query)
* }
* </pre>
*
*
* <h4>Performance Notes</h4>
*
* Internally there's a new data structure geared towards efficient indexing
* and searching, plus the necessary support code to seamlessly plug into the Lucene
* framework.
* <p>
* This class performs very well for very small texts (e.g. 10 chars)
* as well as for large texts (e.g. 10 MB) and everything in between.
* Typically, it is about 10-100 times faster than <code>RAMDirectory</code>.
* Note that <code>RAMDirectory</code> has particularly
* large efficiency overheads for small to medium sized texts, both in time and space.
* Indexing a field with N tokens takes O(N) in the best case, and O(N logN) in the worst
* case. Memory consumption is probably larger than for <code>RAMDirectory</code>.
* <p>
* Example throughput of many simple term queries over a single MemoryIndex:
* ~500000 queries/sec on a MacBook Pro, jdk 1.5.0_06, server VM.
* As always, your mileage may vary.
* <p>
* If you're curious about
* the whereabouts of bottlenecks, run java 1.5 with the non-perturbing '-server
* -agentlib:hprof=cpu=samples,depth=10' flags, then study the trace log and
* correlate its hotspot trailer with its call stack headers (see <a
* target="_blank"
* href="http://java.sun.com/developer/technicalArticles/Programming/HPROF.html">
* hprof tracing </a>).
*
*/
public class MemoryIndex {
/** info for each field: Map<String fieldName, Info field> */
private final HashMap<String,Info> fields = new HashMap<String,Info>();
/** fields sorted ascending by fieldName; lazily computed on demand */
private transient Map.Entry<String,Info>[] sortedFields;
private final boolean storeOffsets;
private static final boolean DEBUG = false;
private final ByteBlockPool byteBlockPool;
private final IntBlockPool intBlockPool;
// private final IntBlockPool.SliceReader postingsReader;
private final IntBlockPool.SliceWriter postingsWriter;
private HashMap<String,FieldInfo> fieldInfos = new HashMap<String,FieldInfo>();
private Counter bytesUsed;
/**
* Sorts term entries into ascending order; also works for
* Arrays.binarySearch() and Arrays.sort()
*/
private static final Comparator<Object> termComparator = new Comparator<Object>() {
@Override
@SuppressWarnings({"unchecked","rawtypes"})
public int compare(Object o1, Object o2) {
if (o1 instanceof Map.Entry<?,?>) o1 = ((Map.Entry<?,?>) o1).getKey();
if (o2 instanceof Map.Entry<?,?>) o2 = ((Map.Entry<?,?>) o2).getKey();
if (o1 == o2) return 0;
return ((Comparable) o1).compareTo((Comparable) o2);
}
};
/**
* Constructs an empty instance.
*/
public MemoryIndex() {
this(false);
}
/**
* Constructs an empty instance that can optionally store the start and end
* character offset of each token term in the text. This can be useful for
* highlighting of hit locations with the Lucene highlighter package.
* Protected until the highlighter package matures, so that this can actually
* be meaningfully integrated.
*
* @param storeOffsets
* whether or not to store the start and end character offset of
* each token term in the text
*/
public MemoryIndex(boolean storeOffsets) {
this(storeOffsets, 0);
}
/**
* Expert: This constructor accepts an upper limit for the number of bytes that should be reused if this instance is {@link #reset()}.
* @param storeOffsets <code>true</code> if offsets should be stored
* @param maxReusedBytes the number of bytes that should remain in the internal memory pools after {@link #reset()} is called
*/
MemoryIndex(boolean storeOffsets, long maxReusedBytes) {
this.storeOffsets = storeOffsets;
this.bytesUsed = Counter.newCounter();
final int maxBufferedByteBlocks = (int)((maxReusedBytes/2) / ByteBlockPool.BYTE_BLOCK_SIZE );
final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT));
assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT) <= maxReusedBytes;
byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed));
intBlockPool = new IntBlockPool(new RecyclingIntBlockAllocator(IntBlockPool.INT_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed));
postingsWriter = new SliceWriter(intBlockPool);
}
/**
* Convenience method; Tokenizes the given field text and adds the resulting
* terms to the index; Equivalent to adding an indexed non-keyword Lucene
* {@link org.apache.lucene.document.Field} that is tokenized, not stored,
* termVectorStored with positions (or termVectorStored with positions and offsets),
*
* @param fieldName
* a name to be associated with the text
* @param text
* the text to tokenize and index.
* @param analyzer
* the analyzer to use for tokenization
*/
public void addField(String fieldName, String text, Analyzer analyzer) {
if (fieldName == null)
throw new IllegalArgumentException("fieldName must not be null");
if (text == null)
throw new IllegalArgumentException("text must not be null");
if (analyzer == null)
throw new IllegalArgumentException("analyzer must not be null");
TokenStream stream;
try {
stream = analyzer.tokenStream(fieldName, text);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
addField(fieldName, stream, 1.0f, analyzer.getPositionIncrementGap(fieldName), analyzer.getOffsetGap(fieldName));
}
/**
* Convenience method; Creates and returns a token stream that generates a
* token for each keyword in the given collection, "as is", without any
* transforming text analysis. The resulting token stream can be fed into
* {@link #addField(String, TokenStream)}, perhaps wrapped into another
* {@link org.apache.lucene.analysis.TokenFilter}, as desired.
*
* @param keywords
* the keywords to generate tokens for
* @return the corresponding token stream
*/
public <T> TokenStream keywordTokenStream(final Collection<T> keywords) {
// TODO: deprecate & move this method into AnalyzerUtil?
if (keywords == null)
throw new IllegalArgumentException("keywords must not be null");
return new TokenStream() {
private Iterator<T> iter = keywords.iterator();
private int start = 0;
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
@Override
public boolean incrementToken() {
if (!iter.hasNext()) return false;
T obj = iter.next();
if (obj == null)
throw new IllegalArgumentException("keyword must not be null");
String term = obj.toString();
clearAttributes();
termAtt.setEmpty().append(term);
offsetAtt.setOffset(start, start+termAtt.length());
start += term.length() + 1; // separate words by 1 (blank) character
return true;
}
};
}
/**
* Equivalent to <code>addField(fieldName, stream, 1.0f)</code>.
*
* @param fieldName
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from
*/
public void addField(String fieldName, TokenStream stream) {
addField(fieldName, stream, 1.0f);
}
/**
* Iterates over the given token stream and adds the resulting terms to the index;
* Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
* Lucene {@link org.apache.lucene.document.Field}.
* Finally closes the token stream. Note that untokenized keywords can be added with this method via
* {@link #keywordTokenStream(Collection)}, the Lucene <code>KeywordTokenizer</code> or similar utilities.
*
* @param fieldName
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from.
* @param boost
* the boost factor for hits for this field
*
* @see org.apache.lucene.document.Field#setBoost(float)
*/
public void addField(String fieldName, TokenStream stream, float boost) {
addField(fieldName, stream, boost, 0);
}
/**
* Iterates over the given token stream and adds the resulting terms to the index;
* Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
* Lucene {@link org.apache.lucene.document.Field}.
* Finally closes the token stream. Note that untokenized keywords can be added with this method via
* {@link #keywordTokenStream(Collection)}, the Lucene <code>KeywordTokenizer</code> or similar utilities.
*
* @param fieldName
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from.
* @param boost
* the boost factor for hits for this field
*
* @param positionIncrementGap
* the position increment gap if fields with the same name are added more than once
*
*
* @see org.apache.lucene.document.Field#setBoost(float)
*/
public void addField(String fieldName, TokenStream stream, float boost, int positionIncrementGap) {
addField(fieldName, stream, boost, positionIncrementGap, 1);
}
/**
* Iterates over the given token stream and adds the resulting terms to the index;
* Equivalent to adding a tokenized, indexed, termVectorStored, unstored,
* Lucene {@link org.apache.lucene.document.Field}.
* Finally closes the token stream. Note that untokenized keywords can be added with this method via
* {@link #keywordTokenStream(Collection)}, the Lucene <code>KeywordTokenizer</code> or similar utilities.
*
*
* @param fieldName
* a name to be associated with the text
* @param stream
* the token stream to retrieve tokens from.
* @param boost
* the boost factor for hits for this field
* @param positionIncrementGap
* the position increment gap if fields with the same name are added more than once
* @param offsetGap
* the offset gap if fields with the same name are added more than once
* @see org.apache.lucene.document.Field#setBoost(float)
*/
public void addField(String fieldName, TokenStream stream, float boost, int positionIncrementGap, int offsetGap) {
try {
if (fieldName == null)
throw new IllegalArgumentException("fieldName must not be null");
if (stream == null)
throw new IllegalArgumentException("token stream must not be null");
if (boost <= 0.0f)
throw new IllegalArgumentException("boost factor must be greater than 0.0");
int numTokens = 0;
int numOverlapTokens = 0;
int pos = -1;
final BytesRefHash terms;
final SliceByteStartArray sliceArray;
Info info = null;
long sumTotalTermFreq = 0;
int offset = 0;
if ((info = fields.get(fieldName)) != null) {
numTokens = info.numTokens;
numOverlapTokens = info.numOverlapTokens;
pos = info.lastPosition + positionIncrementGap;
offset = info.lastOffset + offsetGap;
terms = info.terms;
boost *= info.boost;
sliceArray = info.sliceArray;
sumTotalTermFreq = info.sumTotalTermFreq;
} else {
sliceArray = new SliceByteStartArray(BytesRefHash.DEFAULT_CAPACITY);
terms = new BytesRefHash(byteBlockPool, BytesRefHash.DEFAULT_CAPACITY, sliceArray);
}
if (!fieldInfos.containsKey(fieldName)) {
fieldInfos.put(fieldName,
new FieldInfo(fieldName, true, fieldInfos.size(), false, false, false, this.storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS , null, null, null));
}
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
BytesRef ref = termAtt.getBytesRef();
stream.reset();
while (stream.incrementToken()) {
termAtt.fillBytesRef();
// if (DEBUG) System.err.println("token='" + term + "'");
numTokens++;
final int posIncr = posIncrAttribute.getPositionIncrement();
if (posIncr == 0)
numOverlapTokens++;
pos += posIncr;
int ord = terms.add(ref);
if (ord < 0) {
ord = (-ord) - 1;
postingsWriter.reset(sliceArray.end[ord]);
} else {
sliceArray.start[ord] = postingsWriter.startNewSlice();
}
sliceArray.freq[ord]++;
sumTotalTermFreq++;
if (!storeOffsets) {
postingsWriter.writeInt(pos);
} else {
postingsWriter.writeInt(pos);
postingsWriter.writeInt(offsetAtt.startOffset() + offset);
postingsWriter.writeInt(offsetAtt.endOffset() + offset);
}
sliceArray.end[ord] = postingsWriter.getCurrentOffset();
}
stream.end();
// ensure infos.numTokens > 0 invariant; needed for correct operation of terms()
if (numTokens > 0) {
fields.put(fieldName, new Info(terms, sliceArray, numTokens, numOverlapTokens, boost, pos, offsetAtt.endOffset() + offset, sumTotalTermFreq));
sortedFields = null; // invalidate sorted view, if any
}
} catch (Exception e) { // can never happen
throw new RuntimeException(e);
} finally {
try {
if (stream != null) {
stream.close();
}
} catch (IOException e2) {
throw new RuntimeException(e2);
}
}
}
/**
* Creates and returns a searcher that can be used to execute arbitrary
* Lucene queries and to collect the resulting query results as hits.
*
* @return a searcher
*/
public IndexSearcher createSearcher() {
MemoryIndexReader reader = new MemoryIndexReader();
IndexSearcher searcher = new IndexSearcher(reader); // ensures no auto-close !!
reader.setSearcher(searcher); // to later get hold of searcher.getSimilarity()
return searcher;
}
/**
* Convenience method that efficiently returns the relevance score by
* matching this index against the given Lucene query expression.
*
* @param query
* an arbitrary Lucene query to run against this index
* @return the relevance score of the matchmaking; A number in the range
* [0.0 .. 1.0], with 0.0 indicating no match. The higher the number
* the better the match.
*
*/
public float search(Query query) {
if (query == null)
throw new IllegalArgumentException("query must not be null");
IndexSearcher searcher = createSearcher();
try {
final float[] scores = new float[1]; // inits to 0.0f (no match)
searcher.search(query, new Collector() {
private Scorer scorer;
@Override
public void collect(int doc) throws IOException {
scores[0] = scorer.score();
}
@Override
public void setScorer(Scorer scorer) {
this.scorer = scorer;
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
@Override
public void setNextReader(AtomicReaderContext context) { }
});
float score = scores[0];
return score;
} catch (IOException e) { // can never happen (RAMDirectory)
throw new RuntimeException(e);
} finally {
// searcher.close();
/*
* Note that it is harmless and important for good performance to
* NOT close the index reader!!! This avoids all sorts of
* unnecessary baggage and locking in the Lucene IndexReader
* superclass, all of which is completely unnecessary for this main
* memory index data structure without thread-safety claims.
*
* Wishing IndexReader would be an interface...
*
* Actually with the new tight createSearcher() API auto-closing is now
* made impossible, hence searcher.close() would be harmless and also
* would not degrade performance...
*/
}
}
/**
* Returns a reasonable approximation of the main memory [bytes] consumed by
* this instance. Useful for smart memory sensititive caches/pools.
* @return the main memory consumption
*/
public long getMemorySize() {
return RamUsageEstimator.sizeOf(this);
}
/** sorts into ascending order (on demand), reusing memory along the way */
private void sortFields() {
if (sortedFields == null) sortedFields = sort(fields);
}
/** returns a view of the given map's entries, sorted ascending by key */
private static <K,V> Map.Entry<K,V>[] sort(HashMap<K,V> map) {
int size = map.size();
@SuppressWarnings("unchecked")
Map.Entry<K,V>[] entries = new Map.Entry[size];
Iterator<Map.Entry<K,V>> iter = map.entrySet().iterator();
for (int i=0; i < size; i++) {
entries[i] = iter.next();
}
if (size > 1) ArrayUtil.introSort(entries, termComparator);
return entries;
}
/**
* Returns a String representation of the index data for debugging purposes.
*
* @return the string representation
*/
@Override
public String toString() {
StringBuilder result = new StringBuilder(256);
sortFields();
int sumPositions = 0;
int sumTerms = 0;
final BytesRef spare = new BytesRef();
for (int i=0; i < sortedFields.length; i++) {
Map.Entry<String,Info> entry = sortedFields[i];
String fieldName = entry.getKey();
Info info = entry.getValue();
info.sortTerms();
result.append(fieldName + ":\n");
SliceByteStartArray sliceArray = info.sliceArray;
int numPositions = 0;
SliceReader postingsReader = new SliceReader(intBlockPool);
for (int j=0; j < info.terms.size(); j++) {
int ord = info.sortedTerms[j];
info.terms.get(ord, spare);
int freq = sliceArray.freq[ord];
result.append("\t'" + spare + "':" + freq + ":");
postingsReader.reset(sliceArray.start[ord], sliceArray.end[ord]);
result.append(" [");
final int iters = storeOffsets ? 3 : 1;
while(!postingsReader.endOfSlice()) {
result.append("(");
for (int k = 0; k < iters; k++) {
result.append(postingsReader.readInt());
if (k < iters-1) {
result.append(", ");
}
}
result.append(")");
if (!postingsReader.endOfSlice()) {
result.append(",");
}
}
result.append("]");
result.append("\n");
numPositions += freq;
}
result.append("\tterms=" + info.terms.size());
result.append(", positions=" + numPositions);
result.append(", memory=" + RamUsageEstimator.humanReadableUnits(RamUsageEstimator.sizeOf(info)));
result.append("\n");
sumPositions += numPositions;
sumTerms += info.terms.size();
}
result.append("\nfields=" + sortedFields.length);
result.append(", terms=" + sumTerms);
result.append(", positions=" + sumPositions);
result.append(", memory=" + RamUsageEstimator.humanReadableUnits(getMemorySize()));
return result.toString();
}
/**
* Index data structure for a field; Contains the tokenized term texts and
* their positions.
*/
private static final class Info {
/**
* Term strings and their positions for this field: Map <String
* termText, ArrayIntList positions>
*/
private final BytesRefHash terms;
private final SliceByteStartArray sliceArray;
/** Terms sorted ascending by term text; computed on demand */
private transient int[] sortedTerms;
/** Number of added tokens for this field */
private final int numTokens;
/** Number of overlapping tokens for this field */
private final int numOverlapTokens;
/** Boost factor for hits for this field */
private final float boost;
private final long sumTotalTermFreq;
/** the last position encountered in this field for multi field support*/
private int lastPosition;
/** the last offset encountered in this field for multi field support*/
private int lastOffset;
public Info(BytesRefHash terms, SliceByteStartArray sliceArray, int numTokens, int numOverlapTokens, float boost, int lastPosition, int lastOffset, long sumTotalTermFreq) {
this.terms = terms;
this.sliceArray = sliceArray;
this.numTokens = numTokens;
this.numOverlapTokens = numOverlapTokens;
this.boost = boost;
this.sumTotalTermFreq = sumTotalTermFreq;
this.lastPosition = lastPosition;
this.lastOffset = lastOffset;
}
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
/**
* Sorts hashed terms into ascending order, reusing memory along the
* way. Note that sorting is lazily delayed until required (often it's
* not required at all). If a sorted view is required then hashing +
* sort + binary search is still faster and smaller than TreeMap usage
* (which would be an alternative and somewhat more elegant approach,
* apart from more sophisticated Tries / prefix trees).
*/
public void sortTerms() {
if (sortedTerms == null)
sortedTerms = terms.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
}
public float getBoost() {
return boost;
}
}
///////////////////////////////////////////////////////////////////////////////
// Nested classes:
///////////////////////////////////////////////////////////////////////////////
/**
* Search support for Lucene framework integration; implements all methods
* required by the Lucene IndexReader contracts.
*/
private final class MemoryIndexReader extends AtomicReader {
private IndexSearcher searcher; // needed to find searcher.getSimilarity()
private MemoryIndexReader() {
super(); // avoid as much superclass baggage as possible
}
private Info getInfo(String fieldName) {
return fields.get(fieldName);
}
private Info getInfo(int pos) {
return sortedFields[pos].getValue();
}
@Override
public Bits getLiveDocs() {
return null;
}
@Override
public FieldInfos getFieldInfos() {
return new FieldInfos(fieldInfos.values().toArray(new FieldInfo[fieldInfos.size()]));
}
@Override
public NumericDocValues getNumericDocValues(String field) {
return null;
}
@Override
public BinaryDocValues getBinaryDocValues(String field) {
return null;
}
@Override
public SortedDocValues getSortedDocValues(String field) {
return null;
}
@Override
public SortedSetDocValues getSortedSetDocValues(String field) {
return null;
}
@Override
public Bits getDocsWithField(String field) throws IOException {
return null;
}
private class MemoryFields extends Fields {
@Override
public Iterator<String> iterator() {
return new Iterator<String>() {
int upto = -1;
@Override
public String next() {
upto++;
if (upto >= sortedFields.length) {
throw new NoSuchElementException();
}
return sortedFields[upto].getKey();
}
@Override
public boolean hasNext() {
return upto+1 < sortedFields.length;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Terms terms(final String field) {
int i = Arrays.binarySearch(sortedFields, field, termComparator);
if (i < 0) {
return null;
} else {
final Info info = getInfo(i);
info.sortTerms();
return new Terms() {
@Override
public TermsEnum iterator(TermsEnum reuse) {
return new MemoryTermsEnum(info);
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public long size() {
return info.terms.size();
}
@Override
public long getSumTotalTermFreq() {
return info.getSumTotalTermFreq();
}
@Override
public long getSumDocFreq() {
// each term has df=1
return info.terms.size();
}
@Override
public int getDocCount() {
return info.terms.size() > 0 ? 1 : 0;
}
@Override
public boolean hasFreqs() {
return true;
}
@Override
public boolean hasOffsets() {
return storeOffsets;
}
@Override
public boolean hasPositions() {
return true;
}
@Override
public boolean hasPayloads() {
return false;
}
};
}
}
@Override
public int size() {
return sortedFields.length;
}
}
@Override
public Fields fields() {
sortFields();
return new MemoryFields();
}
private class MemoryTermsEnum extends TermsEnum {
private final Info info;
private final BytesRef br = new BytesRef();
int termUpto = -1;
public MemoryTermsEnum(Info info) {
this.info = info;
info.sortTerms();
}
private final int binarySearch(BytesRef b, BytesRef bytesRef, int low,
int high, BytesRefHash hash, int[] ords, Comparator<BytesRef> comparator) {
int mid = 0;
while (low <= high) {
mid = (low + high) >>> 1;
hash.get(ords[mid], bytesRef);
final int cmp = comparator.compare(bytesRef, b);
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return mid;
}
}
assert comparator.compare(bytesRef, b) != 0;
return -(low + 1);
}
@Override
public boolean seekExact(BytesRef text) {
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms, BytesRef.getUTF8SortedAsUnicodeComparator());
return termUpto >= 0;
}
@Override
public SeekStatus seekCeil(BytesRef text) {
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms, BytesRef.getUTF8SortedAsUnicodeComparator());
if (termUpto < 0) { // not found; choose successor
termUpto = -termUpto-1;
if (termUpto >= info.terms.size()) {
return SeekStatus.END;
} else {
info.terms.get(info.sortedTerms[termUpto], br);
return SeekStatus.NOT_FOUND;
}
} else {
return SeekStatus.FOUND;
}
}
@Override
public void seekExact(long ord) {
assert ord < info.terms.size();
termUpto = (int) ord;
}
@Override
public BytesRef next() {
termUpto++;
if (termUpto >= info.terms.size()) {
return null;
} else {
info.terms.get(info.sortedTerms[termUpto], br);
return br;
}
}
@Override
public BytesRef term() {
return br;
}
@Override
public long ord() {
return termUpto;
}
@Override
public int docFreq() {
return 1;
}
@Override
public long totalTermFreq() {
return info.sliceArray.freq[info.sortedTerms[termUpto]];
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
if (reuse == null || !(reuse instanceof MemoryDocsEnum)) {
reuse = new MemoryDocsEnum();
}
return ((MemoryDocsEnum) reuse).reset(liveDocs, info.sliceArray.freq[info.sortedTerms[termUpto]]);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) {
if (reuse == null || !(reuse instanceof MemoryDocsAndPositionsEnum)) {
reuse = new MemoryDocsAndPositionsEnum();
}
final int ord = info.sortedTerms[termUpto];
return ((MemoryDocsAndPositionsEnum) reuse).reset(liveDocs, info.sliceArray.start[ord], info.sliceArray.end[ord], info.sliceArray.freq[ord]);
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public void seekExact(BytesRef term, TermState state) throws IOException {
assert state != null;
this.seekExact(((OrdTermState)state).ord);
}
@Override
public TermState termState() throws IOException {
OrdTermState ts = new OrdTermState();
ts.ord = termUpto;
return ts;
}
}
private class MemoryDocsEnum extends DocsEnum {
private boolean hasNext;
private Bits liveDocs;
private int doc = -1;
private int freq;
public DocsEnum reset(Bits liveDocs, int freq) {
this.liveDocs = liveDocs;
hasNext = true;
doc = -1;
this.freq = freq;
return this;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
hasNext = false;
return doc = 0;
} else {
return doc = NO_MORE_DOCS;
}
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public int freq() throws IOException {
return freq;
}
@Override
public long cost() {
return 1;
}
}
private class MemoryDocsAndPositionsEnum extends DocsAndPositionsEnum {
private int posUpto; // for assert
private boolean hasNext;
private Bits liveDocs;
private int doc = -1;
private SliceReader sliceReader;
private int freq;
private int startOffset;
private int endOffset;
public MemoryDocsAndPositionsEnum() {
this.sliceReader = new SliceReader(intBlockPool);
}
public DocsAndPositionsEnum reset(Bits liveDocs, int start, int end, int freq) {
this.liveDocs = liveDocs;
this.sliceReader.reset(start, end);
posUpto = 0; // for assert
hasNext = true;
doc = -1;
this.freq = freq;
return this;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
hasNext = false;
return doc = 0;
} else {
return doc = NO_MORE_DOCS;
}
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public int freq() throws IOException {
return freq;
}
@Override
public int nextPosition() {
assert posUpto++ < freq;
assert !sliceReader.endOfSlice() : " stores offsets : " + startOffset;
if (storeOffsets) {
int pos = sliceReader.readInt();
startOffset = sliceReader.readInt();
endOffset = sliceReader.readInt();
return pos;
} else {
return sliceReader.readInt();
}
}
@Override
public int startOffset() {
return startOffset;
}
@Override
public int endOffset() {
return endOffset;
}
@Override
public BytesRef getPayload() {
return null;
}
@Override
public long cost() {
return 1;
}
}
@Override
public Fields getTermVectors(int docID) {
if (docID == 0) {
return fields();
} else {
return null;
}
}
private Similarity getSimilarity() {
if (searcher != null) return searcher.getSimilarity();
return IndexSearcher.getDefaultSimilarity();
}
private void setSearcher(IndexSearcher searcher) {
this.searcher = searcher;
}
@Override
public int numDocs() {
if (DEBUG) System.err.println("MemoryIndexReader.numDocs");
return 1;
}
@Override
public int maxDoc() {
if (DEBUG) System.err.println("MemoryIndexReader.maxDoc");
return 1;
}
@Override
public void document(int docID, StoredFieldVisitor visitor) {
if (DEBUG) System.err.println("MemoryIndexReader.document");
// no-op: there are no stored fields
}
@Override
protected void doClose() {
if (DEBUG) System.err.println("MemoryIndexReader.doClose");
}
/** performance hack: cache norms to avoid repeated expensive calculations */
private NumericDocValues cachedNormValues;
private String cachedFieldName;
private Similarity cachedSimilarity;
@Override
public NumericDocValues getNormValues(String field) {
FieldInfo fieldInfo = fieldInfos.get(field);
if (fieldInfo == null || fieldInfo.omitsNorms())
return null;
NumericDocValues norms = cachedNormValues;
Similarity sim = getSimilarity();
if (!field.equals(cachedFieldName) || sim != cachedSimilarity) { // not cached?
Info info = getInfo(field);
int numTokens = info != null ? info.numTokens : 0;
int numOverlapTokens = info != null ? info.numOverlapTokens : 0;
float boost = info != null ? info.getBoost() : 1.0f;
FieldInvertState invertState = new FieldInvertState(field, 0, numTokens, numOverlapTokens, 0, boost);
long value = sim.computeNorm(invertState);
norms = new MemoryIndexNormDocValues(value);
// cache it for future reuse
cachedNormValues = norms;
cachedFieldName = field;
cachedSimilarity = sim;
if (DEBUG) System.err.println("MemoryIndexReader.norms: " + field + ":" + value + ":" + numTokens);
}
return norms;
}
}
/**
* Resets the {@link MemoryIndex} to its initial state and recycles all internal buffers.
*/
public void reset() {
this.fieldInfos.clear();
this.fields.clear();
this.sortedFields = null;
byteBlockPool.reset(false, false); // no need to 0-fill the buffers
intBlockPool.reset(true, false); // here must must 0-fill since we use slices
}
private static final class SliceByteStartArray extends DirectBytesStartArray {
int[] start; // the start offset in the IntBlockPool per term
int[] end; // the end pointer in the IntBlockPool for the postings slice per term
int[] freq; // the term frequency
public SliceByteStartArray(int initSize) {
super(initSize);
}
@Override
public int[] init() {
final int[] ord = super.init();
start = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
end = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
freq = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
assert start.length >= ord.length;
assert end.length >= ord.length;
assert freq.length >= ord.length;
return ord;
}
@Override
public int[] grow() {
final int[] ord = super.grow();
if (start.length < ord.length) {
start = ArrayUtil.grow(start, ord.length);
end = ArrayUtil.grow(end, ord.length);
freq = ArrayUtil.grow(freq, ord.length);
}
assert start.length >= ord.length;
assert end.length >= ord.length;
assert freq.length >= ord.length;
return ord;
}
@Override
public int[] clear() {
start = end = null;
return super.clear();
}
}
}
| yintaoxue/read-open-source-code | solr-4.7.2/src/org/apache/lucene/index/memory/MemoryIndex.java | Java | apache-2.0 | 44,614 |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template const.java
// Do not modify
package org.projectfloodlight.openflow.protocol;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
public enum OFBundleCtrlType {
OPEN_REQUEST,
OPEN_REPLY,
CLOSE_REQUEST,
CLOSE_REPLY,
COMMIT_REQUEST,
COMMIT_REPLY,
DISCARD_REQUEST,
DISCARD_REPLY;
}
| floodlight/loxigen-artifacts | openflowj/gen-src/main/java/org/projectfloodlight/openflow/protocol/OFBundleCtrlType.java | Java | apache-2.0 | 1,547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.transactions;
import org.jetbrains.annotations.*;
/**
* Synchronization callback for transaction. You can subscribe to receive transaction
* state change callbacks by registering transaction synchronization via
* {@link org.apache.ignite.internal.processors.cache.GridCache#txSynchronize(TransactionSynchronization)} method.
*/
public interface TransactionSynchronization {
/**
* State change callback for transaction. Note that unless transaction has been
* completed, it is possible to mark it for <tt>rollbackOnly</tt> by calling
* {@link Transaction#setRollbackOnly()} on the passed in transaction.
* You can check the return value of {@link Transaction#setRollbackOnly() setRollbackOnly()}
* method to see if transaction was indeed marked for rollback or not.
*
* @param prevState Previous state of the transaction. If transaction has just been
* started, then previous state is {@code null}.
* @param newState New state of the transaction. In majority of the cases this will be the
* same as {@link Transaction#state() tx.state()}, but it is also possible
* that transaction may be marked for rollback concurrently with this method
* invocation, and in that case <tt>newState</tt> reflects the actual state of the
* transition this callback is associated with.
* @param tx Transaction whose state has changed.
*/
public void onStateChanged(@Nullable TransactionState prevState, TransactionState newState, Transaction tx);
}
| gridgain/apache-ignite | modules/core/src/main/java/org/apache/ignite/transactions/TransactionSynchronization.java | Java | apache-2.0 | 2,374 |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInsight.daemon.impl.analysis.JavaHighlightUtil;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class DeleteMultiCatchFix implements IntentionAction {
private final PsiTypeElement myTypeElement;
public DeleteMultiCatchFix(final PsiTypeElement typeElement) {
myTypeElement = typeElement;
}
@NotNull
@Override
public String getText() {
return QuickFixBundle.message("delete.catch.text", JavaHighlightUtil.formatType(myTypeElement.getType()));
}
@NotNull
@Override
public String getFamilyName() {
return QuickFixBundle.message("delete.catch.family");
}
@Override
public boolean isAvailable(@NotNull final Project project, final Editor editor, final PsiFile file) {
return myTypeElement != null &&
myTypeElement.isValid() &&
PsiManager.getInstance(project).isInProject(myTypeElement.getContainingFile());
}
@Override
public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file) throws IncorrectOperationException {
if (!FileModificationService.getInstance().prepareFileForWrite(myTypeElement.getContainingFile())) return;
final PsiElement parentType = myTypeElement.getParent();
if (!(parentType instanceof PsiTypeElement)) return;
final PsiElement first;
final PsiElement last;
final PsiElement right = PsiTreeUtil.skipSiblingsForward(myTypeElement, PsiWhiteSpace.class, PsiComment.class);
if (right instanceof PsiJavaToken && ((PsiJavaToken)right).getTokenType() == JavaTokenType.OR) {
first = myTypeElement;
last = right;
}
else if (right == null) {
final PsiElement left = PsiTreeUtil.skipSiblingsBackward(myTypeElement, PsiWhiteSpace.class, PsiComment.class);
if (!(left instanceof PsiJavaToken)) return;
final IElementType leftType = ((PsiJavaToken)left).getTokenType();
if (leftType != JavaTokenType.OR) return;
first = left;
last = myTypeElement;
}
else {
return;
}
parentType.deleteChildRange(first, last);
final List<PsiTypeElement> typeElements = PsiTreeUtil.getChildrenOfTypeAsList(parentType, PsiTypeElement.class);
if (typeElements.size() == 1) {
final PsiElement parameter = parentType.getParent();
parameter.addRangeAfter(parentType.getFirstChild(), parentType.getLastChild(), parentType);
parentType.delete();
}
}
@Override
public boolean startInWriteAction() {
return true;
}
}
| android-ia/platform_tools_idea | java/java-impl/src/com/intellij/codeInsight/daemon/impl/quickfix/DeleteMultiCatchFix.java | Java | apache-2.0 | 3,583 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.persistence.jpa.entity;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import javax.persistence.Cacheable;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.apache.syncope.core.persistence.api.entity.Schema;
import org.apache.syncope.core.persistence.api.entity.SchemaLabel;
import org.apache.syncope.core.persistence.jpa.validation.entity.SchemaKeyCheck;
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = AbstractSchema.TABLE)
@Cacheable
@SchemaKeyCheck
public abstract class AbstractSchema extends AbstractProvidedKeyEntity implements Schema {
public static final String TABLE = "SyncopeSchema";
private static final long serialVersionUID = -9222344997225831269L;
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY, mappedBy = "schema")
private List<JPASchemaLabel> labels = new ArrayList<>();
@Override
public boolean add(final SchemaLabel label) {
checkType(label, JPASchemaLabel.class);
return this.labels.add((JPASchemaLabel) label);
}
@Override
public Optional<? extends SchemaLabel> getLabel(final Locale locale) {
return labels.stream().filter(label -> label.getLocale().equals(locale)).findFirst();
}
@Override
public List<? extends SchemaLabel> getLabels() {
return labels;
}
}
| apache/syncope | core/persistence-jpa/src/main/java/org/apache/syncope/core/persistence/jpa/entity/AbstractSchema.java | Java | apache-2.0 | 2,445 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.rest.cxf.service;
import java.net.URI;
import java.util.List;
import javax.ws.rs.core.Response;
import org.apache.syncope.common.lib.to.ConnBundleTO;
import org.apache.syncope.common.lib.to.ConnIdObjectClassTO;
import org.apache.syncope.common.lib.to.ConnInstanceTO;
import org.apache.syncope.common.rest.api.RESTHeaders;
import org.apache.syncope.common.rest.api.service.ConnectorService;
import org.apache.syncope.core.logic.ConnectorLogic;
import org.springframework.stereotype.Service;
@Service
public class ConnectorServiceImpl extends AbstractService implements ConnectorService {
protected final ConnectorLogic logic;
public ConnectorServiceImpl(final ConnectorLogic logic) {
this.logic = logic;
}
@Override
public Response create(final ConnInstanceTO connInstanceTO) {
ConnInstanceTO connInstance = logic.create(connInstanceTO);
URI location = uriInfo.getAbsolutePathBuilder().path(connInstance.getKey()).build();
return Response.created(location).
header(RESTHeaders.RESOURCE_KEY, connInstance.getKey()).
build();
}
@Override
public void delete(final String key) {
logic.delete(key);
}
@Override
public List<ConnBundleTO> getBundles(final String lang) {
return logic.getBundles(lang);
}
@Override
public List<ConnIdObjectClassTO> buildObjectClassInfo(
final ConnInstanceTO connInstanceTO, final boolean includeSpecial) {
return logic.buildObjectClassInfo(connInstanceTO, includeSpecial);
}
@Override
public List<ConnInstanceTO> list(final String lang) {
return logic.list(lang);
}
@Override
public ConnInstanceTO read(final String key, final String lang) {
return logic.read(key, lang);
}
@Override
public ConnInstanceTO readByResource(final String resourceName, final String lang) {
return logic.readByResource(resourceName, lang);
}
@Override
public void update(final ConnInstanceTO connInstanceTO) {
logic.update(connInstanceTO);
}
@Override
public void check(final ConnInstanceTO connInstanceTO) {
logic.check(connInstanceTO);
}
@Override
public void reload() {
logic.reload();
}
}
| apache/syncope | core/idm/rest-cxf/src/main/java/org/apache/syncope/core/rest/cxf/service/ConnectorServiceImpl.java | Java | apache-2.0 | 3,130 |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.datastore;
import java.util.List;
/**
* An interface to represent Google Cloud Datastore write operations.
*/
public interface DatastoreWriter {
/**
* Datastore add operation. This method will automatically allocate an id if necessary.
*
* @param entity the entity to add
* @return an {@code Entity} with the same properties and a key that is either newly allocated
* or the same one if key is already complete
* @throws DatastoreException upon failure
* @throws IllegalArgumentException if the given entity is missing a key
*/
Entity add(FullEntity<?> entity);
/**
* Datastore add operation. This method will automatically allocate id for any entity with an
* incomplete key.
*
* @return a list of {@code Entity} ordered by input with the same properties and a key that
* is either newly allocated or the same one if was already complete
* @throws DatastoreException upon failure
* @throws IllegalArgumentException if any of the given entities is missing a key
* @see #add(FullEntity)
*/
List<Entity> add(FullEntity<?>... entities);
/**
* A Datastore update operation. The operation will fail if an entity with the same key does not
* already exist.
*/
void update(Entity... entities);
/**
* A Datastore put (a.k.a upsert) operation. This method will automatically allocate an id if
* necessary.
*
* @param entity the entity to put
* @return an {@code Entity} with the same properties and a key that is either newly allocated
* or the same one if key is already complete
* @throws DatastoreException upon failure
* @throws IllegalArgumentException if the given entity is missing a key
*/
Entity put(FullEntity<?> entity);
/**
* A Datastore put (a.k.a upsert) operation. This method will automatically allocate id for any
* entity with an incomplete key.
*
* @return a list of updated or inserted {@code Entity}, ordered by input. Returned keys are
* either newly allocated or the same one if was already complete.
* @throws DatastoreException upon failure
* @throws IllegalArgumentException if any of the given entities is missing a key
*/
List<Entity> put(FullEntity<?>... entities);
/**
* A datastore delete operation. It is OK request a deletion of a non-existing entity.
*/
void delete(Key... keys);
}
| tangiel/google-cloud-java | google-cloud-datastore/src/main/java/com/google/cloud/datastore/DatastoreWriter.java | Java | apache-2.0 | 3,018 |
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.client.context;
import org.kaaproject.kaa.client.KaaClient;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
/**
* Responsible for creation of {@link ExecutorService executor} instances for
* SDK internal usage. Implementation should not manage created
* {@link ExecutorService executor} life-cycle. Executors will be stopped during
* {@link KaaClient#stop()}, thus {@link ExecutorService executor} instances
* should not be cached in context or context should check shutdown status
* before return of cached value.
*
* @author Andrew Shvayka
*/
public interface ExecutorContext {
/**
* Initialize executors.
*/
void init();
/**
* Stops executors.
*/
void stop();
/**
* Executes lifecycle events/commands of Kaa client.
*
* @return the lifecycle executor
*/
ExecutorService getLifeCycleExecutor();
/**
* Executes user API calls to SDK client. For example, serializing of log
* records before submit to transport.
*
* @return the API executor
*/
ExecutorService getApiExecutor();
/**
* Executes callback methods provided by SDK client user.
*
* @return the callback executor
*/
ExecutorService getCallbackExecutor();
/**
* Executes scheduled tasks(periodically if needed) as log upload.
*
* @return the scheduled executor
*/
ScheduledExecutorService getScheduledExecutor();
}
| vtkhir/kaa | client/client-multi/client-java-core/src/main/java/org/kaaproject/kaa/client/context/ExecutorContext.java | Java | apache-2.0 | 2,065 |
// Modifications copyright (C) 2017, Baidu.com, Inc.
// Copyright 2017 The Apache Software Foundation
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.baidu.palo.analysis;
/**
* captures info of a single WHEN expr THEN expr clause.
*/
class CaseWhenClause {
private final Expr whenExpr;
private final Expr thenExpr;
public CaseWhenClause(Expr whenExpr, Expr thenExpr) {
super();
this.whenExpr = whenExpr;
this.thenExpr = thenExpr;
}
public Expr getWhenExpr() {
return whenExpr;
}
public Expr getThenExpr() {
return thenExpr;
}
}
| cyongli/palo | fe/src/com/baidu/palo/analysis/CaseWhenClause.java | Java | apache-2.0 | 1,374 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j.rule;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.log4j.spi.LoggingEventFieldResolver;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
/**
* A Rule class which returns the result of
* performing equals against two strings.
*
* @author Scott Deboy ([email protected])
*/
public class EqualsRule extends AbstractRule {
/**
* Serialization ID.
*/
static final long serialVersionUID = 1712851553477517245L;
/**
* Resolver.
*/
private static final LoggingEventFieldResolver RESOLVER =
LoggingEventFieldResolver.getInstance();
/**
* Value.
*/
private final String value;
/**
* Field.
*/
private final String field;
/**
* Create new instance.
* @param field field
* @param value value
*/
private EqualsRule(final String field, final String value) {
super();
if (!RESOLVER.isField(field)) {
throw new IllegalArgumentException(
"Invalid EQUALS rule - " + field + " is not a supported field");
}
this.field = field;
this.value = value;
}
/**
* Create new instance from top two elements of stack.
* @param stack stack
* @return new instance
*/
public static Rule getRule(final Stack stack) {
if (stack.size() < 2) {
throw new IllegalArgumentException(
"Invalid EQUALS rule - expected two parameters but received "
+ stack.size());
}
String p2 = stack.pop().toString();
String p1 = stack.pop().toString();
return getRule(p1, p2);
}
/**
* Create new instance.
* @param p1 field, special treatment for level and timestamp.
* @param p2 value
* @return new instance
*/
public static Rule getRule(final String p1, final String p2) {
if (p1.equalsIgnoreCase(LoggingEventFieldResolver.LEVEL_FIELD)) {
return LevelEqualsRule.getRule(p2);
} else if (p1.equalsIgnoreCase(LoggingEventFieldResolver.TIMESTAMP_FIELD)) {
return TimestampEqualsRule.getRule(p2);
} else {
return new EqualsRule(p1, p2);
}
}
/** {@inheritDoc} */
public boolean evaluate(final LoggingEvent event, Map matches) {
Object p2 = RESOLVER.getValue(field, event);
boolean result = (p2 != null) && p2.toString().equals(value);
if (result && matches != null) {
Set entries = (Set) matches.get(field.toUpperCase());
if (entries == null) {
entries = new HashSet();
matches.put(field.toUpperCase(), entries);
}
entries.add(value);
}
return result;
}
}
| apache/log4j-extras | src/main/java/org/apache/log4j/rule/EqualsRule.java | Java | apache-2.0 | 3,458 |
/**
* View Models used by Spring MVC REST controllers.
*/
package io.github.jhipster.registry.web.rest.vm;
| huiqiangyang/registry | src/main/java/io/github/jhipster/registry/web/rest/vm/package-info.java | Java | apache-2.0 | 109 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.test;
import java.net.InetAddress;
import java.io.File;
import java.util.HashSet;
import org.apache.bookkeeper.util.IOUtils;
import org.junit.Test;
import org.apache.bookkeeper.conf.ServerConfiguration;
import org.apache.bookkeeper.proto.BookieServer;
public class BookieZKExpireTest extends BookKeeperClusterTestCase {
public BookieZKExpireTest() {
super(0);
baseConf.setZkRetryBackoffStartMs(100);
baseConf.setZkRetryBackoffMaxMs(200);
}
@Test
public void testBookieServerZKExpireBehaviour() throws Exception {
BookieServer server = null;
try {
File f = createTempDir("bookieserver", "test");
HashSet<Thread> threadset = new HashSet<Thread>();
int threadCount = Thread.activeCount();
Thread threads[] = new Thread[threadCount*2];
threadCount = Thread.enumerate(threads);
for(int i = 0; i < threadCount; i++) {
if (threads[i].getName().indexOf("SendThread") != -1) {
threadset.add(threads[i]);
}
}
ServerConfiguration conf = newServerConfiguration(PortManager.nextFreePort(),
zkUtil.getZooKeeperConnectString(), f, new File[] { f });
server = new BookieServer(conf);
server.start();
Thread.sleep(10);
Thread sendthread = null;
threadCount = Thread.activeCount();
threads = new Thread[threadCount*2];
threadCount = Thread.enumerate(threads);
for(int i = 0; i < threadCount; i++) {
if (threads[i].getName().indexOf("SendThread") != -1
&& !threadset.contains(threads[i])) {
sendthread = threads[i];
break;
}
}
assertNotNull("Send thread not found", sendthread);
sendthread.suspend();
Thread.sleep(2*10000);
sendthread.resume();
// allow watcher thread to run
Thread.sleep(3000);
assertTrue("Bookie should not shutdown on losing zk session", server.isBookieRunning());
assertTrue("Bookie Server should not shutdown on losing zk session", server.isRunning());
// check the existence of znode
assertNotNull(zkUtil.getZooKeeperClient()
.exists("/ledgers/available/" + InetAddress.getLocalHost().getHostAddress() + ":" + conf.getBookiePort(), false));
} finally {
server.shutdown();
}
}
}
| twitter/bookkeeper | bookkeeper-server/src/test/java/org/apache/bookkeeper/test/BookieZKExpireTest.java | Java | apache-2.0 | 3,476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.authorization.composite;
import java.security.Principal;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jcr.Session;
import javax.jcr.security.AccessControlManager;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.JackrabbitAccessControlList;
import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils;
import org.apache.jackrabbit.oak.AbstractSecurityTest;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants;
import org.apache.jackrabbit.oak.plugins.tree.TreeUtil;
import org.apache.jackrabbit.oak.plugins.tree.impl.ImmutableTree;
import org.apache.jackrabbit.oak.security.authorization.composite.CompositeAuthorizationConfiguration.CompositionType;
import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.RepositoryPermission;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission;
import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal;
import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.util.Text;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public abstract class AbstractCompositeProviderTest extends AbstractSecurityTest implements NodeTypeConstants, PrivilegeConstants {
static final String ROOT_PATH = PathUtils.ROOT_PATH;
static final String TEST_PATH = "/test";
static final String TEST_CHILD_PATH = "/test/child";
static final String TEST_A_PATH = "/test/a";
static final String TEST_A_B_PATH = "/test/a/b";
static final String TEST_A_B_C_PATH = "/test/a/b/c";
static final String TEST_A_B2_PATH = "/test/a/b2";
static final String TEST_PATH_2 = "/test2";
static final List<String> NODE_PATHS = ImmutableList.of(ROOT_PATH, TEST_PATH, TEST_PATH_2, TEST_CHILD_PATH, TEST_A_PATH, TEST_A_B_PATH, TEST_A_B_C_PATH, TEST_A_B2_PATH);
static final List<String> TP_PATHS = ImmutableList.of(ROOT_PATH, TEST_PATH, TEST_A_PATH, TEST_A_B_PATH, TEST_A_B_C_PATH, TEST_A_B_C_PATH + "/nonexisting");
static final PropertyState PROPERTY_STATE = PropertyStates.createProperty("propName", "val");
static final String[] ALL_ACTIONS = new String[] {
Session.ACTION_READ,
Session.ACTION_ADD_NODE,
JackrabbitSession.ACTION_REMOVE_NODE,
Session.ACTION_SET_PROPERTY,
JackrabbitSession.ACTION_ADD_PROPERTY,
JackrabbitSession.ACTION_MODIFY_PROPERTY,
JackrabbitSession.ACTION_REMOVE_PROPERTY,
Session.ACTION_REMOVE,
JackrabbitSession.ACTION_READ_ACCESS_CONTROL,
JackrabbitSession.ACTION_MODIFY_ACCESS_CONTROL,
JackrabbitSession.ACTION_LOCKING,
JackrabbitSession.ACTION_NODE_TYPE_MANAGEMENT,
JackrabbitSession.ACTION_VERSIONING,
JackrabbitSession.ACTION_USER_MANAGEMENT
};
Map<String, Long> defPermissions;
Map<String, Set<String>> defPrivileges;
Map<String, String[]> defActionsGranted;
Root readOnlyRoot;
@Override
public void before() throws Exception {
super.before();
Tree rootNode = root.getTree("/");
Tree test = TreeUtil.addChild(rootNode, "test", NT_OAK_UNSTRUCTURED);
TreeUtil.addChild(test, "child", NT_OAK_UNSTRUCTURED);
Tree a = TreeUtil.addChild(test, "a", NT_OAK_UNSTRUCTURED);
TreeUtil.addChild(a, "b2", NT_OAK_UNSTRUCTURED);
Tree b = TreeUtil.addChild(a, "b", NT_OAK_UNSTRUCTURED);
TreeUtil.addChild(b, "c", NT_OAK_UNSTRUCTURED);
TreeUtil.addChild(rootNode, "test2", NT_OAK_UNSTRUCTURED);
AccessControlManager acMgr = getAccessControlManager(root);
Principal everyone = EveryonePrincipal.getInstance();
allow(acMgr, everyone, null, JCR_NAMESPACE_MANAGEMENT, JCR_NODE_TYPE_DEFINITION_MANAGEMENT);
allow(acMgr, everyone, TEST_PATH, JCR_READ);
allow(acMgr, everyone, TEST_CHILD_PATH, JCR_READ_ACCESS_CONTROL);
allow(acMgr, everyone, TEST_A_PATH, JCR_WRITE, JCR_VERSION_MANAGEMENT);
deny(acMgr, everyone, TEST_A_B_PATH, REP_REMOVE_PROPERTIES, JCR_REMOVE_NODE);
deny(acMgr, everyone, TEST_A_B_C_PATH, REP_READ_NODES);
root.commit();
defPermissions = ImmutableMap.<String, Long>builder().
put(TEST_PATH, Permissions.READ).
put(TEST_CHILD_PATH,
Permissions.READ |
Permissions.READ_ACCESS_CONTROL).
put(TEST_A_PATH,
Permissions.READ |
Permissions.SET_PROPERTY |
Permissions.MODIFY_CHILD_NODE_COLLECTION |
Permissions.VERSION_MANAGEMENT).
put(TEST_A_B2_PATH,
Permissions.READ |
Permissions.WRITE |
Permissions.MODIFY_CHILD_NODE_COLLECTION |
Permissions.VERSION_MANAGEMENT).
put(TEST_A_B_PATH,
Permissions.READ |
Permissions.ADD_NODE |
Permissions.ADD_PROPERTY |
Permissions.MODIFY_PROPERTY |
Permissions.MODIFY_CHILD_NODE_COLLECTION |
Permissions.VERSION_MANAGEMENT).
put(TEST_A_B_C_PATH,
Permissions.READ_PROPERTY |
Permissions.ADD_NODE |
Permissions.ADD_PROPERTY |
Permissions.MODIFY_PROPERTY |
Permissions.MODIFY_CHILD_NODE_COLLECTION |
Permissions.VERSION_MANAGEMENT).
build();
defPrivileges = ImmutableMap.<String, Set<String>>builder().
put(ROOT_PATH, ImmutableSet.<String>of()).
put(TEST_PATH_2, ImmutableSet.<String>of()).
put(TEST_PATH, ImmutableSet.of(JCR_READ)).
put(TEST_CHILD_PATH, ImmutableSet.of(JCR_READ, JCR_READ_ACCESS_CONTROL)).
put(TEST_A_PATH, ImmutableSet.of(JCR_READ, JCR_WRITE, JCR_VERSION_MANAGEMENT)).
put(TEST_A_B2_PATH, ImmutableSet.of(JCR_READ, JCR_WRITE, JCR_VERSION_MANAGEMENT)).
put(TEST_A_B_PATH, ImmutableSet.of(JCR_READ, JCR_ADD_CHILD_NODES, JCR_REMOVE_CHILD_NODES, REP_ADD_PROPERTIES, REP_ALTER_PROPERTIES, JCR_VERSION_MANAGEMENT)).
put(TEST_A_B_C_PATH, ImmutableSet.of(REP_READ_PROPERTIES, JCR_ADD_CHILD_NODES, JCR_REMOVE_CHILD_NODES, REP_ADD_PROPERTIES, REP_ALTER_PROPERTIES, JCR_VERSION_MANAGEMENT)).
build();
defActionsGranted = ImmutableMap.<String, String[]>builder().
put(TEST_PATH, new String[] {Session.ACTION_READ}).
put(TEST_CHILD_PATH, new String[] {Session.ACTION_READ, JackrabbitSession.ACTION_READ_ACCESS_CONTROL}).
put(TEST_A_PATH, new String[] {Session.ACTION_READ, Session.ACTION_SET_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_PATH + "/jcr:primaryType", new String[] {Session.ACTION_SET_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_PATH + "/propName", new String[] {JackrabbitSession.ACTION_ADD_PROPERTY, JackrabbitSession.ACTION_MODIFY_PROPERTY, JackrabbitSession.ACTION_REMOVE_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_PATH + "/nodeName", new String[] {Session.ACTION_ADD_NODE, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_B2_PATH, new String[] {Session.ACTION_READ, Session.ACTION_ADD_NODE, JackrabbitSession.ACTION_REMOVE_NODE, Session.ACTION_REMOVE, Session.ACTION_SET_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_B_PATH, new String[] {Session.ACTION_READ, Session.ACTION_ADD_NODE, JackrabbitSession.ACTION_ADD_PROPERTY, JackrabbitSession.ACTION_MODIFY_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_B_PATH + "/nonExisting", new String[] {Session.ACTION_READ, Session.ACTION_ADD_NODE, JackrabbitSession.ACTION_ADD_PROPERTY, JackrabbitSession.ACTION_MODIFY_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_B_C_PATH + "/jcr:primaryType", new String[] {Session.ACTION_READ, JackrabbitSession.ACTION_VERSIONING}).
put(TEST_A_B_C_PATH, new String[] {Session.ACTION_ADD_NODE, JackrabbitSession.ACTION_ADD_PROPERTY, JackrabbitSession.ACTION_VERSIONING}).
build();
readOnlyRoot = getRootProvider().createReadOnlyRoot(root);
}
@Override
public void after() throws Exception {
try {
root.refresh();
root.getTree(TEST_PATH).remove();
root.commit();
} finally {
super.after();
}
}
private static void allow(@Nonnull AccessControlManager acMgr,
@Nonnull Principal principal,
@Nullable String path,
@Nonnull String... privilegeNames) throws Exception {
JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, path);
acl.addEntry(principal, AccessControlUtils.privilegesFromNames(acMgr, privilegeNames), true);
acMgr.setPolicy(acl.getPath(), acl);
}
private static void deny(@Nonnull AccessControlManager acMgr,
@Nonnull Principal principal,
@Nullable String path,
@Nonnull String... privilegeNames) throws Exception {
JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, path);
acl.addEntry(principal, AccessControlUtils.privilegesFromNames(acMgr, privilegeNames), false);
acMgr.setPolicy(acl.getPath(), acl);
}
@Nonnull
static String getActionString(@Nonnull String... actions) {
return Text.implode(actions, ",");
}
static void assertCompositeTreePermission(@Nonnull TreePermission tp) {
assertTrue(tp.getClass()+ "", tp instanceof CompositeTreePermission);
}
static void assertCompositeTreePermission(boolean expected, @Nonnull TreePermission tp) {
assertEquals(expected, tp instanceof CompositeTreePermission);
}
abstract AggregatedPermissionProvider getTestPermissionProvider();
boolean reverseOrder() {
return false;
}
List<AggregatedPermissionProvider> getAggregatedProviders(@Nonnull String workspaceName,
@Nonnull AuthorizationConfiguration config,
@Nonnull Set<Principal> principals) {
ImmutableList<AggregatedPermissionProvider> l = ImmutableList.of(
(AggregatedPermissionProvider) config.getPermissionProvider(root, workspaceName, principals),
getTestPermissionProvider());
if (reverseOrder()) {
return l.reverse();
} else {
return l;
}
}
CompositePermissionProvider createPermissionProvider(Principal... principals) {
return createPermissionProvider(ImmutableSet.copyOf(principals));
}
CompositePermissionProvider createPermissionProvider(Set<Principal> principals) {
String workspaceName = root.getContentSession().getWorkspaceName();
AuthorizationConfiguration config = getConfig(AuthorizationConfiguration.class);
return new CompositePermissionProvider(root, getAggregatedProviders(workspaceName, config, principals),
config.getContext(), CompositionType.AND, getRootProvider());
}
CompositePermissionProvider createPermissionProviderOR(Principal... principals) {
return createPermissionProviderOR(ImmutableSet.copyOf(principals));
}
CompositePermissionProvider createPermissionProviderOR(Set<Principal> principals) {
String workspaceName = root.getContentSession().getWorkspaceName();
AuthorizationConfiguration config = getConfig(AuthorizationConfiguration.class);
return new CompositePermissionProvider(root, getAggregatedProviders(workspaceName, config, principals),
config.getContext(), CompositionType.OR, getRootProvider());
}
@Test
public void testRefresh() throws Exception {
createPermissionProvider().refresh();
createPermissionProviderOR().refresh();
}
@Test
public void testHasPrivilegesJcrAll() throws Exception {
PermissionProvider pp = createPermissionProvider();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
assertFalse(p, pp.hasPrivileges(tree, JCR_ALL));
}
}
@Test
public void testHasPrivilegesJcrAllOR() throws Exception {
PermissionProvider pp = createPermissionProviderOR();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
assertFalse(p, pp.hasPrivileges(tree, JCR_ALL));
}
}
@Test
public void testHasPrivilegesNone() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
assertTrue(p, pp.hasPrivileges(tree));
assertTrue(p, ppo.hasPrivileges(tree));
}
}
@Test
public void testHasPrivilegesOnRepoJcrAll() throws Exception {
PermissionProvider pp = createPermissionProvider();
assertFalse(pp.hasPrivileges(null, JCR_ALL));
PermissionProvider ppo = createPermissionProviderOR();
assertFalse(ppo.hasPrivileges(null, JCR_ALL));
}
@Test
public void testHasPrivilegesOnRepoNone() throws Exception {
PermissionProvider pp = createPermissionProvider();
assertTrue(pp.hasPrivileges(null));
PermissionProvider ppo = createPermissionProviderOR();
assertTrue(ppo.hasPrivileges(null));
}
@Test
public void testIsGrantedAll() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
PropertyState ps = tree.getProperty(JcrConstants.JCR_PRIMARYTYPE);
assertFalse(p, pp.isGranted(tree, null, Permissions.ALL));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), pp.isGranted(tree, ps, Permissions.ALL));
assertFalse(p, ppo.isGranted(tree, null, Permissions.ALL));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), ppo.isGranted(tree, ps, Permissions.ALL));
}
}
@Test
public void testIsGrantedNone() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
PropertyState ps = tree.getProperty(JcrConstants.JCR_PRIMARYTYPE);
assertFalse(p, pp.isGranted(tree, null, Permissions.NO_PERMISSION));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), pp.isGranted(tree, ps, Permissions.NO_PERMISSION));
assertFalse(p, ppo.isGranted(tree, null, Permissions.NO_PERMISSION));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), ppo.isGranted(tree, ps, Permissions.NO_PERMISSION));
}
}
@Test
public void testIsNotGranted() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
for (String p : NODE_PATHS) {
Tree tree = readOnlyRoot.getTree(p);
PropertyState ps = tree.getProperty(JcrConstants.JCR_PRIMARYTYPE);
assertFalse(p, pp.isGranted(tree, null, Permissions.MODIFY_ACCESS_CONTROL));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), pp.isGranted(tree, ps, Permissions.MODIFY_ACCESS_CONTROL));
assertFalse(p, ppo.isGranted(tree, null, Permissions.MODIFY_ACCESS_CONTROL));
assertFalse(PathUtils.concat(p, JcrConstants.JCR_PRIMARYTYPE), ppo.isGranted(tree, ps, Permissions.MODIFY_ACCESS_CONTROL));
}
}
@Test
public void testIsGrantedActionNone() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
String actions = "";
for (String nodePath : NODE_PATHS) {
assertFalse(nodePath, pp.isGranted(nodePath, actions));
assertFalse(nodePath, ppo.isGranted(nodePath, actions));
String propPath = PathUtils.concat(nodePath, JcrConstants.JCR_PRIMARYTYPE);
assertFalse(propPath, pp.isGranted(propPath, actions));
assertFalse(propPath, ppo.isGranted(propPath, actions));
String nonExPath = PathUtils.concat(nodePath, "nonExisting");
assertFalse(nonExPath, pp.isGranted(nonExPath, actions));
assertFalse(nonExPath, ppo.isGranted(nonExPath, actions));
}
}
@Test
public void testIsNotGrantedAction() throws Exception {
PermissionProvider pp = createPermissionProvider();
PermissionProvider ppo = createPermissionProviderOR();
String[] actions = new String[]{JackrabbitSession.ACTION_LOCKING, JackrabbitSession.ACTION_MODIFY_ACCESS_CONTROL};
for (String nodePath : NODE_PATHS) {
String actionStr = getActionString(actions);
assertFalse(nodePath, pp.isGranted(nodePath, actionStr));
assertFalse(nodePath, ppo.isGranted(nodePath, actionStr));
String propPath = PathUtils.concat(nodePath, JcrConstants.JCR_PRIMARYTYPE);
assertFalse(propPath, pp.isGranted(propPath, actionStr));
assertFalse(propPath, ppo.isGranted(propPath, actionStr));
String nonExPath = PathUtils.concat(nodePath, "nonExisting");
assertFalse(nonExPath, pp.isGranted(nonExPath, actionStr));
assertFalse(nonExPath, ppo.isGranted(nonExPath, actionStr));
}
}
@Test
public void testGetTreePermissionAllParent() throws Exception {
TreePermission tp = createPermissionProvider().getTreePermission(readOnlyRoot.getTree(TEST_PATH), TreePermission.ALL);
assertSame(TreePermission.ALL, tp);
TreePermission tpo = createPermissionProviderOR().getTreePermission(readOnlyRoot.getTree(TEST_PATH), TreePermission.ALL);
assertSame(TreePermission.ALL, tpo);
}
@Test
public void testGetTreePermissionEmptyParent() throws Exception {
TreePermission tp = createPermissionProvider().getTreePermission(readOnlyRoot.getTree(TEST_PATH), TreePermission.EMPTY);
assertSame(TreePermission.EMPTY, tp);
TreePermission tpo = createPermissionProviderOR().getTreePermission(readOnlyRoot.getTree(TEST_PATH), TreePermission.EMPTY);
assertSame(TreePermission.EMPTY, tpo);
}
@Test
public void testTreePermissionIsGrantedAll() throws Exception {
PermissionProvider pp = createPermissionProvider();
TreePermission parentPermission = TreePermission.EMPTY;
PropertyState ps = PropertyStates.createProperty("propName", "val");
for (String path : TP_PATHS) {
Tree t = readOnlyRoot.getTree(path);
TreePermission tp = pp.getTreePermission(t, parentPermission);
assertFalse(tp.isGranted(Permissions.ALL));
assertFalse(tp.isGranted(Permissions.ALL, ps));
parentPermission = tp;
}
}
@Test
public void testTreePermissionIsGrantedAllOR() throws Exception {
PermissionProvider pp = createPermissionProviderOR();
TreePermission parentPermission = TreePermission.EMPTY;
PropertyState ps = PropertyStates.createProperty("propName", "val");
for (String path : TP_PATHS) {
Tree t = readOnlyRoot.getTree(path);
TreePermission tp = pp.getTreePermission(t, parentPermission);
assertFalse(tp.isGranted(Permissions.ALL));
assertFalse(tp.isGranted(Permissions.ALL, ps));
parentPermission = tp;
}
}
@Test
public void testTreePermissionIsNotGranted() throws Exception {
PermissionProvider pp = createPermissionProvider();
TreePermission parentPermission = TreePermission.EMPTY;
PropertyState ps = PropertyStates.createProperty("propName", "val");
for (String path : TP_PATHS) {
Tree t = readOnlyRoot.getTree(path);
TreePermission tp = pp.getTreePermission(t, parentPermission);
assertFalse(tp.isGranted(Permissions.NO_PERMISSION));
assertFalse(tp.isGranted(Permissions.MODIFY_ACCESS_CONTROL));
assertFalse(tp.isGranted(Permissions.NO_PERMISSION, ps));
assertFalse(tp.isGranted(Permissions.MODIFY_ACCESS_CONTROL, ps));
parentPermission = tp;
}
}
@Test
public void testTreePermissionIsNotGrantedOR() throws Exception {
PermissionProvider pp = createPermissionProviderOR();
TreePermission parentPermission = TreePermission.EMPTY;
PropertyState ps = PropertyStates.createProperty("propName", "val");
for (String path : TP_PATHS) {
Tree t = readOnlyRoot.getTree(path);
TreePermission tp = pp.getTreePermission(t, parentPermission);
assertFalse(tp.isGranted(Permissions.NO_PERMISSION));
assertFalse(tp.isGranted(Permissions.MODIFY_ACCESS_CONTROL));
assertFalse(tp.isGranted(Permissions.NO_PERMISSION, ps));
assertFalse(tp.isGranted(Permissions.MODIFY_ACCESS_CONTROL, ps));
parentPermission = tp;
}
}
@Test
public void testTreePermissionCanReadAll() throws Exception {
PermissionProvider pp = createPermissionProvider();
TreePermission parentPermission = TreePermission.EMPTY;
PermissionProvider ppO = createPermissionProviderOR();
TreePermission parentPermissionO = TreePermission.EMPTY;
for (String path : TP_PATHS) {
TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission);
assertFalse(tp.canReadAll());
parentPermission = tp;
TreePermission tpO = ppO.getTreePermission(readOnlyRoot.getTree(path), parentPermissionO);
assertFalse(tpO.canReadAll());
parentPermissionO = tpO;
}
}
@Test
public void testTreePermissionCanReadProperties() throws Exception {
PermissionProvider pp = createPermissionProvider();
TreePermission parentPermission = TreePermission.EMPTY;
for (String path : TP_PATHS) {
TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission);
assertFalse(tp.canReadProperties());
parentPermission = tp;
}
}
@Test
public void testTreePermissionCanReadPropertiesOR() throws Exception {
PermissionProvider pp = createPermissionProviderOR();
TreePermission parentPermission = TreePermission.EMPTY;
for (String path : TP_PATHS) {
TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission);
assertFalse(tp.canReadProperties());
parentPermission = tp;
}
}
@Test
public void testGetTreePermissionInstance() throws Exception {
PermissionProvider pp = createPermissionProvider();
TreePermission parentPermission = TreePermission.EMPTY;
for (String path : TP_PATHS) {
TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission);
assertCompositeTreePermission(tp);
parentPermission = tp;
}
}
@Test
public void testGetTreePermissionInstanceOR() throws Exception {
PermissionProvider pp = createPermissionProviderOR();
TreePermission parentPermission = TreePermission.EMPTY;
for (String path : TP_PATHS) {
TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission);
assertCompositeTreePermission(tp);
parentPermission = tp;
}
}
@Test
public void testTreePermissionGetChild() throws Exception {
List<String> childNames = ImmutableList.of("test", "a", "b", "c", "nonexisting");
Tree rootTree = readOnlyRoot.getTree(ROOT_PATH);
NodeState ns = ((ImmutableTree) rootTree).getNodeState();
TreePermission tp = createPermissionProvider().getTreePermission(rootTree, TreePermission.EMPTY);
for (String cName : childNames) {
ns = ns.getChildNode(cName);
tp = tp.getChildPermission(cName, ns);
assertCompositeTreePermission(tp);
}
}
@Test
public void testTreePermissionGetChildOR() throws Exception {
List<String> childNames = ImmutableList.of("test", "a", "b", "c", "nonexisting");
Tree rootTree = readOnlyRoot.getTree(ROOT_PATH);
NodeState ns = ((ImmutableTree) rootTree).getNodeState();
TreePermission tp = createPermissionProviderOR().getTreePermission(rootTree, TreePermission.EMPTY);
for (String cName : childNames) {
ns = ns.getChildNode(cName);
tp = tp.getChildPermission(cName, ns);
assertCompositeTreePermission(tp);
}
}
@Test
public void testGetRepositoryPermissionInstance() throws Exception {
RepositoryPermission rp = createPermissionProvider().getRepositoryPermission();
assertTrue(rp.getClass().getName().endsWith("CompositeRepositoryPermission"));
RepositoryPermission rpO = createPermissionProviderOR().getRepositoryPermission();
assertTrue(rpO.getClass().getName().endsWith("CompositeRepositoryPermission"));
}
@Test
public void testRepositoryPermissionIsNotGranted() throws Exception {
RepositoryPermission rp = createPermissionProvider().getRepositoryPermission();
assertFalse(rp.isGranted(Permissions.PRIVILEGE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT|Permissions.PRIVILEGE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.WORKSPACE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.ALL));
assertFalse(rp.isGranted(Permissions.NO_PERMISSION));
}
@Test
public void testRepositoryPermissionIsNotGrantedOR() throws Exception {
RepositoryPermission rp = createPermissionProviderOR().getRepositoryPermission();
assertFalse(rp.isGranted(Permissions.PRIVILEGE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT|Permissions.PRIVILEGE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.WORKSPACE_MANAGEMENT));
assertFalse(rp.isGranted(Permissions.ALL));
assertFalse(rp.isGranted(Permissions.NO_PERMISSION));
}
} | mduerig/jackrabbit-oak | oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/composite/AbstractCompositeProviderTest.java | Java | apache-2.0 | 29,168 |
/*
* Copyright 2008-2012 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.logging;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ui.ListTable;
import com.intellij.codeInspection.ui.ListWrappingTableModel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.ui.UiUtils;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class LoggerInitializedWithForeignClassInspection extends BaseInspection {
@NonNls private static final String DEFAULT_LOGGER_CLASS_NAMES =
"org.apache.log4j.Logger,org.slf4j.LoggerFactory,org.apache.commons.logging.LogFactory,java.util.logging.Logger";
@NonNls private static final String DEFAULT_FACTORY_METHOD_NAMES = "getLogger,getLogger,getLog,getLogger";
@SuppressWarnings({"PublicField"})
public String loggerClassName = DEFAULT_LOGGER_CLASS_NAMES;
private final List<String> loggerFactoryClassNames = new ArrayList();
@SuppressWarnings({"PublicField"})
public String loggerFactoryMethodName = DEFAULT_FACTORY_METHOD_NAMES;
private final List<String> loggerFactoryMethodNames = new ArrayList();
@Override
@NotNull
public String getDisplayName() {
return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.display.name");
}
@Override
@NotNull
protected String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.problem.descriptor");
}
@Override
public JComponent createOptionsPanel() {
final ListTable table = new ListTable(
new ListWrappingTableModel(Arrays.asList(loggerFactoryClassNames, loggerFactoryMethodNames),
InspectionGadgetsBundle.message("logger.factory.class.name"),
InspectionGadgetsBundle.message("logger.factory.method.name")));
return UiUtils.createAddRemovePanel(table);
}
@Override
@Nullable
protected InspectionGadgetsFix buildFix(Object... infos) {
return new LoggerInitializedWithForeignClassFix((String)infos[0]);
}
private static class LoggerInitializedWithForeignClassFix extends InspectionGadgetsFix {
private final String newClassName;
private LoggerInitializedWithForeignClassFix(String newClassName) {
this.newClassName = newClassName;
}
@Override
@NotNull
public String getName() {
return InspectionGadgetsBundle.message(
"logger.initialized.with.foreign.class.quickfix",
newClassName);
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) throws IncorrectOperationException {
final PsiElement element = descriptor.getPsiElement();
if (!(element instanceof PsiClassObjectAccessExpression)) {
return;
}
final PsiClassObjectAccessExpression classObjectAccessExpression = (PsiClassObjectAccessExpression)element;
replaceExpression(classObjectAccessExpression, newClassName + ".class");
}
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new LoggerInitializedWithForeignClassVisitor();
}
private class LoggerInitializedWithForeignClassVisitor extends BaseInspectionVisitor {
@Override
public void visitClassObjectAccessExpression(PsiClassObjectAccessExpression expression) {
super.visitClassObjectAccessExpression(expression);
PsiElement parent = expression.getParent();
if (parent instanceof PsiReferenceExpression) {
final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)parent;
if (!expression.equals(referenceExpression.getQualifierExpression())) {
return;
}
final String name = referenceExpression.getReferenceName();
if (!"getName".equals(name)) {
return;
}
final PsiElement grandParent = referenceExpression.getParent();
if (!(grandParent instanceof PsiMethodCallExpression)) {
return;
}
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent;
final PsiExpressionList list = methodCallExpression.getArgumentList();
if (list.getExpressions().length != 0) {
return;
}
parent = methodCallExpression.getParent();
}
if (!(parent instanceof PsiExpressionList)) {
return;
}
final PsiElement grandParent = parent.getParent();
if (!(grandParent instanceof PsiMethodCallExpression)) {
return;
}
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent;
final PsiExpressionList argumentList = methodCallExpression.getArgumentList();
final PsiExpression[] expressions = argumentList.getExpressions();
if (expressions.length != 1) {
return;
}
final PsiClass containingClass = PsiTreeUtil.getParentOfType(expression, PsiClass.class);
if (containingClass == null) {
return;
}
final String containingClassName = containingClass.getName();
if (containingClassName == null) {
return;
}
final PsiMethod method = methodCallExpression.resolveMethod();
if (method == null) {
return;
}
final PsiClass aClass = method.getContainingClass();
if (aClass == null) {
return;
}
final String className = aClass.getQualifiedName();
final int index = loggerFactoryClassNames.indexOf(className);
if (index < 0) {
return;
}
final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression();
final String referenceName = methodExpression.getReferenceName();
final String loggerFactoryMethodName = loggerFactoryMethodNames.get(index);
if (!loggerFactoryMethodName.equals(referenceName)) {
return;
}
final PsiTypeElement operand = expression.getOperand();
final PsiType type = operand.getType();
if (!(type instanceof PsiClassType)) {
return;
}
final PsiClassType classType = (PsiClassType)type;
final PsiClass initializerClass = classType.resolve();
if (initializerClass == null) {
return;
}
if (containingClass.equals(initializerClass)) {
return;
}
registerError(expression, containingClassName);
}
}
@Override
public void readSettings(@NotNull Element element) throws InvalidDataException {
super.readSettings(element);
parseString(loggerClassName, loggerFactoryClassNames);
parseString(loggerFactoryMethodName, loggerFactoryMethodNames);
if (loggerFactoryClassNames.size() != loggerFactoryMethodNames.size()) {
parseString(DEFAULT_LOGGER_CLASS_NAMES, loggerFactoryClassNames);
parseString(DEFAULT_FACTORY_METHOD_NAMES, loggerFactoryMethodNames);
}
}
@Override
public void writeSettings(@NotNull Element element) throws WriteExternalException {
loggerClassName = formatString(loggerFactoryClassNames);
loggerFactoryMethodName = formatString(loggerFactoryMethodNames);
super.writeSettings(element);
}}
| android-ia/platform_tools_idea | plugins/InspectionGadgets/src/com/siyeh/ig/logging/LoggerInitializedWithForeignClassInspection.java | Java | apache-2.0 | 8,250 |
package com.alibaba.dubbo.rpc.protocol.thrift;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="mailto:[email protected]">kimi</a>
*/
public enum ThriftType {
BOOL, BYTE, I16, I32, I64, DOUBLE, STRING;
private static final Map<Class<?>, ThriftType> types =
new HashMap<Class<?>, ThriftType>();
static {
put(boolean.class, BOOL);
put(Boolean.class, BOOL);
put(byte.class, BYTE);
put(Byte.class, BYTE);
put(short.class, I16);
}
public static ThriftType get(Class<?> key) {
if (key != null) {
return types.get(key);
}
throw new NullPointerException("key == null");
}
private static void put(Class<?> key, ThriftType value) {
types.put(key, value);
}
}
| way-way/dubbo | dubbo-rpc/dubbo-rpc-thrift/src/main/java/com/alibaba/dubbo/rpc/protocol/thrift/ThriftType.java | Java | apache-2.0 | 816 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.hbase.source;
import org.apache.flink.annotation.Experimental;
import org.apache.flink.api.common.io.InputFormat;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
/**
* {@link InputFormat} subclass that wraps the access for HTables.
*/
@Experimental
public abstract class HBaseInputFormat<T extends Tuple> extends AbstractTableInputFormat<T> {
private static final long serialVersionUID = 1L;
/**
* Returns an instance of Scan that retrieves the required subset of records from the HBase table.
* @return The appropriate instance of Scan for this usecase.
*/
protected abstract Scan getScanner();
/**
* What table is to be read.
* Per instance of a TableInputFormat derivative only a single tablename is possible.
* @return The name of the table
*/
protected abstract String getTableName();
/**
* The output from HBase is always an instance of {@link Result}.
* This method is to copy the data in the Result instance into the required {@link Tuple}
* @param r The Result instance from HBase that needs to be converted
* @return The appropriate instance of {@link Tuple} that contains the needed information.
*/
protected abstract T mapResultToTuple(Result r);
/**
* Creates a {@link Scan} object and opens the {@link HTable} connection.
* These are opened here because they are needed in the createInputSplits
* which is called before the openInputFormat method.
* So the connection is opened in {@link #configure(Configuration)} and closed in {@link #closeInputFormat()}.
*
* @param parameters The configuration that is to be used
* @see Configuration
*/
@Override
public void configure(Configuration parameters) {
table = createTable();
if (table != null) {
scan = getScanner();
}
}
/**
* Create an {@link HTable} instance and set it into this format.
*/
private HTable createTable() {
LOG.info("Initializing HBaseConfiguration");
//use files found in the classpath
org.apache.hadoop.conf.Configuration hConf = HBaseConfiguration.create();
try {
return new HTable(hConf, getTableName());
} catch (Exception e) {
LOG.error("Error instantiating a new HTable instance", e);
}
return null;
}
protected T mapResultToOutType(Result r) {
return mapResultToTuple(r);
}
}
| hequn8128/flink | flink-connectors/flink-connector-hbase/src/main/java/org/apache/flink/connector/hbase/source/HBaseInputFormat.java | Java | apache-2.0 | 3,348 |
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
/**
* Miscellaneous class utility methods. Mainly for internal use within the framework; consider <a
* href="http://commons.apache.org/lang/" target="_blank">Apache Commons Lang</a> for a more comprehensive suite of
* class utilities.
*
* @author Juergen Hoeller
* @author Keith Donald
* @author Rob Harrop
* @author Sam Brannen
* @author Roy Clarkson
* @since 1.0
* @see TypeUtils
* @see ReflectionUtils
*/
@SuppressWarnings("unchecked")
public abstract class ClassUtils {
/** Suffix for array class names: "[]" */
public static final String ARRAY_SUFFIX = "[]";
/** Prefix for internal array class names: "[" */
private static final String INTERNAL_ARRAY_PREFIX = "[";
/** Prefix for internal non-primitive array class names: "[L" */
private static final String NON_PRIMITIVE_ARRAY_PREFIX = "[L";
/** The package separator character '.' */
private static final char PACKAGE_SEPARATOR = '.';
/** The inner class separator character '$' */
private static final char INNER_CLASS_SEPARATOR = '$';
/** The CGLIB class separator character "$$" */
public static final String CGLIB_CLASS_SEPARATOR = "$$";
/** The ".class" file suffix */
public static final String CLASS_FILE_SUFFIX = ".class";
/**
* Map with primitive wrapper type as key and corresponding primitive type as value, for example: Integer.class ->
* int.class.
*/
private static final Map<Class<?>, Class<?>> primitiveWrapperTypeMap = new HashMap<Class<?>, Class<?>>(8);
/**
* Map with primitive type as key and corresponding wrapper type as value, for example: int.class -> Integer.class.
*/
private static final Map<Class<?>, Class<?>> primitiveTypeToWrapperMap = new HashMap<Class<?>, Class<?>>(8);
/**
* Map with primitive type name as key and corresponding primitive type as value, for example: "int" -> "int.class".
*/
private static final Map<String, Class<?>> primitiveTypeNameMap = new HashMap<String, Class<?>>(32);
/**
* Map with common "java.lang" class name as key and corresponding Class as value. Primarily for efficient
* deserialization of remote invocations.
*/
private static final Map<String, Class<?>> commonClassCache = new HashMap<String, Class<?>>(32);
static {
primitiveWrapperTypeMap.put(Boolean.class, boolean.class);
primitiveWrapperTypeMap.put(Byte.class, byte.class);
primitiveWrapperTypeMap.put(Character.class, char.class);
primitiveWrapperTypeMap.put(Double.class, double.class);
primitiveWrapperTypeMap.put(Float.class, float.class);
primitiveWrapperTypeMap.put(Integer.class, int.class);
primitiveWrapperTypeMap.put(Long.class, long.class);
primitiveWrapperTypeMap.put(Short.class, short.class);
for (Map.Entry<Class<?>, Class<?>> entry : primitiveWrapperTypeMap.entrySet()) {
primitiveTypeToWrapperMap.put(entry.getValue(), entry.getKey());
registerCommonClasses(entry.getKey());
}
Set<Class<?>> primitiveTypes = new HashSet<Class<?>>(32);
primitiveTypes.addAll(primitiveWrapperTypeMap.values());
primitiveTypes.addAll(Arrays.asList(boolean[].class, byte[].class, char[].class, double[].class, float[].class, int[].class, long[].class, short[].class));
primitiveTypes.add(void.class);
for (Class<?> primitiveType : primitiveTypes) {
primitiveTypeNameMap.put(primitiveType.getName(), primitiveType);
}
registerCommonClasses(Boolean[].class, Byte[].class, Character[].class, Double[].class, Float[].class, Integer[].class, Long[].class, Short[].class);
registerCommonClasses(Number.class, Number[].class, String.class, String[].class, Object.class, Object[].class, Class.class, Class[].class);
registerCommonClasses(Throwable.class, Exception.class, RuntimeException.class, Error.class, StackTraceElement.class, StackTraceElement[].class);
}
/**
* Register the given common classes with the ClassUtils cache.
*/
private static void registerCommonClasses(Class<?>... commonClasses) {
for (Class<?> clazz : commonClasses) {
commonClassCache.put(clazz.getName(), clazz);
}
}
/**
* Return the default ClassLoader to use: typically the thread context ClassLoader, if available; the ClassLoader
* that loaded the ClassUtils class will be used as fallback.
* <p>
* Call this method if you intend to use the thread context ClassLoader in a scenario where you absolutely need a
* non-null ClassLoader reference: for example, for class path resource loading (but not necessarily for
* <code>Class.forName</code>, which accepts a <code>null</code> ClassLoader reference as well).
* @return the default ClassLoader (never <code>null</code>)
* @see java.lang.Thread#getContextClassLoader()
*/
public static ClassLoader getDefaultClassLoader() {
ClassLoader cl = null;
try {
cl = Thread.currentThread().getContextClassLoader();
} catch (Throwable ex) {
// Cannot access thread context ClassLoader - falling back to system class loader...
}
if (cl == null) {
// No thread context class loader -> use class loader of this class.
cl = ClassUtils.class.getClassLoader();
}
return cl;
}
/**
* Override the thread context ClassLoader with the environment's bean ClassLoader if necessary, i.e. if the bean
* ClassLoader is not equivalent to the thread context ClassLoader already.
* @param classLoaderToUse the actual ClassLoader to use for the thread context
* @return the original thread context ClassLoader, or <code>null</code> if not overridden
*/
public static ClassLoader overrideThreadContextClassLoader(ClassLoader classLoaderToUse) {
Thread currentThread = Thread.currentThread();
ClassLoader threadContextClassLoader = currentThread.getContextClassLoader();
if (classLoaderToUse != null && !classLoaderToUse.equals(threadContextClassLoader)) {
currentThread.setContextClassLoader(classLoaderToUse);
return threadContextClassLoader;
} else {
return null;
}
}
/**
* Replacement for <code>Class.forName()</code> that also returns Class instances for primitives (like "int") and
* array class names (like "String[]").
* <p>
* Always uses the default class loader: that is, preferably the thread context class loader, or the ClassLoader
* that loaded the ClassUtils class as fallback.
* @param name the name of the Class
* @return Class instance for the supplied name
* @throws ClassNotFoundException if the class was not found
* @throws LinkageError if the class file could not be loaded
* @see Class#forName(String, boolean, ClassLoader)
* @see #getDefaultClassLoader()
* @deprecated as of Spring 3.0, in favor of specifying a ClassLoader explicitly: see
* {@link #forName(String, ClassLoader)}
*/
@Deprecated
public static Class<?> forName(String name) throws ClassNotFoundException, LinkageError {
return forName(name, getDefaultClassLoader());
}
/**
* Replacement for <code>Class.forName()</code> that also returns Class instances for primitives (e.g."int") and
* array class names (e.g. "String[]"). Furthermore, it is also capable of resolving inner class names in Java
* source style (e.g. "java.lang.Thread.State" instead of "java.lang.Thread$State").
* @param name the name of the Class
* @param classLoader the class loader to use (may be <code>null</code>, which indicates the default class loader)
* @return Class instance for the supplied name
* @throws ClassNotFoundException if the class was not found
* @throws LinkageError if the class file could not be loaded
* @see Class#forName(String, boolean, ClassLoader)
*/
public static Class<?> forName(String name, ClassLoader classLoader) throws ClassNotFoundException, LinkageError {
Assert.notNull(name, "Name must not be null");
Class<?> clazz = resolvePrimitiveClassName(name);
if (clazz == null) {
clazz = commonClassCache.get(name);
}
if (clazz != null) {
return clazz;
}
// "java.lang.String[]" style arrays
if (name.endsWith(ARRAY_SUFFIX)) {
String elementClassName = name.substring(0, name.length() - ARRAY_SUFFIX.length());
Class<?> elementClass = forName(elementClassName, classLoader);
return Array.newInstance(elementClass, 0).getClass();
}
// "[Ljava.lang.String;" style arrays
if (name.startsWith(NON_PRIMITIVE_ARRAY_PREFIX) && name.endsWith(";")) {
String elementName = name.substring(NON_PRIMITIVE_ARRAY_PREFIX.length(), name.length() - 1);
Class<?> elementClass = forName(elementName, classLoader);
return Array.newInstance(elementClass, 0).getClass();
}
// "[[I" or "[[Ljava.lang.String;" style arrays
if (name.startsWith(INTERNAL_ARRAY_PREFIX)) {
String elementName = name.substring(INTERNAL_ARRAY_PREFIX.length());
Class<?> elementClass = forName(elementName, classLoader);
return Array.newInstance(elementClass, 0).getClass();
}
ClassLoader classLoaderToUse = classLoader;
if (classLoaderToUse == null) {
classLoaderToUse = getDefaultClassLoader();
}
try {
return classLoaderToUse.loadClass(name);
} catch (ClassNotFoundException ex) {
int lastDotIndex = name.lastIndexOf('.');
if (lastDotIndex != -1) {
String innerClassName = name.substring(0, lastDotIndex) + '$' + name.substring(lastDotIndex + 1);
try {
return classLoaderToUse.loadClass(innerClassName);
} catch (ClassNotFoundException ex2) {
// swallow - let original exception get through
}
}
throw ex;
}
}
/**
* Resolve the given class name into a Class instance. Supports primitives (like "int") and array class names (like
* "String[]").
* <p>
* This is effectively equivalent to the <code>forName</code> method with the same arguments, with the only
* difference being the exceptions thrown in case of class loading failure.
* @param className the name of the Class
* @param classLoader the class loader to use (may be <code>null</code>, which indicates the default class loader)
* @return Class instance for the supplied name
* @throws IllegalArgumentException if the class name was not resolvable (that is, the class could not be found or
* the class file could not be loaded)
* @see #forName(String, ClassLoader)
*/
public static Class<?> resolveClassName(String className, ClassLoader classLoader) throws IllegalArgumentException {
try {
return forName(className, classLoader);
} catch (ClassNotFoundException ex) {
throw new IllegalArgumentException("Cannot find class [" + className + "]", ex);
} catch (LinkageError ex) {
throw new IllegalArgumentException("Error loading class [" + className + "]: problem with class file or dependent class.", ex);
}
}
/**
* Resolve the given class name as primitive class, if appropriate, according to the JVM's naming rules for
* primitive classes.
* <p>
* Also supports the JVM's internal class names for primitive arrays. Does <i>not</i> support the "[]" suffix
* notation for primitive arrays; this is only supported by {@link #forName(String, ClassLoader)}.
* @param name the name of the potentially primitive class
* @return the primitive class, or <code>null</code> if the name does not denote a primitive class or primitive
* array class
*/
public static Class<?> resolvePrimitiveClassName(String name) {
Class<?> result = null;
// Most class names will be quite long, considering that they
// SHOULD sit in a package, so a length check is worthwhile.
if (name != null && name.length() <= 8) {
// Could be a primitive - likely.
result = primitiveTypeNameMap.get(name);
}
return result;
}
/**
* Determine whether the {@link Class} identified by the supplied name is present and can be loaded. Will return
* <code>false</code> if either the class or one of its dependencies is not present or cannot be loaded.
* @param className the name of the class to check
* @return whether the specified class is present
* @deprecated as of Spring 2.5, in favor of {@link #isPresent(String, ClassLoader)}
*/
@Deprecated
public static boolean isPresent(String className) {
return isPresent(className, getDefaultClassLoader());
}
/**
* Determine whether the {@link Class} identified by the supplied name is present and can be loaded. Will return
* <code>false</code> if either the class or one of its dependencies is not present or cannot be loaded.
* @param className the name of the class to check
* @param classLoader the class loader to use (may be <code>null</code>, which indicates the default class loader)
* @return whether the specified class is present
*/
public static boolean isPresent(String className, ClassLoader classLoader) {
try {
forName(className, classLoader);
return true;
} catch (Throwable ex) {
// Class or one of its dependencies is not present...
return false;
}
}
/**
* Return the user-defined class for the given instance: usually simply the class of the given instance, but the
* original class in case of a CGLIB-generated subclass.
* @param instance the instance to check
* @return the user-defined class
*/
public static Class<?> getUserClass(Object instance) {
Assert.notNull(instance, "Instance must not be null");
return getUserClass(instance.getClass());
}
/**
* Return the user-defined class for the given class: usually simply the given class, but the original class in case
* of a CGLIB-generated subclass.
* @param clazz the class to check
* @return the user-defined class
*/
public static Class<?> getUserClass(Class<?> clazz) {
if (clazz != null && clazz.getName().contains(CGLIB_CLASS_SEPARATOR)) {
Class<?> superClass = clazz.getSuperclass();
if (superClass != null && !Object.class.equals(superClass)) {
return superClass;
}
}
return clazz;
}
/**
* Check whether the given class is cache-safe in the given context, i.e. whether it is loaded by the given
* ClassLoader or a parent of it.
* @param clazz the class to analyze
* @param classLoader the ClassLoader to potentially cache metadata in
*/
public static boolean isCacheSafe(Class<?> clazz, ClassLoader classLoader) {
Assert.notNull(clazz, "Class must not be null");
ClassLoader target = clazz.getClassLoader();
if (target == null) {
return false;
}
ClassLoader cur = classLoader;
if (cur == target) {
return true;
}
while (cur != null) {
cur = cur.getParent();
if (cur == target) {
return true;
}
}
return false;
}
/**
* Get the class name without the qualified package name.
* @param className the className to get the short name for
* @return the class name of the class without the package name
* @throws IllegalArgumentException if the className is empty
*/
public static String getShortName(String className) {
Assert.hasLength(className, "Class name must not be empty");
int lastDotIndex = className.lastIndexOf(PACKAGE_SEPARATOR);
int nameEndIndex = className.indexOf(CGLIB_CLASS_SEPARATOR);
if (nameEndIndex == -1) {
nameEndIndex = className.length();
}
String shortName = className.substring(lastDotIndex + 1, nameEndIndex);
shortName = shortName.replace(INNER_CLASS_SEPARATOR, PACKAGE_SEPARATOR);
return shortName;
}
/**
* Get the class name without the qualified package name.
* @param clazz the class to get the short name for
* @return the class name of the class without the package name
*/
public static String getShortName(Class<?> clazz) {
return getShortName(getQualifiedName(clazz));
}
/**
* Determine the name of the class file, relative to the containing package: e.g. "String.class"
* @param clazz the class
* @return the file name of the ".class" file
*/
public static String getClassFileName(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
String className = clazz.getName();
int lastDotIndex = className.lastIndexOf(PACKAGE_SEPARATOR);
return className.substring(lastDotIndex + 1) + CLASS_FILE_SUFFIX;
}
/**
* Determine the name of the package of the given class: e.g. "java.lang" for the <code>java.lang.String</code>
* class.
* @param clazz the class
* @return the package name, or the empty String if the class is defined in the default package
*/
public static String getPackageName(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
String className = clazz.getName();
int lastDotIndex = className.lastIndexOf(PACKAGE_SEPARATOR);
return (lastDotIndex != -1 ? className.substring(0, lastDotIndex) : "");
}
/**
* Return the qualified name of the given class: usually simply the class name, but component type class name + "[]"
* for arrays.
* @param clazz the class
* @return the qualified name of the class
*/
public static String getQualifiedName(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
if (clazz.isArray()) {
return getQualifiedNameForArray(clazz);
} else {
return clazz.getName();
}
}
/**
* Build a nice qualified name for an array: component type class name + "[]".
* @param clazz the array class
* @return a qualified name for the array class
*/
private static String getQualifiedNameForArray(Class<?> clazz) {
StringBuilder result = new StringBuilder();
while (clazz.isArray()) {
clazz = clazz.getComponentType();
result.append(ClassUtils.ARRAY_SUFFIX);
}
result.insert(0, clazz.getName());
return result.toString();
}
/**
* Return the qualified name of the given method, consisting of fully qualified interface/class name + "." + method
* name.
* @param method the method
* @return the qualified name of the method
*/
public static String getQualifiedMethodName(Method method) {
Assert.notNull(method, "Method must not be null");
return method.getDeclaringClass().getName() + "." + method.getName();
}
/**
* Return a descriptive name for the given object's type: usually simply the class name, but component type class
* name + "[]" for arrays, and an appended list of implemented interfaces for JDK proxies.
* @param value the value to introspect
* @return the qualified name of the class
*/
public static String getDescriptiveType(Object value) {
if (value == null) {
return null;
}
Class<?> clazz = value.getClass();
if (Proxy.isProxyClass(clazz)) {
StringBuilder result = new StringBuilder(clazz.getName());
result.append(" implementing ");
Class<?>[] ifcs = clazz.getInterfaces();
for (int i = 0; i < ifcs.length; i++) {
result.append(ifcs[i].getName());
if (i < ifcs.length - 1) {
result.append(',');
}
}
return result.toString();
} else if (clazz.isArray()) {
return getQualifiedNameForArray(clazz);
} else {
return clazz.getName();
}
}
/**
* Check whether the given class matches the user-specified type name.
* @param clazz the class to check
* @param typeName the type name to match
*/
public static boolean matchesTypeName(Class<?> clazz, String typeName) {
return (typeName != null && (typeName.equals(clazz.getName()) || typeName.equals(clazz.getSimpleName()) || (clazz.isArray() && typeName.equals(getQualifiedNameForArray(clazz)))));
}
/**
* Determine whether the given class has a public constructor with the given signature.
* <p>
* Essentially translates <code>NoSuchMethodException</code> to "false".
* @param clazz the clazz to analyze
* @param paramTypes the parameter types of the method
* @return whether the class has a corresponding constructor
* @see java.lang.Class#getMethod
*/
public static boolean hasConstructor(Class<?> clazz, Class<?>... paramTypes) {
return (getConstructorIfAvailable(clazz, paramTypes) != null);
}
/**
* Determine whether the given class has a public constructor with the given signature, and return it if available
* (else return <code>null</code>).
* <p>
* Essentially translates <code>NoSuchMethodException</code> to <code>null</code>.
* @param clazz the clazz to analyze
* @param paramTypes the parameter types of the method
* @return the constructor, or <code>null</code> if not found
* @see java.lang.Class#getConstructor
*/
public static <T> Constructor<T> getConstructorIfAvailable(Class<T> clazz, Class<?>... paramTypes) {
Assert.notNull(clazz, "Class must not be null");
try {
return clazz.getConstructor(paramTypes);
} catch (NoSuchMethodException ex) {
return null;
}
}
/**
* Determine whether the given class has a method with the given signature.
* <p>
* Essentially translates <code>NoSuchMethodException</code> to "false".
* @param clazz the clazz to analyze
* @param methodName the name of the method
* @param paramTypes the parameter types of the method
* @return whether the class has a corresponding method
* @see java.lang.Class#getMethod
*/
public static boolean hasMethod(Class<?> clazz, String methodName, Class<?>... paramTypes) {
return (getMethodIfAvailable(clazz, methodName, paramTypes) != null);
}
/**
* Determine whether the given class has a method with the given signature, and return it if available (else throws
* an <code>IllegalStateException</code>).
* <p>
* Essentially translates <code>NoSuchMethodException</code> to <code>IllegalStateException</code>.
* @param clazz the clazz to analyze
* @param methodName the name of the method
* @param paramTypes the parameter types of the method
* @return the method (never <code>null</code>)
* @throws IllegalStateException if the method has not been found
* @see java.lang.Class#getMethod
*/
public static Method getMethod(Class<?> clazz, String methodName, Class<?>... paramTypes) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
try {
return clazz.getMethod(methodName, paramTypes);
} catch (NoSuchMethodException ex) {
throw new IllegalStateException("Expected method not found: " + ex);
}
}
/**
* Determine whether the given class has a method with the given signature, and return it if available (else return
* <code>null</code>).
* <p>
* Essentially translates <code>NoSuchMethodException</code> to <code>null</code>.
* @param clazz the clazz to analyze
* @param methodName the name of the method
* @param paramTypes the parameter types of the method
* @return the method, or <code>null</code> if not found
* @see java.lang.Class#getMethod
*/
public static Method getMethodIfAvailable(Class<?> clazz, String methodName, Class<?>... paramTypes) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
try {
return clazz.getMethod(methodName, paramTypes);
} catch (NoSuchMethodException ex) {
return null;
}
}
/**
* Return the number of methods with a given name (with any argument types), for the given class and/or its
* superclasses. Includes non-public methods.
* @param clazz the clazz to check
* @param methodName the name of the method
* @return the number of methods with the given name
*/
public static int getMethodCountForName(Class<?> clazz, String methodName) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
int count = 0;
Method[] declaredMethods = clazz.getDeclaredMethods();
for (Method method : declaredMethods) {
if (methodName.equals(method.getName())) {
count++;
}
}
Class<?>[] ifcs = clazz.getInterfaces();
for (Class<?> ifc : ifcs) {
count += getMethodCountForName(ifc, methodName);
}
if (clazz.getSuperclass() != null) {
count += getMethodCountForName(clazz.getSuperclass(), methodName);
}
return count;
}
/**
* Does the given class or one of its superclasses at least have one or more methods with the supplied name (with
* any argument types)? Includes non-public methods.
* @param clazz the clazz to check
* @param methodName the name of the method
* @return whether there is at least one method with the given name
*/
public static boolean hasAtLeastOneMethodWithName(Class<?> clazz, String methodName) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
Method[] declaredMethods = clazz.getDeclaredMethods();
for (Method method : declaredMethods) {
if (method.getName().equals(methodName)) {
return true;
}
}
Class<?>[] ifcs = clazz.getInterfaces();
for (Class<?> ifc : ifcs) {
if (hasAtLeastOneMethodWithName(ifc, methodName)) {
return true;
}
}
return (clazz.getSuperclass() != null && hasAtLeastOneMethodWithName(clazz.getSuperclass(), methodName));
}
/**
* Return a public static method of a class.
* @param methodName the static method name
* @param clazz the class which defines the method
* @param args the parameter types to the method
* @return the static method, or <code>null</code> if no static method was found
* @throws IllegalArgumentException if the method name is blank or the clazz is null
*/
public static Method getStaticMethod(Class<?> clazz, String methodName, Class<?>... args) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(methodName, "Method name must not be null");
try {
Method method = clazz.getMethod(methodName, args);
return Modifier.isStatic(method.getModifiers()) ? method : null;
} catch (NoSuchMethodException ex) {
return null;
}
}
/**
* Check if the given class represents a primitive wrapper, i.e. Boolean, Byte, Character, Short, Integer, Long,
* Float, or Double.
* @param clazz the class to check
* @return whether the given class is a primitive wrapper class
*/
public static boolean isPrimitiveWrapper(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return primitiveWrapperTypeMap.containsKey(clazz);
}
/**
* Check if the given class represents a primitive (i.e. boolean, byte, char, short, int, long, float, or double) or
* a primitive wrapper (i.e. Boolean, Byte, Character, Short, Integer, Long, Float, or Double).
* @param clazz the class to check
* @return whether the given class is a primitive or primitive wrapper class
*/
public static boolean isPrimitiveOrWrapper(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return (clazz.isPrimitive() || isPrimitiveWrapper(clazz));
}
/**
* Check if the given class represents an array of primitives, i.e. boolean, byte, char, short, int, long, float, or
* double.
* @param clazz the class to check
* @return whether the given class is a primitive array class
*/
public static boolean isPrimitiveArray(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return (clazz.isArray() && clazz.getComponentType().isPrimitive());
}
/**
* Check if the given class represents an array of primitive wrappers, i.e. Boolean, Byte, Character, Short,
* Integer, Long, Float, or Double.
* @param clazz the class to check
* @return whether the given class is a primitive wrapper array class
*/
public static boolean isPrimitiveWrapperArray(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return (clazz.isArray() && isPrimitiveWrapper(clazz.getComponentType()));
}
/**
* Resolve the given class if it is a primitive class, returning the corresponding primitive wrapper type instead.
* @param clazz the class to check
* @return the original class, or a primitive wrapper for the original primitive type
*/
public static Class<?> resolvePrimitiveIfNecessary(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return (clazz.isPrimitive() && clazz != void.class ? primitiveTypeToWrapperMap.get(clazz) : clazz);
}
/**
* Check if the right-hand side type may be assigned to the left-hand side type, assuming setting by reflection.
* Considers primitive wrapper classes as assignable to the corresponding primitive types.
* @param lhsType the target type
* @param rhsType the value type that should be assigned to the target type
* @return if the target type is assignable from the value type
* @see TypeUtils#isAssignable
*/
@SuppressWarnings("rawtypes")
public static boolean isAssignable(Class<?> lhsType, Class<?> rhsType) {
Assert.notNull(lhsType, "Left-hand side type must not be null");
Assert.notNull(rhsType, "Right-hand side type must not be null");
if (lhsType.isAssignableFrom(rhsType)) {
return true;
}
if (lhsType.isPrimitive()) {
Class resolvedPrimitive = primitiveWrapperTypeMap.get(rhsType);
if (resolvedPrimitive != null && lhsType.equals(resolvedPrimitive)) {
return true;
}
} else {
Class resolvedWrapper = primitiveTypeToWrapperMap.get(rhsType);
if (resolvedWrapper != null && lhsType.isAssignableFrom(resolvedWrapper)) {
return true;
}
}
return false;
}
/**
* Determine if the given type is assignable from the given value, assuming setting by reflection. Considers
* primitive wrapper classes as assignable to the corresponding primitive types.
* @param type the target type
* @param value the value that should be assigned to the type
* @return if the type is assignable from the value
*/
public static boolean isAssignableValue(Class<?> type, Object value) {
Assert.notNull(type, "Type must not be null");
return (value != null ? isAssignable(type, value.getClass()) : !type.isPrimitive());
}
/**
* Convert a "/"-based resource path to a "."-based fully qualified class name.
* @param resourcePath the resource path pointing to a class
* @return the corresponding fully qualified class name
*/
public static String convertResourcePathToClassName(String resourcePath) {
Assert.notNull(resourcePath, "Resource path must not be null");
return resourcePath.replace('/', '.');
}
/**
* Convert a "."-based fully qualified class name to a "/"-based resource path.
* @param className the fully qualified class name
* @return the corresponding resource path, pointing to the class
*/
public static String convertClassNameToResourcePath(String className) {
Assert.notNull(className, "Class name must not be null");
return className.replace('.', '/');
}
/**
* Return a path suitable for use with <code>ClassLoader.getResource</code> (also suitable for use with
* <code>Class.getResource</code> by prepending a slash ('/') to the return value). Built by taking the package of
* the specified class file, converting all dots ('.') to slashes ('/'), adding a trailing slash if necessary, and
* concatenating the specified resource name to this. <br/>
* As such, this function may be used to build a path suitable for loading a resource file that is in the same
* package as a class file, although {@link org.springframework.core.io.ClassPathResource} is usually even more
* convenient.
* @param clazz the Class whose package will be used as the base
* @param resourceName the resource name to append. A leading slash is optional.
* @return the built-up resource path
* @see java.lang.ClassLoader#getResource
* @see java.lang.Class#getResource
*/
public static String addResourcePathToPackagePath(Class<?> clazz, String resourceName) {
Assert.notNull(resourceName, "Resource name must not be null");
if (!resourceName.startsWith("/")) {
return classPackageAsResourcePath(clazz) + "/" + resourceName;
}
return classPackageAsResourcePath(clazz) + resourceName;
}
/**
* Given an input class object, return a string which consists of the class's package name as a pathname, i.e., all
* dots ('.') are replaced by slashes ('/'). Neither a leading nor trailing slash is added. The result could be
* concatenated with a slash and the name of a resource and fed directly to <code>ClassLoader.getResource()</code>.
* For it to be fed to <code>Class.getResource</code> instead, a leading slash would also have to be prepended to
* the returned value.
* @param clazz the input class. A <code>null</code> value or the default (empty) package will result in an empty
* string ("") being returned.
* @return a path which represents the package name
* @see ClassLoader#getResource
* @see Class#getResource
*/
public static String classPackageAsResourcePath(Class<?> clazz) {
if (clazz == null) {
return "";
}
String className = clazz.getName();
int packageEndIndex = className.lastIndexOf('.');
if (packageEndIndex == -1) {
return "";
}
String packageName = className.substring(0, packageEndIndex);
return packageName.replace('.', '/');
}
/**
* Build a String that consists of the names of the classes/interfaces in the given array.
* <p>
* Basically like <code>AbstractCollection.toString()</code>, but stripping the "class "/"interface " prefix before
* every class name.
* @param classes a Collection of Class objects (may be <code>null</code>)
* @return a String of form "[com.foo.Bar, com.foo.Baz]"
* @see java.util.AbstractCollection#toString()
*/
@SuppressWarnings("rawtypes")
public static String classNamesToString(Class... classes) {
return classNamesToString(Arrays.asList(classes));
}
/**
* Build a String that consists of the names of the classes/interfaces in the given collection.
* <p>
* Basically like <code>AbstractCollection.toString()</code>, but stripping the "class "/"interface " prefix before
* every class name.
* @param classes a Collection of Class objects (may be <code>null</code>)
* @return a String of form "[com.foo.Bar, com.foo.Baz]"
* @see java.util.AbstractCollection#toString()
*/
@SuppressWarnings("rawtypes")
public static String classNamesToString(Collection<Class> classes) {
if (CollectionUtils.isEmpty(classes)) {
return "[]";
}
StringBuilder sb = new StringBuilder("[");
for (Iterator<Class> it = classes.iterator(); it.hasNext();) {
Class clazz = it.next();
sb.append(clazz.getName());
if (it.hasNext()) {
sb.append(", ");
}
}
sb.append("]");
return sb.toString();
}
/**
* Return all interfaces that the given instance implements as array, including ones implemented by superclasses.
* @param instance the instance to analyze for interfaces
* @return all interfaces that the given instance implements as array
*/
@SuppressWarnings("rawtypes")
public static Class[] getAllInterfaces(Object instance) {
Assert.notNull(instance, "Instance must not be null");
return getAllInterfacesForClass(instance.getClass());
}
/**
* Return all interfaces that the given class implements as array, including ones implemented by superclasses.
* <p>
* If the class itself is an interface, it gets returned as sole interface.
* @param clazz the class to analyze for interfaces
* @return all interfaces that the given object implements as array
*/
public static Class<?>[] getAllInterfacesForClass(Class<?> clazz) {
return getAllInterfacesForClass(clazz, null);
}
/**
* Return all interfaces that the given class implements as array, including ones implemented by superclasses.
* <p>
* If the class itself is an interface, it gets returned as sole interface.
* @param clazz the class to analyze for interfaces
* @param classLoader the ClassLoader that the interfaces need to be visible in (may be <code>null</code> when
* accepting all declared interfaces)
* @return all interfaces that the given object implements as array
*/
@SuppressWarnings("rawtypes")
public static Class<?>[] getAllInterfacesForClass(Class<?> clazz, ClassLoader classLoader) {
Set<Class> ifcs = getAllInterfacesForClassAsSet(clazz, classLoader);
return ifcs.toArray(new Class[ifcs.size()]);
}
/**
* Return all interfaces that the given instance implements as Set, including ones implemented by superclasses.
* @param instance the instance to analyze for interfaces
* @return all interfaces that the given instance implements as Set
*/
@SuppressWarnings("rawtypes")
public static Set<Class> getAllInterfacesAsSet(Object instance) {
Assert.notNull(instance, "Instance must not be null");
return getAllInterfacesForClassAsSet(instance.getClass());
}
/**
* Return all interfaces that the given class implements as Set, including ones implemented by superclasses.
* <p>
* If the class itself is an interface, it gets returned as sole interface.
* @param clazz the class to analyze for interfaces
* @return all interfaces that the given object implements as Set
*/
@SuppressWarnings("rawtypes")
public static Set<Class> getAllInterfacesForClassAsSet(Class clazz) {
return getAllInterfacesForClassAsSet(clazz, null);
}
/**
* Return all interfaces that the given class implements as Set, including ones implemented by superclasses.
* <p>
* If the class itself is an interface, it gets returned as sole interface.
* @param clazz the class to analyze for interfaces
* @param classLoader the ClassLoader that the interfaces need to be visible in (may be <code>null</code> when
* accepting all declared interfaces)
* @return all interfaces that the given object implements as Set
*/
@SuppressWarnings("rawtypes")
public static Set<Class> getAllInterfacesForClassAsSet(Class clazz, ClassLoader classLoader) {
Assert.notNull(clazz, "Class must not be null");
if (clazz.isInterface() && isVisible(clazz, classLoader)) {
return Collections.singleton(clazz);
}
Set<Class> interfaces = new LinkedHashSet<Class>();
while (clazz != null) {
Class<?>[] ifcs = clazz.getInterfaces();
for (Class<?> ifc : ifcs) {
interfaces.addAll(getAllInterfacesForClassAsSet(ifc, classLoader));
}
clazz = clazz.getSuperclass();
}
return interfaces;
}
/**
* Create a composite interface Class for the given interfaces, implementing the given interfaces in one single
* Class.
* <p>
* This implementation builds a JDK proxy class for the given interfaces.
* @param interfaces the interfaces to merge
* @param classLoader the ClassLoader to create the composite Class in
* @return the merged interface as Class
* @see java.lang.reflect.Proxy#getProxyClass
*/
public static Class<?> createCompositeInterface(Class<?>[] interfaces, ClassLoader classLoader) {
Assert.notEmpty(interfaces, "Interfaces must not be empty");
Assert.notNull(classLoader, "ClassLoader must not be null");
return Proxy.getProxyClass(classLoader, interfaces);
}
/**
* Check whether the given class is visible in the given ClassLoader.
* @param clazz the class to check (typically an interface)
* @param classLoader the ClassLoader to check against (may be <code>null</code>, in which case this method will
* always return <code>true</code>)
*/
public static boolean isVisible(Class<?> clazz, ClassLoader classLoader) {
if (classLoader == null) {
return true;
}
try {
Class<?> actualClass = classLoader.loadClass(clazz.getName());
return (clazz == actualClass);
// Else: different interface class found...
} catch (ClassNotFoundException ex) {
// No interface class found...
return false;
}
}
}
| bboyfeiyu/spring-android | spring-android-core/src/main/java/org/springframework/util/ClassUtils.java | Java | apache-2.0 | 39,998 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.unit.hierarchy.discovery.combined;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.AbstractCollection;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.RandomAccess;
import java.util.Set;
import org.jboss.weld.tests.unit.hierarchy.discovery.Types;
import org.jboss.weld.util.reflection.HierarchyDiscovery;
import org.junit.Test;
public class CombinedHierarchyTest {
@Test
public void testInterfaceTypesResolved() {
Set<Type> expectedTypes = new HashSet<Type>();
expectedTypes.add(Object.class);
expectedTypes.add(Serializable.class);
expectedTypes.add(RandomAccess.class);
expectedTypes.add(Cloneable.class);
expectedTypes.add(Types.newParameterizedType(AbstractCollection.class, Integer.class));
expectedTypes.add(Types.newParameterizedType(Collection.class, Integer.class));
expectedTypes.add(Types.newParameterizedType(AbstractList.class, Integer.class));
expectedTypes.add(Types.newParameterizedType(List.class, Integer.class));
expectedTypes.add(Types.newParameterizedType(ArrayList.class, Integer.class));
expectedTypes.add(Types.newParameterizedType(Iterable.class, Integer.class));
HierarchyDiscovery discovery = new HierarchyDiscovery(Types.newParameterizedType(ArrayList.class, Integer.class));
Types.assertTypeSetMatches(expectedTypes, discovery.getTypeClosure());
}
}
| antoinesd/weld-core | impl/src/test/java/org/jboss/weld/tests/unit/hierarchy/discovery/combined/CombinedHierarchyTest.java | Java | apache-2.0 | 2,340 |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.cache.partition;
import com.gemstone.gemfire.cache.Region;
/**
* <p>Utility class that implements all methods in <code>PartitionListener</code>
* with empty implementations. Applications can subclass this class and only
* override the methods of interest.<p>
*
* <p>Subclasses declared in a Cache XML file, it must also implement {@link com.gemstone.gemfire.cache.Declarable}
* </p>
*
* Note : Please contact VMware support before using these APIs
*
* @author Barry Oglesby
*
* @since 6.6.2
*/
public class PartitionListenerAdapter implements PartitionListener {
public void afterPrimary(int bucketId) {
}
public void afterRegionCreate(Region<?, ?> region) {
}
public void afterBucketRemoved(int bucketId, Iterable<?> keys) {
}
public void afterBucketCreated(int bucketId, Iterable<?> keys) {
}
}
| papicella/snappy-store | gemfire-core/src/main/java/com/gemstone/gemfire/cache/partition/PartitionListenerAdapter.java | Java | apache-2.0 | 1,535 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.shared.resource;
import java.io.InputStream;
import java.net.URL;
import java.util.Comparator;
/**
* Base class for resource loaders. Resource loaders can lookup resources
* as URLs from arbitrary locations, including JAR files.
*/
public abstract class ResourceLoader
{
public static final String VERSION_INVALID = "INVALID";
private String _prefix;
public ResourceLoader(String prefix)
{
_prefix = prefix;
}
public abstract String getResourceVersion(String path);
/**
* Return the max available version found (if exists) or
* return null if no version available.
*/
public abstract String getLibraryVersion(String path);
/**
* Return the max available version found (if exists) or
* return null if no version available.
*/
public abstract URL getResourceURL(ResourceMeta resourceMeta);
public abstract InputStream getResourceInputStream(ResourceMeta resourceMeta);
public abstract ResourceMeta createResourceMeta(String prefix, String libraryName, String libraryVersion,
String resourceName, String resourceVersion);
public ResourceMeta createResourceMeta(String prefix, String libraryName,
String libraryVersion, String resourceName, String resourceVersion, String contractName)
{
return createResourceMeta(prefix, libraryName, libraryVersion, resourceName, resourceVersion);
}
public abstract boolean libraryExists(String libraryName);
public boolean resourceExists(ResourceMeta resourceMeta)
{
return (getResourceURL(resourceMeta) != null);
}
/*
public URL getResourceURL(String resourceId)
{
throw new UnsupportedOperationException(
"An implementation for getResourceURL(String resourceId) method is required for JSF 2.2");
}
public boolean resourceIdExists(String resourceId)
{
return (getResourceURL(resourceId) != null);
}*/
private Comparator<String> _versionComparator = null;
protected Comparator<String> getVersionComparator()
{
if (_versionComparator == null)
{
_versionComparator = new VersionComparator();
}
return _versionComparator;
}
protected void setVersionComparator(Comparator<String> versionComparator)
{
_versionComparator = versionComparator;
}
public static class VersionComparator implements Comparator<String>
{
public int compare(String s1, String s2)
{
int n1 = 0;
int n2 = 0;
String o1 = s1;
String o2 = s2;
boolean p1 = true;
boolean p2 = true;
while (n1 == n2 && (p1 || p2))
{
int i1 = o1.indexOf('_');
int i2 = o2.indexOf('_');
if (i1 < 0)
{
if (o1.length() > 0)
{
p1 = false;
n1 = Integer.valueOf(o1);
o1 = "";
}
else
{
p1 = false;
n1 = 0;
}
}
else
{
n1 = Integer.valueOf(o1.substring(0, i1));
o1 = o1.substring(i1 + 1);
}
if (i2 < 0)
{
if (o2.length() > 0)
{
p2 = false;
n2 = Integer.valueOf(o2);
o2 = "";
}
else
{
p2 = false;
n2 = 0;
}
}
else
{
n2 = Integer.valueOf(o2.substring(0, i2));
o2 = o2.substring(i2 + 1);
}
}
if (n1 == n2)
{
return s1.length() - s2.length();
}
return n1 - n2;
}
}
public String getPrefix()
{
return _prefix;
}
public void setPrefix(String prefix)
{
_prefix = prefix;
}
}
| kulinski/myfaces | shared/src/main/java/org/apache/myfaces/shared/resource/ResourceLoader.java | Java | apache-2.0 | 5,162 |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.widget.escalator;
import java.util.Map;
import com.vaadin.client.widgets.Escalator;
/**
* A representation of the columns in an instance of {@link Escalator}.
*
* @since 7.4
* @author Vaadin Ltd
* @see Escalator#getColumnConfiguration()
*/
public interface ColumnConfiguration {
/**
* Removes columns at certain indices.
* <p>
* If any of the removed columns were frozen, the number of frozen columns
* will be reduced by the number of the removed columns that were frozen.
* <p>
* <em>Note:</em> This method simply removes the given columns, and does not
* do much of anything else. Especially if you have column spans, you
* probably need to run {@link #refreshColumns(int, int)} or
* {@link RowContainer#refreshRows(int, int)}
*
* @param index
* the index of the first column to be removed
* @param numberOfColumns
* the number of rows to remove, starting from {@code index}
* @throws IndexOutOfBoundsException
* if the entire range of removed columns is not currently
* present in the escalator
* @throws IllegalArgumentException
* if <code>numberOfColumns</code> is less than 1.
*/
public void removeColumns(int index, int numberOfColumns)
throws IndexOutOfBoundsException, IllegalArgumentException;
/**
* Adds columns at a certain index.
* <p>
* The new columns will be inserted between the column at the index, and the
* column before (an index of 0 means that the columns are inserted at the
* beginning). Therefore, the columns at the index and afterwards will be
* moved to the right.
* <p>
* The contents of the inserted columns will be queried from the respective
* cell renderers in the header, body and footer.
* <p>
* If there are frozen columns and the first added column is to the left of
* the last frozen column, the number of frozen columns will be increased by
* the number of inserted columns.
* <p>
* <em>Note:</em> Only the contents of the inserted columns will be
* rendered. If inserting new columns affects the contents of existing
* columns (e.g. you have column spans),
* {@link RowContainer#refreshRows(int, int)} or
* {@link #refreshColumns(int, int)} needs to be called as appropriate.
*
* @param index
* the index of the column before which new columns are inserted,
* or {@link #getColumnCount()} to add new columns at the end
* @param numberOfColumns
* the number of columns to insert after the <code>index</code>
* @throws IndexOutOfBoundsException
* if <code>index</code> is not an integer in the range
* <code>[0..{@link #getColumnCount()}]</code>
* @throws IllegalArgumentException
* if {@code numberOfColumns} is less than 1.
*/
public void insertColumns(int index, int numberOfColumns)
throws IndexOutOfBoundsException, IllegalArgumentException;
/**
* Returns the number of columns in the escalator.
*
* @return the number of columns in the escalator
*/
public int getColumnCount();
/**
* Sets the number of leftmost columns that are not affected by horizontal
* scrolling.
*
* @param count
* the number of columns to freeze
*
* @throws IllegalArgumentException
* if the column count is < 0 or > the number of columns
*
*/
public void setFrozenColumnCount(int count) throws IllegalArgumentException;
/**
* Get the number of leftmost columns that are not affected by horizontal
* scrolling.
*
* @return the number of frozen columns
*/
public int getFrozenColumnCount();
/**
* Sets (or unsets) an explicit width for a column.
*
* @param index
* the index of the column for which to set a width
* @param px
* the number of pixels the indicated column should be, or a
* negative number to let the escalator decide
* @throws IllegalArgumentException
* if <code>index</code> is not a valid column index
*/
public void setColumnWidth(int index, double px)
throws IllegalArgumentException;
/**
* Returns the user-defined width of a column.
*
* @param index
* the index of the column for which to retrieve the width
* @return the column's width in pixels, or a negative number if the width
* is implicitly decided by the escalator
* @throws IllegalArgumentException
* if <code>index</code> is not a valid column index
*/
public double getColumnWidth(int index) throws IllegalArgumentException;
/**
* Sets widths for a set of columns.
*
* @param indexWidthMap
* a map from column index to its respective width to be set. If
* the given width for a column index is negative, the column is
* resized-to-fit.
* @throws IllegalArgumentException
* if {@code indexWidthMap} is {@code null}
* @throws IllegalArgumentException
* if any column index in {@code indexWidthMap} is invalid
* @throws NullPointerException
* If any value in the map is <code>null</code>
*/
public void setColumnWidths(Map<Integer, Double> indexWidthMap)
throws IllegalArgumentException;
/**
* Returns the actual width of a column.
*
* @param index
* the index of the column for which to retrieve the width
* @return the column's actual width in pixels
* @throws IllegalArgumentException
* if <code>index</code> is not a valid column index
*/
public double getColumnWidthActual(int index)
throws IllegalArgumentException;
/**
* Refreshes a range of columns in the current row containers in each Escalator
* section.
* <p>
* The data for the refreshed columns is queried from the current cell
* renderer.
*
* @param index
* the index of the first column that will be updated
* @param numberOfColumns
* the number of columns to update, starting from the index
* @throws IndexOutOfBoundsException
* if any integer number in the range
* <code>[index..(index+numberOfColumns)]</code> is not an
* existing column index.
* @throws IllegalArgumentException
* if {@code numberOfColumns} is less than 1.
* @see RowContainer#setEscalatorUpdater(EscalatorUpdater)
* @see Escalator#getHeader()
* @see Escalator#getBody()
* @see Escalator#getFooter()
*/
public void refreshColumns(int index, int numberOfColumns)
throws IndexOutOfBoundsException, IllegalArgumentException;
}
| Darsstar/framework | client/src/main/java/com/vaadin/client/widget/escalator/ColumnConfiguration.java | Java | apache-2.0 | 7,702 |
/* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.transformer.nodejs;
import com.google.api.codegen.config.FieldConfig;
import com.google.api.codegen.transformer.ModelTypeNameConverter;
import com.google.api.codegen.util.TypeName;
import com.google.api.codegen.util.TypeNameConverter;
import com.google.api.codegen.util.TypedValue;
import com.google.api.codegen.util.js.JSTypeTable;
import com.google.api.tools.framework.model.EnumValue;
import com.google.api.tools.framework.model.ProtoElement;
import com.google.api.tools.framework.model.TypeRef;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type;
public class NodeJSModelTypeNameConverter implements ModelTypeNameConverter {
/** A map from primitive types in proto to NodeJS counterparts. */
private static final ImmutableMap<Type, String> PRIMITIVE_TYPE_MAP =
ImmutableMap.<Type, String>builder()
.put(Type.TYPE_BOOL, "boolean")
.put(Type.TYPE_DOUBLE, "number")
.put(Type.TYPE_FLOAT, "number")
.put(Type.TYPE_INT64, "number")
.put(Type.TYPE_UINT64, "number")
.put(Type.TYPE_SINT64, "number")
.put(Type.TYPE_FIXED64, "number")
.put(Type.TYPE_SFIXED64, "number")
.put(Type.TYPE_INT32, "number")
.put(Type.TYPE_UINT32, "number")
.put(Type.TYPE_SINT32, "number")
.put(Type.TYPE_FIXED32, "number")
.put(Type.TYPE_SFIXED32, "number")
.put(Type.TYPE_STRING, "string")
.put(Type.TYPE_BYTES, "string")
.build();
/** A map from primitive types in proto to zero value in NodeJS */
private static final ImmutableMap<Type, String> PRIMITIVE_ZERO_VALUE =
ImmutableMap.<Type, String>builder()
.put(Type.TYPE_BOOL, "false")
.put(Type.TYPE_DOUBLE, "0.0")
.put(Type.TYPE_FLOAT, "0.0")
.put(Type.TYPE_INT64, "0")
.put(Type.TYPE_UINT64, "0")
.put(Type.TYPE_SINT64, "0")
.put(Type.TYPE_FIXED64, "0")
.put(Type.TYPE_SFIXED64, "0")
.put(Type.TYPE_INT32, "0")
.put(Type.TYPE_UINT32, "0")
.put(Type.TYPE_SINT32, "0")
.put(Type.TYPE_FIXED32, "0")
.put(Type.TYPE_SFIXED32, "0")
.put(Type.TYPE_STRING, "\'\'")
.put(Type.TYPE_BYTES, "\'\'")
.build();
private TypeNameConverter typeNameConverter;
public NodeJSModelTypeNameConverter(String implicitPackageName) {
this.typeNameConverter = new JSTypeTable(implicitPackageName);
}
@Override
public TypeName getTypeNameInImplicitPackage(String shortName) {
return typeNameConverter.getTypeNameInImplicitPackage(shortName);
}
@Override
public TypeName getTypeName(TypeRef type) {
if (type.isMap()) {
return new TypeName("Object");
} else if (type.isRepeated()) {
TypeName elementTypeName = getTypeNameForElementType(type);
return new TypeName("", "", "%i[]", elementTypeName);
} else {
return getTypeNameForElementType(type);
}
}
/**
* Returns the NodeJS representation of a type, without cardinality. If the type is a primitive,
* getTypeNameForElementType returns it in unboxed form.
*/
@Override
public TypeName getTypeNameForElementType(TypeRef type) {
String primitiveTypeName = PRIMITIVE_TYPE_MAP.get(type.getKind());
if (primitiveTypeName != null) {
return new TypeName(primitiveTypeName);
}
switch (type.getKind()) {
case TYPE_MESSAGE:
return getTypeName(type.getMessageType());
case TYPE_ENUM:
return getTypeName(type.getEnumType());
default:
throw new IllegalArgumentException("unknown type kind: " + type.getKind());
}
}
@Override
public TypeName getTypeName(ProtoElement elem) {
return typeNameConverter.getTypeName(elem.getFullName());
}
/**
* Returns the NodeJS representation of a zero value for that type, to be used in code sample doc.
*/
@Override
public TypedValue getSnippetZeroValue(TypeRef type) {
// Don't call getTypeName; we don't need to import these.
if (type.isMap()) {
return TypedValue.create(new TypeName("Object"), "{}");
}
if (type.isRepeated()) {
return TypedValue.create(new TypeName("Array"), "[]");
}
if (PRIMITIVE_ZERO_VALUE.containsKey(type.getKind())) {
return TypedValue.create(getTypeName(type), PRIMITIVE_ZERO_VALUE.get(type.getKind()));
}
if (type.isMessage()) {
return TypedValue.create(getTypeName(type), "{}");
}
if (type.isEnum()) {
return getEnumValue(type, type.getEnumType().getValues().get(0));
}
return TypedValue.create(new TypeName(""), "null");
}
@Override
public TypedValue getImplZeroValue(TypeRef type) {
return getSnippetZeroValue(type);
}
@Override
public String renderPrimitiveValue(TypeRef type, String value) {
Type primitiveType = type.getKind();
if (!PRIMITIVE_TYPE_MAP.containsKey(primitiveType)) {
throw new IllegalArgumentException(
"Initial values are only supported for primitive types, got type "
+ type
+ ", with value "
+ value);
}
switch (primitiveType) {
case TYPE_BOOL:
return value.toLowerCase();
case TYPE_STRING:
case TYPE_BYTES:
return "\'" + value + "\'";
default:
// Types that do not need to be modified (e.g. TYPE_INT32) are handled
// here
return value;
}
}
@Override
public TypeName getTypeNameForTypedResourceName(
FieldConfig fieldConfig, String typedResourceShortName) {
throw new UnsupportedOperationException(
"getTypeNameForTypedResourceName not supported by NodeJS");
}
@Override
public TypeName getTypeNameForResourceNameElementType(
FieldConfig fieldConfig, String typedResourceShortName) {
throw new UnsupportedOperationException(
"getTypeNameForResourceNameElementType not supported by NodeJS");
}
@Override
public TypedValue getEnumValue(TypeRef type, EnumValue value) {
return TypedValue.create(getTypeName(type), "%s." + value.getSimpleName());
}
}
| saicheems/discovery-artifact-manager | toolkit/src/main/java/com/google/api/codegen/transformer/nodejs/NodeJSModelTypeNameConverter.java | Java | apache-2.0 | 6,784 |
package org.jvnet.jaxb2_commons.xml.bind.model.concrete;
import org.jvnet.jaxb2_commons.xml.bind.model.MAnyElementPropertyInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MClassInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MPropertyInfoVisitor;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MPropertyInfoOrigin;
public class CMAnyElementPropertyInfo<T, C extends T> extends
CMPropertyInfo<T, C> implements MAnyElementPropertyInfo<T, C> {
private final boolean mixed;
private final boolean domAllowed;
private final boolean typedObjectAllowed;
public CMAnyElementPropertyInfo(MPropertyInfoOrigin origin,
MClassInfo<T, C> classInfo, String privateName, boolean collection,
boolean required, boolean mixed, boolean domAllowed,
boolean typedObjectAllowed) {
super(origin, classInfo, privateName, collection, required);
this.mixed = mixed;
this.domAllowed = domAllowed;
this.typedObjectAllowed = typedObjectAllowed;
}
public boolean isMixed() {
return mixed;
}
public boolean isDomAllowed() {
return domAllowed;
}
public boolean isTypedObjectAllowed() {
return typedObjectAllowed;
}
public <V> V acceptPropertyInfoVisitor(MPropertyInfoVisitor<T, C, V> visitor) {
return visitor.visitAnyElementPropertyInfo(this);
}
}
| highsource/jaxb2-basics | runtime/src/main/java/org/jvnet/jaxb2_commons/xml/bind/model/concrete/CMAnyElementPropertyInfo.java | Java | bsd-2-clause | 1,277 |
/* Generated By:JJTree: Do not edit this line. ASTConstructorDeclaration.java */
package net.sourceforge.pmd.lang.java.ast;
public class ASTConstructorDeclaration extends AbstractJavaAccessNode {
public ASTConstructorDeclaration(int id) {
super(id);
}
public ASTConstructorDeclaration(JavaParser p, int id) {
super(p, id);
}
public ASTFormalParameters getParameters() {
return (ASTFormalParameters) (jjtGetChild(0) instanceof ASTFormalParameters?jjtGetChild(0):jjtGetChild(1));
}
public int getParameterCount() {
return getParameters().getParameterCount();
}
/**
* Accept the visitor. *
*/
public Object jjtAccept(JavaParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
private boolean containsComment;
public boolean containsComment() {
return this.containsComment;
}
public void setContainsComment() {
this.containsComment = true;
}
}
| daejunpark/jsaf | third_party/pmd/src/main/java/net/sourceforge/pmd/lang/java/ast/ASTConstructorDeclaration.java | Java | bsd-3-clause | 999 |
/*
* Copyright (c) 2009-2013, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.admindistribution.nhin.proxy;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType;
import gov.hhs.fha.nhinc.messaging.client.CONNECTClient;
import gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor;
import gov.hhs.fha.nhinc.nhinadmindistribution.RespondingGatewayAdministrativeDistributionPortType;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants.GATEWAY_API_LEVEL;
import oasis.names.tc.emergency.edxl.de._1.EDXLDistribution;
import org.junit.Test;
/**
* @author msw
*
*/
public class NhinAdminDistributionProxyWebServiceSecuredImplTest {
@SuppressWarnings("unchecked")
private final CONNECTClient<RespondingGatewayAdministrativeDistributionPortType> client = mock(CONNECTClient.class);
private final EDXLDistribution body = mock(EDXLDistribution.class);
private final AssertionType assertion = mock(AssertionType.class);
private final NhinTargetSystemType target = mock(NhinTargetSystemType.class);
@Test
public void testNoMtom() {
NhinAdminDistributionProxyWebServiceSecuredImpl impl = getImpl();
GATEWAY_API_LEVEL apiLevel = GATEWAY_API_LEVEL.LEVEL_g0;
impl.sendAlertMessage(body, assertion, target, apiLevel);
verify(client, never()).enableMtom();
}
public void testMtom() {
NhinAdminDistributionProxyWebServiceSecuredImpl impl = getImpl();
GATEWAY_API_LEVEL apiLevel = GATEWAY_API_LEVEL.LEVEL_g1;
impl.sendAlertMessage(body, assertion, target, apiLevel);
verify(client).enableMtom();
}
private NhinAdminDistributionProxyWebServiceSecuredImpl getImpl() {
return new NhinAdminDistributionProxyWebServiceSecuredImpl() {
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.admindistribution.nhin.proxy.NhinAdminDistributionProxyWebServiceSecuredImpl#
* getCONNECTClientSecured(gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor, java.lang.String,
* gov.hhs.fha.nhinc.common.nhinccommon.AssertionType, java.lang.String, java.lang.String)
*/
@Override
protected CONNECTClient<RespondingGatewayAdministrativeDistributionPortType> getCONNECTClientSecured(
ServicePortDescriptor<RespondingGatewayAdministrativeDistributionPortType> portDescriptor,
String url, AssertionType assertion, String target, String serviceName) {
return client;
}
};
}
}
| sailajaa/CONNECT | Product/Production/Services/AdminDistributionCore/src/test/java/gov/hhs/fha/nhinc/admindistribution/nhin/proxy/NhinAdminDistributionProxyWebServiceSecuredImplTest.java | Java | bsd-3-clause | 4,392 |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.material;
import com.jme3.asset.AssetKey;
import com.jme3.asset.AssetProcessor;
public class MaterialProcessor implements AssetProcessor {
@Override
public Object postProcess(AssetKey key, Object obj) {
return null;
}
@Override
public Object createClone(Object obj) {
return ((Material) obj).clone();
}
}
| zzuegg/jmonkeyengine | jme3-core/src/main/java/com/jme3/material/MaterialProcessor.java | Java | bsd-3-clause | 1,992 |
/*
* This file is part of choco-solver, http://choco-solver.org/
*
* Copyright (c) 2019, IMT Atlantique. All rights reserved.
*
* Licensed under the BSD 4-clause license.
*
* See LICENSE file in the project root for full license information.
*/
package org.chocosolver.util.objects.setDataStructures.iterable;
import org.chocosolver.solver.variables.IntVar;
import org.chocosolver.util.iterators.DisposableRangeIterator;
import java.util.Arrays;
/**
*
* <p>
* Project: choco.
* @author Charles Prud'homme
* @since 25/01/2016.
*/
public class IntIterableSetUtils {
/**
* Copy the domain of <i>var</i> into an {@link IntIterableRangeSet}.
* @param var an integer variable
* @return set to transfer values to
*/
public static IntIterableRangeSet extract(IntVar var) {
IntIterableRangeSet set = new IntIterableRangeSet();
copyIn(var, set);
return set;
}
/**
* Copy the domain of <i>var</i> in <i>set</i>.
* First, it clears <i>set</i>, then it fills it with the value in <i>var</i>.
*
* @param var an integer variable
* @param set set to transfer values to
*/
public static void copyIn(IntVar var, IntIterableRangeSet set) {
set.clear();
DisposableRangeIterator rit = var.getRangeIterator(true);
while (rit.hasNext()) {
int lb = rit.min();
int ub = rit.max();
set.pushRange(lb,ub);
rit.next();
}
rit.dispose();
}
/**
* @param set on which the complement is based
* @param lbu lower bound (inclusive) of the universe
* @param ubu upper bound (inclusive) of the universe
* @return the complement of this set wrt to universe set [<i>lbu</i>, <i>ubu</i>].
* Values smaller than <i>lbu</i> and greater than <i>ubu</i> are ignored.
*/
public static IntIterableRangeSet complement(IntIterableRangeSet set, int lbu, int ubu) {
assert lbu <= ubu;
IntIterableRangeSet t = new IntIterableRangeSet();
t.ELEMENTS = new int[set.SIZE + 2];
int i = 0;
int lb = lbu;
while (i < set.SIZE && set.ELEMENTS[i] <= lbu) {
i += 2;
lb = set.ELEMENTS[i - 1] + 1;
}
if (i == set.SIZE) {
if (lb <= ubu) {
t.pushRange(lb, ubu);
}// else: empty set
} else {
assert set.ELEMENTS[i] > lb;
t.pushRange(lb, set.ELEMENTS[i++] - 1);
while (i < set.SIZE - 2 && set.ELEMENTS[i] < ubu) {
t.pushRange(set.ELEMENTS[i++] + 1, set.ELEMENTS[i++] - 1);
}
if (set.ELEMENTS[i] < ubu) {
t.pushRange(set.ELEMENTS[i] + 1, ubu);
}
}
return t;
}
/**
* @param set1 a set of ints
* @param set2 a set of ints
* @return return a set {a + b | a in set1, b in set2}
*/
public static IntIterableRangeSet plus(IntIterableRangeSet set1, IntIterableRangeSet set2) {
IntIterableRangeSet t = new IntIterableRangeSet();
plus(t, set1, set2);
return t;
}
/**
* Set <i>setr</i> to {a + b | a in <i>set1</i>, b in <i>set2</i>}
*
* @param setr set of ints
* @param set1 a set of ints
* @param set2 a set of ints
*/
public static void plus(IntIterableRangeSet setr, IntIterableRangeSet set1, IntIterableRangeSet set2) {
setr.clear();
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
setr.grow(set1.SIZE);
int i = 0, j = 0;
int[] is = new int[s2];
Arrays.fill(is, -1);
is[0] = 0;
int lb = set1.ELEMENTS[0] + set2.ELEMENTS[0];
int ub = set1.ELEMENTS[1] + set2.ELEMENTS[1];
do {
boolean extend = false;
for (int k = i; k <= j; k++) {
int _lb = set1.ELEMENTS[is[k] << 1] + set2.ELEMENTS[k << 1];
if (lb <= _lb && _lb <= ub + 1) {
ub = Math.max(set1.ELEMENTS[(is[k] << 1) + 1] + set2.ELEMENTS[(k << 1) + 1], ub);
extend = true;
// add neighbors to evaluate
// 1. left neighbor
if (k < s2 - 1 && k == j) {
is[k + 1]++;
if (is[k + 1] == 0) {
j++;
}
}
// 2. bottom neighbor
is[k]++;
if (is[k] == s1) {
i++;
}
}
}
if (!extend) {
setr.pushRange(lb, ub);
lb = Integer.MAX_VALUE;
for (int k = i; k <= j; k++) {
int _lb = set1.ELEMENTS[is[k] << 1] + set2.ELEMENTS[k << 1];
if (lb > _lb) {
lb = _lb;
ub = set1.ELEMENTS[(is[k] << 1) + 1] + set2.ELEMENTS[(k << 1) + 1];
}
}
}
} while (is[s2 - 1] < s1);
setr.pushRange(lb, ub);
}
}
/**
* Set <i>setr</i> to {a + b | a in <i>set1</i>, b in [<i>l</i>..<i>u</i>]}
*
* @param setr set of ints
* @param set1 a set of ints
* @param l an int
* @param u an int
*/
public static void plus(IntIterableRangeSet setr, IntIterableRangeSet set1, int l, int u) {
setr.clear();
int s1 = set1.SIZE >> 1;
if (s1 > 0 && l <= u) {
int k = 0;
setr.grow(set1.SIZE);
int lb = set1.ELEMENTS[0] + l;
int ub = set1.ELEMENTS[1] + u;
for (; k < s1; k++) {
int _lb = set1.ELEMENTS[k << 1] + l;
if (lb <= _lb && _lb <= ub + 1) {
ub = Math.max(set1.ELEMENTS[(k << 1) + 1] + u, ub);
} else {
setr.pushRange(lb, ub);
lb = set1.ELEMENTS[k << 1] + l;
ub = set1.ELEMENTS[(k << 1) + 1] + u;
}
}
setr.pushRange(lb,ub);
}
}
/**
* @param set1 a set of ints
* @param set2 a set of ints
* @return return a set {a - b | a in set1, b in set2}
*/
public static IntIterableRangeSet minus(IntIterableRangeSet set1, IntIterableRangeSet set2) {
IntIterableRangeSet t = new IntIterableRangeSet();
minus(t, set1, set2);
return t;
}
/**
* Set <i>setr</i> to {a - b | a in <i>set1</i>, b in <i>set2</i>}
*
* @param setr set of ints
* @param set1 a set of ints
* @param set2 a set of ints
*/
public static void minus(IntIterableRangeSet setr, IntIterableRangeSet set1, IntIterableRangeSet set2) {
setr.clear();
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
setr.grow(set1.SIZE);
int i = s2 - 1, j = s2 - 1;
int[] is = new int[s2];
Arrays.fill(is, -1);
is[s2 - 1] = 0;
int lb = set1.ELEMENTS[0] - set2.ELEMENTS[((s2 - 1) << 1) + 1];
int ub = set1.ELEMENTS[1] - set2.ELEMENTS[(s2 - 1) << 1];
do {
boolean extend = false;
for (int k = j; k >= i; k--) {
int _lb = set1.ELEMENTS[is[k] << 1] - set2.ELEMENTS[(k << 1) + 1];
if (lb <= _lb && _lb <= ub + 1) {
ub = Math.max(set1.ELEMENTS[(is[k] << 1) + 1] - set2.ELEMENTS[(k << 1)], ub);
extend = true;
// add neighbors to evaluate
// 1. left neighbor
if (k > 0 && k == i) {
is[k - 1]++;
if (is[k - 1] == 0) {
i--;
}
}
// 2. bottom neighbor
is[k]++;
if (is[k] == s1) {
j--;
}
}
}
if (!extend) {
setr.pushRange(lb, ub);
lb = Integer.MAX_VALUE;
for (int k = i; k <= j; k++) {
int _lb = set1.ELEMENTS[is[k] << 1] - set2.ELEMENTS[(k << 1) + 1];
if (lb > _lb) {
lb = _lb;
ub = set1.ELEMENTS[(is[k] << 1) + 1] - set2.ELEMENTS[k << 1];
}
}
}
} while (is[0] < s1);
setr.pushRange(lb, ub);
}
}
/**
* Set <i>setr</i> to {a - b | a in <i>set1</i>, b in [<i>l</i>..<i>u</i>]}
*
* @param setr set of ints
* @param set1 a set of ints
* @param l an int
* @param u an int
*/
public static void minus(IntIterableRangeSet setr, IntIterableRangeSet set1, int l, int u) {
setr.clear();
int s1 = set1.SIZE >> 1;
if (s1 > 0 && l <= u) {
setr.grow(set1.SIZE);
int k = s1;
int lb = set1.ELEMENTS[0] - u;
int ub = set1.ELEMENTS[1] - l;
for (; k >= 0; k--) {
int _lb = set1.ELEMENTS[k << 1] - u;
if (lb <= _lb && _lb <= ub + 1) {
ub = Math.max(set1.ELEMENTS[(k << 1) + 1] - l, ub);
} else {
setr.pushRange(lb, ub);
lb = set1.ELEMENTS[k << 1] - u;
ub = set1.ELEMENTS[(k << 1) + 1] - l;
}
}
setr.pushRange(lb, ub);
}
}
/**
* @param set1 a set of ints
* @param set2 a set of ints
* @return return a set = set1 ∩ set2
*/
public static IntIterableRangeSet intersection(IntIterableRangeSet set1, IntIterableRangeSet set2) {
IntIterableRangeSet t = new IntIterableRangeSet();
intersection(t, set1, set2);
return t;
}
/**
* Set <i>setr</i> to <i>set1</i> ∩ <i>set2</i>
*
* @param setr set of ints
* @param set1 a set of ints
* @param set2 a set of ints
*/
@SuppressWarnings("Duplicates")
public static void intersection(IntIterableRangeSet setr, IntIterableRangeSet set1, IntIterableRangeSet set2) {
setr.clear();
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
setr.grow(set1.SIZE);
int i = 0, j = 0;
int lbi, ubi, lbj, ubj, lb, ub;
lbi = set1.ELEMENTS[0];
ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
while (i < s1 && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
lb = Math.max(lbi, lbj);
ub = Math.min(ubi, ubj);
setr.pushRange(lb, ub);
}
if (ubi <= ubj && ++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}else if (ubj <= ubi && ++j < s2) {
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}
}
}
}
/**
* Set <i>setr</i> to <i>set1</i> ∩ [<i>from</i>,<i>to</i>]
*
* @param setr set of ints
* @param set1 a set of ints
* @param from lower bound of an interval
* @param to upper bound of an interval
*/
@SuppressWarnings("Duplicates")
public static void intersection(IntIterableRangeSet setr, IntIterableRangeSet set1, int from, int to) {
setr.clear();
int s1 = set1.SIZE >> 1;
int s2 = from <= to ? 1 : 0;
if (s1 > 0 && s2 > 0) {
setr.grow(set1.SIZE);
int i = 0, j = 0;
int lbi, ubi, lbj, ubj, lb, ub;
lbi = set1.ELEMENTS[0];
ubi = set1.ELEMENTS[1];
lbj = from;
ubj = to;
while (i < s1 && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
lb = Math.max(lbi, lbj);
ub = Math.min(ubi, ubj);
setr.pushRange(lb, ub);
}
if (ubi <= ubj && ++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}else if(ubj <= ubi){
j++;
}
}
}
}
/**
* Set <i>set1</i> to <i>set1</i> ∩ <i>set2</i>
*
* @param set1 a set of ints
* @param set2 a set of ints
* @return <tt>true</tt> if <i>set1</i> has changed
*/
@SuppressWarnings("Duplicates")
public static boolean intersectionOf(IntIterableRangeSet set1, IntIterableRangeSet set2) {
boolean change = false;
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int i = 0, j = 0;
int s = 0, c = 0;
int[] e = new int[set1.SIZE];
int lbi, ubi, lbj, ubj, lb, ub;
lbi = set1.ELEMENTS[0];
ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
while (i < s1 && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
lb = Math.max(lbi, lbj);
ub = Math.min(ubi, ubj);
if (s + 2 > e.length) {
// overflow-conscious code
int oldCapacity = e.length;
int newCapacity = oldCapacity + (oldCapacity >> 1);
if (newCapacity < s + 2)
newCapacity = s + 2;
// minCapacity is usually close to size, so this is a win:
e = Arrays.copyOf(e, newCapacity);
}
e[s++] = lb;
e[s++] = ub;
c += ub - lb + 1;
change = true;
}
if (ubi <= ubj && ++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}else if (ubj <= ubi && ++j < s2) {
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}
}
set1.ELEMENTS = e;
set1.SIZE = s;
change |= (set1.CARDINALITY != c);
set1.CARDINALITY = c;
}else{
change = set1.CARDINALITY > 0;
set1.clear();
}
return change;
}
/**
* @param set1 a set of ints
* @param set2 a set of ints
* @return return a set = set1 ∪ set2
*/
public static IntIterableRangeSet union(IntIterableRangeSet set1, IntIterableRangeSet set2) {
IntIterableRangeSet t = new IntIterableRangeSet();
union(t, set1, set2);
return t;
}
/**
* Set <i>setr</i> to <i>set1</i> ∪ <i>set2</i>
*
* @param setr set of ints
* @param set1 a set of ints
* @param set2 a set of ints
*/
@SuppressWarnings("Duplicates")
public static void union(IntIterableRangeSet setr, IntIterableRangeSet set1, IntIterableRangeSet set2) {
setr.clear();
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
setr.grow(set1.SIZE);
int i = 0, j = 0;
int lbi, ubi, lbj, ubj, lb, ub;
lb = lbi = set1.ELEMENTS[0];
ub = ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
if(lb > lbj){
lb = lbj;
ub = ubj;
}
boolean extend;
while (i < s1 || j < s2) {
extend = false;
if (lb - 1 <= lbi && lbi <= ub + 1) {
ub = Math.max(ub, ubi);
extend = i < s1;
if(++i < s1){
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}
}
if (lb - 1 <= lbj && lbj <= ub + 1) {
ub = Math.max(ub, ubj);
extend |= j < s2;
if(++j < s2){
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}
}
if(!extend){
setr.pushRange(lb, ub);
if(i < s1) {
lb = lbi;
ub = ubi;
if(j < s2 && lbi > lbj){
lb = lbj;
ub = ubj;
}
}else if(j < s2){
lb = lbj;
ub = ubj;
}
}
}
setr.pushRange(lb, ub);
}
}
/**
* Set <i>set1</i> to <i>set1</i> ∩ <i>set2</i>
*
* @param set1 a set of ints
* @param set2 a set of ints
* @return <tt>true</tt> if <i>set1</i> has changed
*/
@SuppressWarnings("Duplicates")
public static boolean unionOf(IntIterableRangeSet set1, IntIterableRangeSet set2) {
boolean change = false;
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int i = 0, j = 0;
int s = 0, c = 0;
int[] e = new int[set1.SIZE];
int lbi, ubi, lbj, ubj, lb, ub;
lb = lbi = set1.ELEMENTS[0];
ub = ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
if (lb > lbj) {
lb = lbj;
ub = ubj;
}
boolean extend;
while (i < s1 || j < s2) {
extend = false;
if (lb - 1 <= lbi && lbi <= ub + 1) {
ub = Math.max(ub, ubi);
extend = i < s1;
if (++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}
}
if (lb - 1 <= lbj && lbj <= ub + 1) {
ub = Math.max(ub, ubj);
extend |= j < s2;
if (++j < s2) {
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}
}
if (!extend) {
if (s + 2 > e.length) {
// overflow-conscious code
int oldCapacity = e.length;
int newCapacity = oldCapacity + (oldCapacity >> 1);
if (newCapacity < s + 2)
newCapacity = s + 2;
// minCapacity is usually close to size, so this is a win:
e = Arrays.copyOf(e, newCapacity);
}
e[s++] = lb;
e[s++] = ub;
c += ub - lb + 1;
if (i < s1) {
lb = lbi;
ub = ubi;
if (j < s2 && lbi > lbj) {
lb = lbj;
ub = ubj;
}
} else if (j < s2) {
lb = lbj;
ub = ubj;
}
}
}
if (s + 2 > e.length) {
// overflow-conscious code
int oldCapacity = e.length;
int newCapacity = oldCapacity + (oldCapacity >> 1);
if (newCapacity < s + 2)
newCapacity = s + 2;
// minCapacity is usually close to size, so this is a win:
e = Arrays.copyOf(e, newCapacity);
}
e[s++] = lb;
e[s++] = ub;
c += ub - lb + 1;
set1.ELEMENTS = e;
set1.SIZE = s;
change = (set1.CARDINALITY != c);
set1.CARDINALITY = c;
} else {
if(s2 > 0){
set1.grow(set2.SIZE);
System.arraycopy(set2.ELEMENTS, 0, set1.ELEMENTS, 0, set2.SIZE);
set1.SIZE = set2.SIZE;
set1.CARDINALITY = set2.CARDINALITY;
change = true;
}
}
return change;
}
/**
* Put all value of <i>var</i> into <i>set</i>.
* TODO: more efficient operation
*
* @param set a set of ints
* @param var a integer variable
*/
public static void union(IntIterableRangeSet set, IntVar var) {
int ub = var.getUB();
for (int v = var.getLB(); v <= ub; v = var.nextValue(v)) {
set.add(v);
}
}
/**
* @param var a variable
* @param set a set
* @return <i>true</i> if <i>var</i> is included into <i>set</i>,
* <i>false</i> otherwise.
*/
@SuppressWarnings("Duplicates")
public static boolean includedIn(IntVar var, IntIterableRangeSet set) {
int s1 = var.getDomainSize();
int s2 = set.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int j = 0;
int lbi, ubi, lbj, ubj;
lbi = var.getLB();
ubi = var.nextValueOut(lbi) - 1;
lbj = set.ELEMENTS[0];
ubj = set.ELEMENTS[1];
while (lbi < Integer.MAX_VALUE && j < s2) {
if (ubj < lbi && ++j < s2) {
lbj = set.ELEMENTS[j << 1];
ubj = set.ELEMENTS[(j << 1) + 1];
}else if(lbj <= lbi && ubi <= ubj){
if((lbi = var.nextValue(ubi)) < Integer.MAX_VALUE) {
ubi = var.nextValueOut(lbi) - 1;
}
}else{
return false;
}
}
}
return s2 > 0;
}
/**
* @param set1 a set
* @param set2 a set
* @return <i>true</i> if <i>set1</i> is included into <i>set2</i>,
* <i>false</i> otherwise.
*/
@SuppressWarnings("Duplicates")
public static boolean includedIn(IntIterableRangeSet set1, IntIterableRangeSet set2) {
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int i = 0, j = 0;
int lbi, ubi, lbj, ubj;
lbi = set1.ELEMENTS[0];
ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
while (i < s1 && j < s2) {
if (ubj < lbi && ++j < s2) {
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}else if (lbj <= lbi && ubi <= ubj) {
if(++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}
}else{
return false;
}
}
}
return s2 > 0;
}
/**
* @param var a variable
* @param set a set
* @return <i>true</i> if <i>var</i> is not included into <i>set</i>,
* <i>false</i> otherwise.
*/
@SuppressWarnings("Duplicates")
public static boolean notIncludedIn(IntVar var, IntIterableRangeSet set) {
int s1 = var.getDomainSize();
int s2 = set.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
DisposableRangeIterator rit = var.getRangeIterator(true);
int j = 0;
int lbi, ubi, lbj, ubj;
lbi = rit.min();
ubi = rit.max();
rit.next();
lbj = set.ELEMENTS[0];
ubj = set.ELEMENTS[1];
while (rit.hasNext() && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
rit.dispose();
return true;
}
if (ubi <= ubj && rit.hasNext()) {
lbi = rit.min();
ubi = rit.max();
rit.next();
}else if (ubj <= ubi && ++j < s2) {
lbj = set.ELEMENTS[j << 1];
ubj = set.ELEMENTS[(j << 1) + 1];
}
}
rit.dispose();
}
return false;
}
/**
* @param var a variable
* @param set a set
* @return <i>true</i> if intersection of <i>var</i> and <i>set</i> is not empty,
* <i>false</i> otherwise.
*/
@SuppressWarnings("Duplicates")
public static boolean intersect(IntVar var, IntIterableRangeSet set) {
int s1 = var.getDomainSize();
int s2 = set.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int j = 0;
int lbi, ubi, lbj, ubj;
lbi = var.getLB();
ubi = var.nextValueOut(lbi) - 1;
lbj = set.ELEMENTS[0];
ubj = set.ELEMENTS[1];
while (lbi < Integer.MAX_VALUE && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
return true;
}
if (ubi <= ubj && (lbi = var.nextValue(ubi)) < Integer.MAX_VALUE) {
ubi = var.nextValueOut(lbi) - 1;
}else if (ubj <= ubi && ++j < s2) {
lbj = set.ELEMENTS[j << 1];
ubj = set.ELEMENTS[(j << 1) + 1];
}
}
}
return false;
}
/**
* @param set1 a set
* @param set2 a set
* @return <i>true</i> if intersection of <i>set1</i> and <i>set2</i> is not empty,
* <i>false</i> otherwise.
*/
@SuppressWarnings("Duplicates")
public static boolean intersect(IntIterableRangeSet set1, IntIterableRangeSet set2) {
int s1 = set1.SIZE >> 1;
int s2 = set2.SIZE >> 1;
if (s1 > 0 && s2 > 0) {
int i = 0, j = 0;
int lbi, ubi, lbj, ubj;
lbi = set1.ELEMENTS[0];
ubi = set1.ELEMENTS[1];
lbj = set2.ELEMENTS[0];
ubj = set2.ELEMENTS[1];
while (i < s1 && j < s2) {
if ((lbi <= lbj && lbj <= ubi) || (lbj <= lbi && lbi <= ubj)) {
return true;
}
if (ubi <= ubj && ++i < s1) {
lbi = set1.ELEMENTS[i << 1];
ubi = set1.ELEMENTS[(i << 1) + 1];
}else if (ubj <= ubi && ++j < s2) {
lbj = set2.ELEMENTS[j << 1];
ubj = set2.ELEMENTS[(j << 1) + 1];
}
}
}
return false;
}
}
| chocoteam/choco3 | src/main/java/org/chocosolver/util/objects/setDataStructures/iterable/IntIterableSetUtils.java | Java | bsd-3-clause | 27,456 |
/*
* [The "BSD licence"]
* Copyright (c) 2013-2015 Dandelion
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Dandelion nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.github.dandelion.datatables.core.option.processor.export;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.dandelion.core.DandelionException;
import com.github.dandelion.core.option.OptionProcessingContext;
import com.github.dandelion.core.util.StringUtils;
import com.github.dandelion.datatables.core.export.ExportConf;
/**
* <p>
* Processor that configures the file name of within the {@link ExportConf}
* corresponding to the export format.
* </p>
*
* @author Thibault Duchateau
* @since 1.1.0
*/
public class ExportFileNameProcessor extends AbstractExportOptionProcessor {
private static Logger logger = LoggerFactory.getLogger(ExportFileNameProcessor.class);
@Override
protected Object getProcessedValue(OptionProcessingContext context) {
String valueAsString = context.getValueAsString();
if (StringUtils.isNotBlank(valueAsString)) {
// Extract the export format
String exportFormat = getExportFormat(context);
if (StringUtils.isNotBlank(exportFormat)) {
logger.debug("Export format found: \"{}\"", exportFormat);
ExportConf exportConf = getExportConf(exportFormat, context);
exportConf.setFileName(valueAsString);
}
else {
throw new DandelionException("Format " + exportFormat + " unknown");
}
}
// The value is not used later during the processing but returned anyway
// mainly for logging purpose
return valueAsString;
}
} | atreeyang/dandelion-datatables | datatables-core/src/main/java/com/github/dandelion/datatables/core/option/processor/export/ExportFileNameProcessor.java | Java | bsd-3-clause | 3,067 |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengles;
import java.nio.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryStack.*;
import static org.lwjgl.system.MemoryUtil.*;
/**
* Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/EXT/EXT_draw_buffers.txt">EXT_draw_buffers</a> extension.
*
* <p>This extension increases the number of available framebuffer object color attachment points, extends OpenGL ES 2.0 to allow multiple output colors, and
* provides a mechanism for directing those outputs to multiple color buffers.</p>
*
* <p>This extension is similar to the combination of the GL_NV_draw_buffers and GL_NV_fbo_color_attachments extensions, but imposes certain restrictions
* informed by the OpenGL ES 3.0 API.</p>
*
* <p>Requires {@link GLES20 GLES 2.0}.</p>
*/
public class EXTDrawBuffers {
static { GLES.initialize(); }
/** Accepted by the {@code pname} parameter of GetIntegerv. */
public static final int GL_MAX_COLOR_ATTACHMENTS_EXT = 0x8CDF;
/** Accepted by the {@code pname} parameters of GetIntegerv and GetFloatv. */
public static final int
GL_MAX_DRAW_BUFFERS_EXT = 0x8824,
GL_DRAW_BUFFER0_EXT = 0x8825,
GL_DRAW_BUFFER1_EXT = 0x8826,
GL_DRAW_BUFFER2_EXT = 0x8827,
GL_DRAW_BUFFER3_EXT = 0x8828,
GL_DRAW_BUFFER4_EXT = 0x8829,
GL_DRAW_BUFFER5_EXT = 0x882A,
GL_DRAW_BUFFER6_EXT = 0x882B,
GL_DRAW_BUFFER7_EXT = 0x882C,
GL_DRAW_BUFFER8_EXT = 0x882D,
GL_DRAW_BUFFER9_EXT = 0x882E,
GL_DRAW_BUFFER10_EXT = 0x882F,
GL_DRAW_BUFFER11_EXT = 0x8830,
GL_DRAW_BUFFER12_EXT = 0x8831,
GL_DRAW_BUFFER13_EXT = 0x8832,
GL_DRAW_BUFFER14_EXT = 0x8833,
GL_DRAW_BUFFER15_EXT = 0x8834;
/**
* Accepted by the {@code attachment} parameter of FramebufferRenderbuffer, FramebufferTexture2D and GetFramebufferAttachmentParameteriv, and by the
* {@code bufs} parameter of DrawBuffersEXT.
*/
public static final int
GL_COLOR_ATTACHMENT0_EXT = 0x8CE0,
GL_COLOR_ATTACHMENT1_EXT = 0x8CE1,
GL_COLOR_ATTACHMENT2_EXT = 0x8CE2,
GL_COLOR_ATTACHMENT3_EXT = 0x8CE3,
GL_COLOR_ATTACHMENT4_EXT = 0x8CE4,
GL_COLOR_ATTACHMENT5_EXT = 0x8CE5,
GL_COLOR_ATTACHMENT6_EXT = 0x8CE6,
GL_COLOR_ATTACHMENT7_EXT = 0x8CE7,
GL_COLOR_ATTACHMENT8_EXT = 0x8CE8,
GL_COLOR_ATTACHMENT9_EXT = 0x8CE9,
GL_COLOR_ATTACHMENT10_EXT = 0x8CEA,
GL_COLOR_ATTACHMENT11_EXT = 0x8CEB,
GL_COLOR_ATTACHMENT12_EXT = 0x8CEC,
GL_COLOR_ATTACHMENT13_EXT = 0x8CED,
GL_COLOR_ATTACHMENT14_EXT = 0x8CEE,
GL_COLOR_ATTACHMENT15_EXT = 0x8CEF;
protected EXTDrawBuffers() {
throw new UnsupportedOperationException();
}
// --- [ glDrawBuffersEXT ] ---
public static native void nglDrawBuffersEXT(int n, long bufs);
public static void glDrawBuffersEXT(@NativeType("GLenum const *") IntBuffer bufs) {
nglDrawBuffersEXT(bufs.remaining(), memAddress(bufs));
}
public static void glDrawBuffersEXT(@NativeType("GLenum const *") int buf) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
IntBuffer bufs = stack.ints(buf);
nglDrawBuffersEXT(1, memAddress(bufs));
} finally {
stack.setPointer(stackPointer);
}
}
/** Array version of: {@link #glDrawBuffersEXT DrawBuffersEXT} */
public static void glDrawBuffersEXT(@NativeType("GLenum const *") int[] bufs) {
long __functionAddress = GLES.getICD().glDrawBuffersEXT;
if (CHECKS) {
check(__functionAddress);
}
callPV(bufs.length, bufs, __functionAddress);
}
} | LWJGL-CI/lwjgl3 | modules/lwjgl/opengles/src/generated/java/org/lwjgl/opengles/EXTDrawBuffers.java | Java | bsd-3-clause | 4,079 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.crash.browser;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.app.ApplicationExitInfo;
import android.content.Context;
import android.os.Build;
import androidx.annotation.IntDef;
import org.chromium.base.ContextUtils;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.metrics.RecordHistogram;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.List;
/**
* Wrapper to get the process exit reason of a dead process with same UID as current from
* ActivityManager, and record to UMA.
*/
public class ProcessExitReasonFromSystem {
/**
* Get the exit reason of the most recent chrome process that died and had |pid| as the process
* ID. Only available on R+ devices, returns -1 otherwise.
* @return ApplicationExitInfo.Reason
*/
@TargetApi(Build.VERSION_CODES.R)
public static int getExitReason(int pid) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
return -1;
}
ActivityManager am =
(ActivityManager) ContextUtils.getApplicationContext().getSystemService(
Context.ACTIVITY_SERVICE);
// Set maxNum to 1 since we want the latest reason with the pid.
List<ApplicationExitInfo> reasons =
am.getHistoricalProcessExitReasons(/*package_name=*/null, pid, /*maxNum=*/1);
if (reasons.isEmpty() || reasons.get(0) == null || reasons.get(0).getPid() != pid) {
return -1;
}
return reasons.get(0).getReason();
}
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
@IntDef({ExitReason.REASON_ANR, ExitReason.REASON_CRASH, ExitReason.REASON_CRASH_NATIVE,
ExitReason.REASON_DEPENDENCY_DIED, ExitReason.REASON_EXCESSIVE_RESOURCE_USAGE,
ExitReason.REASON_EXIT_SELF, ExitReason.REASON_INITIALIZATION_FAILURE,
ExitReason.REASON_LOW_MEMORY, ExitReason.REASON_OTHER,
ExitReason.REASON_PERMISSION_CHANGE, ExitReason.REASON_SIGNALED,
ExitReason.REASON_UNKNOWN, ExitReason.REASON_USER_REQUESTED,
ExitReason.REASON_USER_STOPPED})
@Retention(RetentionPolicy.SOURCE)
@interface ExitReason {
int REASON_ANR = 0;
int REASON_CRASH = 1;
int REASON_CRASH_NATIVE = 2;
int REASON_DEPENDENCY_DIED = 3;
int REASON_EXCESSIVE_RESOURCE_USAGE = 4;
int REASON_EXIT_SELF = 5;
int REASON_INITIALIZATION_FAILURE = 6;
int REASON_LOW_MEMORY = 7;
int REASON_OTHER = 8;
int REASON_PERMISSION_CHANGE = 9;
int REASON_SIGNALED = 10;
int REASON_UNKNOWN = 11;
int REASON_USER_REQUESTED = 12;
int REASON_USER_STOPPED = 13;
int NUM_ENTRIES = 14;
}
@CalledByNative
private static void recordExitReasonToUma(int pid, String umaName) {
recordAsEnumHistogram(umaName, getExitReason(pid));
}
/**
* Records the given |systemReason| (given by #getExitReason) to UMA with the given |umaName|.
* @see #getExitReason
*/
public static void recordAsEnumHistogram(String umaName, int systemReason) {
@ExitReason
int reason;
switch (systemReason) {
case ApplicationExitInfo.REASON_ANR:
reason = ExitReason.REASON_ANR;
break;
case ApplicationExitInfo.REASON_CRASH:
reason = ExitReason.REASON_CRASH;
break;
case ApplicationExitInfo.REASON_CRASH_NATIVE:
reason = ExitReason.REASON_CRASH_NATIVE;
break;
case ApplicationExitInfo.REASON_DEPENDENCY_DIED:
reason = ExitReason.REASON_DEPENDENCY_DIED;
break;
case ApplicationExitInfo.REASON_EXCESSIVE_RESOURCE_USAGE:
reason = ExitReason.REASON_EXCESSIVE_RESOURCE_USAGE;
break;
case ApplicationExitInfo.REASON_EXIT_SELF:
reason = ExitReason.REASON_EXIT_SELF;
break;
case ApplicationExitInfo.REASON_INITIALIZATION_FAILURE:
reason = ExitReason.REASON_INITIALIZATION_FAILURE;
break;
case ApplicationExitInfo.REASON_LOW_MEMORY:
reason = ExitReason.REASON_LOW_MEMORY;
break;
case ApplicationExitInfo.REASON_OTHER:
reason = ExitReason.REASON_OTHER;
break;
case ApplicationExitInfo.REASON_PERMISSION_CHANGE:
reason = ExitReason.REASON_PERMISSION_CHANGE;
break;
case ApplicationExitInfo.REASON_SIGNALED:
reason = ExitReason.REASON_SIGNALED;
break;
case ApplicationExitInfo.REASON_UNKNOWN:
reason = ExitReason.REASON_UNKNOWN;
break;
case ApplicationExitInfo.REASON_USER_REQUESTED:
reason = ExitReason.REASON_USER_REQUESTED;
break;
case ApplicationExitInfo.REASON_USER_STOPPED:
reason = ExitReason.REASON_USER_STOPPED;
break;
default:
// Reason is unavailable in current platform, so skip recording.
return;
}
RecordHistogram.recordEnumeratedHistogram(umaName, reason, ExitReason.NUM_ENTRIES);
}
};
| nwjs/chromium.src | components/crash/android/java/src/org/chromium/components/crash/browser/ProcessExitReasonFromSystem.java | Java | bsd-3-clause | 5,698 |
package rallyws;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.junit.Before;
import org.junit.Test;
import rallywidget.ListWidgetItem;
import rallyws.api.Task;
import rallyws.api.HierarchicalRequirement;
import rallyws.api.Iteration;
import javax.xml.rpc.ServiceException;
import java.rmi.RemoteException;
import java.util.List;
public class RallyWSTest {
private RallyService service;
private final static String PROJECT_NAME = "Riders on the Storm";
/*
These tests are not intended to be run in an automated suite, they depend on data that varies over time
*/
@Before
public void setup() throws RemoteException, ServiceException {
service = new RallyService();
service.login("[email protected]", "griffith.5");
}
@Test
public void testLogin() throws RemoteException{
assertEquals("Keith Combs", service.getDisplayName());
}
@Test
public void testRequestsWithoutLogin() throws RemoteException {
RallyService service = new RallyService();
assertNull(service.getDisplayName());
assertEquals(0, service.getTasks(null, null).size());
assertEquals(0, service.getItems(null).size());
}
@Test
public void testTaskCount() throws RemoteException {
List tasks = service.getTasks(null, null);
assertEquals(78, tasks.size());
}
@Test
public void testTaskOwner() throws RemoteException {
List<Task> tasks = service.getTasks(null, null);
for (Task task : tasks) {
assertEquals("[email protected]", task.getOwner());
}
}
@Test
public void testGetItems() throws RemoteException {
List<ListWidgetItem> widgetItemList = service.getItems(null);
List tasks = service.getTasks(null, null);
assertEquals(78, widgetItemList.size());
for (int i = 0; i < widgetItemList.size(); i++) {
ListWidgetItem listWidgetItem = widgetItemList.get(i);
Task task = (Task) tasks.get(i);
assertEquals(task.getName(), listWidgetItem.getName());
assertEquals(RallyService.RALLY_TASK_URL + task.getObjectID(), listWidgetItem.getUri().toString());
}
}
@Test
public void testStateFilter() throws RemoteException {
List<ListWidgetItem> widgetItemList = service.getItems("(State != Completed)");
List tasks = service.getTasks("(State != Completed)", null);
assertEquals(13, widgetItemList.size());
for (int i = 0; i < widgetItemList.size(); i++) {
ListWidgetItem listWidgetItem = widgetItemList.get(i);
Task task = (Task) tasks.get(i);
assertEquals(task.getName(), listWidgetItem.getName());
assertEquals(RallyService.RALLY_TASK_URL + task.getObjectID(), listWidgetItem.getUri().toString());
}
}
@Test
public void testGetStories() throws RemoteException {
Iteration iteration = service.getCurrentIteration(PROJECT_NAME);
List<HierarchicalRequirement> stories = service.getStories("(((Iteration.ObjectID = " + iteration.getObjectID() + ") and (ScheduleState != Accepted)) and (ScheduleState != Completed))");
assertEquals(8, stories.size());
}
@Test
public void testUpdateTask() throws RemoteException {
Iteration iteration = service.getCurrentIteration(PROJECT_NAME);
List tasks = service.getTasks("((State != Completed) and (Iteration.ObjectID = " + iteration.getObjectID() + "))", null);
Task myTask = (Task) tasks.get(0);
String initialState = myTask.getState();
myTask.setState(TaskStateEnum.Completed.toString());
service.updateRally(myTask);
// verify results
// put it back where it was
//myTask.setState(initialState);
//service.updateRally(myTask);
}
@Test
public void testRemainingWorkHours() throws RemoteException {
Double todo = service.getTotalToDo(PROJECT_NAME);
assertEquals((Double)13.0, todo);
}
@Test
public void testDaysRemaining() throws RemoteException{
Double hours = service.getRemainingTime(PROJECT_NAME);
System.out.println("hours = " + hours);
}
}
| steveonjava/widgetfx | widgets/RallyWS/test/rallyws/RallyWSTest.java | Java | bsd-3-clause | 4,417 |
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.util.LinkedList;
/** Java wrapper for a C++ MediaStreamInterface. */
public class MediaStream {
public final LinkedList<AudioTrack> audioTracks;
public final LinkedList<VideoTrack> videoTracks;
public final LinkedList<VideoTrack> preservedVideoTracks;
// Package-protected for PeerConnection.
final long nativeStream;
public MediaStream(long nativeStream) {
audioTracks = new LinkedList<AudioTrack>();
videoTracks = new LinkedList<VideoTrack>();
preservedVideoTracks = new LinkedList<VideoTrack>();
this.nativeStream = nativeStream;
}
public boolean addTrack(AudioTrack track) {
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
audioTracks.add(track);
return true;
}
return false;
}
public boolean addTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
videoTracks.add(track);
return true;
}
return false;
}
// Tracks added in addTrack() call will be auto released once MediaStream.dispose()
// is called. If video track need to be preserved after MediaStream is destroyed it
// should be added to MediaStream using addPreservedTrack() call.
public boolean addPreservedTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
preservedVideoTracks.add(track);
return true;
}
return false;
}
public boolean removeTrack(AudioTrack track) {
audioTracks.remove(track);
return nativeRemoveAudioTrack(nativeStream, track.nativeTrack);
}
public boolean removeTrack(VideoTrack track) {
videoTracks.remove(track);
preservedVideoTracks.remove(track);
return nativeRemoveVideoTrack(nativeStream, track.nativeTrack);
}
public void dispose() {
// Remove and release previously added audio and video tracks.
while (!audioTracks.isEmpty()) {
AudioTrack track = audioTracks.getFirst();
removeTrack(track);
track.dispose();
}
while (!videoTracks.isEmpty()) {
VideoTrack track = videoTracks.getFirst();
removeTrack(track);
track.dispose();
}
// Remove, but do not release preserved video tracks.
while (!preservedVideoTracks.isEmpty()) {
removeTrack(preservedVideoTracks.getFirst());
}
free(nativeStream);
}
public String label() {
return nativeLabel(nativeStream);
}
public String toString() {
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
}
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
private static native String nativeLabel(long nativeStream);
private static native void free(long nativeStream);
}
| Alkalyne/webrtctrunk | sdk/android/api/org/webrtc/MediaStream.java | Java | bsd-3-clause | 3,448 |
/*
* Copyright (c) 2016, 2019, Gluon Software
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.devoxx.views;
import com.devoxx.DevoxxApplication;
import com.devoxx.DevoxxView;
import com.devoxx.model.Conference;
import com.devoxx.model.Location;
import com.devoxx.service.Service;
import com.devoxx.views.helper.PoiLayer;
import com.devoxx.views.helper.Util;
import com.gluonhq.charm.glisten.afterburner.GluonPresenter;
import com.gluonhq.charm.glisten.control.AppBar;
import com.gluonhq.charm.glisten.control.FloatingActionButton;
import com.gluonhq.charm.glisten.mvc.View;
import com.gluonhq.charm.glisten.visual.MaterialDesignIcon;
import com.gluonhq.connect.GluonObservableObject;
import com.gluonhq.maps.MapLayer;
import com.gluonhq.maps.MapPoint;
import com.gluonhq.maps.MapView;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.value.ChangeListener;
import javafx.fxml.FXML;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.image.ImageView;
import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javax.inject.Inject;
public class VenuePresenter extends GluonPresenter<DevoxxApplication> {
private static double DEFAULT_ZOOM = 15.0;
@Inject
private Service service;
@FXML
private View venue;
@FXML
private MapView mapView;
@FXML
private Region imageSpacer;
@FXML
private ImageView imageView;
@FXML
private Label name;
@FXML
private Label address1;
@FXML
private Label address2;
private MapLayer venueMarker;
private final ChangeListener<Number> widthListener = (observable, oldValue, newValue) -> resizeImages();
private final ChangeListener<Number> heightListener = (observable, oldValue, newValue) -> resizeImages();
private FloatingActionButton webActionButton;
public void initialize() {
ReadOnlyObjectProperty<Conference> conferenceProperty = service.conferenceProperty();
conferenceProperty.addListener((observableValue, oldVenue, conference) -> {
if (conference != null) {
fetchLocationAndUpdateVenue(conference);
}
});
if (conferenceProperty.get() != null) {
fetchLocationAndUpdateVenue(conferenceProperty.get());
}
venue.setOnShowing(event -> {
AppBar appBar = getApp().getAppBar();
appBar.setNavIcon(getApp().getNavMenuButton());
appBar.setTitleText(DevoxxView.VENUE.getTitle());
// FixME: The following is a hack to reset zoom value (OTN-254)
mapView.setZoom(10.0);
mapView.setZoom(DEFAULT_ZOOM);
// randomly change image on each showing
imageView.setImage(Util.getMediaBackgroundImage());
venue.getScene().widthProperty().addListener(widthListener);
venue.getScene().heightProperty().addListener(heightListener);
resizeImages();
});
venue.setOnHiding(event -> {
venue.getScene().widthProperty().removeListener(widthListener);
venue.getScene().heightProperty().removeListener(heightListener);
});
}
private void fetchLocationAndUpdateVenue(Conference conference) {
createFloatingActionButtons(conference);
if (service.isNewCfpURL()) {
final Location location = createLocationFrom(conference);
updateVenueInformation(conference, location);
} else {
final GluonObservableObject<Location> location = service.retrieveLocation();
if (location.isInitialized()) {
updateVenueInformation(conference, location.get());
}
location.initializedProperty().addListener((o, ov, nv) -> {
if (nv) {
updateVenueInformation(conference, location.get());
}
});
}
}
private void updateVenueInformation(Conference conference, Location location) {
name.setText(location.getName());
address1.setText(getLocationAddress1(location));
address2.setText(getLocationAddress2(location));
MapPoint venuePoint = new MapPoint(location.getLatitude(), location.getLongitude());
mapView.setCenter(venuePoint);
mapView.setZoom(DEFAULT_ZOOM);
if (venueMarker != null) {
mapView.removeLayer(venueMarker);
}
venueMarker = createVenueMarker(venuePoint);
mapView.addLayer(venueMarker);
String url = conference.getWebsite();
if (url == null || url.isEmpty()) {
webActionButton.hide();
}
resizeImages();
}
private String getLocationAddress1(Location location) {
StringBuilder address = new StringBuilder();
if (location.getAddress1() != null && !location.getAddress1().equals("")) {
address.append(location.getAddress1());
}
if (location.getAddress2() != null && !location.getAddress2().equals("")) {
if (!address.toString().equals("")) {
address.append("\n");
}
address.append(location.getAddress2());
}
return address.toString();
}
private String getLocationAddress2(Location location) {
StringBuilder address = new StringBuilder();
if (location.getCity() != null && !location.getCity().equals("")) {
if (!address.toString().equals("")) {
address.append("\n");
}
address.append(location.getCity());
if (location.getCountry() != null && !location.getCountry().equals("")) {
address.append(", ");
address.append(location.getCountry());
}
} else if (location.getCountry() != null && !location.getCountry().equals("")) {
if (!address.toString().equals("")) {
address.append("\n");
}
address.append(location.getCountry());
}
return address.toString();
}
private void createFloatingActionButtons(Conference conference) {
webActionButton = Util.createWebLaunchFAB(() -> conference.getWebsite());
webActionButton.getStyleClass().add("secondary");
webActionButton.showOn(venue);
}
private MapLayer createVenueMarker(MapPoint venue) {
PoiLayer answer = new PoiLayer();
answer.getStyleClass().add("poi-layer");
Node marker = MaterialDesignIcon.ROOM.graphic();
marker.getStyleClass().add("marker");
Group box = new Group(marker);
box.getStyleClass().add("marker-container");
// FIXME: Better Solution ?
// StackPane added because of OTN-320.
// Avoids Group to translate when zoom in / zoom out events takes place
answer.addPoint(venue, new StackPane(box));
return answer;
}
private void resizeImages() {
if (venue == null || venue.getScene() == null) {
return;
}
double newWidth = venue.getScene().getWidth();
double newHeight = venue.getScene().getHeight() - getApp().getAppBar().getHeight(); // Exclude the AppBar
// Resize and translate ImageView
// Resize imageSpacer and stop expanding when a maxHeight is reached.
Util.resizeImageViewAndImageSpacer(imageSpacer, imageView, newWidth, newHeight / 3.5);
}
private Location createLocationFrom(Conference conference) {
final Location location = new Location();
location.setName(conference.getName());
location.setAddress1(conference.getLocationAddress());
location.setCity(conference.getLocationCity());
location.setCountry(conference.getLocationCountry());
location.setLatitude(conference.getVenueLatitude());
location.setLongitude(conference.getVenueLongitude());
return location;
}
}
| devoxx/MyDevoxxGluon | DevoxxClientMobile/src/main/java/com/devoxx/views/VenuePresenter.java | Java | bsd-3-clause | 9,442 |
package com.ericsson.research.transport.ws.spi;
/*
* ##_BEGIN_LICENSE_##
* Transport Abstraction Package (trap)
* ----------
* Copyright (C) 2014 Ericsson AB
* ----------
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Ericsson AB nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ##_END_LICENSE_##
*/
import com.ericsson.research.transport.ws.WSException;
public class WSHixieFrame extends WSAbstractFrame
{
private static final byte[] CLOSE_PAYLOAD = new byte[] { (byte) 0xFF, 0 };
protected WSHixieFrame()
{
}
public WSHixieFrame(byte type, byte[] payload)
{
super(type);
switch (type)
{
case BINARY_FRAME:
int j = 1;
for (int i = 28; i >= 0; i = i - 7)
{
byte b = (byte) ((payload.length >>> i) & 0x7F);
if (b > 0 || j > 1 || i == 0)
{
if (i > 0)
b |= 0x80;
if (j == 1)
this.payload = new byte[payload.length + 2 + (i / 7)];
this.payload[j++] = b;
}
}
this.payload[0] = (byte) 0x80;
System.arraycopy(payload, 0, this.payload, j, payload.length);
break;
case TEXT_FRAME:
this.payload = new byte[payload.length + 2];
this.payload[0] = 0;
System.arraycopy(payload, 0, this.payload, 1, payload.length);
this.payload[payload.length + 1] = (byte) 0xFF;
break;
case CLOSE_FRAME:
this.payload = CLOSE_PAYLOAD;
break;
default:
throw new IllegalStateException("Invalid payload type");
}
}
public int deserialize(byte[] data, int length) throws WSException
{
if (this.l1 == -1)
throw new IllegalStateException("Already deserialized");
for (;;)
{
if (length <= this.pos)
return 0;
switch (this.pos)
{
case 0:
if ((data[0] & 0x80) == 0)
this.type = TEXT_FRAME;
else if ((data[0] & 0x7F) == 0)
this.type = BINARY_FRAME;
else
this.type = CLOSE_FRAME;
this.pos++;
break;
case 1:
if (this.type == CLOSE_FRAME)
{
this.l1 = -1;
return 2;
}
default:
switch (this.type)
{
case BINARY_FRAME:
if (this.len == -1)
{
if ((data[this.pos] & 0x80) == 0)
{
this.len = data[1] & 0x7F;
for (int i = 2; i <= this.pos; i++)
this.len = (this.len << 7) | (data[i] & 0x7F);
this.l1 = (byte) (this.pos + 1);
this.pos += this.len;
continue;
}
}
else
{
this.payload = new byte[this.len];
if (this.len > 0)
System.arraycopy(data, this.l1, this.payload, 0, this.len);
this.l1 = -1;
return this.pos + 1;
}
break;
case TEXT_FRAME:
if (data[this.pos] == (byte) 0xFF)
{
this.payload = new byte[this.pos - 1];
if (this.pos > 1)
System.arraycopy(data, 1, this.payload, 0, this.pos - 1);
this.l1 = -1;
return this.pos + 1;
}
break;
}
this.pos++;
}
}
}
}
| EricssonResearch/trap | trap-network/trap-network-websockets-api/src/main/java/com/ericsson/research/transport/ws/spi/WSHixieFrame.java | Java | bsd-3-clause | 5,956 |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.security.apikey;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.schema.PropertyType;
import org.hisp.dhis.schema.annotation.Property;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
/**
* @author Morten Svanæs <[email protected]>
*/
public class IpAllowedList extends ApiTokenAttribute implements Serializable
{
private Set<String> allowedIps = new HashSet<>();
public IpAllowedList()
{
super( "IpAllowedList" );
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@Property( value = PropertyType.COLLECTION, required = Property.Value.TRUE )
public Set<String> getAllowedIps()
{
return allowedIps;
}
public void setAllowedIps( Set<String> allowedIps )
{
this.allowedIps = allowedIps;
}
public static IpAllowedList of( String... values )
{
final IpAllowedList ipAllowedList = new IpAllowedList();
ipAllowedList.setAllowedIps( new HashSet<>(
Stream.of( values ).collect( Collectors.toSet() ) ) );
return ipAllowedList;
}
@Override
public boolean equals( Object o )
{
if ( this == o )
{
return true;
}
if ( o == null || getClass() != o.getClass() )
{
return false;
}
IpAllowedList that = (IpAllowedList) o;
return Objects.equals( allowedIps, that.allowedIps );
}
@Override
public int hashCode()
{
return Objects.hash( allowedIps );
}
}
| dhis2/dhis2-core | dhis-2/dhis-api/src/main/java/org/hisp/dhis/security/apikey/IpAllowedList.java | Java | bsd-3-clause | 3,370 |
package org.motechproject.event.listener;
import org.motechproject.event.MotechEvent;
public interface EventRelay {
void sendEventMessage(MotechEvent motechEvent);
}
| motech/motech-server-pillreminder | platform/event/src/main/java/org/motechproject/event/listener/EventRelay.java | Java | bsd-3-clause | 172 |
package io.tracee.binding.httpclient;
import io.tracee.Tracee;
import io.tracee.TraceeConstants;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.InetSocketAddress;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
public class TraceeHttpClientIT {
private Server server;
private String serverEndpoint;
@Test
public void testWritesToServerAndParsesResponse() throws IOException {
final HttpClient unit = TraceeHttpClientDecorator.wrap(new HttpClient());
GetMethod getMethod = new GetMethod(serverEndpoint);
Tracee.getBackend().put("beforeRequest", "yip");
unit.executeMethod(getMethod);
assertThat(getMethod.getStatusCode(), equalTo(HttpServletResponse.SC_NO_CONTENT));
assertThat(Tracee.getBackend().get("responseFromServer"), equalTo("yesSir"));
}
@Before
public void startJetty() throws Exception {
server = new Server(new InetSocketAddress("127.0.0.1", 0));
server.setHandler(requestHandler);
server.start();
serverEndpoint = "http://"+server.getConnectors()[0].getName();
}
private final Handler requestHandler = new AbstractHandler() {
@Override
public void handle(String s, Request request, HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws IOException, ServletException {
final String incomingTraceeHeader = request.getHeader(TraceeConstants.TPIC_HEADER);
assertThat(incomingTraceeHeader, equalTo("beforeRequest=yip"));
httpServletResponse.setHeader(TraceeConstants.TPIC_HEADER, "responseFromServer=yesSir");
httpServletResponse.setStatus(HttpServletResponse.SC_NO_CONTENT);
request.setHandled(true);
}
};
@After
public void stopJetty() throws Exception {
if (server != null) {
server.stop();
server.join();
}
}
}
| hypery2k/tracee | binding/httpclient/src/test/java/io/tracee/binding/httpclient/TraceeHttpClientIT.java | Java | bsd-3-clause | 2,269 |
package org.devgateway.ocds.persistence.mongo.reader;
import org.apache.commons.digester3.Digester;
import org.apache.commons.digester3.binder.AbstractRulesModule;
import org.apache.commons.digester3.binder.DigesterLoader;
import org.devgateway.ocds.persistence.mongo.Release;
import org.devgateway.ocds.persistence.mongo.repository.main.ReleaseRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* @author idobre
* @since 6/27/16
*/
public abstract class XMLFileImport implements XMLFile {
@Autowired
private ReleaseRepository releaseRepository;
@Async
public void process(final InputStream inputStream) throws IOException, SAXException {
DigesterLoader digesterLoader = DigesterLoader.newLoader(getAbstractRulesModule());
Digester digester = digesterLoader.newDigester();
// Push this object onto Digester's stack to handle object save operation (call saveRelease method)
digester.push(this);
digester.parse(inputStream);
}
@Async
public void process(final File file) throws IOException, SAXException {
process(new FileInputStream(file));
}
/**
* This function should be called on 'end' event when we have a complete Release object.
*
* @param obj
*/
public void saveRelease(final Object obj) {
if (obj instanceof Release) {
Release release = processRelease((Release) obj);
if (release.getId() == null) {
releaseRepository.insert(release);
} else {
releaseRepository.save(release);
}
}
}
/**
* Function used to post-process a release in case we need to append new information, like ocid.
*
* @param release
* @return
*/
protected abstract Release processRelease(Release release);
protected abstract AbstractRulesModule getAbstractRulesModule();
}
| devgateway/oc-explorer | persistence-mongodb/src/main/java/org/devgateway/ocds/persistence/mongo/reader/XMLFileImport.java | Java | mit | 2,133 |
package com.alexstyl.android.widget;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* A id of a Widget
*/
@Retention(RetentionPolicy.SOURCE)
public @interface AppWidgetId {
}
| alexstyl/Memento-Calendar | android_common/src/main/java/com/alexstyl/android/widget/AppWidgetId.java | Java | mit | 220 |
/*
* @(#)CopyAction.java
*
* Copyright (c) 1996-2010 by the original authors of JHotDraw and all its
* contributors. All rights reserved.
*
* You may not use, copy or modify this file, except in compliance with the
* license agreement you entered into with the copyright holders. For details
* see accompanying license terms.
*/
package org.jhotdraw.app.action.edit;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import org.jhotdraw.gui.datatransfer.ClipboardUtil;
import org.jhotdraw.util.*;
/**
* Copies the selected region and place its contents into the system clipboard.
* <p>
* This action acts on the last {@link org.jhotdraw.gui.EditableComponent} /
* {@code JTextComponent} which had the focus when the {@code ActionEvent}
* was generated.
* <p>
* This action is called when the user selects the Copy item in the Edit
* menu. The menu item is automatically created by the application.
* <p>
* If you want this behavior in your application, you have to create an action
* with this ID and put it in your {@code ApplicationModel} in method
* {@link org.jhotdraw.app.ApplicationModel#initApplication}.
*
* @author Werner Randelshofer
* @version $Id: CopyAction.java 717 2010-11-21 12:30:57Z rawcoder $
*/
public class CopyAction extends AbstractSelectionAction {
public final static String ID = "edit.copy";
/** Creates a new instance which acts on the currently focused component. */
public CopyAction() {
this(null);
}
/** Creates a new instance which acts on the specified component.
*
* @param target The target of the action. Specify null for the currently
* focused component.
*/
public CopyAction(@Nullable JComponent target) {
super(target);
ResourceBundleUtil labels = ResourceBundleUtil.getBundle("org.jhotdraw.app.Labels");
labels.configureAction(this, ID);
}
@Override
public void actionPerformed(ActionEvent evt) {
JComponent c = target;
if (c == null && (KeyboardFocusManager.getCurrentKeyboardFocusManager().
getPermanentFocusOwner() instanceof JComponent)) {
c = (JComponent) KeyboardFocusManager.getCurrentKeyboardFocusManager().
getPermanentFocusOwner();
}
// Note: copying is allowed for disabled components
if (c != null) {
c.getTransferHandler().exportToClipboard(
c,
ClipboardUtil.getClipboard(),
TransferHandler.COPY);
}
}
}
| runqingz/umple | Umplificator/UmplifiedProjects/jhotdraw7/src/main/java/org/jhotdraw/app/action/edit/CopyAction.java | Java | mit | 2,615 |
/*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*******************************************************************************/
package org.openhealthtools.mdht.emf.hl7.mif2;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Association Annotations</b></em>'.
* <!-- end-user-doc -->
*
* <!-- begin-model-doc -->
* Descriptive information about the containing association.
* UML: A collector for the comments and constraints associated with an Association. (Consider rendering the definition or description annotation into ModelElement.documentation)
* <!-- end-model-doc -->
*
*
* @see org.openhealthtools.mdht.emf.hl7.mif2.Mif2Package#getAssociationAnnotations()
* @model extendedMetaData="name='AssociationAnnotations' kind='elementOnly'"
* @generated
*/
public interface AssociationAnnotations extends Annotations {
} // AssociationAnnotations
| drbgfc/mdht | hl7/plugins/org.openhealthtools.mdht.emf.hl7.mif2/src/org/openhealthtools/mdht/emf/hl7/mif2/AssociationAnnotations.java | Java | epl-1.0 | 1,339 |
/*******************************************************************************
* Copyright (c) 2010-present Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Stuart McCulloch (Sonatype, Inc.) - initial API and implementation
*
* Minimal facade required to be binary-compatible with legacy Plexus API
*******************************************************************************/
package org.codehaus.plexus.component.composition;
public final class CycleDetectedInComponentGraphException
extends Exception
{
private static final long serialVersionUID = 1L;
public CycleDetectedInComponentGraphException( final String message )
{
super( message );
}
public CycleDetectedInComponentGraphException( final String message, final Throwable detail )
{
super( message, detail );
}
}
| eclipse/sisu.plexus | org.eclipse.sisu.plexus/src/org/codehaus/plexus/component/composition/CycleDetectedInComponentGraphException.java | Java | epl-1.0 | 1,070 |
/* Alloy Analyzer 4 -- Copyright (c) 2006-2009, Felix Chang
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
* OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package edu.mit.csail.sdg.alloy4;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.util.Vector;
import javax.swing.Icon;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
import javax.swing.border.EmptyBorder;
/** Graphical combobox.
*
* <p><b>Thread Safety:</b> Can be called only by the AWT event thread.
*/
public class OurCombobox extends JComboBox {
/** This ensures the class can be serialized reliably. */
private static final long serialVersionUID = 0;
/** This caches a preconstructed JLabel that is used for the rendering of each Combo value. */
private static JLabel jlabel;
/** Subclass can override this method to provide the custom text for any given value (It should return "" if no text is needed) */
public String do_getText(Object value) { return String.valueOf(value); }
/** Subclass can override this method to provide the custom icon for any given value (It should return null if no icon is needed) */
public Icon do_getIcon(Object value) { return null; }
/** Subclass can override this method to react upon selection change. */
public void do_changed(Object newValue) { }
/** This helper method makes a copy of the list, and then optionally prepend null at the beginning of the list. */
private static Vector<Object> do_copy (Object[] list, boolean addNull) {
Vector<Object> answer = new Vector<Object>(list.length + (addNull ? 1 : 0));
if (addNull) answer.add(null);
for(int i=0; i<list.length; i++) answer.add(list[i]);
return answer;
}
/** Constructs a new OurCombobox object.
* @param list - the list of allowed values
*/
public OurCombobox (Object[] list) { this(false, list, 0, 0, null); }
/** Constructs a new OurCombobox object.
* @param addNull - whether we should prepend null onto the beginning of the list of allowed values
* @param list - the list of allowed values
* @param width - the width to use (if width==0 and height==0, then we ignore this parameter)
* @param height - the height to use (if width==0 and height==0, then we ignore this parameter)
* @param initialValue - if nonnull it is the initial value to choose in this combo box
*/
public OurCombobox (boolean addNull, Object[] list, int width, int height, Object initialValue) {
super(do_copy(list, addNull));
setFont(OurUtil.getVizFont());
setRenderer(new ListCellRenderer() {
public Component getListCellRendererComponent(JList list, Object value, int i, boolean selected, boolean focused) {
if (jlabel == null) jlabel = OurUtil.label("", Color.BLACK, Color.WHITE, new EmptyBorder(0, 2, 0, 0));
jlabel.setText(do_getText(value));
jlabel.setIcon(do_getIcon(value));
jlabel.setBackground(selected ? list.getSelectionBackground() : list.getBackground());
jlabel.setForeground(selected ? list.getSelectionForeground() : list.getForeground());
return jlabel;
}
});
if (width != 0 || height != 0) { // Make some platform-specific adjustments which should make the combobox look nicer
if (Util.onWindows() && height > 25) height = 25; // Otherwise, the height is too big on Windows
setPreferredSize(new Dimension(width, height));
setMaximumSize(new Dimension(width, height));
if (!Util.onWindows() && !Util.onMac()) setBorder(new EmptyBorder(4, 3, 4, 0));
}
if (initialValue != null) { setSelectedItem(initialValue); }
addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) { do_changed(getSelectedItem()); }
});
}
}
| ModelWriter/WP3 | Source/eu.modelwriter.visualization.test/src/edu/mit/csail/sdg/alloy4/OurCombobox.java | Java | epl-1.0 | 4,958 |
/*******************************************************************************
* Copyright (c) 2017 Red Hat, Inc and others.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Red Hat, Inc - initial API and implementation
*******************************************************************************/
package org.eclipse.reddeer.eclipse.jst.servlet.ui.project.facet;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.reddeer.common.wait.WaitWhile;
import org.eclipse.reddeer.core.reference.ReferencedComposite;
import org.eclipse.reddeer.jface.wizard.WizardPage;
import org.eclipse.reddeer.swt.api.Shell;
import org.eclipse.reddeer.swt.api.TreeItem;
import org.eclipse.reddeer.swt.condition.ShellIsAvailable;
import org.eclipse.reddeer.swt.impl.button.OkButton;
import org.eclipse.reddeer.swt.impl.button.PushButton;
import org.eclipse.reddeer.swt.impl.shell.DefaultShell;
import org.eclipse.reddeer.swt.impl.text.DefaultText;
import org.eclipse.reddeer.swt.impl.text.LabeledText;
import org.eclipse.reddeer.swt.impl.tree.DefaultTree;
import org.eclipse.reddeer.swt.impl.tree.DefaultTreeItem;
/**
* The second wizard page for creating web project.
*/
public class WebProjectSecondPage extends WizardPage{
public WebProjectSecondPage(ReferencedComposite referencedComposite) {
super(referencedComposite);
}
/**
* Edits the source folders on build path.
*
* @param sourceFolder the source folder
* @param newVaule the new vaule
*/
public WebProjectSecondPage editSourceFoldersOnBuildPath(String sourceFolder, String newVaule){
new DefaultTreeItem(new DefaultTree(this), sourceFolder).select();
new PushButton(this, "Edit...").click();
Shell editShell = new DefaultShell("Edit Source Folder");
new DefaultText(editShell).setText(newVaule);
new OkButton(editShell).click();
new WaitWhile(new ShellIsAvailable(editShell));
return this;
}
/**
* Removes the source folders on build path.
*
* @param sourceFolder the source folder
*/
public WebProjectSecondPage removeSourceFoldersOnBuildPath(String sourceFolder){
new DefaultTreeItem(new DefaultTree(this), sourceFolder).select();
new PushButton(this, "Remove").click();
return this;
}
/**
* Adds the source folders on build path.
*
* @param newVaule the new vaule
*/
public WebProjectSecondPage addSourceFoldersOnBuildPath(String newVaule){
new PushButton(this, "Add Folder...").click();
Shell addShell = new DefaultShell("Add Source Folder");
new DefaultText(addShell).setText(newVaule);
new OkButton(addShell).click();
new WaitWhile(new ShellIsAvailable(addShell));
return this;
}
/**
* Sets the default output folder.
*
* @param folder the new default output folder
*/
public WebProjectSecondPage setDefaultOutputFolder(String folder){
new LabeledText(this, "Default output folder:").setText(folder);
return this;
}
/**
* Gets the source folders.
*
* @return the source folders
*/
public List<String> getSourceFolders(){
List<String> toReturn = new ArrayList<String>();
for(TreeItem item: new DefaultTree(this).getAllItems()){
toReturn.add(item.getText());
}
return toReturn;
}
}
| jboss-reddeer/reddeer | plugins/org.eclipse.reddeer.eclipse/src/org/eclipse/reddeer/eclipse/jst/servlet/ui/project/facet/WebProjectSecondPage.java | Java | epl-1.0 | 3,384 |
/*******************************************************************************
* Copyright (c) 2014, 2014 Bruno Medeiros and other Contributors.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Bruno Medeiros - initial API and implementation
*******************************************************************************/
package melnorme.lang.ide.ui.tools.console;
import static melnorme.utilbox.core.Assert.AssertNamespace.assertTrue;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.text.source.ISharedTextColors;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.console.IOConsole;
import org.eclipse.ui.console.IOConsoleOutputStream;
import melnorme.util.swt.jface.text.ColorManager;
public abstract class AbstractProcessMessageConsole extends IOConsole {
public static class ProcessMessageConsole extends AbstractProcessMessageConsole {
protected ProcessMessageConsole(String name, ImageDescriptor imageDescriptor) {
super(name, imageDescriptor);
postToUI_initOutputStreamColors();
}
}
public final IOConsoleOutputStream stdOut;
public final IOConsoleOutputStream stdErr;
public volatile boolean disposed;
/**
* Note: subclasse must call {@link #postToUI_initOutputStreamColors()} after all members
* have been initialized.
*/
protected AbstractProcessMessageConsole(String name, ImageDescriptor imageDescriptor) {
super(name, imageDescriptor);
stdOut = newOutputStream();
stdErr = newOutputStream();
stdErr.setActivateOnWrite(true);
}
protected void postToUI_initOutputStreamColors() {
// BM: it's not clear to me if a Color can be created outside UI thread, so do asyncExec
// I would think one cant, but some Platform code (ProcessConsole) does freely create Color instances
// on the UI thread, so maybe the asyncExec is not necessary.
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
ui_initStreamColors();
}
});
}
/** Initialize stream colors. This method is only called in the UI thread. */
protected void ui_initStreamColors() {
assertTrue(disposed == false);
}
/**
* Dispose this class, by running the actual disposing to the UI thread.
*/
@Override
protected final void dispose() {
// Disposing in UI thread is one way to solve certain concurrency issues arising with the use of this class.
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
disposed = true;
disposeDo();
}
});
}
protected void disposeDo() {
assertTrue(Display.getCurrent() != null);
super.dispose(); // run actual dispose code
}
protected ISharedTextColors getColorManager() {
return ColorManager.getDefault();
}
} | rayyang2000/goclipse | plugin_ide.ui/src-lang/melnorme/lang/ide/ui/tools/console/AbstractProcessMessageConsole.java | Java | epl-1.0 | 3,042 |
/*******************************************************************************
* Copyright (c) 2017 Red Hat, Inc and others.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Red Hat, Inc - initial API and implementation
*******************************************************************************/
package org.eclipse.reddeer.swt.condition;
import org.eclipse.reddeer.common.condition.AbstractWaitCondition;
import org.eclipse.reddeer.swt.api.Control;
/**
* Condition is met when specified control is enabled.
*
* @author [email protected]
*
*/
public class ControlIsEnabled extends AbstractWaitCondition {
private Control<?> control;
private Control<?> resultControl;
/**
* Constructs ControltIsEnabled wait condition.
* Condition is met when specified control is enabled.
*
* @param control control which should be enabled to let the condition pass
*/
public ControlIsEnabled(Control<?> control) {
this.control = control;
}
@Override
public boolean test() {
if (control.isEnabled()) {
this.resultControl = control;
return true;
}
return false;
}
@Override
public String description() {
return "control is enabled";
}
@SuppressWarnings("unchecked")
@Override
public Control<?> getResult() {
return this.resultControl;
}
}
| jboss-reddeer/reddeer | plugins/org.eclipse.reddeer.swt/src/org/eclipse/reddeer/swt/condition/ControlIsEnabled.java | Java | epl-1.0 | 1,491 |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.wst.jsdt.internal.compiler.ast;
import org.eclipse.wst.jsdt.core.ast.IASTNode;
import org.eclipse.wst.jsdt.core.ast.IArrayTypeReference;
import org.eclipse.wst.jsdt.core.compiler.CharOperation;
import org.eclipse.wst.jsdt.internal.compiler.ASTVisitor;
import org.eclipse.wst.jsdt.internal.compiler.lookup.BlockScope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.ClassScope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.Scope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.TypeBinding;
public class ArrayTypeReference extends SingleTypeReference implements IArrayTypeReference {
public int dimensions;
public int originalSourceEnd;
/**
* ArrayTypeReference constructor comment.
* @param source char[]
* @param dimensions int
* @param pos int
*/
public ArrayTypeReference(char[] source, int dimensions, long pos) {
super(source, pos);
this.originalSourceEnd = this.sourceEnd;
this.dimensions = dimensions ;
}
public int dimensions() {
return dimensions;
}
/**
* @return char[][]
*/
public char [][] getParameterizedTypeName(){
int dim = this.dimensions;
char[] dimChars = new char[dim*2];
for (int i = 0; i < dim; i++) {
int index = i*2;
dimChars[index] = '[';
dimChars[index+1] = ']';
}
return new char[][]{ CharOperation.concat(token, dimChars) };
}
protected TypeBinding getTypeBinding(Scope scope) {
if (this.resolvedType != null) return this.resolvedType;
TypeBinding leafComponentType = scope.getType(token);
return scope.createArrayType(leafComponentType, dimensions);
}
public StringBuffer printExpression(int indent, StringBuffer output){
super.printExpression(indent, output);
if ((this.bits & IsVarArgs) != 0) {
for (int i= 0 ; i < dimensions - 1; i++) {
output.append("[]"); //$NON-NLS-1$
}
output.append("..."); //$NON-NLS-1$
} else {
for (int i= 0 ; i < dimensions; i++) {
output.append("[]"); //$NON-NLS-1$
}
}
return output;
}
public void traverse(ASTVisitor visitor, BlockScope scope) {
visitor.visit(this, scope);
visitor.endVisit(this, scope);
}
public void traverse(ASTVisitor visitor, ClassScope scope) {
visitor.visit(this, scope);
visitor.endVisit(this, scope);
}
public int getASTType() {
return IASTNode.ARRAY_TYPE_REFERENCE;
}
}
| boniatillo-com/PhaserEditor | source/thirdparty/jsdt/org.eclipse.wst.jsdt.core/src/org/eclipse/wst/jsdt/internal/compiler/ast/ArrayTypeReference.java | Java | epl-1.0 | 2,863 |
/*******************************************************************************
* Copyright (c) 2012-2015 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.projectimport.zip;
import org.eclipse.che.ide.api.mvp.View;
import com.google.inject.ImplementedBy;
import javax.validation.constraints.NotNull;
/**
* @author Roman Nikitenko
*/
@ImplementedBy(ZipImporterPageViewImpl.class)
public interface ZipImporterPageView extends View<ZipImporterPageView.ActionDelegate> {
public interface ActionDelegate {
/** Performs any actions appropriate in response to the user having changed the project's name. */
void projectNameChanged(@NotNull String name);
/** Performs any actions appropriate in response to the user having changed the project's URL. */
void projectUrlChanged(@NotNull String url);
/** Performs any actions appropriate in response to the user having changed the project's description. */
void projectDescriptionChanged(@NotNull String projectDescriptionValue);
/** Performs any actions appropriate in response to the user having changed the project's visibility. */
void projectVisibilityChanged(boolean visible);
/** Performs any actions appropriate in response to the user having selected a skip first level. */
void skipFirstLevelChanged(boolean isSkipFirstLevel);
}
/** Show the name error. */
void showNameError();
/** Hide the name error. */
void hideNameError();
/** Show URL error. */
void showUrlError(@NotNull String message);
/** Hide URL error. */
void hideUrlError();
/**
* Set the project's URL.
*
* @param url
* the project's URL to set
*/
void setProjectUrl(@NotNull String url);
/**
* Get the project's name value.
*
* @return {@link String} project's name
*/
@NotNull
String getProjectName();
/**
* Set the project's name value.
*
* @param projectName
* project's name to set
*/
void setProjectName(@NotNull String projectName);
void setProjectDescription(@NotNull String projectDescription);
/** Give focus to project's URL input. */
void focusInUrlInput();
/**
* Set the enable state of the inputs.
*
* @param isEnabled
* <code>true</code> if enabled, <code>false</code> if disabled
*/
void setInputsEnableState(boolean isEnabled);
/** Performs when user select skip first level. */
boolean isSkipFirstLevelSelected();
void setSkipFirstLevel(boolean skip);
void setVisibility(boolean visible);
}
| Ori-Libhaber/che-core | ide/che-core-ide-app/src/main/java/org/eclipse/che/ide/projectimport/zip/ZipImporterPageView.java | Java | epl-1.0 | 3,044 |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.wst.jsdt.internal.compiler.ast;
import org.eclipse.wst.jsdt.core.ast.IASTNode;
import org.eclipse.wst.jsdt.core.ast.ISingleTypeReference;
import org.eclipse.wst.jsdt.internal.compiler.ASTVisitor;
import org.eclipse.wst.jsdt.internal.compiler.lookup.BlockScope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.ClassScope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.ReferenceBinding;
import org.eclipse.wst.jsdt.internal.compiler.lookup.Scope;
import org.eclipse.wst.jsdt.internal.compiler.lookup.TypeBinding;
public class SingleTypeReference extends TypeReference implements ISingleTypeReference {
public char[] token;
public SingleTypeReference(char[] source, long pos) {
token = source;
sourceStart = (int) (pos>>>32) ;
sourceEnd = (int) (pos & 0x00000000FFFFFFFFL) ;
}
public TypeReference copyDims(int dim){
//return a type reference copy of me with some dimensions
//warning : the new type ref has a null binding
return new ArrayTypeReference(token, dim,(((long)sourceStart)<<32)+sourceEnd);
}
public char[] getLastToken() {
return this.token;
}
protected TypeBinding getTypeBinding(Scope scope) {
if (this.resolvedType != null)
return this.resolvedType;
this.resolvedType = scope.getType(token);
if (scope.kind == Scope.CLASS_SCOPE && this.resolvedType.isValidBinding())
if (((ClassScope) scope).detectHierarchyCycle(this.resolvedType, this))
return null;
return this.resolvedType;
}
public char [][] getTypeName() {
return new char[][] { token };
}
public StringBuffer printExpression(int indent, StringBuffer output){
return output.append(token);
}
public TypeBinding resolveTypeEnclosing(BlockScope scope, ReferenceBinding enclosingType) {
TypeBinding memberType = scope.getMemberType(token, enclosingType);
if (!memberType.isValidBinding()) {
this.resolvedType = memberType;
return null;
}
if (isTypeUseDeprecated(memberType, scope))
scope.problemReporter().deprecatedType(memberType, this);
return this.resolvedType = memberType;
}
public void traverse(ASTVisitor visitor, BlockScope scope) {
visitor.visit(this, scope);
visitor.endVisit(this, scope);
}
public void traverse(ASTVisitor visitor, ClassScope scope) {
visitor.visit(this, scope);
visitor.endVisit(this, scope);
}
public int getASTType() {
return IASTNode.SINGLE_TYPE_REFERENCE;
}
}
| boniatillo-com/PhaserEditor | source/thirdparty/jsdt/org.eclipse.wst.jsdt.core/src/org/eclipse/wst/jsdt/internal/compiler/ast/SingleTypeReference.java | Java | epl-1.0 | 2,941 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.