method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
File file(); | File file(); | /**
* Obtains the configuration file which stores the data
* held in the config.
*
* @return the config as a file
*/ | Obtains the configuration file which stores the data held in the config | file | {
"repo_name": "nickrobson/TridentSDK",
"path": "src/main/java/net/tridentsdk/config/Config.java",
"license": "apache-2.0",
"size": 3195
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 1,545,709 |
public static SimpleToken nextToken(String expression, int index, boolean allowEscape, TokenType... filter) {
return doNextToken(expression, index, allowEscape, filter);
} | static SimpleToken function(String expression, int index, boolean allowEscape, TokenType... filter) { return doNextToken(expression, index, allowEscape, filter); } | /**
* Create the next token
*
* @param expression the input expression
* @param index the current index
* @param allowEscape whether to allow escapes
* @param filter defines the accepted token types to be returned (character is always used as fallback)
* @return the created token, will always return a token
*/ | Create the next token | nextToken | {
"repo_name": "sabre1041/camel",
"path": "camel-core/src/main/java/org/apache/camel/language/simple/SimpleTokenizer.java",
"license": "apache-2.0",
"size": 12766
} | [
"org.apache.camel.language.simple.types.SimpleToken",
"org.apache.camel.language.simple.types.TokenType"
] | import org.apache.camel.language.simple.types.SimpleToken; import org.apache.camel.language.simple.types.TokenType; | import org.apache.camel.language.simple.types.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,097,585 |
public IFile getFile(IPath path) {
return this.workspace.getRoot().getFile(path);
} | IFile function(IPath path) { return this.workspace.getRoot().getFile(path); } | /** Replies the file with the given name.
*
* @param path the name of the file.
* @return the file.
*/ | Replies the file with the given name | getFile | {
"repo_name": "jgfoster/sarl",
"path": "tests/io.sarl.tests.api.ui/src/io/sarl/tests/api/WorkbenchTestHelper.java",
"license": "apache-2.0",
"size": 41142
} | [
"org.eclipse.core.resources.IFile",
"org.eclipse.core.runtime.IPath"
] | import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.IPath; | import org.eclipse.core.resources.*; import org.eclipse.core.runtime.*; | [
"org.eclipse.core"
] | org.eclipse.core; | 329,565 |
void onRendererClosedMojoConnection();
}
public PaymentRequestService(RenderFrameHost renderFrameHost,
@Nullable PaymentRequestClient client, Runnable onClosedListener, Delegate delegate,
Supplier<PaymentAppServiceBridge> paymentAppServiceBridgeSupplier) {
assert renderFrameHost != null;
assert onClosedListener != null;
assert delegate != null;
mRenderFrameHost = renderFrameHost;
mClient = client;
mOnClosedListener = onClosedListener;
mDelegate = delegate;
mHasClosed = false;
mPaymentAppServiceBridgeSupplier = paymentAppServiceBridgeSupplier;
} | void onRendererClosedMojoConnection(); } public PaymentRequestService(RenderFrameHost renderFrameHost, @Nullable PaymentRequestClient client, Runnable onClosedListener, Delegate delegate, Supplier<PaymentAppServiceBridge> paymentAppServiceBridgeSupplier) { assert renderFrameHost != null; assert onClosedListener != null; assert delegate != null; mRenderFrameHost = renderFrameHost; mClient = client; mOnClosedListener = onClosedListener; mDelegate = delegate; mHasClosed = false; mPaymentAppServiceBridgeSupplier = paymentAppServiceBridgeSupplier; } | /**
* Called when the renderer is closing the mojo connection (e.g. upon show promise
* rejection).
*/ | Called when the renderer is closing the mojo connection (e.g. upon show promise rejection) | onRendererClosedMojoConnection | {
"repo_name": "scheib/chromium",
"path": "components/payments/content/android/java/src/org/chromium/components/payments/PaymentRequestService.java",
"license": "bsd-3-clause",
"size": 80595
} | [
"androidx.annotation.Nullable",
"org.chromium.base.supplier.Supplier",
"org.chromium.content_public.browser.RenderFrameHost",
"org.chromium.payments.mojom.PaymentRequestClient"
] | import androidx.annotation.Nullable; import org.chromium.base.supplier.Supplier; import org.chromium.content_public.browser.RenderFrameHost; import org.chromium.payments.mojom.PaymentRequestClient; | import androidx.annotation.*; import org.chromium.base.supplier.*; import org.chromium.content_public.browser.*; import org.chromium.payments.mojom.*; | [
"androidx.annotation",
"org.chromium.base",
"org.chromium.content_public",
"org.chromium.payments"
] | androidx.annotation; org.chromium.base; org.chromium.content_public; org.chromium.payments; | 2,761,242 |
public GoogleApiClient.Builder createApiClientBuilder()
{
if (mSetupDone)
{
String error = "GameHelper: you called GameHelper.createApiClientBuilder() after "
+ "calling setup. You can only get a client builder BEFORE performing setup.";
logError(error);
throw new IllegalStateException(error);
}
GoogleApiClient.Builder builder = new GoogleApiClient.Builder(mActivity, this, this);
if (0 != (mRequestedClients & CLIENT_GAMES))
{
builder.addApi(Games.API, mGamesApiOptions);
builder.addScope(Games.SCOPE_GAMES);
}
if (0 != (mRequestedClients & CLIENT_PLUS))
{
builder.addApi(Plus.API);
builder.addScope(Plus.SCOPE_PLUS_LOGIN);
}
if (0 != (mRequestedClients & CLIENT_APPSTATE))
{
builder.addApi(AppStateManager.API);
builder.addScope(AppStateManager.SCOPE_APP_STATE);
}
if (0 != (mRequestedClients & CLIENT_SNAPSHOT))
{
builder.addScope(Drive.SCOPE_APPFOLDER);
builder.addApi(Drive.API);
}
mGoogleApiClientBuilder = builder;
return builder;
} | GoogleApiClient.Builder function() { if (mSetupDone) { String error = STR + STR; logError(error); throw new IllegalStateException(error); } GoogleApiClient.Builder builder = new GoogleApiClient.Builder(mActivity, this, this); if (0 != (mRequestedClients & CLIENT_GAMES)) { builder.addApi(Games.API, mGamesApiOptions); builder.addScope(Games.SCOPE_GAMES); } if (0 != (mRequestedClients & CLIENT_PLUS)) { builder.addApi(Plus.API); builder.addScope(Plus.SCOPE_PLUS_LOGIN); } if (0 != (mRequestedClients & CLIENT_APPSTATE)) { builder.addApi(AppStateManager.API); builder.addScope(AppStateManager.SCOPE_APP_STATE); } if (0 != (mRequestedClients & CLIENT_SNAPSHOT)) { builder.addScope(Drive.SCOPE_APPFOLDER); builder.addApi(Drive.API); } mGoogleApiClientBuilder = builder; return builder; } | /**
* Creates a GoogleApiClient.Builder for use with @link{#setup}. Normally,
* you do not have to do this; use this method only if you need to make
* nonstandard setup (e.g. adding extra scopes for other APIs) on the
* GoogleApiClient.Builder before calling @link{#setup}.
*/ | Creates a GoogleApiClient.Builder for use with @link{#setup}. Normally, you do not have to do this; use this method only if you need to make nonstandard setup (e.g. adding extra scopes for other APIs) on the GoogleApiClient.Builder before calling @link{#setup} | createApiClientBuilder | {
"repo_name": "PorkyPixels/Cocos-Helper",
"path": "External Cocos Helper Android Frameworks/Frameworks/GooglePlayServices/GooglePlayServicesGameHelper.java",
"license": "mit",
"size": 41515
} | [
"com.google.android.gms.appstate.AppStateManager",
"com.google.android.gms.common.api.GoogleApiClient",
"com.google.android.gms.drive.Drive",
"com.google.android.gms.games.Games",
"com.google.android.gms.plus.Plus"
] | import com.google.android.gms.appstate.AppStateManager; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.drive.Drive; import com.google.android.gms.games.Games; import com.google.android.gms.plus.Plus; | import com.google.android.gms.appstate.*; import com.google.android.gms.common.api.*; import com.google.android.gms.drive.*; import com.google.android.gms.games.*; import com.google.android.gms.plus.*; | [
"com.google.android"
] | com.google.android; | 347,527 |
public GsonBuilder serializeNulls() {
this.serializeNulls = true;
return this;
}
/**
* Enabling this feature will only change the serialized form if the map key is
* a complex type (i.e. non-primitive) in its <strong>serialized</strong> JSON
* form. The default implementation of map serialization uses {@code toString()}
* on the key; however, when this is called then one of the following cases
* apply:
*
* <h3>Maps as JSON objects</h3>
* For this case, assume that a type adapter is registered to serialize and
* deserialize some {@code Point} class, which contains an x and y coordinate,
* to/from the JSON Primitive string value {@code "(x,y)"}. The Java map would
* then be serialized as a {@link JsonObject}.
*
* <p>Below is an example:
* <pre> {@code
* Gson gson = new GsonBuilder()
* .register(Point.class, new MyPointTypeAdapter())
* .enableComplexMapKeySerialization()
* .create();
*
* Map<Point, String> original = new LinkedHashMap<Point, String>();
* original.put(new Point(5, 6), "a");
* original.put(new Point(8, 8), "b");
* System.out.println(gson.toJson(original, type));
* }</pre>
* The above code prints this JSON object:<pre> {@code
* {
* "(5,6)": "a",
* "(8,8)": "b"
* }
* }</pre>
*
* <h3>Maps as JSON arrays</h3>
* For this case, assume that a type adapter was NOT registered for some
* {@code Point} class, but rather the default Gson serialization is applied.
* In this case, some {@code new Point(2,3)} would serialize as {@code
* {"x":2,"y":5}}.
*
* <p>Given the assumption above, a {@code Map<Point, String>} will be
* serialize as an array of arrays (can be viewed as an entry set of pairs).
*
* <p>Below is an example of serializing complex types as JSON arrays:
* <pre> {@code
* Gson gson = new GsonBuilder()
* .enableComplexMapKeySerialization()
* .create();
*
* Map<Point, String> original = new LinkedHashMap<Point, String>();
* original.put(new Point(5, 6), "a");
* original.put(new Point(8, 8), "b");
* System.out.println(gson.toJson(original, type));
* } | GsonBuilder function() { this.serializeNulls = true; return this; } /** * Enabling this feature will only change the serialized form if the map key is * a complex type (i.e. non-primitive) in its <strong>serialized</strong> JSON * form. The default implementation of map serialization uses {@code toString()} * on the key; however, when this is called then one of the following cases * apply: * * <h3>Maps as JSON objects</h3> * For this case, assume that a type adapter is registered to serialize and * deserialize some {@code Point} class, which contains an x and y coordinate, * to/from the JSON Primitive string value {@code "(x,y)"}. The Java map would * then be serialized as a {@link JsonObject}. * * <p>Below is an example: * <pre> { * Gson gson = new GsonBuilder() * .register(Point.class, new MyPointTypeAdapter()) * .enableComplexMapKeySerialization() * .create(); * * Map<Point, String> original = new LinkedHashMap<Point, String>(); * original.put(new Point(5, 6), "a"); * original.put(new Point(8, 8), "b"); * System.out.println(gson.toJson(original, type)); * }</pre> * The above code prints this JSON object:<pre> { * { * "(5,6)": "a", * "(8,8)": "b" * } * }</pre> * * <h3>Maps as JSON arrays</h3> * For this case, assume that a type adapter was NOT registered for some * {@code Point} class, but rather the default Gson serialization is applied. * In this case, some {@code new Point(2,3)} would serialize as { * {"x":2,"y":5}}. * * <p>Given the assumption above, a {@code Map<Point, String>} will be * serialize as an array of arrays (can be viewed as an entry set of pairs). * * <p>Below is an example of serializing complex types as JSON arrays: * <pre> { * Gson gson = new GsonBuilder() * .enableComplexMapKeySerialization() * .create(); * * Map<Point, String> original = new LinkedHashMap<Point, String>(); * original.put(new Point(5, 6), "a"); * original.put(new Point(8, 8), "b"); * System.out.println(gson.toJson(original, type)); * } | /**
* Configure Gson to serialize null fields. By default, Gson omits all fields that are null
* during serialization.
*
* @return a reference to this {@code GsonBuilder} object to fulfill the "Builder" pattern
* @since 1.2
*/ | Configure Gson to serialize null fields. By default, Gson omits all fields that are null during serialization | serializeNulls | {
"repo_name": "vnc-biz/zcs-lib-gson",
"path": "src/main/java/com/google/gson/GsonBuilder.java",
"license": "apache-2.0",
"size": 30535
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,895,988 |
private int findNumberOfChannels(MidiDevice synth)
{
int number = DEFAULT_NR_OF_CHANNELS;
if (synth instanceof Synthesizer)
{
MidiChannel[] channels = ((Synthesizer)synth).getChannels();
while (number < channels.length && channels[number] != null)
{
number++;
}
}
return number;
}
| int function(MidiDevice synth) { int number = DEFAULT_NR_OF_CHANNELS; if (synth instanceof Synthesizer) { MidiChannel[] channels = ((Synthesizer)synth).getChannels(); while (number < channels.length && channels[number] != null) { number++; } } return number; } | /**
* Returns number of channels supported by synthesizer.
* @param synth
* the synthesizer device
* @result the number of channels
*/ | Returns number of channels supported by synthesizer | findNumberOfChannels | {
"repo_name": "megoldsby/intune",
"path": "src/java/intune/PlayTuned.java",
"license": "apache-2.0",
"size": 41009
} | [
"javax.sound.midi.MidiChannel",
"javax.sound.midi.MidiDevice",
"javax.sound.midi.Synthesizer"
] | import javax.sound.midi.MidiChannel; import javax.sound.midi.MidiDevice; import javax.sound.midi.Synthesizer; | import javax.sound.midi.*; | [
"javax.sound"
] | javax.sound; | 1,335,400 |
public void addToCars(final CarDto carDto) {
checkDisposed();
carDto.setManufacturer(this);
} | void function(final CarDto carDto) { checkDisposed(); carDto.setManufacturer(this); } | /**
* Adds the given carDto to this object. <p>
* Since the reference is a composition reference, the opposite reference <code>CarDto#manufacturer</code> of the <code>carDto</code> will be handled automatically and no further coding is required to keep them in sync.<p>
* See {@link CarDto#setManufacturer(CarDto)}.
*
* @param carDto - the property
* @throws RuntimeException if instance is <code>disposed</code>
*
*/ | Adds the given carDto to this object. Since the reference is a composition reference, the opposite reference <code>CarDto#manufacturer</code> of the <code>carDto</code> will be handled automatically and no further coding is required to keep them in sync. See <code>CarDto#setManufacturer(CarDto)</code> | addToCars | {
"repo_name": "lunifera/lunifera-dsl",
"path": "org.lunifera.dsl.entity.xtext.tests/src-gen/org/lunifera/dsl/entity/xtext/tests/model/testcarstore2/ManufacturerDto.java",
"license": "epl-1.0",
"size": 6800
} | [
"org.lunifera.dsl.entity.xtext.tests.model.testcarstore2.CarDto"
] | import org.lunifera.dsl.entity.xtext.tests.model.testcarstore2.CarDto; | import org.lunifera.dsl.entity.xtext.tests.model.testcarstore2.*; | [
"org.lunifera.dsl"
] | org.lunifera.dsl; | 783,741 |
@Override
public void add(String q) throws BestQueryException, SQLException {
Candidate c = new Candidate(q);
add(c);
} | void function(String q) throws BestQueryException, SQLException { Candidate c = new Candidate(q); add(c); } | /**
* Allows a Query to be added to this tracker.
*
* @param q a query String to be added to the tracker
* @throws BestQueryException if the current best Query is the best we think we are going to get
* @throws SQLException if error occurs in the underlying database
*/ | Allows a Query to be added to this tracker | add | {
"repo_name": "joshkh/intermine",
"path": "intermine/objectstore/main/src/org/intermine/sql/precompute/BestQueryExplainer.java",
"license": "lgpl-2.1",
"size": 12000
} | [
"java.sql.SQLException"
] | import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,263,065 |
protected String[] getRunCommand(String command, String groupId,
String userName, Path pidFile, Configuration config, Resource resource) {
if (Shell.WINDOWS) {
return getRunCommandForWindows(command, groupId, userName, pidFile,
config, resource);
} else {
return getRunCommandForOther(command, config);
}
} | String[] function(String command, String groupId, String userName, Path pidFile, Configuration config, Resource resource) { if (Shell.WINDOWS) { return getRunCommandForWindows(command, groupId, userName, pidFile, config, resource); } else { return getRunCommandForOther(command, config); } } | /**
* Return a command line to execute the given command in the OS shell.
* On Windows, the {code}groupId{code} parameter can be used to launch
* and associate the given GID with a process group. On
* non-Windows hosts, the {code}groupId{code} parameter is ignored.
*
* @param command the command to execute
* @param groupId the job owner's GID for Windows. On other operating systems
* it is ignored.
* @param userName the job owner's username for Windows. On other operating
* systems it is ignored.
* @param pidFile the path to the container's PID file on Windows. On other
* operating systems it is ignored.
* @param config the configuration
* @param resource on Windows this parameter controls memory and CPU limits.
* If null, no limits are set. On other operating systems it is ignored.
* @return the command line to execute
*/ | Return a command line to execute the given command in the OS shell. On Windows, the {code}groupId{code} parameter can be used to launch and associate the given GID with a process group. On non-Windows hosts, the {code}groupId{code} parameter is ignored | getRunCommand | {
"repo_name": "ChetnaChaudhari/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java",
"license": "apache-2.0",
"size": 30709
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.util.Shell",
"org.apache.hadoop.yarn.api.records.Resource"
] | import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; import org.apache.hadoop.yarn.api.records.Resource; | import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.util.*; import org.apache.hadoop.yarn.api.records.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 673,768 |
public void addCollectionCount(String key, String description, Object param,
String anchor) {
if (param instanceof PathQuery) {
try {
storage.put(key, createWrapper("integer", executor.count((PathQuery)
param), anchor, description, null));
} catch (ObjectStoreException e) {
LOG.error("Problem running PathQuery " + e.toString());
}
} else if (param instanceof String) {
Collection<?> coll = null;
try {
coll = (Collection<?>) imObj.getFieldValue(param.toString());
if (coll != null) {
storage.put(key, createWrapper("integer", coll.size(), anchor,
description, null));
}
} catch (IllegalAccessException e) {
LOG.error("The field " + param + " does not exist");
}
} else {
storage.put(key, createWrapper("unknown", param, anchor, description, null));
}
} | void function(String key, String description, Object param, String anchor) { if (param instanceof PathQuery) { try { storage.put(key, createWrapper(STR, executor.count((PathQuery) param), anchor, description, null)); } catch (ObjectStoreException e) { LOG.error(STR + e.toString()); } } else if (param instanceof String) { Collection<?> coll = null; try { coll = (Collection<?>) imObj.getFieldValue(param.toString()); if (coll != null) { storage.put(key, createWrapper(STR, coll.size(), anchor, description, null)); } } catch (IllegalAccessException e) { LOG.error(STR + param + STR); } } else { storage.put(key, createWrapper(STR, param, anchor, description, null)); } } | /**
* Add collection count to the summary.
* @param key to show under in the summary
* @param description to show under the title
* @param param can be a fieldName or a PathQuery
* @param anchor says where we will scroll onlick, an ID attr of the target element
*/ | Add collection count to the summary | addCollectionCount | {
"repo_name": "julie-sullivan/phytomine",
"path": "bio/webapp/src/org/intermine/bio/web/displayer/MetabolicGeneSummaryDisplayer.java",
"license": "lgpl-2.1",
"size": 15458
} | [
"java.util.Collection",
"org.intermine.objectstore.ObjectStoreException",
"org.intermine.pathquery.PathQuery"
] | import java.util.Collection; import org.intermine.objectstore.ObjectStoreException; import org.intermine.pathquery.PathQuery; | import java.util.*; import org.intermine.objectstore.*; import org.intermine.pathquery.*; | [
"java.util",
"org.intermine.objectstore",
"org.intermine.pathquery"
] | java.util; org.intermine.objectstore; org.intermine.pathquery; | 1,289,309 |
protected GroundedSingleAction translateAction(GroundedSingleAction a, Map <String,String> matching){
String [] newParams = new String[a.params.length];
for(int i = 0; i < a.params.length; i++){
newParams[i] = matching.get(a.params[i]);
}
return new GroundedSingleAction(worldAgentName, a.action, newParams);
} | GroundedSingleAction function(GroundedSingleAction a, Map <String,String> matching){ String [] newParams = new String[a.params.length]; for(int i = 0; i < a.params.length; i++){ newParams[i] = matching.get(a.params[i]); } return new GroundedSingleAction(worldAgentName, a.action, newParams); } | /**
* Takes an input action and mapping objects in the source state for the action to objects in another state
* and returns a action with its object parameters mapped to the matched objects.
* @param a the input action
* @param matching the matching between objects from the source state in which the action was generated to objects in another state.
* @return an action with its object parameters mapped according to the state object matching.
*/ | Takes an input action and mapping objects in the source state for the action to objects in another state and returns a action with its object parameters mapped to the matched objects | translateAction | {
"repo_name": "bhilliard/burlap",
"path": "src/burlap/behavior/stochasticgame/agents/naiveq/SGNaiveQLAgent.java",
"license": "lgpl-3.0",
"size": 11755
} | [
"burlap.oomdp.stochasticgames.GroundedSingleAction",
"java.util.Map"
] | import burlap.oomdp.stochasticgames.GroundedSingleAction; import java.util.Map; | import burlap.oomdp.stochasticgames.*; import java.util.*; | [
"burlap.oomdp.stochasticgames",
"java.util"
] | burlap.oomdp.stochasticgames; java.util; | 1,179,015 |
public Header nextHeader()
throws NoSuchElementException {
final int current = this.currentIndex;
if (current < 0) {
throw new NoSuchElementException("Iteration already finished.");
}
this.currentIndex = findNext(current);
return this.allHeaders[current];
} | Header function() throws NoSuchElementException { final int current = this.currentIndex; if (current < 0) { throw new NoSuchElementException(STR); } this.currentIndex = findNext(current); return this.allHeaders[current]; } | /**
* Obtains the next header from this iteration.
*
* @return the next header in this iteration
*
* @throws NoSuchElementException if there are no more headers
*/ | Obtains the next header from this iteration | nextHeader | {
"repo_name": "alinvasile/httpcore",
"path": "httpcore/src/main/java/org/apache/http/message/BasicHeaderIterator.java",
"license": "apache-2.0",
"size": 5167
} | [
"java.util.NoSuchElementException",
"org.apache.http.Header"
] | import java.util.NoSuchElementException; import org.apache.http.Header; | import java.util.*; import org.apache.http.*; | [
"java.util",
"org.apache.http"
] | java.util; org.apache.http; | 189,515 |
public Append add(byte [] family, byte [] qualifier, byte [] value) {
KeyValue kv = new KeyValue(this.row, family, qualifier, this.ts, KeyValue.Type.Put, value);
return add(kv);
} | Append function(byte [] family, byte [] qualifier, byte [] value) { KeyValue kv = new KeyValue(this.row, family, qualifier, this.ts, KeyValue.Type.Put, value); return add(kv); } | /**
* Add the specified column and value to this Append operation.
* @param family family name
* @param qualifier column qualifier
* @param value value to append to specified column
* @return this
*/ | Add the specified column and value to this Append operation | add | {
"repo_name": "Jackygq1982/hbase_src",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java",
"license": "apache-2.0",
"size": 4440
} | [
"org.apache.hadoop.hbase.KeyValue"
] | import org.apache.hadoop.hbase.KeyValue; | import org.apache.hadoop.hbase.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,803,308 |
List<AndesBinding> getBindingsStoredForExchange(String exchangeName) throws AndesException; | List<AndesBinding> getBindingsStoredForExchange(String exchangeName) throws AndesException; | /**
* Get bindings stored for some exchange.
*
* @return a list of bindings belonging to the exchange
* @throws AndesException
*/ | Get bindings stored for some exchange | getBindingsStoredForExchange | {
"repo_name": "Asitha/andes",
"path": "modules/andes-core/broker/src/main/java/org/wso2/andes/kernel/AndesContextStore.java",
"license": "apache-2.0",
"size": 14259
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,187,053 |
public static void initTable(final AmazonDynamoDB dynamoClient, final String dynamoTable,
final long readCapacity, final long writeCapacity,
List<AttributeDefinition> attributes, List<KeySchemaElement> keySchema,
final Collection<GlobalSecondaryIndex> gsi) throws Exception {
try {
DescribeTableResult res = safeDescribeTable(dynamoClient, dynamoTable);
if (!res.getTable().getTableStatus().equals("ACTIVE")) {
waitForTableActive(dynamoClient, dynamoTable);
}
} catch (ResourceInUseException r) {
waitForTableActive(dynamoClient, dynamoTable);
} catch (ResourceNotFoundException e) {
LOG.info(String.format(
"Table %s Not Found - Creating with %s Reads/sec & %s Writes/sec", dynamoTable,
readCapacity, writeCapacity));
CreateTableRequest createTableRequest = new CreateTableRequest().withTableName(
dynamoTable).withProvisionedThroughput(
new ProvisionedThroughput().withReadCapacityUnits(readCapacity).withWriteCapacityUnits(
writeCapacity)).withKeySchema(keySchema).withAttributeDefinitions(
attributes);
if (gsi != null)
createTableRequest.withGlobalSecondaryIndexes(gsi);
// create the table
try {
safeCreateTable(dynamoClient, createTableRequest);
} catch (Exception ex) {
LOG.error(ex);
throw e;
}
// wait for it to go to active state
waitForTableActive(dynamoClient, dynamoTable);
}
} | static void function(final AmazonDynamoDB dynamoClient, final String dynamoTable, final long readCapacity, final long writeCapacity, List<AttributeDefinition> attributes, List<KeySchemaElement> keySchema, final Collection<GlobalSecondaryIndex> gsi) throws Exception { try { DescribeTableResult res = safeDescribeTable(dynamoClient, dynamoTable); if (!res.getTable().getTableStatus().equals(STR)) { waitForTableActive(dynamoClient, dynamoTable); } } catch (ResourceInUseException r) { waitForTableActive(dynamoClient, dynamoTable); } catch (ResourceNotFoundException e) { LOG.info(String.format( STR, dynamoTable, readCapacity, writeCapacity)); CreateTableRequest createTableRequest = new CreateTableRequest().withTableName( dynamoTable).withProvisionedThroughput( new ProvisionedThroughput().withReadCapacityUnits(readCapacity).withWriteCapacityUnits( writeCapacity)).withKeySchema(keySchema).withAttributeDefinitions( attributes); if (gsi != null) createTableRequest.withGlobalSecondaryIndexes(gsi); try { safeCreateTable(dynamoClient, createTableRequest); } catch (Exception ex) { LOG.error(ex); throw e; } waitForTableActive(dynamoClient, dynamoTable); } } | /**
* Creates a table in Dynamo DB with the requested read and write capacity,
* attributes, key schema and GSI's. This method will block until the table
* is Active in Dynamo DB.
*
* @param dynamoClient Dynamo DB Client to use for connection to Dynamo DB.
* @param dynamoTable The table name to create in Dynamo DB.
* @param readCapacity The requested amount of read IOPS to be provisioned.
* @param writeCapacity The requested amount of write IOPS to be
* provisioned.
* @param attributes Attribute Names which must be indicated to create the
* key schema and/or GSI's.
* @param keySchema The keys used for the primary key of the table.
* @param gsi List of Global Secondary Indexes to be created on the table
* @throws Exception
*/ | Creates a table in Dynamo DB with the requested read and write capacity, attributes, key schema and GSI's. This method will block until the table is Active in Dynamo DB | initTable | {
"repo_name": "workanandr/amazon-kinesis-aggregators",
"path": "src/com/amazonaws/services/kinesis/aggregators/datastore/DynamoUtils.java",
"license": "apache-2.0",
"size": 17028
} | [
"com.amazonaws.services.dynamodbv2.AmazonDynamoDB",
"com.amazonaws.services.dynamodbv2.model.AttributeDefinition",
"com.amazonaws.services.dynamodbv2.model.CreateTableRequest",
"com.amazonaws.services.dynamodbv2.model.DescribeTableResult",
"com.amazonaws.services.dynamodbv2.model.GlobalSecondaryIndex",
"com.amazonaws.services.dynamodbv2.model.KeySchemaElement",
"com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput",
"com.amazonaws.services.dynamodbv2.model.ResourceInUseException",
"com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException",
"java.util.Collection",
"java.util.List"
] | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; import com.amazonaws.services.dynamodbv2.model.AttributeDefinition; import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; import com.amazonaws.services.dynamodbv2.model.DescribeTableResult; import com.amazonaws.services.dynamodbv2.model.GlobalSecondaryIndex; import com.amazonaws.services.dynamodbv2.model.KeySchemaElement; import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput; import com.amazonaws.services.dynamodbv2.model.ResourceInUseException; import com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException; import java.util.Collection; import java.util.List; | import com.amazonaws.services.dynamodbv2.*; import com.amazonaws.services.dynamodbv2.model.*; import java.util.*; | [
"com.amazonaws.services",
"java.util"
] | com.amazonaws.services; java.util; | 563,039 |
@SuppressWarnings("unchecked")
private void publishAggregateValues(SuperStepContext context) {
for (Entry<String, AggregateValue> entry : this.aggregateResults.entrySet()) {
context.addAggregateValues(entry.getKey(), entry.getValue());
}
} | @SuppressWarnings(STR) void function(SuperStepContext context) { for (Entry<String, AggregateValue> entry : this.aggregateResults.entrySet()) { context.addAggregateValues(entry.getKey(), entry.getValue()); } } | /**
* To publish the aggregate values into the super step context for the
* bsp.initBeforeSuperStep for the next super step.
*
* @param context
* SuperStepContext
*/ | To publish the aggregate values into the super step context for the bsp.initBeforeSuperStep for the next super step | publishAggregateValues | {
"repo_name": "LiuJianan/Graduate-Graph",
"path": "src/java/com/chinamobile/bcbsp/bspstaff/BSPStaff.java",
"license": "apache-2.0",
"size": 138171
} | [
"com.chinamobile.bcbsp.api.AggregateValue",
"java.util.Map"
] | import com.chinamobile.bcbsp.api.AggregateValue; import java.util.Map; | import com.chinamobile.bcbsp.api.*; import java.util.*; | [
"com.chinamobile.bcbsp",
"java.util"
] | com.chinamobile.bcbsp; java.util; | 1,144,037 |
private static int estimateCallCost(Node fnNode, boolean referencesThis) {
Node argsNode = NodeUtil.getFnParameters(fnNode);
int numArgs = argsNode.getChildCount();
int callCost = NAME_COST_ESTIMATE + PAREN_COST;
if (numArgs > 0) {
callCost += (numArgs * NAME_COST_ESTIMATE) + ((numArgs - 1) * COMMA_COST);
}
if (referencesThis) {
// TODO(johnlenz): Update this if we start supporting inlining
// other functions that reference this.
// The only functions that reference this that are currently inlined
// are those that are called via ".call" with an explicit "this".
callCost += 5 + 5; // ".call" + "this,"
}
return callCost;
} | static int function(Node fnNode, boolean referencesThis) { Node argsNode = NodeUtil.getFnParameters(fnNode); int numArgs = argsNode.getChildCount(); int callCost = NAME_COST_ESTIMATE + PAREN_COST; if (numArgs > 0) { callCost += (numArgs * NAME_COST_ESTIMATE) + ((numArgs - 1) * COMMA_COST); } if (referencesThis) { callCost += 5 + 5; } return callCost; } | /**
* Gets an estimate of the cost in characters of making the function call:
* the sum of the identifiers and the separators.
* @param referencesThis
*/ | Gets an estimate of the cost in characters of making the function call: the sum of the identifiers and the separators | estimateCallCost | {
"repo_name": "jayli/kissy",
"path": "tools/module-compiler/src/com/google/javascript/jscomp/FunctionInjector.java",
"license": "mit",
"size": 30544
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 714,575 |
public Collection<P> getList(NodeRef nodeRef, Set<QName> classQNames, QName assocTypeQName)
{
checkAssocType(assocTypeQName);
Collection<P> policies = new HashSet<P>();
for (QName classQName : classQNames)
{
P policy = factory.create(new ClassFeatureBehaviourBinding(dictionary, nodeRef, classQName, assocTypeQName));
if (policy instanceof PolicyList)
{
policies.addAll(((PolicyList<P>)policy).getPolicies());
}
else
{
policies.add(policy);
}
}
return policies;
} | Collection<P> function(NodeRef nodeRef, Set<QName> classQNames, QName assocTypeQName) { checkAssocType(assocTypeQName); Collection<P> policies = new HashSet<P>(); for (QName classQName : classQNames) { P policy = factory.create(new ClassFeatureBehaviourBinding(dictionary, nodeRef, classQName, assocTypeQName)); if (policy instanceof PolicyList) { policies.addAll(((PolicyList<P>)policy).getPolicies()); } else { policies.add(policy); } } return policies; } | /**
* Gets the <tt>Policy</tt> instances for all the given Classes and Associations
*
* @param nodeRef the node reference
* @param classQNames the class qualified names
* @param assocTypeQName the association type qualified name
* @return Return the policies
*/ | Gets the Policy instances for all the given Classes and Associations | getList | {
"repo_name": "nguyentienlong/community-edition",
"path": "projects/repository/source/java/org/alfresco/repo/policy/AssociationPolicyDelegate.java",
"license": "lgpl-3.0",
"size": 7683
} | [
"java.util.Collection",
"java.util.HashSet",
"java.util.Set",
"org.alfresco.service.cmr.repository.NodeRef",
"org.alfresco.service.namespace.QName"
] | import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName; | import java.util.*; import org.alfresco.service.cmr.repository.*; import org.alfresco.service.namespace.*; | [
"java.util",
"org.alfresco.service"
] | java.util; org.alfresco.service; | 1,081,228 |
public AssessmentGradingData getLastAssessmentGradingByAgentId(String publishedAssessmentId, String agentIdString); | AssessmentGradingData function(String publishedAssessmentId, String agentIdString); | /**
* Get the grading data for the last submission of this agent.
* @param publishedAssessmentId
* @param agentIdString
* @return
*/ | Get the grading data for the last submission of this agent | getLastAssessmentGradingByAgentId | {
"repo_name": "eemirtekin/Sakai-10.6-TR",
"path": "samigo/samigo-api/src/java/org/sakaiproject/tool/assessment/shared/api/grading/GradingServiceAPI.java",
"license": "apache-2.0",
"size": 4990
} | [
"org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData"
] | import org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData; | import org.sakaiproject.tool.assessment.data.dao.grading.*; | [
"org.sakaiproject.tool"
] | org.sakaiproject.tool; | 2,628,112 |
@BodyParser.Of(BodyParser.Json.class)
public Result getComptebyId(String loginprofil){
Compte Comptetrouve=models.utilisateur.DAOUtilisateur.comptebyid(models.utilisateur.DAOUtilisateur.idcomptebylogin(loginprofil));
try{
final ObjectMapper mapper = new ObjectMapper();
String r = mapper.writeValueAsString( Comptetrouve);
JsonNode result = Json.parse(r);
return (Result) ok(result);
}
catch(IOException e){
e.printStackTrace();
return (Result) internalServerError(e.toString());
}
} | @BodyParser.Of(BodyParser.Json.class) Result function(String loginprofil){ Compte Comptetrouve=models.utilisateur.DAOUtilisateur.comptebyid(models.utilisateur.DAOUtilisateur.idcomptebylogin(loginprofil)); try{ final ObjectMapper mapper = new ObjectMapper(); String r = mapper.writeValueAsString( Comptetrouve); JsonNode result = Json.parse(r); return (Result) ok(result); } catch(IOException e){ e.printStackTrace(); return (Result) internalServerError(e.toString()); } } | /**
* permet de rechercher un profil
* @param idcompterecherche
* @return un compte
*/ | permet de rechercher un profil | getComptebyId | {
"repo_name": "aniela75/twitter",
"path": "app/controllers/Home.java",
"license": "mit",
"size": 2194
} | [
"com.fasterxml.jackson.databind.JsonNode",
"com.fasterxml.jackson.databind.ObjectMapper",
"java.io.IOException"
] | import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; | import com.fasterxml.jackson.databind.*; import java.io.*; | [
"com.fasterxml.jackson",
"java.io"
] | com.fasterxml.jackson; java.io; | 712,815 |
public DataSource unMarshalDataSourceXml() throws custom.mondrian.xmla.exception.XmlaException {
try {
DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(olapConfig));
Document doc = db.parse(is);
Element olapElement = doc.getDocumentElement();
// Elements under <olap>/<server>
Element serverElement = (Element) olapElement.getElementsByTagName("server").item(0);
String host = serverElement.getElementsByTagName("host").item(0).getTextContent();
String port = serverElement.getElementsByTagName("port").item(0).getTextContent();
String context = serverElement.getElementsByTagName("web-context").item(0).getTextContent();
String uri = "http://" + host + ":" + port + context;
String relativeCatalogPath = olapElement.getElementsByTagName("schemas-folder").item(0).getTextContent();
catalogRoot = System.getProperty("ads_olap_root") + "/xmla" + relativeCatalogPath;
Element jdbcElement = (Element) olapElement.getElementsByTagName("jdbc").item(0);
String jdbcUri = jdbcElement.getElementsByTagName("uri").item(0).getTextContent();
//insert login credential to jdbcUri
if(credential != null)
jdbcUri = jdbcUri +"?" + credential;
String jdbcDriver = jdbcElement.getElementsByTagName("driver").item(0).getTextContent();
Catalogs catalogs = new Catalogs();
for (String catalogName : getCatalogs(catalogRoot)) {
DataSourceInfo ds = new DataSourceInfo(jdbcUri, jdbcDriver);
Catalog catalog = new Catalog(catalogName, relativeCatalogPath, ds);
catalogs.addCatalog(catalog);
}
// populate DataSource
DataSource dataSource = new DataSource(catalogs, uri);
return dataSource;
} catch (ParserConfigurationException | SAXException | IOException e) {
// TODO Auto-generated catch block
throw new XmlaException("0", "0", "Fail the unmarshal datasource xml string ", null);
}
} | DataSource function() throws custom.mondrian.xmla.exception.XmlaException { try { DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(olapConfig)); Document doc = db.parse(is); Element olapElement = doc.getDocumentElement(); Element serverElement = (Element) olapElement.getElementsByTagName(STR).item(0); String host = serverElement.getElementsByTagName("host").item(0).getTextContent(); String port = serverElement.getElementsByTagName("port").item(0).getTextContent(); String context = serverElement.getElementsByTagName(STR).item(0).getTextContent(); String uri = STRschemas-folderSTRads_olap_rootSTR/xmlaSTRjdbcSTRuriSTR?STRdriverSTR0STR0STRFail the unmarshal datasource xml string ", null); } } | /**
* Unmarshal the serialized DSINFO with olap.xml to entitiy objects: Catalogs, Catalog,
* DatasourceInfo
*
* @return data source
*
*/ | Unmarshal the serialized DSINFO with olap.xml to entitiy objects: Catalogs, Catalog, DatasourceInfo | unMarshalDataSourceXml | {
"repo_name": "OpenlinkFinancial/MXMLABridge",
"path": "src/custom/mondrian/xmla/dataSource/DataSourceProcessor.java",
"license": "epl-1.0",
"size": 6759
} | [
"java.io.StringReader",
"javax.xml.parsers.DocumentBuilder",
"javax.xml.parsers.DocumentBuilderFactory",
"org.w3c.dom.Document",
"org.w3c.dom.Element",
"org.xml.sax.InputSource"
] | import java.io.StringReader; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; | import java.io.*; import javax.xml.parsers.*; import org.w3c.dom.*; import org.xml.sax.*; | [
"java.io",
"javax.xml",
"org.w3c.dom",
"org.xml.sax"
] | java.io; javax.xml; org.w3c.dom; org.xml.sax; | 605,894 |
public void batchUpdateEntityTypes(
com.google.cloud.dialogflow.v2.BatchUpdateEntityTypesRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
asyncUnaryCall(
getChannel().newCall(getBatchUpdateEntityTypesMethodHelper(), getCallOptions()),
request,
responseObserver);
} | void function( com.google.cloud.dialogflow.v2.BatchUpdateEntityTypesRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { asyncUnaryCall( getChannel().newCall(getBatchUpdateEntityTypesMethodHelper(), getCallOptions()), request, responseObserver); } | /**
*
*
* <pre>
* Updates/Creates multiple entity types in the specified agent.
* Operation <response:
* [BatchUpdateEntityTypesResponse][google.cloud.dialogflow.v2.BatchUpdateEntityTypesResponse],
* metadata: [google.protobuf.Struct][google.protobuf.Struct]>
* </pre>
*/ | <code> Updates/Creates multiple entity types in the specified agent. Operation <response: [BatchUpdateEntityTypesResponse][google.cloud.dialogflow.v2.BatchUpdateEntityTypesResponse], metadata: [google.protobuf.Struct][google.protobuf.Struct]> </code> | batchUpdateEntityTypes | {
"repo_name": "vam-google/google-cloud-java",
"path": "google-api-grpc/grpc-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/EntityTypesGrpc.java",
"license": "apache-2.0",
"size": 71064
} | [
"io.grpc.stub.ClientCalls",
"io.grpc.stub.ServerCalls"
] | import io.grpc.stub.ClientCalls; import io.grpc.stub.ServerCalls; | import io.grpc.stub.*; | [
"io.grpc.stub"
] | io.grpc.stub; | 1,158,369 |
protected double[] getUtilizationHistory() {
double[] utilizationHistory = new double[PowerVmSteady.HISTORY_LENGTH];
double hostMips = getTotalMips();
for (PowerVmSteady vm : this.<PowerVmSteady> getVmList()) {
for (int i = 0; i < vm.getUtilizationHistory().size(); i++) {
utilizationHistory[i] += vm.getUtilizationHistory().get(i) * vm.getMips() / hostMips;
}
}
return MathUtil.trimZeroTail(utilizationHistory);
}
| double[] function() { double[] utilizationHistory = new double[PowerVmSteady.HISTORY_LENGTH]; double hostMips = getTotalMips(); for (PowerVmSteady vm : this.<PowerVmSteady> getVmList()) { for (int i = 0; i < vm.getUtilizationHistory().size(); i++) { utilizationHistory[i] += vm.getUtilizationHistory().get(i) * vm.getMips() / hostMips; } } return MathUtil.trimZeroTail(utilizationHistory); } | /**
* Gets the host utilization history.
*
* @return the host utilization history
*/ | Gets the host utilization history | getUtilizationHistory | {
"repo_name": "swethapts/cloudsim",
"path": "sources/org/cloudbus/cloudsim/power/PowerHostUtilizationHistorySteady.java",
"license": "lgpl-3.0",
"size": 2683
} | [
"org.cloudbus.cloudsim.util.MathUtil"
] | import org.cloudbus.cloudsim.util.MathUtil; | import org.cloudbus.cloudsim.util.*; | [
"org.cloudbus.cloudsim"
] | org.cloudbus.cloudsim; | 2,625,074 |
public List<AdminEmailAttributes> getSentAdminEmails() {
return adminEmailsLogic.getSentAdminEmails();
} | List<AdminEmailAttributes> function() { return adminEmailsLogic.getSentAdminEmails(); } | /**
* Gets all admin emails that have been sent and not in trash bin.
*
* @see AdminEmailsLogic#getSentAdminEmails()
*/ | Gets all admin emails that have been sent and not in trash bin | getSentAdminEmails | {
"repo_name": "thenaesh/teammates",
"path": "src/main/java/teammates/logic/api/Logic.java",
"license": "gpl-2.0",
"size": 87996
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 602,719 |
public static MozuClient deleteExtendedPropertyClient(String orderId, String key) throws Exception
{
return deleteExtendedPropertyClient( orderId, key, null, null);
} | static MozuClient function(String orderId, String key) throws Exception { return deleteExtendedPropertyClient( orderId, key, null, null); } | /**
* orders-extendedproperties Delete DeleteExtendedProperty description DOCUMENT_HERE
* <p><pre><code>
* MozuClient mozuClient=DeleteExtendedPropertyClient( orderId, key);
* client.setBaseAddress(url);
* client.executeRequest();
* </code></pre></p>
* @param key
* @param orderId Unique identifier of the order.
* @return Mozu.Api.MozuClient
*/ | orders-extendedproperties Delete DeleteExtendedProperty description DOCUMENT_HERE <code><code> MozuClient mozuClient=DeleteExtendedPropertyClient( orderId, key); client.setBaseAddress(url); client.executeRequest(); </code></code> | deleteExtendedPropertyClient | {
"repo_name": "johngatti/mozu-java",
"path": "mozu-java-core/src/main/java/com/mozu/api/clients/commerce/orders/ExtendedPropertyClient.java",
"license": "mit",
"size": 17386
} | [
"com.mozu.api.MozuClient"
] | import com.mozu.api.MozuClient; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 1,462,491 |
private void postLogin() throws IOException {
logger.info("PRG interface now connected");
_session.setCallback(new NormalResponseCallback());
_callback.statusChanged(ThingStatus.ONLINE, ThingStatusDetail.NONE, null);
} | void function() throws IOException { logger.info(STR); _session.setCallback(new NormalResponseCallback()); _callback.statusChanged(ThingStatus.ONLINE, ThingStatusDetail.NONE, null); } | /**
* Post successful login stuff - mark us online and refresh from the switch
*
* @throws IOException
*/ | Post successful login stuff - mark us online and refresh from the switch | postLogin | {
"repo_name": "kwave/openhab2-addons",
"path": "addons/binding/org.openhab.binding.lutron/src/main/java/org/openhab/binding/lutron/internal/grxprg/PrgProtocolHandler.java",
"license": "epl-1.0",
"size": 45437
} | [
"java.io.IOException",
"org.eclipse.smarthome.core.thing.ThingStatus",
"org.eclipse.smarthome.core.thing.ThingStatusDetail"
] | import java.io.IOException; import org.eclipse.smarthome.core.thing.ThingStatus; import org.eclipse.smarthome.core.thing.ThingStatusDetail; | import java.io.*; import org.eclipse.smarthome.core.thing.*; | [
"java.io",
"org.eclipse.smarthome"
] | java.io; org.eclipse.smarthome; | 314,282 |
@SuppressWarnings("deprecation")
public static void clientRegisterFilter(final String beanName, final int order) {
Map<Integer, Filter> orderedFilters = SpringSecurityUtils.getConfiguredOrderedFilters();
Filter oldFilter = orderedFilters.get(order);
if (oldFilter != null) {
throw new IllegalArgumentException("Cannot register filter '" + beanName + "' at position " + order + "; '"
+ oldFilter + "' is already registered in that position");
}
Filter filter = getBean(beanName);
orderedFilters.put(order, filter);
FilterChainProxy filterChain = getFilterChainProxy();
Map<RequestMatcher, List<Filter>> filterChainMap = filterChain.getFilterChainMap();
Map<RequestMatcher, List<Filter>> fixedFilterChainMap = mergeFilterChainMap(orderedFilters, filter, order,
filterChainMap);
filterChain.setFilterChainMap(fixedFilterChainMap);
LOG.trace("Client registered bean '{}' as a filter at order {}", beanName, order);
LOG.trace("Updated filter chain: {}", fixedFilterChainMap);
} | @SuppressWarnings(STR) static void function(final String beanName, final int order) { Map<Integer, Filter> orderedFilters = SpringSecurityUtils.getConfiguredOrderedFilters(); Filter oldFilter = orderedFilters.get(order); if (oldFilter != null) { throw new IllegalArgumentException(STR + beanName + STR + order + STR + oldFilter + STR); } Filter filter = getBean(beanName); orderedFilters.put(order, filter); FilterChainProxy filterChain = getFilterChainProxy(); Map<RequestMatcher, List<Filter>> filterChainMap = filterChain.getFilterChainMap(); Map<RequestMatcher, List<Filter>> fixedFilterChainMap = mergeFilterChainMap(orderedFilters, filter, order, filterChainMap); filterChain.setFilterChainMap(fixedFilterChainMap); LOG.trace(STR, beanName, order); LOG.trace(STR, fixedFilterChainMap); } | /**
* Register a filter in a specified position in the chain.
* <p/>
* Note - this is for use in application code after the plugin has initialized,
* e.g. in BootStrap where you want to register a custom filter in the correct
* order without dealing with the existing configured filters.
*
* @param beanName the Spring bean name of the filter
* @param order the position (see {@link SecurityFilterPosition})
*/ | Register a filter in a specified position in the chain. Note - this is for use in application code after the plugin has initialized, e.g. in BootStrap where you want to register a custom filter in the correct order without dealing with the existing configured filters | clientRegisterFilter | {
"repo_name": "puaykai/noodles",
"path": "target/work/plugins/spring-security-core-2.0.0/src/java/grails/plugin/springsecurity/SpringSecurityUtils.java",
"license": "mit",
"size": 27888
} | [
"java.util.List",
"java.util.Map",
"javax.servlet.Filter",
"org.springframework.security.web.FilterChainProxy",
"org.springframework.security.web.util.matcher.RequestMatcher"
] | import java.util.List; import java.util.Map; import javax.servlet.Filter; import org.springframework.security.web.FilterChainProxy; import org.springframework.security.web.util.matcher.RequestMatcher; | import java.util.*; import javax.servlet.*; import org.springframework.security.web.*; import org.springframework.security.web.util.matcher.*; | [
"java.util",
"javax.servlet",
"org.springframework.security"
] | java.util; javax.servlet; org.springframework.security; | 748,585 |
@NotNull
private static String changeHttpsToHttp(@NotNull String url) {
String prefix = "https";
if (url.startsWith(prefix)) {
return "http" + url.substring(prefix.length());
}
return url;
} | static String function(@NotNull String url) { String prefix = "https"; if (url.startsWith(prefix)) { return "http" + url.substring(prefix.length()); } return url; } | /**
* If the url scheme is HTTPS, store it as HTTP in the database, not to make user enter and remember same credentials twice.
*/ | If the url scheme is HTTPS, store it as HTTP in the database, not to make user enter and remember same credentials twice | changeHttpsToHttp | {
"repo_name": "amith01994/intellij-community",
"path": "plugins/git4idea/src/git4idea/commands/GitHttpGuiAuthenticator.java",
"license": "apache-2.0",
"size": 11413
} | [
"org.jetbrains.annotations.NotNull"
] | import org.jetbrains.annotations.NotNull; | import org.jetbrains.annotations.*; | [
"org.jetbrains.annotations"
] | org.jetbrains.annotations; | 2,324,706 |
public static String encodeToString(byte[] input, int flags) {
try {
return new String(encode(input, flags), "US-ASCII");
} catch (UnsupportedEncodingException e) {
// US-ASCII is guaranteed to be available.
throw new AssertionError(e);
}
} | static String function(byte[] input, int flags) { try { return new String(encode(input, flags), STR); } catch (UnsupportedEncodingException e) { throw new AssertionError(e); } } | /**
* Base64-encode the given data and return a newly allocated String with the result.
*
* @param input the data to encode
* @param flags controls certain features of the encoded output. Passing {@code DEFAULT} results
* in output that adheres to RFC 2045.
*/ | Base64-encode the given data and return a newly allocated String with the result | encodeToString | {
"repo_name": "lehinevych/PatternAndroid",
"path": "twittterclone/deps/DataDroid/src/android/support/util/Base64.java",
"license": "mit",
"size": 29366
} | [
"java.io.UnsupportedEncodingException"
] | import java.io.UnsupportedEncodingException; | import java.io.*; | [
"java.io"
] | java.io; | 394,467 |
View v = convertView;
if (v == null) {
LayoutInflater vi;
vi = LayoutInflater.from(getContext());
v = vi.inflate(R.layout.item_list_row, parent, false);
}
Assessment p = getItem(position);
if (p != null) {
TextView gradeTextView = (TextView) v.findViewById(R.id.itemRowGrade);
TextView weightTextView = (TextView) v.findViewById(R.id.itemRowWeight);
if (gradeTextView != null) {
gradeTextView.setText(String.valueOf(p.getGrade()));
}
if (weightTextView != null) {
weightTextView.setText(String.valueOf(p.getWeight()));
}
}
return v;
} | View v = convertView; if (v == null) { LayoutInflater vi; vi = LayoutInflater.from(getContext()); v = vi.inflate(R.layout.item_list_row, parent, false); } Assessment p = getItem(position); if (p != null) { TextView gradeTextView = (TextView) v.findViewById(R.id.itemRowGrade); TextView weightTextView = (TextView) v.findViewById(R.id.itemRowWeight); if (gradeTextView != null) { gradeTextView.setText(String.valueOf(p.getGrade())); } if (weightTextView != null) { weightTextView.setText(String.valueOf(p.getWeight())); } } return v; } | /**
* Gets the view at a given position.
*
* @param position position of view
* @param convertView the reference view
* @param parent the parent group of the view
* @return the view
*/ | Gets the view at a given position | getView | {
"repo_name": "kaozgamer/GradeCalculator",
"path": "app/src/main/java/com/thunderboltsoft/finalgradecalculator/adapters/ListAdapter.java",
"license": "apache-2.0",
"size": 2299
} | [
"android.view.LayoutInflater",
"android.view.View",
"android.widget.TextView",
"com.thunderboltsoft.finalgradecalculator.models.Assessment"
] | import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import com.thunderboltsoft.finalgradecalculator.models.Assessment; | import android.view.*; import android.widget.*; import com.thunderboltsoft.finalgradecalculator.models.*; | [
"android.view",
"android.widget",
"com.thunderboltsoft.finalgradecalculator"
] | android.view; android.widget; com.thunderboltsoft.finalgradecalculator; | 2,803,729 |
private void initCachedState() {
dataEmptyKeyValueRef =
new ColumnReference(emptyKeyValueCFPtr.copyBytesIfNecessary(),
QueryConstants.EMPTY_COLUMN_BYTES);
indexQualifiers = Lists.newArrayListWithExpectedSize(this.coveredColumns.size());
for (ColumnReference ref : coveredColumns) {
indexQualifiers.add(new ImmutableBytesPtr(IndexUtil.getIndexColumnName(
ref.getFamily(), ref.getQualifier())));
} | void function() { dataEmptyKeyValueRef = new ColumnReference(emptyKeyValueCFPtr.copyBytesIfNecessary(), QueryConstants.EMPTY_COLUMN_BYTES); indexQualifiers = Lists.newArrayListWithExpectedSize(this.coveredColumns.size()); for (ColumnReference ref : coveredColumns) { indexQualifiers.add(new ImmutableBytesPtr(IndexUtil.getIndexColumnName( ref.getFamily(), ref.getQualifier()))); } | /**
* Init calculated state reading/creating
*/ | Init calculated state reading/creating | initCachedState | {
"repo_name": "cloudera-labs/phoenix",
"path": "phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java",
"license": "apache-2.0",
"size": 64424
} | [
"com.google.common.collect.Lists",
"org.apache.phoenix.hbase.index.covered.update.ColumnReference",
"org.apache.phoenix.hbase.index.util.ImmutableBytesPtr",
"org.apache.phoenix.query.QueryConstants",
"org.apache.phoenix.util.IndexUtil"
] | import com.google.common.collect.Lists; import org.apache.phoenix.hbase.index.covered.update.ColumnReference; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.util.IndexUtil; | import com.google.common.collect.*; import org.apache.phoenix.hbase.index.covered.update.*; import org.apache.phoenix.hbase.index.util.*; import org.apache.phoenix.query.*; import org.apache.phoenix.util.*; | [
"com.google.common",
"org.apache.phoenix"
] | com.google.common; org.apache.phoenix; | 1,664,624 |
static boolean isAnalyticsElement(Vertex vertex) {
return CommonOLAP.analyticsElements.contains(getVertexType(vertex));
} | static boolean isAnalyticsElement(Vertex vertex) { return CommonOLAP.analyticsElements.contains(getVertexType(vertex)); } | /**
* Whether the Tinkerpop vertex has a Grakn type property reserved for analytics.
*
* @param vertex the Tinkerpop vertex
* @return if the type is reserved or not
*/ | Whether the Tinkerpop vertex has a Grakn type property reserved for analytics | isAnalyticsElement | {
"repo_name": "fppt/mindmapsdb",
"path": "grakn-graql/src/main/java/ai/grakn/graql/internal/analytics/Utility.java",
"license": "gpl-3.0",
"size": 2049
} | [
"org.apache.tinkerpop.gremlin.structure.Vertex"
] | import org.apache.tinkerpop.gremlin.structure.Vertex; | import org.apache.tinkerpop.gremlin.structure.*; | [
"org.apache.tinkerpop"
] | org.apache.tinkerpop; | 491,873 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<WebhookInner>> listByAutomationAccountSinglePageAsync(
String resourceGroupName, String automationAccountName, String filter, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (automationAccountName == null) {
return Mono
.error(new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2015-10-31";
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listByAutomationAccount(
this.client.getEndpoint(),
resourceGroupName,
automationAccountName,
filter,
this.client.getSubscriptionId(),
apiVersion,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<WebhookInner>> function( String resourceGroupName, String automationAccountName, String filter, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (automationAccountName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String apiVersion = STR; final String accept = STR; context = this.client.mergeContext(context); return service .listByAutomationAccount( this.client.getEndpoint(), resourceGroupName, automationAccountName, filter, this.client.getSubscriptionId(), apiVersion, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } | /**
* Retrieve a list of webhooks.
*
* @param resourceGroupName Name of an Azure Resource group.
* @param automationAccountName The name of the automation account.
* @param filter The filter to apply on the operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response model for the list webhook operation.
*/ | Retrieve a list of webhooks | listByAutomationAccountSinglePageAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/automation/azure-resourcemanager-automation/src/main/java/com/azure/resourcemanager/automation/implementation/WebhooksClientImpl.java",
"license": "mit",
"size": 63264
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedResponse",
"com.azure.core.http.rest.PagedResponseBase",
"com.azure.core.util.Context",
"com.azure.resourcemanager.automation.fluent.models.WebhookInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.Context; import com.azure.resourcemanager.automation.fluent.models.WebhookInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.automation.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,244,790 |
static Set<Class<? extends Annotation>> annotationsToTypes(Annotation[] annotations) {
Set<Class<? extends Annotation>> result = new HashSet<Class<? extends Annotation>>();
for (Annotation annotation : annotations) {
result.add(annotation.annotationType());
}
return result;
} | static Set<Class<? extends Annotation>> annotationsToTypes(Annotation[] annotations) { Set<Class<? extends Annotation>> result = new HashSet<Class<? extends Annotation>>(); for (Annotation annotation : annotations) { result.add(annotation.annotationType()); } return result; } | /**
* Extracts the annotation types ({@link Annotation#annotationType()} from the supplied
* annotations.
*/ | Extracts the annotation types (<code>Annotation#annotationType()</code> from the supplied annotations | annotationsToTypes | {
"repo_name": "google/desugar_jdk_libs",
"path": "jdk11/src/libcore/luni/src/test/java/libcore/java/lang/reflect/annotations/AnnotatedElementTestSupport.java",
"license": "gpl-2.0",
"size": 13763
} | [
"java.lang.annotation.Annotation",
"java.util.HashSet",
"java.util.Set"
] | import java.lang.annotation.Annotation; import java.util.HashSet; import java.util.Set; | import java.lang.annotation.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 805,616 |
public Builder addExpand(String element) {
if (this.expand == null) {
this.expand = new ArrayList<>();
}
this.expand.add(element);
return this;
} | Builder function(String element) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.add(element); return this; } | /**
* Add an element to `expand` list. A list is initialized for the first `add/addAll` call, and
* subsequent calls adds additional elements to the original list. See {@link
* BankAccountUpdateOnAccountParams#expand} for the field documentation.
*/ | Add an element to `expand` list. A list is initialized for the first `add/addAll` call, and subsequent calls adds additional elements to the original list. See <code>BankAccountUpdateOnAccountParams#expand</code> for the field documentation | addExpand | {
"repo_name": "stripe/stripe-java",
"path": "src/main/java/com/stripe/param/BankAccountUpdateOnAccountParams.java",
"license": "mit",
"size": 7095
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 366,556 |
private static String getStringFromRawResource(Context context, int resourceId) {
String result = null;
InputStream is = context.getResources().openRawResource(resourceId);
if (is != null) {
StringBuilder sb = new StringBuilder();
String line;
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
} catch (IOException e) {
Log.w("ApplicationUtils", String.format("Unable to load resource %s: %s", resourceId, e.getMessage()));
} finally {
try {
is.close();
} catch (IOException e) {
Log.w("ApplicationUtils", String.format("Unable to load resource %s: %s", resourceId, e.getMessage()));
}
}
result = sb.toString();
} else {
result = "";
}
return result;
}
| static String function(Context context, int resourceId) { String result = null; InputStream is = context.getResources().openRawResource(resourceId); if (is != null) { StringBuilder sb = new StringBuilder(); String line; try { BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8")); while ((line = reader.readLine()) != null) { sb.append(line).append("\n"); } } catch (IOException e) { Log.w(STR, String.format(STR, resourceId, e.getMessage())); } finally { try { is.close(); } catch (IOException e) { Log.w(STR, String.format(STR, resourceId, e.getMessage())); } } result = sb.toString(); } else { result = ""; } return result; } | /**
* Load a raw string resource.
* @param context The current context.
* @param resourceId The resource id.
* @return The loaded string.
*/ | Load a raw string resource | getStringFromRawResource | {
"repo_name": "jeesmon/ml-browser",
"path": "src/org/zirco/utils/ApplicationUtils.java",
"license": "gpl-3.0",
"size": 19642
} | [
"android.content.Context",
"android.util.Log",
"java.io.BufferedReader",
"java.io.IOException",
"java.io.InputStream",
"java.io.InputStreamReader"
] | import android.content.Context; import android.util.Log; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; | import android.content.*; import android.util.*; import java.io.*; | [
"android.content",
"android.util",
"java.io"
] | android.content; android.util; java.io; | 1,369,569 |
public void addCallToActionButton(Button postBackButton) {
this.callToActions.add(postBackButton);
}
| void function(Button postBackButton) { this.callToActions.add(postBackButton); } | /**
* Adds the call to action button.
*
* @param postBackButton the post back button
*/ | Adds the call to action button | addCallToActionButton | {
"repo_name": "Aurasphere/facebot",
"path": "src/main/java/co/aurasphere/botmill/fb/model/api/messengerprofile/persistentmenu/CallToActionNested.java",
"license": "mit",
"size": 3626
} | [
"co.aurasphere.botmill.fb.model.outcoming.template.button.Button"
] | import co.aurasphere.botmill.fb.model.outcoming.template.button.Button; | import co.aurasphere.botmill.fb.model.outcoming.template.button.*; | [
"co.aurasphere.botmill"
] | co.aurasphere.botmill; | 1,196,681 |
@Test
public void test245ModifyUserRappLocalityUnderReconcile() throws Exception {
final String TEST_NAME = "test245ModifyUserRappLocalityUnderReconcile";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
Task task = taskManager.createTaskInstance(TestUserTemplate.class.getName() + "." + TEST_NAME);
OperationResult result = task.getResult();
PrismObject<UserType> userBefore = modelService.getObject(UserType.class, USER_RAPP_OID, null, task, result);
display("User before", userBefore);
ObjectDelta<UserType> objectDelta = createModifyUserReplaceDelta(USER_RAPP_OID, new ItemPath(UserType.F_LOCALITY),
PrismTestUtil.createPolyString("Six feet under"));
Collection<ObjectDelta<? extends ObjectType>> deltas = MiscSchemaUtil.createCollection(objectDelta);
ModelExecuteOptions options = ModelExecuteOptions.createReconcile();
// WHEN
modelService.executeChanges(deltas, options, task, result);
// THEN
result.computeStatus();
TestUtil.assertSuccess(result);
PrismObject<UserType> userAfter = modelService.getObject(UserType.class, USER_RAPP_OID, null, task, result);
display("User after", userAfter);
assertAssignedAccount(userAfter, RESOURCE_DUMMY_BLUE_OID);
assertAssignedRole(userAfter, ROLE_RASTAMAN_OID);
assertAssignments(userAfter, 2);
UserType userAfterType = userAfter.asObjectable();
assertLinks(userAfter, 1);
assertEquals("Wrong timezone", "High Seas/Six feet under", userAfterType.getTimezone());
assertEquals("Wrong locale", "WE", userAfterType.getLocale());
assertEquals("Unexpected value of employeeNumber",
"D3ADB33F", userAfterType.getEmployeeNumber());
assertEquals("Wrong costCenter", "CC-RAPP", userAfterType.getCostCenter());
}
| void function() throws Exception { final String TEST_NAME = STR; TestUtil.displayTestTile(this, TEST_NAME); Task task = taskManager.createTaskInstance(TestUserTemplate.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); PrismObject<UserType> userBefore = modelService.getObject(UserType.class, USER_RAPP_OID, null, task, result); display(STR, userBefore); ObjectDelta<UserType> objectDelta = createModifyUserReplaceDelta(USER_RAPP_OID, new ItemPath(UserType.F_LOCALITY), PrismTestUtil.createPolyString(STR)); Collection<ObjectDelta<? extends ObjectType>> deltas = MiscSchemaUtil.createCollection(objectDelta); ModelExecuteOptions options = ModelExecuteOptions.createReconcile(); modelService.executeChanges(deltas, options, task, result); result.computeStatus(); TestUtil.assertSuccess(result); PrismObject<UserType> userAfter = modelService.getObject(UserType.class, USER_RAPP_OID, null, task, result); display(STR, userAfter); assertAssignedAccount(userAfter, RESOURCE_DUMMY_BLUE_OID); assertAssignedRole(userAfter, ROLE_RASTAMAN_OID); assertAssignments(userAfter, 2); UserType userAfterType = userAfter.asObjectable(); assertLinks(userAfter, 1); assertEquals(STR, STR, userAfterType.getTimezone()); assertEquals(STR, "WE", userAfterType.getLocale()); assertEquals(STR, STR, userAfterType.getEmployeeNumber()); assertEquals(STR, STR, userAfterType.getCostCenter()); } | /**
* Similar to test244, but also use reconcile option.
* MID-3040
*/ | Similar to test244, but also use reconcile option. MID-3040 | test245ModifyUserRappLocalityUnderReconcile | {
"repo_name": "PetrGasparik/midpoint",
"path": "model/model-intest/src/test/java/com/evolveum/midpoint/model/intest/TestUserTemplate.java",
"license": "apache-2.0",
"size": 93837
} | [
"com.evolveum.midpoint.model.api.ModelExecuteOptions",
"com.evolveum.midpoint.prism.PrismObject",
"com.evolveum.midpoint.prism.delta.ObjectDelta",
"com.evolveum.midpoint.prism.path.ItemPath",
"com.evolveum.midpoint.prism.util.PrismTestUtil",
"com.evolveum.midpoint.schema.result.OperationResult",
"com.evolveum.midpoint.schema.util.MiscSchemaUtil",
"com.evolveum.midpoint.task.api.Task",
"com.evolveum.midpoint.test.IntegrationTestTools",
"com.evolveum.midpoint.test.util.TestUtil",
"com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType",
"com.evolveum.midpoint.xml.ns._public.common.common_3.UserType",
"java.util.Collection",
"org.testng.AssertJUnit"
] | import com.evolveum.midpoint.model.api.ModelExecuteOptions; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.MiscSchemaUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.IntegrationTestTools; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import java.util.Collection; import org.testng.AssertJUnit; | import com.evolveum.midpoint.model.api.*; import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.prism.delta.*; import com.evolveum.midpoint.prism.path.*; import com.evolveum.midpoint.prism.util.*; import com.evolveum.midpoint.schema.result.*; import com.evolveum.midpoint.schema.util.*; import com.evolveum.midpoint.task.api.*; import com.evolveum.midpoint.test.*; import com.evolveum.midpoint.test.util.*; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import java.util.*; import org.testng.*; | [
"com.evolveum.midpoint",
"java.util",
"org.testng"
] | com.evolveum.midpoint; java.util; org.testng; | 1,952,583 |
public Interval getRecordInterval();
| Interval function(); | /**
* Returns the interval in which this object is known.
*/ | Returns the interval in which this object is known | getRecordInterval | {
"repo_name": "klr8/bitemporal",
"path": "src/main/java/com/ervacon/bitemporal/Bitemporal.java",
"license": "bsd-3-clause",
"size": 1786
} | [
"org.threeten.extra.Interval"
] | import org.threeten.extra.Interval; | import org.threeten.extra.*; | [
"org.threeten.extra"
] | org.threeten.extra; | 300,469 |
public synchronized boolean readOnce(Datapoint datapoint) {
if (datapoint == null) {
sLogger.error("Argument datapoint cannot be null");
return false;
}
if (mReadQueue.size() > KNXConnection.getMaxRefreshQueueEntries()) {
sLogger.error("Maximum number of permissible reading queue entries reached ('{}'). Ignoring new entries.",
KNXConnection.getMaxRefreshQueueEntries());
return false;
}
sLogger.debug("Datapoint '{}': one time reading scheduled.", datapoint.getName());
return mReadQueue.add(datapoint);
} | synchronized boolean function(Datapoint datapoint) { if (datapoint == null) { sLogger.error(STR); return false; } if (mReadQueue.size() > KNXConnection.getMaxRefreshQueueEntries()) { sLogger.error(STR, KNXConnection.getMaxRefreshQueueEntries()); return false; } sLogger.debug(STR, datapoint.getName()); return mReadQueue.add(datapoint); } | /**
* Schedules immediate and one-time reading of a <code>Datapoint</code>.
*
* @param datapoint the <code>Datapoint</code> to read
* @return false if the datapoint is null.
*/ | Schedules immediate and one-time reading of a <code>Datapoint</code> | readOnce | {
"repo_name": "cdjackson/openhab",
"path": "bundles/binding/org.openhab.binding.knx/src/main/java/org/openhab/binding/knx/internal/bus/KNXBusReaderScheduler.java",
"license": "epl-1.0",
"size": 12333
} | [
"org.openhab.binding.knx.internal.connection.KNXConnection"
] | import org.openhab.binding.knx.internal.connection.KNXConnection; | import org.openhab.binding.knx.internal.connection.*; | [
"org.openhab.binding"
] | org.openhab.binding; | 1,556,239 |
public BillingResponseListResultInner withVmSizeFilters(List<VmSizeCompatibilityFilterV2> vmSizeFilters) {
this.vmSizeFilters = vmSizeFilters;
return this;
} | BillingResponseListResultInner function(List<VmSizeCompatibilityFilterV2> vmSizeFilters) { this.vmSizeFilters = vmSizeFilters; return this; } | /**
* Set the vmSizeFilters property: The virtual machine filtering mode. Effectively this can enabling or disabling
* the virtual machine sizes in a particular set.
*
* @param vmSizeFilters the vmSizeFilters value to set.
* @return the BillingResponseListResultInner object itself.
*/ | Set the vmSizeFilters property: The virtual machine filtering mode. Effectively this can enabling or disabling the virtual machine sizes in a particular set | withVmSizeFilters | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/hdinsight/azure-resourcemanager-hdinsight/src/main/java/com/azure/resourcemanager/hdinsight/fluent/models/BillingResponseListResultInner.java",
"license": "mit",
"size": 5483
} | [
"com.azure.resourcemanager.hdinsight.models.VmSizeCompatibilityFilterV2",
"java.util.List"
] | import com.azure.resourcemanager.hdinsight.models.VmSizeCompatibilityFilterV2; import java.util.List; | import com.azure.resourcemanager.hdinsight.models.*; import java.util.*; | [
"com.azure.resourcemanager",
"java.util"
] | com.azure.resourcemanager; java.util; | 1,524,501 |
public Builder setDocumentStore(DocumentStore documentStore) {
this.documentStoreSupplier = ofInstance(documentStore);
return this;
} | Builder function(DocumentStore documentStore) { this.documentStoreSupplier = ofInstance(documentStore); return this; } | /**
* Set the document store to use. By default an in-memory store is used.
*
* @param documentStore the document store
* @return this
*/ | Set the document store to use. By default an in-memory store is used | setDocumentStore | {
"repo_name": "chetanmeh/jackrabbit-oak",
"path": "oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java",
"license": "apache-2.0",
"size": 54533
} | [
"com.google.common.base.Suppliers"
] | import com.google.common.base.Suppliers; | import com.google.common.base.*; | [
"com.google.common"
] | com.google.common; | 376,000 |
@SuppressWarnings("unchecked")
public static List<Comment> getCommentList(final CompilationUnit node) {
return node.getCommentList();
}
| @SuppressWarnings(STR) static List<Comment> function(final CompilationUnit node) { return node.getCommentList(); } | /**
* Generecized version of the equivalent JDT method.
*
* @param node the node on which to call the equivalent JDT method
* @return a List of expressions
* @see CompilationUnit#getCommentList()
*/ | Generecized version of the equivalent JDT method | getCommentList | {
"repo_name": "rpau/AutoRefactor",
"path": "plugin/src/main/java/org/autorefactor/jdt/internal/corext/dom/ASTNodes.java",
"license": "epl-1.0",
"size": 104241
} | [
"java.util.List",
"org.eclipse.jdt.core.dom.Comment",
"org.eclipse.jdt.core.dom.CompilationUnit"
] | import java.util.List; import org.eclipse.jdt.core.dom.Comment; import org.eclipse.jdt.core.dom.CompilationUnit; | import java.util.*; import org.eclipse.jdt.core.dom.*; | [
"java.util",
"org.eclipse.jdt"
] | java.util; org.eclipse.jdt; | 2,516,275 |
AaptCommandBuilder thenAdd(String flag, Optional<Path> value); | AaptCommandBuilder thenAdd(String flag, Optional<Path> value); | /**
* Adds a single flag and associated path value to the builder if the value is non-null and the
* condition was true.
*
* @see AaptCommandBuilder#add(String,Optional)
*/ | Adds a single flag and associated path value to the builder if the value is non-null and the condition was true | thenAdd | {
"repo_name": "dslomov/bazel",
"path": "src/tools/android/java/com/google/devtools/build/android/AaptCommandBuilder.java",
"license": "apache-2.0",
"size": 10652
} | [
"java.nio.file.Path",
"java.util.Optional"
] | import java.nio.file.Path; import java.util.Optional; | import java.nio.file.*; import java.util.*; | [
"java.nio",
"java.util"
] | java.nio; java.util; | 2,300,927 |
public boolean isDimBlocked() {
final PowerManager pm = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
return WakeLockManager.getInstance(pm).isLocked();
} | boolean function() { final PowerManager pm = (PowerManager) context.getSystemService(Context.POWER_SERVICE); return WakeLockManager.getInstance(pm).isLocked(); } | /**
* Checks if dim is blocked.
*/ | Checks if dim is blocked | isDimBlocked | {
"repo_name": "yyunikov/android-dim-block",
"path": "src/main/java/com/yyunikov/dimblock/controller/DimPreferenceController.java",
"license": "apache-2.0",
"size": 3597
} | [
"android.content.Context",
"android.os.PowerManager",
"com.yyunikov.dimblock.base.WakeLockManager"
] | import android.content.Context; import android.os.PowerManager; import com.yyunikov.dimblock.base.WakeLockManager; | import android.content.*; import android.os.*; import com.yyunikov.dimblock.base.*; | [
"android.content",
"android.os",
"com.yyunikov.dimblock"
] | android.content; android.os; com.yyunikov.dimblock; | 829,574 |
private NgramDPState computeFinalTransition(NgramDPState state, Accumulator acc) {
float res = 0.0f;
LinkedList<Integer> currentNgram = new LinkedList<Integer>();
int[] leftContext = state.getLeftLMStateWords();
int[] rightContext = state.getRightLMStateWords();
for (int i = 0; i < leftContext.length; i++) {
int t = leftContext[i];
currentNgram.add(t);
if (currentNgram.size() >= 2) { // start from bigram
float prob = this.languageModel.ngramLogProbability(Support.toArray(currentNgram),
currentNgram.size());
res += prob;
}
if (currentNgram.size() == this.ngramOrder)
currentNgram.removeFirst();
}
// Tell the accumulator
acc.add(name, res);
// State is the same
return new NgramDPState(leftContext, rightContext);
} | NgramDPState function(NgramDPState state, Accumulator acc) { float res = 0.0f; LinkedList<Integer> currentNgram = new LinkedList<Integer>(); int[] leftContext = state.getLeftLMStateWords(); int[] rightContext = state.getRightLMStateWords(); for (int i = 0; i < leftContext.length; i++) { int t = leftContext[i]; currentNgram.add(t); if (currentNgram.size() >= 2) { float prob = this.languageModel.ngramLogProbability(Support.toArray(currentNgram), currentNgram.size()); res += prob; } if (currentNgram.size() == this.ngramOrder) currentNgram.removeFirst(); } acc.add(name, res); return new NgramDPState(leftContext, rightContext); } | /**
* This function differs from regular transitions because we incorporate the cost of incomplete
* left-hand ngrams, as well as including the start- and end-of-sentence markers (if they were
* requested when the object was created).
*
* @param state the dynamic programming state
* @return the final transition probability (including incomplete n-grams)
*/ | This function differs from regular transitions because we incorporate the cost of incomplete left-hand ngrams, as well as including the start- and end-of-sentence markers (if they were requested when the object was created) | computeFinalTransition | {
"repo_name": "gwenniger/joshua",
"path": "src/joshua/decoder/ff/lm/LanguageModelFF.java",
"license": "lgpl-2.1",
"size": 10102
} | [
"java.util.LinkedList"
] | import java.util.LinkedList; | import java.util.*; | [
"java.util"
] | java.util; | 942,545 |
private String appendToSource(String appendDir, String srcDir) {
StringBuilder builder = new StringBuilder(srcDir);
srcDir = (appendDir == null) ? srcDir : builder.append(Path.SEPARATOR).append(appendDir)
.toString();
return srcDir;
} | String function(String appendDir, String srcDir) { StringBuilder builder = new StringBuilder(srcDir); srcDir = (appendDir == null) ? srcDir : builder.append(Path.SEPARATOR).append(appendDir) .toString(); return srcDir; } | /**
* Append dir to source dir
* @param appendDir
* @param srcDir
* @return
*/ | Append dir to source dir | appendToSource | {
"repo_name": "wangbin83-gmail-com/hive-1.1.0-cdh5.4.8",
"path": "ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java",
"license": "apache-2.0",
"size": 46999
} | [
"org.apache.hadoop.fs.Path"
] | import org.apache.hadoop.fs.Path; | import org.apache.hadoop.fs.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,666,510 |
@Override
public void parse(DLNAMediaInfo media, File file) {
if (media == null) {
throw new IllegalArgumentException("media cannot be null");
}
if (file == null) {
throw new IllegalArgumentException("file cannot be null");
}
OutputParams params = new OutputParams(configuration);
params.log = true;
String[] cmdArray = new String[4];
cmdArray[0] = PlayerFactory.getPlayerExecutable(ID);
cmdArray[1] = "-i";
cmdArray[2] = "-v";
cmdArray[3] = file.getAbsolutePath();
ProcessWrapperImpl pw = new ProcessWrapperImpl(cmdArray, params, true, false);
pw.runInSameThread();
List<String> list = pw.getOtherResults();
Pattern pattern = Pattern.compile("^Output size:\\s*(\\d+)\\s*x\\s*(\\d+)");
Matcher matcher;
for (String s : list) {
matcher = pattern.matcher(s);
if (matcher.find()) {
media.setWidth(Integer.parseInt(matcher.group(1)));
media.setHeight(Integer.parseInt(matcher.group(2)));
if (LOGGER.isTraceEnabled()) {
LOGGER.trace(
"Parsed resolution {} x {} for image \"{}\" from DCRaw output",
Integer.parseInt(matcher.group(1)),
Integer.parseInt(matcher.group(2)),
file.getPath()
);
}
break;
}
}
} | void function(DLNAMediaInfo media, File file) { if (media == null) { throw new IllegalArgumentException(STR); } if (file == null) { throw new IllegalArgumentException(STR); } OutputParams params = new OutputParams(configuration); params.log = true; String[] cmdArray = new String[4]; cmdArray[0] = PlayerFactory.getPlayerExecutable(ID); cmdArray[1] = "-i"; cmdArray[2] = "-v"; cmdArray[3] = file.getAbsolutePath(); ProcessWrapperImpl pw = new ProcessWrapperImpl(cmdArray, params, true, false); pw.runInSameThread(); List<String> list = pw.getOtherResults(); Pattern pattern = Pattern.compile(STR); Matcher matcher; for (String s : list) { matcher = pattern.matcher(s); if (matcher.find()) { media.setWidth(Integer.parseInt(matcher.group(1))); media.setHeight(Integer.parseInt(matcher.group(2))); if (LOGGER.isTraceEnabled()) { LOGGER.trace( STR{}\STR, Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)), file.getPath() ); } break; } } } | /**
* Parses {@code file} and stores the result in {@code media}.
*
* @param media the {@link DLNAMediaInfo} instance to store the parse
* results in.
* @param file the {@link File} to parse.
*/ | Parses file and stores the result in media | parse | {
"repo_name": "DigitalMediaServer/DigitalMediaServer",
"path": "src/main/java/net/pms/encoders/DCRaw.java",
"license": "gpl-2.0",
"size": 14828
} | [
"java.io.File",
"java.util.List",
"java.util.regex.Matcher",
"java.util.regex.Pattern",
"net.pms.dlna.DLNAMediaInfo",
"net.pms.io.OutputParams",
"net.pms.io.ProcessWrapperImpl"
] | import java.io.File; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.pms.dlna.DLNAMediaInfo; import net.pms.io.OutputParams; import net.pms.io.ProcessWrapperImpl; | import java.io.*; import java.util.*; import java.util.regex.*; import net.pms.dlna.*; import net.pms.io.*; | [
"java.io",
"java.util",
"net.pms.dlna",
"net.pms.io"
] | java.io; java.util; net.pms.dlna; net.pms.io; | 1,740,952 |
public void testTxRecordsConsistency() throws Exception {
System.setProperty(IgniteSystemProperties.IGNITE_WAL_LOG_TX_RECORDS, "true");
IgniteEx ignite = (IgniteEx) startGrids(3);
ignite.active(true);
try {
final String cacheName = "transactional";
CacheConfiguration<Object, Object> cacheConfiguration = new CacheConfiguration<>(cacheName)
.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL)
.setAffinity(new RendezvousAffinityFunction(false, 32))
.setCacheMode(CacheMode.PARTITIONED)
.setRebalanceMode(CacheRebalanceMode.SYNC)
.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC)
.setBackups(0);
ignite.createCache(cacheConfiguration);
IgniteCache<Object, Object> cache = ignite.cache(cacheName);
GridCacheSharedContext<Object, Object> sharedCtx = ignite.context().cache().context();
GridCacheDatabaseSharedManager db = (GridCacheDatabaseSharedManager)sharedCtx.database();
db.waitForCheckpoint("test");
db.enableCheckpoints(false).get();
// Log something to know where to start.
WALPointer startPtr = sharedCtx.wal().log(new MemoryRecoveryRecord(U.currentTimeMillis()));
final int transactions = 100;
final int operationsPerTransaction = 40;
Random random = new Random();
for (int t = 1; t <= transactions; t++) {
Transaction tx = ignite.transactions().txStart(
TransactionConcurrency.OPTIMISTIC, TransactionIsolation.READ_COMMITTED);
for (int op = 0; op < operationsPerTransaction; op++) {
int key = random.nextInt(1000) + 1;
Object value;
if (random.nextBoolean())
value = randomString(random) + key;
else
value = new BigObject(key);
cache.put(key, value);
}
if (random.nextBoolean()) {
tx.commit();
}
else {
tx.rollback();
}
if (t % 50 == 0)
log.info("Finished transaction " + t);
}
Set<GridCacheVersion> activeTransactions = new HashSet<>();
// Check that all DataRecords are within PREPARED and COMMITTED tx records.
try (WALIterator it = sharedCtx.wal().replay(startPtr)) {
while (it.hasNext()) {
IgniteBiTuple<WALPointer, WALRecord> tup = it.next();
WALRecord rec = tup.get2();
if (rec instanceof TxRecord) {
TxRecord txRecord = (TxRecord) rec;
GridCacheVersion txId = txRecord.nearXidVersion();
switch (txRecord.state()) {
case PREPARED:
assert !activeTransactions.contains(txId) : "Transaction is already present " + txRecord;
activeTransactions.add(txId);
break;
case COMMITTED:
assert activeTransactions.contains(txId) : "No PREPARE marker for transaction " + txRecord;
activeTransactions.remove(txId);
break;
case ROLLED_BACK:
activeTransactions.remove(txId);
break;
default:
throw new IllegalStateException("Unknown Tx state of record " + txRecord);
}
} else if (rec instanceof DataRecord) {
DataRecord dataRecord = (DataRecord) rec;
for (DataEntry entry : dataRecord.writeEntries()) {
GridCacheVersion txId = entry.nearXidVersion();
assert activeTransactions.contains(txId) : "No transaction for entry " + entry;
}
}
}
}
}
finally {
System.clearProperty(IgniteSystemProperties.IGNITE_WAL_LOG_TX_RECORDS);
stopAllGrids();
}
} | void function() throws Exception { System.setProperty(IgniteSystemProperties.IGNITE_WAL_LOG_TX_RECORDS, "true"); IgniteEx ignite = (IgniteEx) startGrids(3); ignite.active(true); try { final String cacheName = STR; CacheConfiguration<Object, Object> cacheConfiguration = new CacheConfiguration<>(cacheName) .setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL) .setAffinity(new RendezvousAffinityFunction(false, 32)) .setCacheMode(CacheMode.PARTITIONED) .setRebalanceMode(CacheRebalanceMode.SYNC) .setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC) .setBackups(0); ignite.createCache(cacheConfiguration); IgniteCache<Object, Object> cache = ignite.cache(cacheName); GridCacheSharedContext<Object, Object> sharedCtx = ignite.context().cache().context(); GridCacheDatabaseSharedManager db = (GridCacheDatabaseSharedManager)sharedCtx.database(); db.waitForCheckpoint("test"); db.enableCheckpoints(false).get(); WALPointer startPtr = sharedCtx.wal().log(new MemoryRecoveryRecord(U.currentTimeMillis())); final int transactions = 100; final int operationsPerTransaction = 40; Random random = new Random(); for (int t = 1; t <= transactions; t++) { Transaction tx = ignite.transactions().txStart( TransactionConcurrency.OPTIMISTIC, TransactionIsolation.READ_COMMITTED); for (int op = 0; op < operationsPerTransaction; op++) { int key = random.nextInt(1000) + 1; Object value; if (random.nextBoolean()) value = randomString(random) + key; else value = new BigObject(key); cache.put(key, value); } if (random.nextBoolean()) { tx.commit(); } else { tx.rollback(); } if (t % 50 == 0) log.info(STR + t); } Set<GridCacheVersion> activeTransactions = new HashSet<>(); try (WALIterator it = sharedCtx.wal().replay(startPtr)) { while (it.hasNext()) { IgniteBiTuple<WALPointer, WALRecord> tup = it.next(); WALRecord rec = tup.get2(); if (rec instanceof TxRecord) { TxRecord txRecord = (TxRecord) rec; GridCacheVersion txId = txRecord.nearXidVersion(); switch (txRecord.state()) { case PREPARED: assert !activeTransactions.contains(txId) : STR + txRecord; activeTransactions.add(txId); break; case COMMITTED: assert activeTransactions.contains(txId) : STR + txRecord; activeTransactions.remove(txId); break; case ROLLED_BACK: activeTransactions.remove(txId); break; default: throw new IllegalStateException(STR + txRecord); } } else if (rec instanceof DataRecord) { DataRecord dataRecord = (DataRecord) rec; for (DataEntry entry : dataRecord.writeEntries()) { GridCacheVersion txId = entry.nearXidVersion(); assert activeTransactions.contains(txId) : STR + entry; } } } } } finally { System.clearProperty(IgniteSystemProperties.IGNITE_WAL_LOG_TX_RECORDS); stopAllGrids(); } } | /**
* Test that all DataRecord WAL records are within transaction boundaries - PREPARED and COMMITTED markers.
*
* @throws Exception If any fail.
*/ | Test that all DataRecord WAL records are within transaction boundaries - PREPARED and COMMITTED markers | testTxRecordsConsistency | {
"repo_name": "WilliamDo/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/IgniteWalRecoveryTest.java",
"license": "apache-2.0",
"size": 49960
} | [
"java.util.HashSet",
"java.util.Random",
"java.util.Set",
"org.apache.ignite.IgniteCache",
"org.apache.ignite.IgniteSystemProperties",
"org.apache.ignite.cache.CacheAtomicityMode",
"org.apache.ignite.cache.CacheMode",
"org.apache.ignite.cache.CacheRebalanceMode",
"org.apache.ignite.cache.CacheWriteSynchronizationMode",
"org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction",
"org.apache.ignite.configuration.CacheConfiguration",
"org.apache.ignite.internal.IgniteEx",
"org.apache.ignite.internal.pagemem.wal.WALIterator",
"org.apache.ignite.internal.pagemem.wal.WALPointer",
"org.apache.ignite.internal.pagemem.wal.record.DataEntry",
"org.apache.ignite.internal.pagemem.wal.record.DataRecord",
"org.apache.ignite.internal.pagemem.wal.record.MemoryRecoveryRecord",
"org.apache.ignite.internal.pagemem.wal.record.TxRecord",
"org.apache.ignite.internal.pagemem.wal.record.WALRecord",
"org.apache.ignite.internal.processors.cache.GridCacheSharedContext",
"org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager",
"org.apache.ignite.internal.processors.cache.version.GridCacheVersion",
"org.apache.ignite.internal.util.typedef.internal.U",
"org.apache.ignite.lang.IgniteBiTuple",
"org.apache.ignite.transactions.Transaction",
"org.apache.ignite.transactions.TransactionConcurrency",
"org.apache.ignite.transactions.TransactionIsolation"
] | import java.util.HashSet; import java.util.Random; import java.util.Set; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.pagemem.wal.WALIterator; import org.apache.ignite.internal.pagemem.wal.WALPointer; import org.apache.ignite.internal.pagemem.wal.record.DataEntry; import org.apache.ignite.internal.pagemem.wal.record.DataRecord; import org.apache.ignite.internal.pagemem.wal.record.MemoryRecoveryRecord; import org.apache.ignite.internal.pagemem.wal.record.TxRecord; import org.apache.ignite.internal.pagemem.wal.record.WALRecord; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; | import java.util.*; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.affinity.rendezvous.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.pagemem.wal.*; import org.apache.ignite.internal.pagemem.wal.record.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.persistence.*; import org.apache.ignite.internal.processors.cache.version.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.transactions.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 2,364,127 |
@RequestMapping(CRUDControllerConfig.STD_CALL_COMMAND)
public ModelAndView call(@ModelAttribute("form") PCRProductForm form, @RequestParam("subFlowId") int subFlowId) {
return super.doCall(form, subFlowId);
}
/**
* {@inheritDoc} | @RequestMapping(CRUDControllerConfig.STD_CALL_COMMAND) ModelAndView function(@ModelAttribute("form") PCRProductForm form, @RequestParam(STR) int subFlowId) { return super.doCall(form, subFlowId); } /** * {@inheritDoc} | /**
* Calls a subflow.
*
* @param form The object containing the values of the entity to be saved.
* @param subFlowId The id of the flow to call in the subFlowUrls array.
* @return The next view to go to. It is a forward to the entry action of the subflow.
*/ | Calls a subflow | call | {
"repo_name": "NCIP/calims",
"path": "calims2-webapp/src/java/gov/nih/nci/calims2/ui/inventory/pcrproduct/PCRProductController.java",
"license": "bsd-3-clause",
"size": 18330
} | [
"gov.nih.nci.calims2.ui.generic.crud.CRUDControllerConfig",
"org.springframework.web.bind.annotation.ModelAttribute",
"org.springframework.web.bind.annotation.RequestMapping",
"org.springframework.web.bind.annotation.RequestParam",
"org.springframework.web.servlet.ModelAndView"
] | import gov.nih.nci.calims2.ui.generic.crud.CRUDControllerConfig; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.servlet.ModelAndView; | import gov.nih.nci.calims2.ui.generic.crud.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.*; | [
"gov.nih.nci",
"org.springframework.web"
] | gov.nih.nci; org.springframework.web; | 718,726 |
public void testTTReservingInHeterogenousEnvironment()
throws IOException {
// 2 taskTrackers, 4 map slots on one and 3 map slot on another.
taskTrackerManager = new FakeTaskTrackerManager(1, 4, 0);
taskTrackerManager.addTaskTracker("tt2", 3, 0);
taskTrackerManager.addQueues(new String[] { "default" });
ArrayList<FakeQueueInfo> queues = new ArrayList<FakeQueueInfo>();
queues.add(new FakeQueueInfo("default", 100.0f, true, 100));
resConf.setFakeQueues(queues);
scheduler.setTaskTrackerManager(taskTrackerManager);
// enabled memory-based scheduling
// Normal job in the cluster would be 2GB maps/reduces
// Max allowed map memory would be 8GB.
scheduler.getConf().setLong(
JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY, 8 * 1024);
scheduler.getConf().setLong(
JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 2 * 1024);
scheduler.getConf().setLong(
JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY, 8 * 1024);
scheduler.getConf().setLong(
JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 2 * 1024);
scheduler.setResourceManagerConf(resConf);
scheduler.start();
LOG.debug("Submit a memory(7GB vmem maps/reduces) job of "
+ "2 map & 0 red tasks");
JobConf jConf = new JobConf(conf);
jConf = new JobConf(conf);
// We require 7GB maps, so thats worth 4 slots on the cluster.
jConf.setMemoryForMapTask(7 * 1024);
jConf.setMemoryForReduceTask(1 * 1024);
// Hence, 4 + 4 slots are required totally, for two tasks.
jConf.setNumMapTasks(2);
jConf.setNumReduceTasks(0);
jConf.setQueueName("default");
jConf.setUser("u1");
FakeJobInProgress job = submitJobAndInit(JobStatus.PREP, jConf);
// Heartbeating the trackers
scheduler.assignTasks(tracker("tt1"));
scheduler.assignTasks(tracker("tt2"));
scheduler.updateQueueUsageForTests();
LOG.info(job.getSchedulingInfo());
// tt2 can at most run 3 slots while each map task of this job requires
// at least 4 minimum slots to run.
// tt2 should not at all be reserved, hence. Since it would be a waste of
// slots for other jobs.
assertEquals("Tracker tt2 got reserved unnecessarily.",
0, scheduler.getMapScheduler().getNumReservedTaskTrackers(job));
assertEquals(
// Should be running only one map task worth four slots,
// and no reservations.
CapacityTaskScheduler.getJobQueueSchedInfo(1, 4, 0, 0, 0, 0),
(String) job.getSchedulingInfo());
jConf = new JobConf(conf);
// Try submitting a 3-slot worthy job, targeting tt2
// 5 GB should be worth 3 slots (2GB/map)
jConf.setMemoryForMapTask(5 * 1024);
jConf.setMemoryForReduceTask(1 * 1024);
// Just one task, targetting an unreserved tt2
jConf.setNumMapTasks(1);
jConf.setNumReduceTasks(0);
jConf.setQueueName("default");
jConf.setUser("u1");
submitJobAndInit(JobStatus.PREP, jConf);
// TT2 should get assigned.
checkAssignment("tt2", "attempt_test_0002_m_000001_0 on tt2");
} | void function() throws IOException { taskTrackerManager = new FakeTaskTrackerManager(1, 4, 0); taskTrackerManager.addTaskTracker("tt2", 3, 0); taskTrackerManager.addQueues(new String[] { STR }); ArrayList<FakeQueueInfo> queues = new ArrayList<FakeQueueInfo>(); queues.add(new FakeQueueInfo(STR, 100.0f, true, 100)); resConf.setFakeQueues(queues); scheduler.setTaskTrackerManager(taskTrackerManager); scheduler.getConf().setLong( JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY, 8 * 1024); scheduler.getConf().setLong( JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 2 * 1024); scheduler.getConf().setLong( JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY, 8 * 1024); scheduler.getConf().setLong( JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 2 * 1024); scheduler.setResourceManagerConf(resConf); scheduler.start(); LOG.debug(STR + STR); JobConf jConf = new JobConf(conf); jConf = new JobConf(conf); jConf.setMemoryForMapTask(7 * 1024); jConf.setMemoryForReduceTask(1 * 1024); jConf.setNumMapTasks(2); jConf.setNumReduceTasks(0); jConf.setQueueName(STR); jConf.setUser("u1"); FakeJobInProgress job = submitJobAndInit(JobStatus.PREP, jConf); scheduler.assignTasks(tracker("tt1")); scheduler.assignTasks(tracker("tt2")); scheduler.updateQueueUsageForTests(); LOG.info(job.getSchedulingInfo()); assertEquals(STR, 0, scheduler.getMapScheduler().getNumReservedTaskTrackers(job)); assertEquals( CapacityTaskScheduler.getJobQueueSchedInfo(1, 4, 0, 0, 0, 0), (String) job.getSchedulingInfo()); jConf = new JobConf(conf); jConf.setMemoryForMapTask(5 * 1024); jConf.setMemoryForReduceTask(1 * 1024); jConf.setNumMapTasks(1); jConf.setNumReduceTasks(0); jConf.setQueueName(STR); jConf.setUser("u1"); submitJobAndInit(JobStatus.PREP, jConf); checkAssignment("tt2", STR); } | /**
* Test to verify that TTs are not reserved in case the required memory
* exceeds the total availability of memory on TT.
* @throws IOException
*/ | Test to verify that TTs are not reserved in case the required memory exceeds the total availability of memory on TT | testTTReservingInHeterogenousEnvironment | {
"repo_name": "gndpig/hadoop-1.2",
"path": "src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java",
"license": "apache-2.0",
"size": 148941
} | [
"java.io.IOException",
"java.util.ArrayList"
] | import java.io.IOException; import java.util.ArrayList; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 577,889 |
public void clickToSeeMoreDocumentResults()
{
try
{
WebElement expandDocumentResults = drone.findAndWait(By.cssSelector(MORE_RESULTS));
expandDocumentResults.click();
}
catch (NoSuchElementException nse)
{
logger.error("No see more results icon " + nse);
throw new PageException("Unable to find see more results icon.", nse);
}
catch (TimeoutException te)
{
logger.error("Unable to find see more results icon. " + te);
throw new PageException("Unable to find see more results icon. ", te);
}
}
| void function() { try { WebElement expandDocumentResults = drone.findAndWait(By.cssSelector(MORE_RESULTS)); expandDocumentResults.click(); } catch (NoSuchElementException nse) { logger.error(STR + nse); throw new PageException(STR, nse); } catch (TimeoutException te) { logger.error(STR + te); throw new PageException(STR, te); } } | /**
* Clicks on see more results arrow
*
* @return
*/ | Clicks on see more results arrow | clickToSeeMoreDocumentResults | {
"repo_name": "loftuxab/community-edition-old",
"path": "projects/share-po/src/main/java/org/alfresco/po/share/search/LiveSearchDropdown.java",
"license": "lgpl-3.0",
"size": 12281
} | [
"org.alfresco.webdrone.exception.PageException",
"org.openqa.selenium.By",
"org.openqa.selenium.NoSuchElementException",
"org.openqa.selenium.TimeoutException",
"org.openqa.selenium.WebElement"
] | import org.alfresco.webdrone.exception.PageException; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.TimeoutException; import org.openqa.selenium.WebElement; | import org.alfresco.webdrone.exception.*; import org.openqa.selenium.*; | [
"org.alfresco.webdrone",
"org.openqa.selenium"
] | org.alfresco.webdrone; org.openqa.selenium; | 2,900,590 |
@Override public void exitFunction(@NotNull PoCoParser.FunctionContext ctx) { } | @Override public void exitFunction(@NotNull PoCoParser.FunctionContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | enterFunction | {
"repo_name": "Corjuh/PoCo-Compiler",
"path": "src/com/poco/PoCoParser/PoCoParserBaseListener.java",
"license": "lgpl-2.1",
"size": 18510
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 1,869,345 |
@Input @Optional
public String getWindowTitle() {
return windowTitle;
} | @Input String function() { return windowTitle; } | /**
* Returns the text to appear in the window title.
*/ | Returns the text to appear in the window title | getWindowTitle | {
"repo_name": "HenryHarper/Acquire-Reboot",
"path": "gradle/src/scala/org/gradle/api/tasks/scala/ScalaDocOptions.java",
"license": "mit",
"size": 5538
} | [
"org.gradle.api.tasks.Input"
] | import org.gradle.api.tasks.Input; | import org.gradle.api.tasks.*; | [
"org.gradle.api"
] | org.gradle.api; | 50,753 |
public static WebArchive kitchensink() {
return ShrinkWrap.createFromZipFile(WebArchive.class, new File(KITCHENSINK));
} | static WebArchive function() { return ShrinkWrap.createFromZipFile(WebArchive.class, new File(KITCHENSINK)); } | /**
* Creates deployment which is sent to the container upon test's start.
*
* @return war file which is deployed while testing, the whole application in our case
*/ | Creates deployment which is sent to the container upon test's start | kitchensink | {
"repo_name": "jboss-developer/jboss-wfk-quickstarts",
"path": "spring-kitchensink-asyncrequestmapping/functional-tests/src/test/java/org/jboss/as/quickstarts/kitchensink/test/Deployments.java",
"license": "apache-2.0",
"size": 1618
} | [
"java.io.File",
"org.jboss.shrinkwrap.api.ShrinkWrap",
"org.jboss.shrinkwrap.api.spec.WebArchive"
] | import java.io.File; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; | import java.io.*; import org.jboss.shrinkwrap.api.*; import org.jboss.shrinkwrap.api.spec.*; | [
"java.io",
"org.jboss.shrinkwrap"
] | java.io; org.jboss.shrinkwrap; | 2,072,754 |
public static <A extends Annotation> A findAnnotation(Class<?> clazz, Class<A> annotationType) {
Assert.notNull(clazz, "Class must not be null");
A annotation = clazz.getAnnotation(annotationType);
if (annotation != null) {
return annotation;
}
for (Class<?> ifc : clazz.getInterfaces()) {
annotation = findAnnotation(ifc, annotationType);
if (annotation != null) {
return annotation;
}
}
if (clazz.getSuperclass() == null || Object.class.equals(clazz.getSuperclass())) {
return null;
}
return findAnnotation(clazz.getSuperclass(), annotationType);
} | static <A extends Annotation> A function(Class<?> clazz, Class<A> annotationType) { Assert.notNull(clazz, STR); A annotation = clazz.getAnnotation(annotationType); if (annotation != null) { return annotation; } for (Class<?> ifc : clazz.getInterfaces()) { annotation = findAnnotation(ifc, annotationType); if (annotation != null) { return annotation; } } if (clazz.getSuperclass() == null Object.class.equals(clazz.getSuperclass())) { return null; } return findAnnotation(clazz.getSuperclass(), annotationType); } | /**
* Find a single {@link Annotation} of <code>annotationType</code> from the
* supplied {@link Class}, traversing its interfaces and super classes
* if no annotation can be found on the given class itself.
* <p>This method explicitly handles class-level annotations which are not
* declared as {@link java.lang.annotation.Inherited inherited} as well as
* annotations on interfaces.
*
* @param clazz the class to look for annotations on
* @param annotationType the annotation class to look for
* @return the annotation of the given type found, or <code>null</code>
*/ | Find a single <code>Annotation</code> of <code>annotationType</code> from the supplied <code>Class</code>, traversing its interfaces and super classes if no annotation can be found on the given class itself. This method explicitly handles class-level annotations which are not declared as <code>java.lang.annotation.Inherited inherited</code> as well as annotations on interfaces | findAnnotation | {
"repo_name": "Gigaspaces/xap-openspaces",
"path": "src/main/java/org/openspaces/core/util/AnnotationUtils.java",
"license": "apache-2.0",
"size": 15824
} | [
"java.lang.annotation.Annotation",
"org.springframework.util.Assert"
] | import java.lang.annotation.Annotation; import org.springframework.util.Assert; | import java.lang.annotation.*; import org.springframework.util.*; | [
"java.lang",
"org.springframework.util"
] | java.lang; org.springframework.util; | 2,771,373 |
public void doEdit_group(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
state.setAttribute("mode", "editGroup");
String id = data.getParameters().getString("id");
// get the group
Site site = (Site) state.getAttribute("site");
Group group = site.getGroup(id);
state.setAttribute("group", group);
} // doEdit_group | void function(RunData data, Context context) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); state.setAttribute("mode", STR); String id = data.getParameters().getString("id"); Site site = (Site) state.getAttribute("site"); Group group = site.getGroup(id); state.setAttribute("group", group); } | /**
* Edit an existing group.
*/ | Edit an existing group | doEdit_group | {
"repo_name": "kingmook/sakai",
"path": "site/site-tool/tool/src/java/org/sakaiproject/site/tool/AdminSitesAction.java",
"license": "apache-2.0",
"size": 77028
} | [
"org.sakaiproject.cheftool.Context",
"org.sakaiproject.cheftool.JetspeedRunData",
"org.sakaiproject.cheftool.RunData",
"org.sakaiproject.event.api.SessionState",
"org.sakaiproject.site.api.Group",
"org.sakaiproject.site.api.Site"
] | import org.sakaiproject.cheftool.Context; import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.site.api.Group; import org.sakaiproject.site.api.Site; | import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*; import org.sakaiproject.site.api.*; | [
"org.sakaiproject.cheftool",
"org.sakaiproject.event",
"org.sakaiproject.site"
] | org.sakaiproject.cheftool; org.sakaiproject.event; org.sakaiproject.site; | 2,459,033 |
T next() throws StandardException,IOException; | T next() throws StandardException,IOException; | /**
* Get the next element in the interation
* @return the next element in the iteration, or {@code null} if no
* more elements exist
*
* @throws StandardException
* @throws IOException
*/ | Get the next element in the interation | next | {
"repo_name": "splicemachine/spliceengine",
"path": "splice_machine/src/main/java/com/splicemachine/derby/utils/StandardIterator.java",
"license": "agpl-3.0",
"size": 1822
} | [
"com.splicemachine.db.iapi.error.StandardException",
"java.io.IOException"
] | import com.splicemachine.db.iapi.error.StandardException; import java.io.IOException; | import com.splicemachine.db.iapi.error.*; import java.io.*; | [
"com.splicemachine.db",
"java.io"
] | com.splicemachine.db; java.io; | 155,133 |
public WorkflowDocument createWorkflowDocument(String documentTypeName, Person workflowUser)
throws WorkflowException; | WorkflowDocument function(String documentTypeName, Person workflowUser) throws WorkflowException; | /**
* Given a documentTypeName and workflowUser, returns a new workflowDocument from the workflow
* server.
*
* @param documentTypeName
* @param workflowUser
* @return newly-created workflowDocument instance
* @throws IllegalArgumentException if the given documentTypeName is blank
* @throws IllegalArgumentException if the given workflowUser is null or contains no id
* @throws ResourceUnavailableException
*/ | Given a documentTypeName and workflowUser, returns a new workflowDocument from the workflow server | createWorkflowDocument | {
"repo_name": "ua-eas/ua-rice-2.1.9",
"path": "krad/krad-web-framework/src/main/java/org/kuali/rice/krad/workflow/service/WorkflowDocumentService.java",
"license": "apache-2.0",
"size": 10974
} | [
"org.kuali.rice.kew.api.WorkflowDocument",
"org.kuali.rice.kew.api.exception.WorkflowException",
"org.kuali.rice.kim.api.identity.Person"
] | import org.kuali.rice.kew.api.WorkflowDocument; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kim.api.identity.Person; | import org.kuali.rice.kew.api.*; import org.kuali.rice.kew.api.exception.*; import org.kuali.rice.kim.api.identity.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 2,473,973 |
public void setNotificationPolicy(Boolean isEnabled) {
//Create a Runnable thread that will download needed playlist and video data
ThreadManager
.getInstance()
.sendVoidTask(new Thread(() -> {
settings.setNotificationPolicy(isEnabled);
if (ThreadManager.getExecutionPermission()) {
saveToJson();
}
}), TASK_TYPE.SETTING);
} | void function(Boolean isEnabled) { ThreadManager .getInstance() .sendVoidTask(new Thread(() -> { settings.setNotificationPolicy(isEnabled); if (ThreadManager.getExecutionPermission()) { saveToJson(); } }), TASK_TYPE.SETTING); } | /**
* Sets notification policy and saves it in SettingsManager
*
* @param isEnabled Should notifications be enabled
*/ | Sets notification policy and saves it in SettingsManager | setNotificationPolicy | {
"repo_name": "Open96/JYpm",
"path": "jypm-settings-manager/src/main/java/com/github/open96/jypm/settings/SettingsManager.java",
"license": "apache-2.0",
"size": 15102
} | [
"com.github.open96.jypm.thread.ThreadManager"
] | import com.github.open96.jypm.thread.ThreadManager; | import com.github.open96.jypm.thread.*; | [
"com.github.open96"
] | com.github.open96; | 1,098,669 |
private void withdrawFromBank() {
if (bankComms == null) {
msgTextBox.append("[*ERR*] Not logged in.\n");
return;
}
String credits = null;
while (true) {
credits = JOptionPane.showInputDialog("Credits to withdraw:", credits);
if (credits == null)
return;
try {
final int dummy = Integer.parseInt(credits);
if (dummy <= 0)
throw new NumberFormatException();
break;
} catch (final NumberFormatException e) {
JOptionPane.showMessageDialog(null, "Credits must be a positive integer.", "Invalid credits", JOptionPane.ERROR_MESSAGE);
}
}
final int currentCredits = hashChain.getLength();
if (currentCredits > 0) {
final Stack<byte[]> payment = hashChain.getNextCredits(currentCredits);
bankComms.sendPacket(DecryptedPacket.CMD_PAYMENT, payment.size() + ";" + Base64.encodeBase64String(payment.peek()));
}
System.out.println(Integer.parseInt(credits) + currentCredits);
getNewHashChain(Integer.parseInt(credits) + currentCredits);
}
| void function() { if (bankComms == null) { msgTextBox.append(STR); return; } String credits = null; while (true) { credits = JOptionPane.showInputDialog(STR, credits); if (credits == null) return; try { final int dummy = Integer.parseInt(credits); if (dummy <= 0) throw new NumberFormatException(); break; } catch (final NumberFormatException e) { JOptionPane.showMessageDialog(null, STR, STR, JOptionPane.ERROR_MESSAGE); } } final int currentCredits = hashChain.getLength(); if (currentCredits > 0) { final Stack<byte[]> payment = hashChain.getNextCredits(currentCredits); bankComms.sendPacket(DecryptedPacket.CMD_PAYMENT, payment.size() + ";" + Base64.encodeBase64String(payment.peek())); } System.out.println(Integer.parseInt(credits) + currentCredits); getNewHashChain(Integer.parseInt(credits) + currentCredits); } | /**
* Withdraw additional credits from the {@link Bank} and generate a new
* (larger) {@link CryptoCreditHashChain}.
*/ | Withdraw additional credits from the <code>Bank</code> and generate a new (larger) <code>CryptoCreditHashChain</code> | withdrawFromBank | {
"repo_name": "joshuaspence/StealthNet",
"path": "src/StealthNet/Client.java",
"license": "mit",
"size": 72896
} | [
"java.util.Stack",
"javax.swing.JOptionPane",
"org.apache.commons.codec.binary.Base64"
] | import java.util.Stack; import javax.swing.JOptionPane; import org.apache.commons.codec.binary.Base64; | import java.util.*; import javax.swing.*; import org.apache.commons.codec.binary.*; | [
"java.util",
"javax.swing",
"org.apache.commons"
] | java.util; javax.swing; org.apache.commons; | 1,201,958 |
public static AngleSensor create(DoubleSupplier angleSupplier) {
return new AngleSensor() {
private volatile double zero = 0; | static AngleSensor function(DoubleSupplier angleSupplier) { return new AngleSensor() { private volatile double zero = 0; | /**
* Construct a new AngleSensor based on a {@link DoubleSupplier}.
*
* @param angleSupplier the supplier method for the sensor
* @return the new AngleSensor
*/ | Construct a new AngleSensor based on a <code>DoubleSupplier</code> | create | {
"repo_name": "m3rcuriel/controve",
"path": "src/main/java/com/m3rcuriel/controve/components/AngleSensor.java",
"license": "mpl-2.0",
"size": 2079
} | [
"java.util.function.DoubleSupplier"
] | import java.util.function.DoubleSupplier; | import java.util.function.*; | [
"java.util"
] | java.util; | 1,412,165 |
private void destroyCleanUp(RegionEventImpl event, int serials[]) {
String rId = getRegionIdentifier();
try {
if (logger.isDebugEnabled()) {
logger.debug("PartitionedRegion#destroyCleanUp: Destroying region: {}", getFullPath());
}
sendDestroyRegionMessage(event, serials);
try {
// if this event is global destruction of the region everywhere, remove
// it from the pr root configuration
if (null != getPRRoot()) {
getPRRoot().destroy(rId);
}
} catch (EntryNotFoundException ex) {
if (logger.isDebugEnabled()) {
logger.debug("PartitionedRegion#destroyCleanup: caught exception", ex);
}
} catch (CancelException ignore) {
// ignore; metadata not accessible
}
} finally {
if (logger.isDebugEnabled()) {
logger.debug("PartitionedRegion#destroyCleanUp: " + "Destroyed region: {}", getFullPath());
}
}
} | void function(RegionEventImpl event, int serials[]) { String rId = getRegionIdentifier(); try { if (logger.isDebugEnabled()) { logger.debug(STR, getFullPath()); } sendDestroyRegionMessage(event, serials); try { if (null != getPRRoot()) { getPRRoot().destroy(rId); } } catch (EntryNotFoundException ex) { if (logger.isDebugEnabled()) { logger.debug(STR, ex); } } catch (CancelException ignore) { } } finally { if (logger.isDebugEnabled()) { logger.debug(STR + STR, getFullPath()); } } } | /**
* This method: <br>
* Sends DestroyRegionMessage to other nodes <br>
* Removes this PartitionedRegion from allPartitionedRegions <br>
* Destroys bucket2node region <br>
*
* @param event the RegionEvent that triggered the region clean up
*
* @see DestroyPartitionedRegionMessage
*/ | This method: Sends DestroyRegionMessage to other nodes Removes this PartitionedRegion from allPartitionedRegions Destroys bucket2node region | destroyCleanUp | {
"repo_name": "smgoller/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/PartitionedRegion.java",
"license": "apache-2.0",
"size": 379988
} | [
"org.apache.geode.CancelException",
"org.apache.geode.cache.EntryNotFoundException"
] | import org.apache.geode.CancelException; import org.apache.geode.cache.EntryNotFoundException; | import org.apache.geode.*; import org.apache.geode.cache.*; | [
"org.apache.geode"
] | org.apache.geode; | 782,203 |
public boolean execute(final String action,
JSONArray args,
CallbackContext callbackContext) throws JSONException {
mCallbackContext = callbackContext;
Log.v(TAG, "OnyxPlugin action: " + action);
mExecuteAction = action;
mArgs = args.getJSONObject(0);
if (!mArgs.has("onyxLicense") || !mArgs.has("action")) {
mPluginResult = new PluginResult(PluginResult.Status.ERROR);
mCallbackContext.error("Missing required parameters");
mCallbackContext.sendPluginResult(mPluginResult);
return true;
}
if (action.equalsIgnoreCase(PluginAction.MATCH.getKey())) {
mPluginAction = PluginAction.MATCH;
} else if (action.equalsIgnoreCase(PluginAction.CAPTURE.getKey())) {
mPluginAction = PluginAction.CAPTURE;
}
if (null != mPluginAction) {
switch (mPluginAction) {
case MATCH:
doMatch();
break;
case CAPTURE:
launchOnyx();
break;
}
} else {
onError("Invalid plugin action.");
}
return true;
} | boolean function(final String action, JSONArray args, CallbackContext callbackContext) throws JSONException { mCallbackContext = callbackContext; Log.v(TAG, STR + action); mExecuteAction = action; mArgs = args.getJSONObject(0); if (!mArgs.has(STR) !mArgs.has(STR)) { mPluginResult = new PluginResult(PluginResult.Status.ERROR); mCallbackContext.error(STR); mCallbackContext.sendPluginResult(mPluginResult); return true; } if (action.equalsIgnoreCase(PluginAction.MATCH.getKey())) { mPluginAction = PluginAction.MATCH; } else if (action.equalsIgnoreCase(PluginAction.CAPTURE.getKey())) { mPluginAction = PluginAction.CAPTURE; } if (null != mPluginAction) { switch (mPluginAction) { case MATCH: doMatch(); break; case CAPTURE: launchOnyx(); break; } } else { onError(STR); } return true; } | /**
* Executes the request and returns PluginResult.
*
* @param action The action to execute.
* @param args JSONArry of arguments for the plugin.
* @param callbackContext The callback id used when calling back into JavaScript.
* @return A PluginResult object with a status and message.
*/ | Executes the request and returns PluginResult | execute | {
"repo_name": "DFTinc/cordova-plugin-onyx",
"path": "src/android/OnyxPlugin.java",
"license": "apache-2.0",
"size": 9623
} | [
"android.util.Log",
"org.apache.cordova.CallbackContext",
"org.apache.cordova.PluginResult",
"org.json.JSONArray",
"org.json.JSONException"
] | import android.util.Log; import org.apache.cordova.CallbackContext; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; | import android.util.*; import org.apache.cordova.*; import org.json.*; | [
"android.util",
"org.apache.cordova",
"org.json"
] | android.util; org.apache.cordova; org.json; | 1,287,089 |
@Test
public void testVarProperty() throws IOException
{
final GlobalDoc global = parse("varProperty.js");
assertEquals(1, global.getVariables().size());
final VariableDoc property = global.getVariables().iterator().next();
assertEquals("varProp", property.getName());
assertEquals("Property description.", property.getDescription());
assertEquals("number", property.getType().toExpression());
assertSame(global, property.getNamespace());
} | void function() throws IOException { final GlobalDoc global = parse(STR); assertEquals(1, global.getVariables().size()); final VariableDoc property = global.getVariables().iterator().next(); assertEquals(STR, property.getName()); assertEquals(STR, property.getDescription()); assertEquals(STR, property.getType().toExpression()); assertSame(global, property.getNamespace()); } | /**
* Tests parsing a var declared property.
*
* @throws IOException
* If JavaScript parsing fails.
*/ | Tests parsing a var declared property | testVarProperty | {
"repo_name": "kayahr/jasdoc",
"path": "src/test/java/de/ailis/jasdoc/doc/DocParserTest.java",
"license": "gpl-3.0",
"size": 38080
} | [
"java.io.IOException",
"org.junit.Assert"
] | import java.io.IOException; import org.junit.Assert; | import java.io.*; import org.junit.*; | [
"java.io",
"org.junit"
] | java.io; org.junit; | 1,303,709 |
public LinkedList<Beacon> getDetectedNearablesAndBeacons() {
LinkedList<Beacon> allBeaconsAndNearables = new LinkedList<>(detectedBeacons);
allBeaconsAndNearables.addAll(detectedNearables);
return allBeaconsAndNearables;
} | LinkedList<Beacon> function() { LinkedList<Beacon> allBeaconsAndNearables = new LinkedList<>(detectedBeacons); allBeaconsAndNearables.addAll(detectedNearables); return allBeaconsAndNearables; } | /**
* Returns list of all detected estimote Beacons and estimote Nearables.
*
* @return
*/ | Returns list of all detected estimote Beacons and estimote Nearables | getDetectedNearablesAndBeacons | {
"repo_name": "IoSL-INav/android",
"path": "app/src/main/java/de/tu_berlin/indoornavigation/singletons/LocationSharingSingleton.java",
"license": "apache-2.0",
"size": 10800
} | [
"de.tu_berlin.indoornavigation.entities.Beacon",
"java.util.LinkedList"
] | import de.tu_berlin.indoornavigation.entities.Beacon; import java.util.LinkedList; | import de.tu_berlin.indoornavigation.entities.*; import java.util.*; | [
"de.tu_berlin.indoornavigation",
"java.util"
] | de.tu_berlin.indoornavigation; java.util; | 627,022 |
public List<ParamTag> getParams()
{
return m_params;
} | List<ParamTag> function() { return m_params; } | /**
* Get the List of ParamTags defined by this node.
*
* @return the List of ParamTags
*/ | Get the List of ParamTags defined by this node | getParams | {
"repo_name": "bponsler/rosjava_roslaunch",
"path": "src/org/ros/rosjava/roslaunch/parsing/NodeTag.java",
"license": "bsd-3-clause",
"size": 16436
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,124,814 |
Map<Post, User> getAuthorsOfPosts(Post[] posts); | Map<Post, User> getAuthorsOfPosts(Post[] posts); | /**
* for each post, this method downloads the User that
* authored it and then maps the post to the author.
*
* @param posts the posts in question
* @return the map of all the posts and their authors.
*/ | for each post, this method downloads the User that authored it and then maps the post to the author | getAuthorsOfPosts | {
"repo_name": "nareddyt/Bitter",
"path": "app/src/main/java/gitmad/bitter/data/UserProvider.java",
"license": "gpl-2.0",
"size": 1958
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,781,751 |
private Object construct(Constructor<?> con, Object[] parameterValues) {
for(IOCListener l : listeners){
l.listenConstruct(con, parameterValues);
}
try {
return con.newInstance(parameterValues);
} catch (Exception e) {
return null;
}
} | Object function(Constructor<?> con, Object[] parameterValues) { for(IOCListener l : listeners){ l.listenConstruct(con, parameterValues); } try { return con.newInstance(parameterValues); } catch (Exception e) { return null; } } | /**
* construct with given class and parameter values.
*
* @param con constructor to call
* @param parameterValues parameter values
* @return new instance generated by the constructor and pv
*/ | construct with given class and parameter values | construct | {
"repo_name": "wkgcass/common",
"path": "Pure/src/cass/pure/ioc/IOCController.java",
"license": "mit",
"size": 10177
} | [
"java.lang.reflect.Constructor"
] | import java.lang.reflect.Constructor; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 524,601 |
@SideOnly(Side.CLIENT)
public int getSlotFor(ItemStack stack)
{
for (int i = 0; i < this.mainInventory.size(); ++i)
{
if (!((ItemStack)this.mainInventory.get(i)).isEmpty() && this.stackEqualExact(stack, (ItemStack)this.mainInventory.get(i)))
{
return i;
}
}
return -1;
} | @SideOnly(Side.CLIENT) int function(ItemStack stack) { for (int i = 0; i < this.mainInventory.size(); ++i) { if (!((ItemStack)this.mainInventory.get(i)).isEmpty() && this.stackEqualExact(stack, (ItemStack)this.mainInventory.get(i))) { return i; } } return -1; } | /**
* Finds the stack or an equivalent one in the main inventory
*/ | Finds the stack or an equivalent one in the main inventory | getSlotFor | {
"repo_name": "SuperUnitato/UnLonely",
"path": "build/tmp/recompileMc/sources/net/minecraft/entity/player/InventoryPlayer.java",
"license": "lgpl-2.1",
"size": 27035
} | [
"net.minecraft.item.ItemStack",
"net.minecraftforge.fml.relauncher.Side",
"net.minecraftforge.fml.relauncher.SideOnly"
] | import net.minecraft.item.ItemStack; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; | import net.minecraft.item.*; import net.minecraftforge.fml.relauncher.*; | [
"net.minecraft.item",
"net.minecraftforge.fml"
] | net.minecraft.item; net.minecraftforge.fml; | 1,680,754 |
@Test
public void testQueryNonEscapedSemiColon() throws Throwable {
String SCRIPT_TEXT = "drop table if exists nonEscapedSemiColon;create table nonEscapedSemiColon "
+ "(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ';';show tables;";
String EXPECTED_PATTERN = "nonescapedsemicolon";
List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
testScriptFile(SCRIPT_TEXT, argList, EXPECTED_PATTERN, true);
//look for the " nonEscapedSemiColon " in the query text not the table name which comes
//in the result
EXPECTED_PATTERN = " nonEscapedSemiColon ";
testScriptFile(SCRIPT_TEXT, argList, OutStream.ERR, EXPECTED_PATTERN, true);
testScriptFile(SCRIPT_TEXT, argList, OutStream.OUT, EXPECTED_PATTERN, false);
} | void function() throws Throwable { String SCRIPT_TEXT = STR + STR; String EXPECTED_PATTERN = STR; List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL()); testScriptFile(SCRIPT_TEXT, argList, EXPECTED_PATTERN, true); EXPECTED_PATTERN = STR; testScriptFile(SCRIPT_TEXT, argList, OutStream.ERR, EXPECTED_PATTERN, true); testScriptFile(SCRIPT_TEXT, argList, OutStream.OUT, EXPECTED_PATTERN, false); } | /**
* Test that Beeline queries don't treat semicolons inside quotations as query-ending characters.
*/ | Test that Beeline queries don't treat semicolons inside quotations as query-ending characters | testQueryNonEscapedSemiColon | {
"repo_name": "vineetgarg02/hive",
"path": "itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java",
"license": "apache-2.0",
"size": 46744
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 982,531 |
public void deleteBucket(DeleteBucketRequest deleteBucketRequest)
throws AmazonClientException, AmazonServiceException; | void function(DeleteBucketRequest deleteBucketRequest) throws AmazonClientException, AmazonServiceException; | /**
* <p>
* Deletes the specified bucket. All objects (and all object versions, if versioning
* was ever enabled) in the bucket must be deleted before the bucket itself
* can be deleted.
* </p>
* <p>
* Only the owner of a bucket can delete it, regardless of the bucket's
* access control policy (ACL).
* </p>
*
* @param deleteBucketRequest
* The request object containing all options for deleting an Amazon S3
* bucket.
* @throws AmazonClientException
* If any errors are encountered in the client while making the
* request or handling the response.
* @throws AmazonServiceException
* If any errors occurred in Amazon S3 while processing the
* request.
*
* @see AmazonS3#deleteBucket(String)
*/ | Deletes the specified bucket. All objects (and all object versions, if versioning was ever enabled) in the bucket must be deleted before the bucket itself can be deleted. Only the owner of a bucket can delete it, regardless of the bucket's access control policy (ACL). | deleteBucket | {
"repo_name": "mhurne/aws-sdk-java",
"path": "aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/AmazonS3.java",
"license": "apache-2.0",
"size": 211153
} | [
"com.amazonaws.AmazonClientException",
"com.amazonaws.AmazonServiceException",
"com.amazonaws.services.s3.model.DeleteBucketRequest"
] | import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.s3.model.DeleteBucketRequest; | import com.amazonaws.*; import com.amazonaws.services.s3.model.*; | [
"com.amazonaws",
"com.amazonaws.services"
] | com.amazonaws; com.amazonaws.services; | 2,453,440 |
public int skipBytes(int n) throws IOException {
long pos = getStreamPosition();
seek(pos + n);
return (int)(getStreamPosition() - pos);
} | int function(int n) throws IOException { long pos = getStreamPosition(); seek(pos + n); return (int)(getStreamPosition() - pos); } | /**
* Advances the current stream position by calling
* <code>seek(getStreamPosition() + n)</code>.
*
* <p> The bit offset is reset to zero.
*
* @param n the number of bytes to seek forward.
*
* @return an <code>int</code> representing the number of bytes
* skipped.
*
* @exception IOException if <code>getStreamPosition</code>
* throws an <code>IOException</code> when computing either
* the starting or ending position.
*/ | Advances the current stream position by calling <code>seek(getStreamPosition() + n)</code>. The bit offset is reset to zero | skipBytes | {
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/javax/imageio/stream/ImageInputStreamImpl.java",
"license": "apache-2.0",
"size": 27381
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,303,129 |
public void setBookmarkIcon(Icon icon) {
removeBookmarkTrackingIcons();
bookmarkIcon = icon;
repaint();
}
| void function(Icon icon) { removeBookmarkTrackingIcons(); bookmarkIcon = icon; repaint(); } | /**
* Sets the icon to use for bookmarks. Any previous bookmark icons
* are removed.
*
* @param icon The new bookmark icon. If this is <code>null</code>,
* bookmarking is effectively disabled.
* @see #getBookmarkIcon()
* @see #isBookmarkingEnabled()
*/ | Sets the icon to use for bookmarks. Any previous bookmark icons are removed | setBookmarkIcon | {
"repo_name": "thomasgalvin/ThirdParty",
"path": "RText/RText-Editor/src/main/java/org/fife/ui/rtextarea/IconRowHeader.java",
"license": "apache-2.0",
"size": 21561
} | [
"javax.swing.Icon"
] | import javax.swing.Icon; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 2,066,632 |
public void onTick()
{
final GuiScreen guiScreen = Minecraft.getMinecraft().currentScreen;
if (guiScreen != null)
{
onTickInGui(guiScreen);
}
} | void function() { final GuiScreen guiScreen = Minecraft.getMinecraft().currentScreen; if (guiScreen != null) { onTickInGui(guiScreen); } } | /**
* Runs this tick handler if appropriate.
*/ | Runs this tick handler if appropriate | onTick | {
"repo_name": "MrPonyCaptain/minecraft-comes-alive",
"path": "Minecraft/1.7.10/src/main/java/mca/core/forge/ClientTickHandler.java",
"license": "gpl-3.0",
"size": 5474
} | [
"net.minecraft.client.Minecraft",
"net.minecraft.client.gui.GuiScreen"
] | import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiScreen; | import net.minecraft.client.*; import net.minecraft.client.gui.*; | [
"net.minecraft.client"
] | net.minecraft.client; | 2,869,347 |
Collection<Truck> findByTwitterId(String screenName); | Collection<Truck> findByTwitterId(String screenName); | /**
* Returns the trucks by their twitter handle. It is possible for more than one truck to be
* associated with a twitter handle
* @param screenName screenName
* @return the list of trucks associated with the twitter handle (or empty if none found)
*/ | Returns the trucks by their twitter handle. It is possible for more than one truck to be associated with a twitter handle | findByTwitterId | {
"repo_name": "aviolette/foodtrucklocator",
"path": "main/src/main/java/foodtruck/dao/TruckDAO.java",
"license": "mit",
"size": 1711
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 82,941 |
public LibraryHandleAdapter getLibraryHandleAdapter( )
{
return getLibraryHandleAdapter( SessionHandleAdapter.getInstance( )
.getReportDesignHandle( ) );
} | LibraryHandleAdapter function( ) { return getLibraryHandleAdapter( SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) ); } | /**
* Get library handle adapter
*
* @return LibraryHandleAdapter
*/ | Get library handle adapter | getLibraryHandleAdapter | {
"repo_name": "sguan-actuate/birt",
"path": "UI/org.eclipse.birt.report.designer.core/src/org/eclipse/birt/report/designer/core/model/schematic/HandleAdapterFactory.java",
"license": "epl-1.0",
"size": 18803
} | [
"org.eclipse.birt.report.designer.core.model.LibraryHandleAdapter",
"org.eclipse.birt.report.designer.core.model.SessionHandleAdapter"
] | import org.eclipse.birt.report.designer.core.model.LibraryHandleAdapter; import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter; | import org.eclipse.birt.report.designer.core.model.*; | [
"org.eclipse.birt"
] | org.eclipse.birt; | 1,948,569 |
public LanguageModel getLanguageModel()
{
return languageModel;
} | LanguageModel function() { return languageModel; } | /**
* Retrieves the language model for this linguist
*
* @return the language model (or null if there is none)
*/ | Retrieves the language model for this linguist | getLanguageModel | {
"repo_name": "Strauss5805/MyDocks",
"path": "src/PostProcessor/SphinxBased/MyLexTreeLinguist.java",
"license": "agpl-3.0",
"size": 59953
} | [
"edu.cmu.sphinx.linguist.language.ngram.LanguageModel"
] | import edu.cmu.sphinx.linguist.language.ngram.LanguageModel; | import edu.cmu.sphinx.linguist.language.ngram.*; | [
"edu.cmu.sphinx"
] | edu.cmu.sphinx; | 1,063,471 |
return new ArrayList(this.values());
}
| return new ArrayList(this.values()); } | /**
* Gets the groups as a list of groups
*
* @return A list of groups
*/ | Gets the groups as a list of groups | getGroupsList | {
"repo_name": "davidlad123/spine",
"path": "spine/build/spine-0.9-src/src/com/zphinx/spine/utils/ActiveGroups.java",
"license": "gpl-3.0",
"size": 10587
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,307,454 |
MainApp.instance = this;
MainApp.tokenRequestInterceptor = new TokenRequestInterceptor();
// Set OkHttpClient
HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.addInterceptor(interceptor);
builder.addInterceptor(MainApp.tokenRequestInterceptor);
okHttpClient = builder.build();
// Set Retrofit
GsonBuilder gsonBuilder = new GsonBuilder();
gsonBuilder.registerTypeAdapter(Date.class, new DateDeserializer());
gsonBuilder.excludeFieldsWithoutExposeAnnotation();
Gson gson = gsonBuilder.create();
Builder retrofitBuilder = new Retrofit.Builder();
retrofitBuilder.baseUrl(Globals.API_SERVER_URL);
retrofitBuilder.addConverterFactory(GsonConverterFactory.create(gson));
retrofitBuilder.client(okHttpClient);
MainApp.retrofit = retrofitBuilder.build();
RestService restService = retrofit.create(RestService.class);
// Set EventBus
MainApp.eventBus = new EventBus();
// Set RssApi
MainApp.rssApi = new RssApiImpl(restService, MainApp.eventBus);
this.primaryStage = primaryStage;
SplashScreenScene scene = new SplashScreenScene(this);
scene.launchSplashScreen();
} | MainApp.instance = this; MainApp.tokenRequestInterceptor = new TokenRequestInterceptor(); HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor(); interceptor.setLevel(HttpLoggingInterceptor.Level.BODY); OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.addInterceptor(interceptor); builder.addInterceptor(MainApp.tokenRequestInterceptor); okHttpClient = builder.build(); GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.registerTypeAdapter(Date.class, new DateDeserializer()); gsonBuilder.excludeFieldsWithoutExposeAnnotation(); Gson gson = gsonBuilder.create(); Builder retrofitBuilder = new Retrofit.Builder(); retrofitBuilder.baseUrl(Globals.API_SERVER_URL); retrofitBuilder.addConverterFactory(GsonConverterFactory.create(gson)); retrofitBuilder.client(okHttpClient); MainApp.retrofit = retrofitBuilder.build(); RestService restService = retrofit.create(RestService.class); MainApp.eventBus = new EventBus(); MainApp.rssApi = new RssApiImpl(restService, MainApp.eventBus); this.primaryStage = primaryStage; SplashScreenScene scene = new SplashScreenScene(this); scene.launchSplashScreen(); } | /**
* Starts the primary stage. Launches the Splash Screen Scene.
*/ | Starts the primary stage. Launches the Splash Screen Scene | start | {
"repo_name": "RSSAggregatorProject/DesktopApp",
"path": "src/com/rssaggregator/desktop/MainApp.java",
"license": "apache-2.0",
"size": 4018
} | [
"com.google.common.eventbus.EventBus",
"com.google.gson.Gson",
"com.google.gson.GsonBuilder",
"com.rssaggregator.desktop.network.RestService",
"com.rssaggregator.desktop.network.RssApiImpl",
"com.rssaggregator.desktop.utils.DateDeserializer",
"com.rssaggregator.desktop.utils.Globals",
"com.rssaggregator.desktop.utils.TokenRequestInterceptor",
"java.util.Date"
] | import com.google.common.eventbus.EventBus; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.rssaggregator.desktop.network.RestService; import com.rssaggregator.desktop.network.RssApiImpl; import com.rssaggregator.desktop.utils.DateDeserializer; import com.rssaggregator.desktop.utils.Globals; import com.rssaggregator.desktop.utils.TokenRequestInterceptor; import java.util.Date; | import com.google.common.eventbus.*; import com.google.gson.*; import com.rssaggregator.desktop.network.*; import com.rssaggregator.desktop.utils.*; import java.util.*; | [
"com.google.common",
"com.google.gson",
"com.rssaggregator.desktop",
"java.util"
] | com.google.common; com.google.gson; com.rssaggregator.desktop; java.util; | 2,364,871 |
private static MethodCache createMethodCache(Class<?> classToReflect, Log log) {
//
// Build a list of all elements in the class hierarchy. This one is bottom-first (i.e. we start
// with the actual declaring class and its interfaces and then move up (superclass etc.) until we
// hit java.lang.Object. That is important because it will give us the methods of the declaring class
// which might in turn be abstract further up the tree.
//
// We also ignore all SecurityExceptions that might happen due to SecurityManager restrictions (prominently
// hit with Tomcat 5.5).
//
// We can also omit all that complicated getPublic, getAccessible and upcast logic that the class map had up
// until Velocity 1.4. As we always reflect all elements of the tree (that's what we have a cache for), we will
// hit the public elements sooner or later because we reflect all the public elements anyway.
//
// Ah, the miracles of Java for(;;) ...
MethodCache cache = new MethodCache();
for (; classToReflect != null; classToReflect = classToReflect.getSuperclass()) {
if (Modifier.isPublic(classToReflect.getModifiers())) {
populateMethodCacheWith(cache, classToReflect, log);
}
Class<?>[] interfaces = classToReflect.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
populateMethodCacheWithInterface(cache, interfaces[i], log);
}
}
return cache;
} | static MethodCache function(Class<?> classToReflect, Log log) { MethodCache cache = new MethodCache(); for (; classToReflect != null; classToReflect = classToReflect.getSuperclass()) { if (Modifier.isPublic(classToReflect.getModifiers())) { populateMethodCacheWith(cache, classToReflect, log); } Class<?>[] interfaces = classToReflect.getInterfaces(); for (int i = 0; i < interfaces.length; i++) { populateMethodCacheWithInterface(cache, interfaces[i], log); } } return cache; } | /**
* Populate the Map of direct hits. These are taken from all the public methods
* that our class, its parents and their implemented interfaces provide.
* @param classToReflect the class to cache
* @param log the Log
* @return a newly allocated & filled up cache
*/ | Populate the Map of direct hits. These are taken from all the public methods that our class, its parents and their implemented interfaces provide | createMethodCache | {
"repo_name": "InsomniaxGaming/OWHInternals",
"path": "src/org/apache/commons/jexl2/internal/introspection/ClassMap.java",
"license": "gpl-2.0",
"size": 13949
} | [
"java.lang.reflect.Modifier",
"org.apache.commons.logging.Log"
] | import java.lang.reflect.Modifier; import org.apache.commons.logging.Log; | import java.lang.reflect.*; import org.apache.commons.logging.*; | [
"java.lang",
"org.apache.commons"
] | java.lang; org.apache.commons; | 157,125 |
private void startWaitingForReplicaCalls(List<Action<Row>> actionsForReplicaThread) {
long startTime = EnvironmentEdgeManager.currentTime();
ReplicaCallIssuingRunnable replicaRunnable = new ReplicaCallIssuingRunnable(
actionsForReplicaThread, startTime);
if (primaryCallTimeoutMicroseconds == 0) {
// Start replica calls immediately.
replicaRunnable.run();
} else {
// Start the thread that may kick off replica gets.
// TODO: we could do it on the same thread, but it's a user thread, might be a bad idea.
try {
pool.submit(replicaRunnable);
} catch (RejectedExecutionException ree) {
LOG.warn("#" + id + ", replica task was rejected by the pool - no replica calls", ree);
}
}
} | void function(List<Action<Row>> actionsForReplicaThread) { long startTime = EnvironmentEdgeManager.currentTime(); ReplicaCallIssuingRunnable replicaRunnable = new ReplicaCallIssuingRunnable( actionsForReplicaThread, startTime); if (primaryCallTimeoutMicroseconds == 0) { replicaRunnable.run(); } else { try { pool.submit(replicaRunnable); } catch (RejectedExecutionException ree) { LOG.warn("#" + id + STR, ree); } } } | /**
* Starts waiting to issue replica calls on a different thread; or issues them immediately.
*/ | Starts waiting to issue replica calls on a different thread; or issues them immediately | startWaitingForReplicaCalls | {
"repo_name": "narendragoyal/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java",
"license": "apache-2.0",
"size": 74609
} | [
"java.util.List",
"java.util.concurrent.RejectedExecutionException",
"org.apache.hadoop.hbase.util.EnvironmentEdgeManager"
] | import java.util.List; import java.util.concurrent.RejectedExecutionException; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; | import java.util.*; import java.util.concurrent.*; import org.apache.hadoop.hbase.util.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 68,592 |
void setAdminCmsObject(CmsObject adminCms); | void setAdminCmsObject(CmsObject adminCms); | /**
* Sets the admin CmsObject.
*
* @param adminCms a CmsObject with admin privileges
*/ | Sets the admin CmsObject | setAdminCmsObject | {
"repo_name": "alkacon/opencms-core",
"path": "src/org/opencms/configuration/I_CmsNeedsAdminCmsObject.java",
"license": "lgpl-2.1",
"size": 1581
} | [
"org.opencms.file.CmsObject"
] | import org.opencms.file.CmsObject; | import org.opencms.file.*; | [
"org.opencms.file"
] | org.opencms.file; | 2,032,944 |
EClass getExtensionType(); | EClass getExtensionType(); | /**
* Returns the meta object for class '{@link org.w3._2001.schema.ExtensionType <em>Extension Type</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Extension Type</em>'.
* @see org.w3._2001.schema.ExtensionType
* @generated
*/ | Returns the meta object for class '<code>org.w3._2001.schema.ExtensionType Extension Type</code>'. | getExtensionType | {
"repo_name": "geotools/geotools",
"path": "modules/ogc/net.opengis.wps/src/org/w3/_2001/schema/SchemaPackage.java",
"license": "lgpl-2.1",
"size": 433240
} | [
"org.eclipse.emf.ecore.EClass"
] | import org.eclipse.emf.ecore.EClass; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,878,487 |
@Override
ValueNode bindExpression(
FromList fromList, SubqueryList subqueryList, List<AggregateNode> aggregates)
throws StandardException
{
TypeId operandType;
bindOperand(fromList, subqueryList, aggregates);
operandType = operand.getTypeId();
switch (operandType.getJDBCTypeId())
{
case Types.CHAR:
case Types.VARCHAR:
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
case Types.LONGVARCHAR:
case Types.BLOB:
case Types.CLOB:
break;
default:
throw StandardException.newException(SQLState.LANG_UNARY_FUNCTION_BAD_TYPE,
getOperatorString(),
operandType.getSQLTypeName());
}
setType(new DataTypeDescriptor(
TypeId.INTEGER_ID,
operand.getTypeServices().isNullable()
)
);
return this;
} | ValueNode bindExpression( FromList fromList, SubqueryList subqueryList, List<AggregateNode> aggregates) throws StandardException { TypeId operandType; bindOperand(fromList, subqueryList, aggregates); operandType = operand.getTypeId(); switch (operandType.getJDBCTypeId()) { case Types.CHAR: case Types.VARCHAR: case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.LONGVARCHAR: case Types.BLOB: case Types.CLOB: break; default: throw StandardException.newException(SQLState.LANG_UNARY_FUNCTION_BAD_TYPE, getOperatorString(), operandType.getSQLTypeName()); } setType(new DataTypeDescriptor( TypeId.INTEGER_ID, operand.getTypeServices().isNullable() ) ); return this; } | /**
* Bind this operator
*
* @param fromList The query's FROM list
* @param subqueryList The subquery list being built as we find SubqueryNodes
* @param aggregates The aggregate list being built as we find AggregateNodes
*
* @return The new top of the expression tree.
*
* @exception StandardException Thrown on error
*/ | Bind this operator | bindExpression | {
"repo_name": "apache/derby",
"path": "java/org.apache.derby.engine/org/apache/derby/impl/sql/compile/LengthOperatorNode.java",
"license": "apache-2.0",
"size": 4217
} | [
"java.sql.Types",
"java.util.List",
"org.apache.derby.iapi.types.DataTypeDescriptor",
"org.apache.derby.iapi.types.TypeId",
"org.apache.derby.shared.common.error.StandardException",
"org.apache.derby.shared.common.reference.SQLState"
] | import java.sql.Types; import java.util.List; import org.apache.derby.iapi.types.DataTypeDescriptor; import org.apache.derby.iapi.types.TypeId; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.reference.SQLState; | import java.sql.*; import java.util.*; import org.apache.derby.iapi.types.*; import org.apache.derby.shared.common.error.*; import org.apache.derby.shared.common.reference.*; | [
"java.sql",
"java.util",
"org.apache.derby"
] | java.sql; java.util; org.apache.derby; | 2,745,108 |
public void gotoLine(int line) {
Element element =
getDocument().getDefaultRootElement().getElement(line);
if (element == null) { return; }
int pos = element.getStartOffset();
setCaretPosition(pos);
} | void function(int line) { Element element = getDocument().getDefaultRootElement().getElement(line); if (element == null) { return; } int pos = element.getStartOffset(); setCaretPosition(pos); } | /** Move the cursor to the specified line
* if exception occur cursor not change
* @param line the specified line number
*/ | Move the cursor to the specified line if exception occur cursor not change | gotoLine | {
"repo_name": "adufilie/flex-sdk",
"path": "modules/thirdparty/batik/sources/org/apache/flex/forks/batik/util/gui/xmleditor/XMLTextEditor.java",
"license": "apache-2.0",
"size": 3386
} | [
"javax.swing.text.Element"
] | import javax.swing.text.Element; | import javax.swing.text.*; | [
"javax.swing"
] | javax.swing; | 1,088,514 |
@Override
public boolean onMenuItemClick(MenuItem item)
{
return false;
} | boolean function(MenuItem item) { return false; } | /**
* This method will be invoked when a menu item is clicked if the item itself did
* not already handle the event.
*
* @param item {@link MenuItem} that was clicked
* @return <code>true</code> if the event was handled, <code>false</code> otherwise.
*/ | This method will be invoked when a menu item is clicked if the item itself did not already handle the event | onMenuItemClick | {
"repo_name": "Mithrandir21/RelationshipPoints",
"path": "app/src/main/java/com/bahram/relationshippoints/GUI/TimelineList/Timeline/TimelineEvents/TimelineViewHolders/TimelineAchievement.java",
"license": "gpl-3.0",
"size": 4158
} | [
"android.view.MenuItem"
] | import android.view.MenuItem; | import android.view.*; | [
"android.view"
] | android.view; | 1,330,049 |
public void testChoiceXmlContentDefinitionCreation() throws Exception {
CmsObject cms = getCmsObject();
echo("Testing XML content definition object generation for a schema that contains xsd:choice");
CmsXmlEntityResolver resolver = new CmsXmlEntityResolver(cms);
// fire "clear cache" event to clear up previously cached schemas
OpenCms.fireCmsEvent(new CmsEvent(I_CmsEventListener.EVENT_CLEAR_CACHES, new HashMap<String, Object>()));
// read and cache the sub-schemas
cacheXmlSchema(
"org/opencms/xml/content/xmlcontent-choice-definition-1-subA.xsd",
"http://www.opencms.org/choice-definition1-subA.xsd");
cacheXmlSchema(
"org/opencms/xml/content/xmlcontent-choice-definition-1-subB.xsd",
"http://www.opencms.org/choice-definition1-subB.xsd");
cacheXmlSchema(
"org/opencms/xml/content/xmlcontent-choice-definition-1-subC.xsd",
"http://www.opencms.org/choice-definition1-subC.xsd");
// now read the XML from the given file and store it in the resolver
String schema = CmsFileUtil.readFile(
"org/opencms/xml/content/xmlcontent-choice-definition-1.xsd",
CmsEncoder.ENCODING_UTF_8);
// the point of this test really is that there is no exception thrown here if xsd:choice is in the schema
CmsXmlContentDefinition definition = CmsXmlContentDefinition.unmarshal(
schema,
"http://www.opencms.org/testChoice1.xsd",
resolver);
System.out.println(definition.getSchema().asXML());
CmsXmlEntityResolver.cacheSystemId(
"http://www.opencms.org/testChoice1.xsd",
definition.getSchema().asXML().getBytes(CmsEncoder.ENCODING_UTF_8));
assertSame(
"Content definition sequence not of required type SEQUENCE",
definition.getSequenceType(),
CmsXmlContentDefinition.SequenceType.SEQUENCE);
assertTrue(
"Content definition sequence choice maxOccurs is " + definition.getChoiceMaxOccurs() + " but must be 0",
definition.getChoiceMaxOccurs() == 0);
// now read the XML content
byte[] content = CmsFileUtil.readFile("org/opencms/xml/content/xmlcontent-choice-1.xml");
// validate the XML structure
CmsXmlUtils.validateXmlStructure(content, resolver);
// now create an XML content from the file with the xsd:choice content definition
CmsXmlContentFactory.unmarshal(
new String(content, CmsEncoder.ENCODING_UTF_8),
CmsEncoder.ENCODING_UTF_8,
resolver);
CmsXmlNestedContentDefinition nestA = (CmsXmlNestedContentDefinition)definition.getSchemaType("ChoiceTestA");
CmsXmlNestedContentDefinition nestB = (CmsXmlNestedContentDefinition)definition.getSchemaType("ChoiceTestB");
CmsXmlNestedContentDefinition nestC = (CmsXmlNestedContentDefinition)definition.getSchemaType("ChoiceTestC");
CmsXmlContentDefinition testA = nestA.getNestedContentDefinition();
CmsXmlContentDefinition testB = nestB.getNestedContentDefinition();
CmsXmlContentDefinition testC = nestC.getNestedContentDefinition();
assertSame(
"Choice sequence A not of required type MULTIPLE_CHOICE",
testA.getSequenceType(),
CmsXmlContentDefinition.SequenceType.MULTIPLE_CHOICE);
assertTrue(
"Choice sequence A maxOccurs is " + testA.getChoiceMaxOccurs() + " but must be 5",
testA.getChoiceMaxOccurs() == 5);
assertSame(
"Choice sequence B not of required type MULTIPLE_CHOICE",
testB.getSequenceType(),
CmsXmlContentDefinition.SequenceType.MULTIPLE_CHOICE);
assertTrue(
"Choice sequence B maxOccurs is " + testB.getChoiceMaxOccurs() + " but must be 5",
testB.getChoiceMaxOccurs() == 5);
assertSame(
"Choice sequence C not of required type SINGLE_CHOICE",
testC.getSequenceType(),
CmsXmlContentDefinition.SequenceType.SINGLE_CHOICE);
assertTrue(
"Choice sequence C maxOccurs is " + testC.getChoiceMaxOccurs() + " but must be 1",
testC.getChoiceMaxOccurs() == 1);
} | void function() throws Exception { CmsObject cms = getCmsObject(); echo(STR); CmsXmlEntityResolver resolver = new CmsXmlEntityResolver(cms); OpenCms.fireCmsEvent(new CmsEvent(I_CmsEventListener.EVENT_CLEAR_CACHES, new HashMap<String, Object>())); cacheXmlSchema( STR, STRorg/opencms/xml/content/xmlcontent-choice-definition-1-subB.xsd", STRorg/opencms/xml/content/xmlcontent-choice-definition-1-subC.xsdSTRhttp: String schema = CmsFileUtil.readFile( "org/opencms/xml/content/xmlcontent-choice-definition-1.xsdSTRhttp: resolver); System.out.println(definition.getSchema().asXML()); CmsXmlEntityResolver.cacheSystemId( STRContent definition sequence not of required type SEQUENCESTRContent definition sequence choice maxOccurs is STR but must be 0STRorg/opencms/xml/content/xmlcontent-choice-1.xmlSTRChoiceTestASTRChoiceTestBSTRChoiceTestCSTRChoice sequence A not of required type MULTIPLE_CHOICESTRChoice sequence A maxOccurs is STR but must be 5STRChoice sequence B not of required type MULTIPLE_CHOICESTRChoice sequence B maxOccurs is STR but must be 5STRChoice sequence C not of required type SINGLE_CHOICESTRChoice sequence C maxOccurs is STR but must be 1", testC.getChoiceMaxOccurs() == 1); } | /**
* Tests XML content definition object generation for a schema that contains xsd:choice.<p>
*
* @throws Exception in case something goes wrong
*/ | Tests XML content definition object generation for a schema that contains xsd:choice | testChoiceXmlContentDefinitionCreation | {
"repo_name": "ggiudetti/opencms-core",
"path": "test/org/opencms/xml/content/TestCmsXmlContentChoice.java",
"license": "lgpl-2.1",
"size": 17462
} | [
"java.util.HashMap",
"org.opencms.file.CmsObject",
"org.opencms.main.CmsEvent",
"org.opencms.main.OpenCms",
"org.opencms.util.CmsFileUtil",
"org.opencms.xml.CmsXmlEntityResolver"
] | import java.util.HashMap; import org.opencms.file.CmsObject; import org.opencms.main.CmsEvent; import org.opencms.main.OpenCms; import org.opencms.util.CmsFileUtil; import org.opencms.xml.CmsXmlEntityResolver; | import java.util.*; import org.opencms.file.*; import org.opencms.main.*; import org.opencms.util.*; import org.opencms.xml.*; | [
"java.util",
"org.opencms.file",
"org.opencms.main",
"org.opencms.util",
"org.opencms.xml"
] | java.util; org.opencms.file; org.opencms.main; org.opencms.util; org.opencms.xml; | 2,093,976 |
EReference getPath_Link(); | EReference getPath_Link(); | /**
* Returns the meta object for the containment reference list '{@link jp.pizzafactory.model.spacewireos.channelinfo.Path#getLink <em>Link</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Link</em>'.
* @see jp.pizzafactory.model.spacewireos.channelinfo.Path#getLink()
* @see #getPath()
* @generated
*/ | Returns the meta object for the containment reference list '<code>jp.pizzafactory.model.spacewireos.channelinfo.Path#getLink Link</code>'. | getPath_Link | {
"repo_name": "SpaceWireOS-Modeler/MetaModel",
"path": "jp.pizzafactory.model.spacewireos/src/jp/pizzafactory/model/spacewireos/channelinfo/ChannelInfoPackage.java",
"license": "epl-1.0",
"size": 39912
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,796,288 |
public static Calendar getFirstDayOfNextYear() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.DAY_OF_YEAR, 1);
cal.set(Calendar.YEAR, cal.get(Calendar.YEAR) + 1);
return truncateToMidnight(cal);
} | static Calendar function() { Calendar cal = Calendar.getInstance(); cal.set(Calendar.DAY_OF_YEAR, 1); cal.set(Calendar.YEAR, cal.get(Calendar.YEAR) + 1); return truncateToMidnight(cal); } | /**
* Returns the midnight Calendar for the first day next year.
*/ | Returns the midnight Calendar for the first day next year | getFirstDayOfNextYear | {
"repo_name": "bakrus/openhab",
"path": "bundles/binding/org.openhab.binding.astro/src/main/java/org/openhab/binding/astro/internal/util/DateTimeUtils.java",
"license": "epl-1.0",
"size": 4686
} | [
"java.util.Calendar"
] | import java.util.Calendar; | import java.util.*; | [
"java.util"
] | java.util; | 815,767 |
ISensorManager getSensorManager(); | ISensorManager getSensorManager(); | /**
* Restituisce il gestore dei sensori dell'applicazione,
* dietro interfaccia ISensorManager.
*
* @return Gestore dei sensori dell'applicazione.
*/ | Restituisce il gestore dei sensori dell'applicazione, dietro interfaccia ISensorManager | getSensorManager | {
"repo_name": "tobiatesan/serleena-android",
"path": "serleena/app/src/main/java/com/kyloth/serleena/presenters/ISerleenaActivity.java",
"license": "mit",
"size": 2833
} | [
"com.kyloth.serleena.sensors.ISensorManager"
] | import com.kyloth.serleena.sensors.ISensorManager; | import com.kyloth.serleena.sensors.*; | [
"com.kyloth.serleena"
] | com.kyloth.serleena; | 2,460,764 |
@Test
public void testLookupEmptyAtrid() throws Exception
{
Entry entry = connection.lookup( "cn=test,ou=system", ( String[] ) null );
assertNotNull( entry );
// We should have 3 attributes
assertEquals( 3, entry.size() );
// Check that all the user attributes are present
assertEquals( "test", entry.get( "cn" ).getString() );
assertEquals( "sn_test", entry.get( "sn" ).getString() );
assertTrue( entry.contains( "objectClass", "top", "person" ) );
} | void function() throws Exception { Entry entry = connection.lookup( STR, ( String[] ) null ); assertNotNull( entry ); assertEquals( 3, entry.size() ); assertEquals( "test", entry.get( "cn" ).getString() ); assertEquals( STR, entry.get( "sn" ).getString() ); assertTrue( entry.contains( STR, "top", STR ) ); } | /**
* Test a lookup( Dn, []) operation
*/ | Test a lookup( Dn, []) operation | testLookupEmptyAtrid | {
"repo_name": "lucastheisen/apache-directory-server",
"path": "core-integ/src/test/java/org/apache/directory/server/core/operations/lookup/LookupIT.java",
"license": "apache-2.0",
"size": 7984
} | [
"org.apache.directory.api.ldap.model.entry.Entry",
"org.junit.Assert"
] | import org.apache.directory.api.ldap.model.entry.Entry; import org.junit.Assert; | import org.apache.directory.api.ldap.model.entry.*; import org.junit.*; | [
"org.apache.directory",
"org.junit"
] | org.apache.directory; org.junit; | 198,698 |
public double setWorkUnitEstSizes(Map<String, List<WorkUnit>> workUnitsByTopic) {
double totalEstDataSize = 0;
for (List<WorkUnit> workUnitsForTopic : workUnitsByTopic.values()) {
for (WorkUnit workUnit : workUnitsForTopic) {
setWorkUnitEstSize(workUnit);
totalEstDataSize += getWorkUnitEstSize(workUnit);
}
}
return totalEstDataSize;
} | double function(Map<String, List<WorkUnit>> workUnitsByTopic) { double totalEstDataSize = 0; for (List<WorkUnit> workUnitsForTopic : workUnitsByTopic.values()) { for (WorkUnit workUnit : workUnitsForTopic) { setWorkUnitEstSize(workUnit); totalEstDataSize += getWorkUnitEstSize(workUnit); } } return totalEstDataSize; } | /**
* Calculate the total size of the workUnits and set the estimated size for each workUnit
* @param workUnitsByTopic
* @return the total size of the input workUnits
*/ | Calculate the total size of the workUnits and set the estimated size for each workUnit | setWorkUnitEstSizes | {
"repo_name": "arjun4084346/gobblin",
"path": "gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/workunit/packer/KafkaWorkUnitPacker.java",
"license": "apache-2.0",
"size": 19465
} | [
"java.util.List",
"java.util.Map",
"org.apache.gobblin.source.workunit.WorkUnit"
] | import java.util.List; import java.util.Map; import org.apache.gobblin.source.workunit.WorkUnit; | import java.util.*; import org.apache.gobblin.source.workunit.*; | [
"java.util",
"org.apache.gobblin"
] | java.util; org.apache.gobblin; | 1,930,419 |
protected void emit_iNSelt_WSTerminalRuleCall_4_q(EObject semanticObject, ISynNavigable transition, List<INode> nodes) {
acceptNodes(transition, nodes);
}
| void function(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } | /**
* Syntax:
* WS?
*/ | Syntax: WS | emit_iNSelt_WSTerminalRuleCall_4_q | {
"repo_name": "cooked/NDT",
"path": "sc.ndt.editor.bmodes.bmi/src-gen/sc/ndt/editor/bmodes/serializer/BmodesbmiSyntacticSequencer.java",
"license": "gpl-3.0",
"size": 75631
} | [
"java.util.List",
"org.eclipse.emf.ecore.EObject",
"org.eclipse.xtext.nodemodel.INode",
"org.eclipse.xtext.serializer.analysis.ISyntacticSequencerPDAProvider"
] | import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.serializer.analysis.ISyntacticSequencerPDAProvider; | import java.util.*; import org.eclipse.emf.ecore.*; import org.eclipse.xtext.nodemodel.*; import org.eclipse.xtext.serializer.analysis.*; | [
"java.util",
"org.eclipse.emf",
"org.eclipse.xtext"
] | java.util; org.eclipse.emf; org.eclipse.xtext; | 2,356,556 |
public BigInteger[] getBigIntegerValues() throws TypeMismatchException; | BigInteger[] function() throws TypeMismatchException; | /**
* Gets the values of this object as a {@link BigInteger} value.
*
* @return the values of this object as a {@link BigInteger} array, whose
* elements may be null; or an empty array if this object has no
* value.
* @throws TypeMismatchException
* if the type of this object is not {@link Type#BIG_INTEGER}.
*/ | Gets the values of this object as a <code>BigInteger</code> value | getBigIntegerValues | {
"repo_name": "Haixing-Hu/commons",
"path": "src/main/java/com/github/haixing_hu/util/value/MultiValues.java",
"license": "apache-2.0",
"size": 81305
} | [
"com.github.haixing_hu.lang.TypeMismatchException",
"java.math.BigInteger"
] | import com.github.haixing_hu.lang.TypeMismatchException; import java.math.BigInteger; | import com.github.haixing_hu.lang.*; import java.math.*; | [
"com.github.haixing_hu",
"java.math"
] | com.github.haixing_hu; java.math; | 2,409,812 |
List<Path> dataLocationDump(Table table) throws InterruptedException, IOException, HiveException {
List<Path> extTableLocations = new LinkedList<>();
if (!shouldWrite()) {
return extTableLocations;
}
if (!TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
throw new IllegalArgumentException(
"only External tables can be writen via this writer, provided table is " + table
.getTableType());
}
Path fullyQualifiedDataLocation =
PathBuilder.fullyQualifiedHDFSUri(table.getDataLocation(), FileSystem.get(hiveConf));
write(lineFor(table.getTableName(), fullyQualifiedDataLocation, hiveConf));
extTableLocations.add(fullyQualifiedDataLocation);
if (table.isPartitioned()) {
List<Partition> partitions;
try {
partitions = Hive.get(hiveConf).getPartitions(table);
} catch (HiveException e) {
if (e.getCause() instanceof NoSuchObjectException) {
// If table is dropped when dump in progress, just skip partitions data location dump
LOG.debug(e.getMessage());
return extTableLocations;
}
throw e;
}
for (Partition partition : partitions) {
boolean partitionLocOutsideTableLoc = !FileUtils.isPathWithinSubtree(
partition.getDataLocation(), table.getDataLocation()
);
if (partitionLocOutsideTableLoc) {
fullyQualifiedDataLocation = PathBuilder
.fullyQualifiedHDFSUri(partition.getDataLocation(), FileSystem.get(hiveConf));
write(lineFor(table.getTableName(), fullyQualifiedDataLocation, hiveConf));
extTableLocations.add(fullyQualifiedDataLocation);
}
}
}
return extTableLocations;
} | List<Path> dataLocationDump(Table table) throws InterruptedException, IOException, HiveException { List<Path> extTableLocations = new LinkedList<>(); if (!shouldWrite()) { return extTableLocations; } if (!TableType.EXTERNAL_TABLE.equals(table.getTableType())) { throw new IllegalArgumentException( STR + table .getTableType()); } Path fullyQualifiedDataLocation = PathBuilder.fullyQualifiedHDFSUri(table.getDataLocation(), FileSystem.get(hiveConf)); write(lineFor(table.getTableName(), fullyQualifiedDataLocation, hiveConf)); extTableLocations.add(fullyQualifiedDataLocation); if (table.isPartitioned()) { List<Partition> partitions; try { partitions = Hive.get(hiveConf).getPartitions(table); } catch (HiveException e) { if (e.getCause() instanceof NoSuchObjectException) { LOG.debug(e.getMessage()); return extTableLocations; } throw e; } for (Partition partition : partitions) { boolean partitionLocOutsideTableLoc = !FileUtils.isPathWithinSubtree( partition.getDataLocation(), table.getDataLocation() ); if (partitionLocOutsideTableLoc) { fullyQualifiedDataLocation = PathBuilder .fullyQualifiedHDFSUri(partition.getDataLocation(), FileSystem.get(hiveConf)); write(lineFor(table.getTableName(), fullyQualifiedDataLocation, hiveConf)); extTableLocations.add(fullyQualifiedDataLocation); } } } return extTableLocations; } | /**
* this will dump a single line per external table. it can include additional lines for the same
* table if the table is partitioned and the partition location is outside the table.
* It returns list of all the external table locations.
*/ | this will dump a single line per external table. it can include additional lines for the same table if the table is partitioned and the partition location is outside the table. It returns list of all the external table locations | dataLocationDump | {
"repo_name": "anishek/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplExternalTables.java",
"license": "apache-2.0",
"size": 12815
} | [
"java.io.IOException",
"java.util.LinkedList",
"java.util.List",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hive.common.FileUtils",
"org.apache.hadoop.hive.metastore.TableType",
"org.apache.hadoop.hive.metastore.api.NoSuchObjectException",
"org.apache.hadoop.hive.ql.metadata.Hive",
"org.apache.hadoop.hive.ql.metadata.HiveException",
"org.apache.hadoop.hive.ql.metadata.Partition",
"org.apache.hadoop.hive.ql.metadata.Table",
"org.apache.hadoop.hive.ql.parse.repl.PathBuilder"
] | import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.repl.PathBuilder; | import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hive.common.*; import org.apache.hadoop.hive.metastore.*; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.parse.repl.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 251,350 |
public void close()
throws ResourceException; | void function() throws ResourceException; | /**
* Closes the connection at the application level.
*/ | Closes the connection at the application level | close | {
"repo_name": "christianchristensen/resin",
"path": "modules/jca/src/javax/resource/cci/Connection.java",
"license": "gpl-2.0",
"size": 2129
} | [
"javax.resource.ResourceException"
] | import javax.resource.ResourceException; | import javax.resource.*; | [
"javax.resource"
] | javax.resource; | 1,053,479 |
@WebMethod
@WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202202")
@RequestWrapper(localName = "performSiteAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v202202", className = "com.google.api.ads.admanager.jaxws.v202202.SiteServiceInterfaceperformSiteAction")
@ResponseWrapper(localName = "performSiteActionResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202202", className = "com.google.api.ads.admanager.jaxws.v202202.SiteServiceInterfaceperformSiteActionResponse")
public UpdateResult performSiteAction(
@WebParam(name = "siteAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v202202")
SiteAction siteAction,
@WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202202")
Statement filterStatement)
throws ApiException_Exception
; | @WebResult(name = "rval", targetNamespace = STRperformSiteActionSTRhttps: @ResponseWrapper(localName = "performSiteActionResponseSTRhttps: UpdateResult function( @WebParam(name = "siteActionSTRhttps: SiteAction siteAction, @WebParam(name = "filterStatementSTRhttps: Statement filterStatement) throws ApiException_Exception ; | /**
*
* Performs actions on {@link Site} objects that match the given {@link Statement#query}.
*
* @param siteAction the action to perform
* @param filterStatement a Publisher Query Language statement used to filter a set of sites
* @return the result of the action performed
*
*
* @param filterStatement
* @param siteAction
* @return
* returns com.google.api.ads.admanager.jaxws.v202202.UpdateResult
* @throws ApiException_Exception
*/ | Performs actions on <code>Site</code> objects that match the given <code>Statement#query</code> | performSiteAction | {
"repo_name": "googleads/googleads-java-lib",
"path": "modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202202/SiteServiceInterface.java",
"license": "apache-2.0",
"size": 7428
} | [
"javax.jws.WebParam",
"javax.jws.WebResult",
"javax.xml.ws.ResponseWrapper"
] | import javax.jws.WebParam; import javax.jws.WebResult; import javax.xml.ws.ResponseWrapper; | import javax.jws.*; import javax.xml.ws.*; | [
"javax.jws",
"javax.xml"
] | javax.jws; javax.xml; | 2,627,886 |
public static Predicate<LivingEntity> hasNotPotionEffect(Effect effect) {
return e -> !(e.isPotionActive(effect));
}
| static Predicate<LivingEntity> function(Effect effect) { return e -> !(e.isPotionActive(effect)); } | /**
* Returns {@linkplain Predicate} which returns true if
* {@linkplain LivingEntity} doesn't has active potion effect.
*
* @param effect potion effect to test.
*
* @return {@linkplain Predicate} which returns true if
* {@linkplain LivingEntity} has active potion effect.
*/ | Returns Predicate which returns true if LivingEntity doesn't has active potion effect | hasNotPotionEffect | {
"repo_name": "athrane/bassebombecraft",
"path": "src/main/java/bassebombecraft/util/function/Predicates.java",
"license": "gpl-3.0",
"size": 4390
} | [
"java.util.function.Predicate",
"net.minecraft.entity.LivingEntity",
"net.minecraft.potion.Effect"
] | import java.util.function.Predicate; import net.minecraft.entity.LivingEntity; import net.minecraft.potion.Effect; | import java.util.function.*; import net.minecraft.entity.*; import net.minecraft.potion.*; | [
"java.util",
"net.minecraft.entity",
"net.minecraft.potion"
] | java.util; net.minecraft.entity; net.minecraft.potion; | 791,721 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.