method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
@Override
public java.sql.Connection unwrapConnection(java.sql.Connection connection){
try {
return connection.getMetaData().getConnection();
} catch (java.sql.SQLException e){
((DatabaseSessionImpl)getDatabaseSession()).log(SessionLog.WARNING, SessionLog.CONNECTION, "cannot_unwrap_connection", e);
return connection;
}
} | java.sql.Connection function(java.sql.Connection connection){ try { return connection.getMetaData().getConnection(); } catch (java.sql.SQLException e){ ((DatabaseSessionImpl)getDatabaseSession()).log(SessionLog.WARNING, SessionLog.CONNECTION, STR, e); return connection; } } | /**
* INTERNAL: This method is used to unwrap the connection wrapped by
* the application server. TopLink needs this unwrapped connection for certain
* database vendor specific support. (i.e. TIMESTAMPTZ,NCHAR,XMLTYPE)
*
* Be default we will use the connection's metadata to try to get the connection
*/ | the application server. TopLink needs this unwrapped connection for certain database vendor specific support. (i.e. TIMESTAMPTZ,NCHAR,XMLTYPE) Be default we will use the connection's metadata to try to get the connection | unwrapConnection | {
"repo_name": "gameduell/eclipselink.runtime",
"path": "foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/platform/server/ServerPlatformBase.java",
"license": "epl-1.0",
"size": 24474
} | [
"java.sql.SQLException",
"org.eclipse.persistence.internal.sessions.DatabaseSessionImpl",
"org.eclipse.persistence.logging.SessionLog"
] | import java.sql.SQLException; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.logging.SessionLog; | import java.sql.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.logging.*; | [
"java.sql",
"org.eclipse.persistence"
] | java.sql; org.eclipse.persistence; | 1,767,033 |
public List<T> getActivateExtension(URL url, String key, String group) {
String value = url.getParameter(key);
return getActivateExtension(url, value == null || value.length() == 0 ? null : Constants.COMMA_SPLIT_PATTERN.split(value), group);
}
| List<T> function(URL url, String key, String group) { String value = url.getParameter(key); return getActivateExtension(url, value == null value.length() == 0 ? null : Constants.COMMA_SPLIT_PATTERN.split(value), group); } | /**
* This is equivalent to <pre>
* getActivateExtension(url, url.getParameter(key).split(","), null);
* </pre>
*
* @see #getActivateExtension(com.alibaba.dubbo.common.URL, String[], String)
* @param url url
* @param key url parameter key which used to get extension point names
* @param group group
* @return extension list which are activated.
*/ | This is equivalent to <code> getActivateExtension(url, url.getParameter(key).split(","), null); </code> | getActivateExtension | {
"repo_name": "hl198181/dubbo",
"path": "dubbo-common/src/main/java/com/alibaba/dubbo/common/extension/ExtensionLoader.java",
"license": "apache-2.0",
"size": 43476
} | [
"com.alibaba.dubbo.common.Constants",
"java.util.List"
] | import com.alibaba.dubbo.common.Constants; import java.util.List; | import com.alibaba.dubbo.common.*; import java.util.*; | [
"com.alibaba.dubbo",
"java.util"
] | com.alibaba.dubbo; java.util; | 2,828,662 |
public ConfigProgram getBuilderProgram()
{
return _program;
} | ConfigProgram function() { return _program; } | /**
* Returns the program.
*/ | Returns the program | getBuilderProgram | {
"repo_name": "mdaniel/svn-caucho-com-resin",
"path": "modules/resin/src/com/caucho/env/deploy/DeployConfig.java",
"license": "gpl-2.0",
"size": 6215
} | [
"com.caucho.config.program.ConfigProgram"
] | import com.caucho.config.program.ConfigProgram; | import com.caucho.config.program.*; | [
"com.caucho.config"
] | com.caucho.config; | 596,106 |
View getSkipButton() {
return mButtonSkip;
} | View getSkipButton() { return mButtonSkip; } | /**
* Get skip button.
*
* @return skip button
*/ | Get skip button | getSkipButton | {
"repo_name": "Cleveroad/slidingtutorial-android",
"path": "lib/src/main/java/com/cleveroad/slidingtutorial/TutorialImpl.java",
"license": "mit",
"size": 17989
} | [
"android.view.View"
] | import android.view.View; | import android.view.*; | [
"android.view"
] | android.view; | 976,376 |
protected void createSymbolicLink(Path link, Path target) throws IOException {
createSymbolicLink(link, target.asFragment());
} | void function(Path link, Path target) throws IOException { createSymbolicLink(link, target.asFragment()); } | /**
* Indirection to create links so we can test FileSystems that do not support
* link creation. For example, JavaFileSystemTest overrides this method
* and creates the link with an alternate FileSystem.
*/ | Indirection to create links so we can test FileSystems that do not support link creation. For example, JavaFileSystemTest overrides this method and creates the link with an alternate FileSystem | createSymbolicLink | {
"repo_name": "dropbox/bazel",
"path": "src/test/java/com/google/devtools/build/lib/vfs/FileSystemTest.java",
"license": "apache-2.0",
"size": 45968
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,335,365 |
@SuppressWarnings({ "unchecked", "rawtypes" })
private void encodeObject(Object o, OutputStream out) throws IOException {
if (o instanceof String)
encodeString((String)o, out);
else if (o instanceof Map)
encodeMap((Map)o, out);
else if (o instanceof byte[])
encodeBytes((byte[])o, out);
else if (o instanceof Number)
encodeLong(((Number) o).longValue(), out);
else
throw new Error("Unencodable type");
}
| @SuppressWarnings({ STR, STR }) void function(Object o, OutputStream out) throws IOException { if (o instanceof String) encodeString((String)o, out); else if (o instanceof Map) encodeMap((Map)o, out); else if (o instanceof byte[]) encodeBytes((byte[])o, out); else if (o instanceof Number) encodeLong(((Number) o).longValue(), out); else throw new Error(STR); } | /**
* Utility method to create torrent file
* @param o
* @param out
* @throws IOException
*/ | Utility method to create torrent file | encodeObject | {
"repo_name": "UnaCloud/UnaCloud2",
"path": "UnaCloudFileManager/src/java/uniandes/unacloud/file/net/torrent/TorrentTracker.java",
"license": "gpl-2.0",
"size": 8542
} | [
"java.io.IOException",
"java.io.OutputStream",
"java.util.Map"
] | import java.io.IOException; import java.io.OutputStream; import java.util.Map; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 87,546 |
public Set<StandardPredicate> getRegisteredPredicates(); | Set<StandardPredicate> function(); | /**
* Returns the set of StandardPredicates registered with this DataStore.
*/ | Returns the set of StandardPredicates registered with this DataStore | getRegisteredPredicates | {
"repo_name": "linqs/psl",
"path": "psl-core/src/main/java/org/linqs/psl/database/DataStore.java",
"license": "apache-2.0",
"size": 5252
} | [
"java.util.Set",
"org.linqs.psl.model.predicate.StandardPredicate"
] | import java.util.Set; import org.linqs.psl.model.predicate.StandardPredicate; | import java.util.*; import org.linqs.psl.model.predicate.*; | [
"java.util",
"org.linqs.psl"
] | java.util; org.linqs.psl; | 895,783 |
boolean hasAttribute(String name, Predicate<Object> value); | boolean hasAttribute(String name, Predicate<Object> value); | /**
* Has attribute boolean.
*
* @param name the name
* @param value the value
* @return true /false
*/ | Has attribute boolean | hasAttribute | {
"repo_name": "rrenomeron/cas",
"path": "api/cas-server-core-api-authentication/src/main/java/org/apereo/cas/authentication/AuthenticationBuilder.java",
"license": "apache-2.0",
"size": 6202
} | [
"java.util.function.Predicate"
] | import java.util.function.Predicate; | import java.util.function.*; | [
"java.util"
] | java.util; | 1,370,607 |
@SuppressWarnings("unchecked")
public static void saveRemoteStorageStep(String nameStorage, String nameStep, Map<Integer, String> scripts, boolean mount, boolean edit) throws Exception {
try {
boolean exists = false;
exists = existsStorageStep(nameStorage, nameStep);
if (!edit && exists) {
throw new Exception ("Another storage step already exists with that name");
}
Map<String, Map<String, String>> vars = null;
Map<String, Object> steps = null;
Map<String,Object> storage = getRemoteStorage(nameStorage);
if (storage.get("steps") != null) {
steps = (Map<String, Object>) storage.get("steps");
} else {
steps = new TreeMap<String, Object>();
}
if (storage.get("variables") != null) {
vars = (Map<String, Map<String, String>>) storage.get("variables");
}
Map<String, Object> step = new HashMap<String, Object>();
step.put("name", nameStep);
if (mount)
step.put("mount", "true");
else
step.put("mount", "false");
step.put("scripts", scripts);
steps.put(nameStep, step);
writeRemoteStorageXML(nameStorage, (String) storage.get("typeConnection"), (String) storage.get("typeStorage"), vars, steps);
} catch (Exception ex) {
logger.error("Error guardando remote storage step: storage: {} step: {}. Ex: {}", new Object[]{nameStorage, nameStep, ex.getMessage()});
throw new Exception("Error saving remote storage step: storage:"+nameStorage+" step:"+nameStep+". Ex:"+ex.getMessage());
}
}
| @SuppressWarnings(STR) static void function(String nameStorage, String nameStep, Map<Integer, String> scripts, boolean mount, boolean edit) throws Exception { try { boolean exists = false; exists = existsStorageStep(nameStorage, nameStep); if (!edit && exists) { throw new Exception (STR); } Map<String, Map<String, String>> vars = null; Map<String, Object> steps = null; Map<String,Object> storage = getRemoteStorage(nameStorage); if (storage.get("steps") != null) { steps = (Map<String, Object>) storage.get("steps"); } else { steps = new TreeMap<String, Object>(); } if (storage.get(STR) != null) { vars = (Map<String, Map<String, String>>) storage.get(STR); } Map<String, Object> step = new HashMap<String, Object>(); step.put("name", nameStep); if (mount) step.put("mount", "true"); else step.put("mount", "false"); step.put(STR, scripts); steps.put(nameStep, step); writeRemoteStorageXML(nameStorage, (String) storage.get(STR), (String) storage.get(STR), vars, steps); } catch (Exception ex) { logger.error(STR, new Object[]{nameStorage, nameStep, ex.getMessage()}); throw new Exception(STR+nameStorage+STR+nameStep+STR+ex.getMessage()); } } | /**
* Almacena un step con sus scripts en un remote storage
* @param nameStorage
* @param nameStep
* @param scripts
* @param edit
* @throws Exception
*/ | Almacena un step con sus scripts en un remote storage | saveRemoteStorageStep | {
"repo_name": "WhiteBearSolutions/WBSAirback",
"path": "src/com/whitebearsolutions/imagine/wbsairback/advanced/RemoteStorageManager.java",
"license": "apache-2.0",
"size": 37148
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.TreeMap"
] | import java.util.HashMap; import java.util.Map; import java.util.TreeMap; | import java.util.*; | [
"java.util"
] | java.util; | 1,595,756 |
@FIXVersion(introduced="4.4")
@TagNumRef(tagNum=TagNum.UnderlyingSymbolSfx)
public String getUnderlyingSymbolSfx() {
return underlyingSymbolSfx;
} | @FIXVersion(introduced="4.4") @TagNumRef(tagNum=TagNum.UnderlyingSymbolSfx) String function() { return underlyingSymbolSfx; } | /**
* Message field getter.
* @return field value
*/ | Message field getter | getUnderlyingSymbolSfx | {
"repo_name": "marvisan/HadesFIX",
"path": "Model/src/main/java/net/hades/fix/message/comp/UnderlyingInstrument.java",
"license": "gpl-3.0",
"size": 91408
} | [
"net.hades.fix.message.anno.FIXVersion",
"net.hades.fix.message.anno.TagNumRef",
"net.hades.fix.message.type.TagNum"
] | import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.TagNum; | import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*; | [
"net.hades.fix"
] | net.hades.fix; | 710,954 |
public int pointToPosition(int x, int y) {
Rect frame = mTouchFrame;
if (frame == null) {
mTouchFrame = new Rect();
frame = mTouchFrame;
}
final int count = getChildCount();
for (int i = count - 1; i >= 0; i--) {
final View child = getChildAt(i);
if (child.getVisibility() == View.VISIBLE) {
child.getHitRect(frame);
if (frame.contains(x, y)) {
return mFirstPosition + i;
}
}
}
return INVALID_POSITION;
}
/**
* Maps a point to a the rowId of the item which intersects that point.
*
* @param x X in local coordinate
* @param y Y in local coordinate
* @return The rowId of the item which contains the specified point, or {@link #INVALID_ROW_ID} | int function(int x, int y) { Rect frame = mTouchFrame; if (frame == null) { mTouchFrame = new Rect(); frame = mTouchFrame; } final int count = getChildCount(); for (int i = count - 1; i >= 0; i--) { final View child = getChildAt(i); if (child.getVisibility() == View.VISIBLE) { child.getHitRect(frame); if (frame.contains(x, y)) { return mFirstPosition + i; } } } return INVALID_POSITION; } /** * Maps a point to a the rowId of the item which intersects that point. * * @param x X in local coordinate * @param y Y in local coordinate * @return The rowId of the item which contains the specified point, or {@link #INVALID_ROW_ID} | /**
* Maps a point to a position in the list.
*
* @param x X in local coordinate
* @param y Y in local coordinate
* @return The position of the item which contains the specified point, or
* {@link #INVALID_POSITION} if the point does not intersect an item.
*/ | Maps a point to a position in the list | pointToPosition | {
"repo_name": "daimajia/EverMemo",
"path": "libraries/ExGridView/src/com/huewu/pla/lib/internal/PLA_AbsListView.java",
"license": "mit",
"size": 91245
} | [
"android.graphics.Rect",
"android.view.View"
] | import android.graphics.Rect; import android.view.View; | import android.graphics.*; import android.view.*; | [
"android.graphics",
"android.view"
] | android.graphics; android.view; | 2,368,027 |
TDImportResult importFile(String database, String table, File file); | TDImportResult importFile(String database, String table, File file); | /**
* Import a msgpack.gz file upon target table
* @param database target database
* @param table target table
* @param file source msgpack.gz formatted file
* @return TDImportResult which contains a unique import id and md5
*/ | Import a msgpack.gz file upon target table | importFile | {
"repo_name": "treasure-data/td-client-java",
"path": "src/main/java/com/treasuredata/client/TDClientApi.java",
"license": "apache-2.0",
"size": 15031
} | [
"com.treasuredata.client.model.TDImportResult",
"java.io.File"
] | import com.treasuredata.client.model.TDImportResult; import java.io.File; | import com.treasuredata.client.model.*; import java.io.*; | [
"com.treasuredata.client",
"java.io"
] | com.treasuredata.client; java.io; | 2,367,199 |
public double getCategoryJava2DCoordinate(CategoryAnchor anchor,
int category,
int categoryCount,
Rectangle2D area,
RectangleEdge edge) {
double result = 0.0;
Rectangle2D adjustedArea = area;
CategoryPlot plot = (CategoryPlot) getPlot();
CategoryItemRenderer renderer = plot.getRenderer();
if (renderer instanceof Effect3D) {
Effect3D e3D = (Effect3D) renderer;
double adjustedX = area.getMinX();
double adjustedY = area.getMinY();
double adjustedW = area.getWidth() - e3D.getXOffset();
double adjustedH = area.getHeight() - e3D.getYOffset();
if (edge == RectangleEdge.LEFT || edge == RectangleEdge.BOTTOM) {
adjustedY += e3D.getYOffset();
}
else if (edge == RectangleEdge.RIGHT || edge == RectangleEdge.TOP) {
adjustedX += e3D.getXOffset();
}
adjustedArea = new Rectangle2D.Double(adjustedX, adjustedY,
adjustedW, adjustedH);
}
if (anchor == CategoryAnchor.START) {
result = getCategoryStart(category, categoryCount, adjustedArea,
edge);
}
else if (anchor == CategoryAnchor.MIDDLE) {
result = getCategoryMiddle(category, categoryCount, adjustedArea,
edge);
}
else if (anchor == CategoryAnchor.END) {
result = getCategoryEnd(category, categoryCount, adjustedArea,
edge);
}
return result;
}
| double function(CategoryAnchor anchor, int category, int categoryCount, Rectangle2D area, RectangleEdge edge) { double result = 0.0; Rectangle2D adjustedArea = area; CategoryPlot plot = (CategoryPlot) getPlot(); CategoryItemRenderer renderer = plot.getRenderer(); if (renderer instanceof Effect3D) { Effect3D e3D = (Effect3D) renderer; double adjustedX = area.getMinX(); double adjustedY = area.getMinY(); double adjustedW = area.getWidth() - e3D.getXOffset(); double adjustedH = area.getHeight() - e3D.getYOffset(); if (edge == RectangleEdge.LEFT edge == RectangleEdge.BOTTOM) { adjustedY += e3D.getYOffset(); } else if (edge == RectangleEdge.RIGHT edge == RectangleEdge.TOP) { adjustedX += e3D.getXOffset(); } adjustedArea = new Rectangle2D.Double(adjustedX, adjustedY, adjustedW, adjustedH); } if (anchor == CategoryAnchor.START) { result = getCategoryStart(category, categoryCount, adjustedArea, edge); } else if (anchor == CategoryAnchor.MIDDLE) { result = getCategoryMiddle(category, categoryCount, adjustedArea, edge); } else if (anchor == CategoryAnchor.END) { result = getCategoryEnd(category, categoryCount, adjustedArea, edge); } return result; } | /**
* Returns the Java 2D coordinate for a category.
*
* @param anchor the anchor point.
* @param category the category index.
* @param categoryCount the category count.
* @param area the data area.
* @param edge the location of the axis.
*
* @return The coordinate.
*/ | Returns the Java 2D coordinate for a category | getCategoryJava2DCoordinate | {
"repo_name": "ilyessou/jfreechart",
"path": "source/org/jfree/chart/axis/CategoryAxis3D.java",
"license": "lgpl-2.1",
"size": 8911
} | [
"java.awt.geom.Rectangle2D",
"org.jfree.chart.Effect3D",
"org.jfree.chart.plot.CategoryPlot",
"org.jfree.chart.renderer.category.CategoryItemRenderer",
"org.jfree.chart.util.RectangleEdge"
] | import java.awt.geom.Rectangle2D; import org.jfree.chart.Effect3D; import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.renderer.category.CategoryItemRenderer; import org.jfree.chart.util.RectangleEdge; | import java.awt.geom.*; import org.jfree.chart.*; import org.jfree.chart.plot.*; import org.jfree.chart.renderer.category.*; import org.jfree.chart.util.*; | [
"java.awt",
"org.jfree.chart"
] | java.awt; org.jfree.chart; | 1,399,043 |
void processTxFailure(@Observes(during = TransactionPhase.AFTER_FAILURE) Employee emp) {
String msg = "*** An error occurred and deletion of emp # " + emp.getId() + " was roll-backed";
LOG.info(msg);
service.addRollbackMsg(msg);
}
/**
* Observes {@link Employee} event type in case of transaction success.
* Log a success message and use it to invoke {@link EmployeeService#addCommitMsg(String)} | void processTxFailure(@Observes(during = TransactionPhase.AFTER_FAILURE) Employee emp) { String msg = STR + emp.getId() + STR; LOG.info(msg); service.addRollbackMsg(msg); } /** * Observes {@link Employee} event type in case of transaction success. * Log a success message and use it to invoke {@link EmployeeService#addCommitMsg(String)} | /**
* Observes {@link Employee} event type in case of transaction failure.
* Log a failure message and use it to invoke {@link EmployeeService#addRollbackMsg(String)}
*
* @param emp payload
*/ | Observes <code>Employee</code> event type in case of transaction failure. Log a failure message and use it to invoke <code>EmployeeService#addRollbackMsg(String)</code> | processTxFailure | {
"repo_name": "wildfly-swarm/wildfly-swarm-examples",
"path": "jpa-jaxrs-cdi/jpa-jaxrs-cdi-jta/src/main/java/org/wildfly/swarm/examples/jpajaxrscdijta/EmployeeObservers.java",
"license": "apache-2.0",
"size": 1743
} | [
"javax.enterprise.event.Observes",
"javax.enterprise.event.TransactionPhase"
] | import javax.enterprise.event.Observes; import javax.enterprise.event.TransactionPhase; | import javax.enterprise.event.*; | [
"javax.enterprise"
] | javax.enterprise; | 1,197,158 |
@Test
public void testOtherMechsFail() throws Exception {
for (String s : Arrays.asList("ANONYMOUS", "", "1" + getMechanism(), getMechanism() + "1", "DIGEST-MD5", "DIGEST-SHA", "DIGEST-SHA-256", "DIGEST-SHA-384",
"DIGEST-SHA-512", "PLAIN", "SCRAM-SHA-1", "JBOSS-LOCAL-USER")) {
if (! getMechanism().equals(s)) {
assertMechFails(s);
}
}
} | void function() throws Exception { for (String s : Arrays.asList(STR, STR1STR1STRDIGEST-MD5STRDIGEST-SHASTRDIGEST-SHA-256STRDIGEST-SHA-384STRDIGEST-SHA-512STRPLAINSTRSCRAM-SHA-1STRJBOSS-LOCAL-USER")) { if (! getMechanism().equals(s)) { assertMechFails(s); } } } | /**
* Tests that client fails to use other mechanisms than the server allows.
*/ | Tests that client fails to use other mechanisms than the server allows | testOtherMechsFail | {
"repo_name": "yersan/wildfly-core",
"path": "testsuite/elytron/src/test/java/org/wildfly/test/integration/elytron/sasl/mgmt/AbstractMgmtSaslTestBase.java",
"license": "lgpl-2.1",
"size": 18002
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 2,868,689 |
@Override
public Object convertDataFromString( String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull,
int trim_type ) throws KettleValueException {
if ( convertMeta == null ) {
throw new KettleValueException( "API coding error: convertMeta input parameter should not be equals to null" );
}
// null handling and conversion of value to null
//
String null_value = nullIf;
int inValueType = convertMeta.getType();
int outValueType = getType();
if ( null_value == null ) {
switch ( inValueType ) {
case ValueMetaInterface.TYPE_BOOLEAN:
null_value = Const.NULL_BOOLEAN;
break;
case ValueMetaInterface.TYPE_STRING:
null_value = Const.NULL_STRING;
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
null_value = Const.NULL_BIGNUMBER;
break;
case ValueMetaInterface.TYPE_NUMBER:
null_value = Const.NULL_NUMBER;
break;
case ValueMetaInterface.TYPE_INTEGER:
null_value = Const.NULL_INTEGER;
break;
case ValueMetaInterface.TYPE_DATE:
null_value = Const.NULL_DATE;
break;
case ValueMetaInterface.TYPE_BINARY:
null_value = Const.NULL_BINARY;
break;
default:
null_value = Const.NULL_NONE;
break;
}
}
// See if we need to convert a null value into a String
// For example, we might want to convert null into "Empty".
//
if ( !Utils.isEmpty( ifNull ) ) {
// Note that you can't pull the pad method up here as a nullComp variable
// because you could get an NPE since you haven't checked isEmpty(pol)
// yet!
if ( Utils.isEmpty( pol )
|| pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) {
pol = ifNull;
}
}
// See if the polled value is empty
// In that case, we have a null value on our hands...
boolean isStringValue = outValueType == Value.VALUE_TYPE_STRING;
Object emptyValue = isStringValue ? Const.NULL_STRING : null;
Boolean isEmptyAndNullDiffer = convertStringToBoolean(
Const.NVL( System.getProperty( Const.KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL, "N" ), "N" ) );
if ( pol == null && isStringValue && isEmptyAndNullDiffer ) {
pol = Const.NULL_STRING;
}
if ( pol == null ) {
return null;
} else if ( Utils.isEmpty( pol ) && !isStringValue ) {
return null;
} else {
// if the null_value is specified, we try to match with that.
//
if ( !Utils.isEmpty( null_value ) ) {
if ( null_value.length() <= pol.length() ) {
// If the polled value is equal to the spaces right-padded null_value,
// we have a match
//
if ( pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) {
return emptyValue;
}
}
} else {
// Verify if there are only spaces in the polled value...
// We consider that empty as well...
//
if ( Const.onlySpaces( pol ) ) {
return emptyValue;
}
}
}
// Trimming
StringBuilder strpol;
switch ( trim_type ) {
case ValueMetaInterface.TRIM_TYPE_LEFT:
strpol = new StringBuilder( pol );
while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) {
strpol.deleteCharAt( 0 );
}
pol = strpol.toString();
break;
case ValueMetaInterface.TRIM_TYPE_RIGHT:
strpol = new StringBuilder( pol );
while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) {
strpol.deleteCharAt( strpol.length() - 1 );
}
pol = strpol.toString();
break;
case ValueMetaInterface.TRIM_TYPE_BOTH:
strpol = new StringBuilder( pol );
while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) {
strpol.deleteCharAt( 0 );
}
while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) {
strpol.deleteCharAt( strpol.length() - 1 );
}
pol = strpol.toString();
break;
default:
break;
}
// On with the regular program...
// Simply call the ValueMeta routines to do the conversion
// We need to do some effort here: copy all
//
return convertData( convertMeta, pol );
} | Object function( String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull, int trim_type ) throws KettleValueException { if ( convertMeta == null ) { throw new KettleValueException( STR ); } int inValueType = convertMeta.getType(); int outValueType = getType(); if ( null_value == null ) { switch ( inValueType ) { case ValueMetaInterface.TYPE_BOOLEAN: null_value = Const.NULL_BOOLEAN; break; case ValueMetaInterface.TYPE_STRING: null_value = Const.NULL_STRING; break; case ValueMetaInterface.TYPE_BIGNUMBER: null_value = Const.NULL_BIGNUMBER; break; case ValueMetaInterface.TYPE_NUMBER: null_value = Const.NULL_NUMBER; break; case ValueMetaInterface.TYPE_INTEGER: null_value = Const.NULL_INTEGER; break; case ValueMetaInterface.TYPE_DATE: null_value = Const.NULL_DATE; break; case ValueMetaInterface.TYPE_BINARY: null_value = Const.NULL_BINARY; break; default: null_value = Const.NULL_NONE; break; } } if ( Utils.isEmpty( pol ) pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) { pol = ifNull; } } boolean isStringValue = outValueType == Value.VALUE_TYPE_STRING; Object emptyValue = isStringValue ? Const.NULL_STRING : null; Boolean isEmptyAndNullDiffer = convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL, "N" ), "N" ) ); if ( pol == null && isStringValue && isEmptyAndNullDiffer ) { pol = Const.NULL_STRING; } if ( pol == null ) { return null; } else if ( Utils.isEmpty( pol ) && !isStringValue ) { return null; } else { if ( null_value.length() <= pol.length() ) { return emptyValue; } } } else { return emptyValue; } } } StringBuilder strpol; switch ( trim_type ) { case ValueMetaInterface.TRIM_TYPE_LEFT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_RIGHT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_BOTH: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; default: break; } } | /**
* Convert the specified string to the data type specified in this object.
*
* @param pol
* the string to be converted
* @param convertMeta
* the metadata of the object (only string type) to be converted
* @param nullIf
* set the object to null if pos equals nullif (IgnoreCase)
* @param ifNull
* set the object to ifNull when pol is empty or null
* @param trim_type
* the trim type to be used (ValueMetaInterface.TRIM_TYPE_XXX)
* @return the object in the data type of this value metadata object
* @throws KettleValueException
* in case there is a data conversion error
*/ | Convert the specified string to the data type specified in this object | convertDataFromString | {
"repo_name": "stepanovdg/pentaho-kettle",
"path": "core/src/main/java/org/pentaho/di/core/row/value/ValueMetaBase.java",
"license": "apache-2.0",
"size": 179272
} | [
"org.pentaho.di.compatibility.Value",
"org.pentaho.di.core.Const",
"org.pentaho.di.core.exception.KettleValueException",
"org.pentaho.di.core.row.ValueMetaInterface",
"org.pentaho.di.core.util.Utils"
] | import org.pentaho.di.compatibility.Value; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.Utils; | import org.pentaho.di.compatibility.*; import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.core.util.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 867,450 |
public void setUnemploymentamount(final java.math.BigDecimal unemploymentamount) {
this.unemploymentamount = unemploymentamount;
} | void function(final java.math.BigDecimal unemploymentamount) { this.unemploymentamount = unemploymentamount; } | /**
* Set the value related to the column: unemploymentamount.
* @param unemploymentamount the unemploymentamount value you wish to set
*/ | Set the value related to the column: unemploymentamount | setUnemploymentamount | {
"repo_name": "servinglynk/hmis-lynk-open-source",
"path": "hmis-model-v2015/src/main/java/com/servinglynk/hmis/warehouse/model/v2015/Incomeandsources.java",
"license": "mpl-2.0",
"size": 42167
} | [
"java.math.BigDecimal"
] | import java.math.BigDecimal; | import java.math.*; | [
"java.math"
] | java.math; | 1,257,006 |
private static ValidationResult checkVmNumaNodeCount(int numaNodeCount, int cpuCores) {
if (cpuCores < numaNodeCount) {
return new ValidationResult(EngineMessage.VM_NUMA_NODE_MORE_NODES_THAN_CPUS,
String.format("$numaNodes %d", numaNodeCount),
String.format("$cpus %d", cpuCores));
}
return ValidationResult.VALID;
} | static ValidationResult function(int numaNodeCount, int cpuCores) { if (cpuCores < numaNodeCount) { return new ValidationResult(EngineMessage.VM_NUMA_NODE_MORE_NODES_THAN_CPUS, String.format(STR, numaNodeCount), String.format(STR, cpuCores)); } return ValidationResult.VALID; } | /**
* Check if we have enough virtual cpus for the virtual numa nodes
*
* @param numaNodeCount number of virtual numa nodes
* @param cpuCores number of virtual cpu cores
* @return the validation result
*/ | Check if we have enough virtual cpus for the virtual numa nodes | checkVmNumaNodeCount | {
"repo_name": "walteryang47/ovirt-engine",
"path": "backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/numa/vm/NumaValidator.java",
"license": "apache-2.0",
"size": 10781
} | [
"org.ovirt.engine.core.bll.ValidationResult",
"org.ovirt.engine.core.common.errors.EngineMessage"
] | import org.ovirt.engine.core.bll.ValidationResult; import org.ovirt.engine.core.common.errors.EngineMessage; | import org.ovirt.engine.core.bll.*; import org.ovirt.engine.core.common.errors.*; | [
"org.ovirt.engine"
] | org.ovirt.engine; | 1,603,410 |
public void checkForCorruption() {
Set<GraphNode> s = Collections.newSetFromMap(new IdentityHashMap());
// put all the nodes in the idendity backed set
for (Iterator<GraphNode> it = this.nodeIterator(); it.hasNext(); ) {
s.add(it.next());
}
// now again traverse and make sure all the parents and children
// of each node exist in the set
for (Iterator<GraphNode> it = this.nodeIterator(); it.hasNext(); ) {
GraphNode node = it.next();
for (GraphNode parent : node.getParents()) {
// contains operation is on basis of underlying IdentityHashMap
if (!s.contains(parent)) {
throw new RuntimeException(complain("Parent", node, parent));
}
}
for (GraphNode child : node.getChildren()) {
if (!s.contains(child)) {
throw new RuntimeException(complain("Child", node, child));
}
}
}
} | void function() { Set<GraphNode> s = Collections.newSetFromMap(new IdentityHashMap()); for (Iterator<GraphNode> it = this.nodeIterator(); it.hasNext(); ) { s.add(it.next()); } for (Iterator<GraphNode> it = this.nodeIterator(); it.hasNext(); ) { GraphNode node = it.next(); for (GraphNode parent : node.getParents()) { if (!s.contains(parent)) { throw new RuntimeException(complain(STR, node, parent)); } } for (GraphNode child : node.getChildren()) { if (!s.contains(child)) { throw new RuntimeException(complain("Child", node, child)); } } } } | /**
* Checks the underlying graph structure for any corruption. Corruption can be where a parent or
* a child of a node refers to an object, that is not in underlying graph node list.
*
* @throws RuntimeException in case of corruption.
*/ | Checks the underlying graph structure for any corruption. Corruption can be where a parent or a child of a node refers to an object, that is not in underlying graph node list | checkForCorruption | {
"repo_name": "pegasus-isi/pegasus",
"path": "src/edu/isi/pegasus/planner/classes/ADag.java",
"license": "apache-2.0",
"size": 34906
} | [
"edu.isi.pegasus.planner.partitioner.graph.GraphNode",
"java.util.Collections",
"java.util.IdentityHashMap",
"java.util.Iterator",
"java.util.Set"
] | import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.Collections; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Set; | import edu.isi.pegasus.planner.partitioner.graph.*; import java.util.*; | [
"edu.isi.pegasus",
"java.util"
] | edu.isi.pegasus; java.util; | 1,042,775 |
private HRegionServer getOtherRegionServer(final MiniHBaseCluster cluster,
final HRegionServer notThisOne) {
for (RegionServerThread rst: cluster.getRegionServerThreads()) {
HRegionServer hrs = rst.getRegionServer();
if (hrs.getServerName().equals(notThisOne.getServerName())) continue;
return hrs;
}
return null;
} | HRegionServer function(final MiniHBaseCluster cluster, final HRegionServer notThisOne) { for (RegionServerThread rst: cluster.getRegionServerThreads()) { HRegionServer hrs = rst.getRegionServer(); if (hrs.getServerName().equals(notThisOne.getServerName())) continue; return hrs; } return null; } | /**
* Find regionserver other than the one passed.
* Can't rely on indexes into list of regionservers since crashed servers
* occupy an index.
* @param cluster
* @param notThisOne
* @return A regionserver that is not <code>notThisOne</code> or null if none
* found
*/ | Find regionserver other than the one passed. Can't rely on indexes into list of regionservers since crashed servers occupy an index | getOtherRegionServer | {
"repo_name": "centiteo/hbase",
"path": "src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java",
"license": "apache-2.0",
"size": 12021
} | [
"org.apache.hadoop.hbase.MiniHBaseCluster",
"org.apache.hadoop.hbase.util.JVMClusterUtil"
] | import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.util.JVMClusterUtil; | import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,517,589 |
public OutputStream getSerialOutputStream() {
return outStream;
} | OutputStream function() { return outStream; } | /**
* Get the serial port output stream
*
* @return The serial port output stream
*/ | Get the serial port output stream | getSerialOutputStream | {
"repo_name": "rfdrake/opennms",
"path": "features/sms-reflector/rxtx-commands/src/main/java/org/opennms/rxtx/test/internal/LoopbackEventTest.java",
"license": "gpl-2.0",
"size": 9939
} | [
"java.io.OutputStream"
] | import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,049,579 |
protected void closeOpenController()
{
// MLH TODO: Be nice if we didn't have to import ControllerRegistry
// because of its interface use of DefaultController!
Controller c =
ControllerRegistry.ONLY.findOpenController(getModelObject());
if (c != null) {
c.dispose();
}
} | void function() { Controller c = ControllerRegistry.ONLY.findOpenController(getModelObject()); if (c != null) { c.dispose(); } } | /**
* Close the registered open controller for the associated model object.
* Used when the object or an ancestor has been removed from the model.
*
* @author mlh
*/ | Close the registered open controller for the associated model object. Used when the object or an ancestor has been removed from the model | closeOpenController | {
"repo_name": "cogtool/cogtool",
"path": "java/edu/cmu/cs/hcii/cogtool/ui/DefaultUI.java",
"license": "lgpl-2.1",
"size": 28392
} | [
"edu.cmu.cs.hcii.cogtool.controller.Controller",
"edu.cmu.cs.hcii.cogtool.controller.ControllerRegistry"
] | import edu.cmu.cs.hcii.cogtool.controller.Controller; import edu.cmu.cs.hcii.cogtool.controller.ControllerRegistry; | import edu.cmu.cs.hcii.cogtool.controller.*; | [
"edu.cmu.cs"
] | edu.cmu.cs; | 2,171,655 |
private void initSSLSocketFactoryEx(SSLContext ctx)
throws NoSuchAlgorithmException, KeyManagementException {
sslCtxt = ctx;
protocols = getProtocolList();
} | void function(SSLContext ctx) throws NoSuchAlgorithmException, KeyManagementException { sslCtxt = ctx; protocols = getProtocolList(); } | /**
* Initializes the SSL Socket Factory Extension.
*
* @param ctx the SSL context
* @throws NoSuchAlgorithmException thrown when an algorithm is not
* supported
* @throws KeyManagementException thrown if initialization fails
*/ | Initializes the SSL Socket Factory Extension | initSSLSocketFactoryEx | {
"repo_name": "awhitford/DependencyCheck",
"path": "utils/src/main/java/org/owasp/dependencycheck/utils/SSLSocketFactoryEx.java",
"license": "apache-2.0",
"size": 10078
} | [
"java.security.KeyManagementException",
"java.security.NoSuchAlgorithmException",
"javax.net.ssl.SSLContext"
] | import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import javax.net.ssl.SSLContext; | import java.security.*; import javax.net.ssl.*; | [
"java.security",
"javax.net"
] | java.security; javax.net; | 2,675,498 |
private void resetSeenObjects() {
objectsRead = new ArrayList<Object>();
nextHandle = baseWireHandle;
primitiveData = emptyStream;
} | void function() { objectsRead = new ArrayList<Object>(); nextHandle = baseWireHandle; primitiveData = emptyStream; } | /**
* Reset the collection of objects already loaded by the receiver.
*/ | Reset the collection of objects already loaded by the receiver | resetSeenObjects | {
"repo_name": "JSDemos/android-sdk-20",
"path": "src/java/io/ObjectInputStream.java",
"license": "apache-2.0",
"size": 92845
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,007,274 |
public IDataset getGuide_current();
| IDataset function(); | /**
* Guide field coil current in "on" state"
* <p>
* <b>Type:</b> NX_FLOAT
* <b>Units:</b> NX_CURRENT
* </p>
*
* @return the value.
*/ | Guide field coil current in "on" state" Type: NX_FLOAT Units: NX_CURRENT | getGuide_current | {
"repo_name": "xen-0/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXflipper.java",
"license": "epl-1.0",
"size": 8472
} | [
"org.eclipse.january.dataset.IDataset"
] | import org.eclipse.january.dataset.IDataset; | import org.eclipse.january.dataset.*; | [
"org.eclipse.january"
] | org.eclipse.january; | 1,841,697 |
@Inline("$1.log(java.util.logging.Level.INFO, String.valueOf($2), $3)")
public static void info(final Logger log, final Object msg,
final Throwable error) {
log.log(Level.INFO, String.valueOf(msg), error);
}
| @Inline(STR) static void function(final Logger log, final Object msg, final Throwable error) { log.log(Level.INFO, String.valueOf(msg), error); } | /**
* Log an INFO message.
* @param msg The message
* @param error The error
*/ | Log an INFO message | info | {
"repo_name": "skunkiferous/Util",
"path": "xtend/src/main/java/com/blockwithme/util/xtend/JavaUtilLoggingExtension.java",
"license": "apache-2.0",
"size": 7206
} | [
"java.util.logging.Level",
"java.util.logging.Logger",
"org.eclipse.xtext.xbase.lib.Inline"
] | import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.xtext.xbase.lib.Inline; | import java.util.logging.*; import org.eclipse.xtext.xbase.lib.*; | [
"java.util",
"org.eclipse.xtext"
] | java.util; org.eclipse.xtext; | 431,349 |
public List<Org> findValidated(){
EqualsFilter classFilter = new EqualsFilter("objectClass", "groupOfMembers");
EqualsFilter validatedFilter = new EqualsFilter("businessCategory", Org.STATUS_REGISTERED);
AndFilter filter = new AndFilter();
filter.and(classFilter);
filter.and(validatedFilter);
return ldapTemplate.search(this.orgsSearchBaseDN, filter.encode(), new OrgsDao.OrgAttributesMapper());
} | List<Org> function(){ EqualsFilter classFilter = new EqualsFilter(STR, STR); EqualsFilter validatedFilter = new EqualsFilter(STR, Org.STATUS_REGISTERED); AndFilter filter = new AndFilter(); filter.and(classFilter); filter.and(validatedFilter); return ldapTemplate.search(this.orgsSearchBaseDN, filter.encode(), new OrgsDao.OrgAttributesMapper()); } | /**
* Search for validated organizations defined in ldap.
*
* @return list of validated organizations
*/ | Search for validated organizations defined in ldap | findValidated | {
"repo_name": "MSHE-Ledoux/georchestra-geosync",
"path": "ldapadmin/src/main/java/org/georchestra/ldapadmin/ds/OrgsDao.java",
"license": "gpl-3.0",
"size": 14359
} | [
"java.util.List",
"org.georchestra.ldapadmin.dto.Org",
"org.springframework.ldap.filter.AndFilter",
"org.springframework.ldap.filter.EqualsFilter"
] | import java.util.List; import org.georchestra.ldapadmin.dto.Org; import org.springframework.ldap.filter.AndFilter; import org.springframework.ldap.filter.EqualsFilter; | import java.util.*; import org.georchestra.ldapadmin.dto.*; import org.springframework.ldap.filter.*; | [
"java.util",
"org.georchestra.ldapadmin",
"org.springframework.ldap"
] | java.util; org.georchestra.ldapadmin; org.springframework.ldap; | 1,199,970 |
public void delete(Connection _con) throws SQLException
{
String sql = "delete from T_LangCodes where F_Code=?";
PreparedStatement stmt = null;
try
{
stmt = con_.prepareStatement(sql);
stmt.setString(1,code_);
stmt.executeUpdate();
}
catch (SQLException e)
{
log_.severe(e.toString());
throw e;
}
finally
{
try {
if (stmt != null) stmt.close();
}
catch (SQLException x)
{
}
}
} | void function(Connection _con) throws SQLException { String sql = STR; PreparedStatement stmt = null; try { stmt = con_.prepareStatement(sql); stmt.setString(1,code_); stmt.executeUpdate(); } catch (SQLException e) { log_.severe(e.toString()); throw e; } finally { try { if (stmt != null) stmt.close(); } catch (SQLException x) { } } } | /**
* Delete current record by PK with a connection.
*
*/ | Delete current record by PK with a connection | delete | {
"repo_name": "tedwen/transmem",
"path": "src/com/transmem/data/db/LangCodes.java",
"license": "apache-2.0",
"size": 6090
} | [
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.SQLException"
] | import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,235,945 |
private boolean validatePost(final JsonObject requestBody) {
LOG.trace("Start FlowListResourceValidator#validatePost()");
boolean isValid = false;
setInvalidParameter(VtnServiceJsonConsts.FLOWLIST);
if (requestBody.has(VtnServiceJsonConsts.FLOWLIST)
&& requestBody.get(VtnServiceJsonConsts.FLOWLIST)
.isJsonObject()) {
final JsonObject flowList = requestBody
.getAsJsonObject(VtnServiceJsonConsts.FLOWLIST);
// validation for madatory key: fl_name
setInvalidParameter(VtnServiceJsonConsts.FLNAME);
if (flowList.has(VtnServiceJsonConsts.FLNAME)
&& flowList.getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME)
.getAsString() != null
&& !flowList
.getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME)
.getAsString().isEmpty()) {
isValid = validator.isValidMaxLengthAlphaNum(flowList
.getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME)
.getAsString(), VtnServiceJsonConsts.LEN_32);
}
// validation for key: ip_version(optional)
if (isValid) {
setInvalidParameter(VtnServiceJsonConsts.IPVERSION);
if (flowList.has(VtnServiceJsonConsts.IPVERSION)
&& flowList.getAsJsonPrimitive(
VtnServiceJsonConsts.IPVERSION).getAsString() != null
&& !flowList
.getAsJsonPrimitive(
VtnServiceJsonConsts.IPVERSION)
.getAsString().isEmpty()) {
final String ipVersion = flowList.getAsJsonPrimitive(
VtnServiceJsonConsts.IPVERSION).getAsString();
isValid = ipVersion
.equalsIgnoreCase(VtnServiceJsonConsts.IP)
|| ipVersion
.equalsIgnoreCase(VtnServiceJsonConsts.IPV6);
} else {
flowList.remove(VtnServiceJsonConsts.IPVERSION);
flowList.addProperty(VtnServiceJsonConsts.IPVERSION,
VtnServiceJsonConsts.IP);
isValid = true;
}
}
}
LOG.trace("Complete FlowListResourceValidator#validatePost()");
return isValid;
} | boolean function(final JsonObject requestBody) { LOG.trace(STR); boolean isValid = false; setInvalidParameter(VtnServiceJsonConsts.FLOWLIST); if (requestBody.has(VtnServiceJsonConsts.FLOWLIST) && requestBody.get(VtnServiceJsonConsts.FLOWLIST) .isJsonObject()) { final JsonObject flowList = requestBody .getAsJsonObject(VtnServiceJsonConsts.FLOWLIST); setInvalidParameter(VtnServiceJsonConsts.FLNAME); if (flowList.has(VtnServiceJsonConsts.FLNAME) && flowList.getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME) .getAsString() != null && !flowList .getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME) .getAsString().isEmpty()) { isValid = validator.isValidMaxLengthAlphaNum(flowList .getAsJsonPrimitive(VtnServiceJsonConsts.FLNAME) .getAsString(), VtnServiceJsonConsts.LEN_32); } if (isValid) { setInvalidParameter(VtnServiceJsonConsts.IPVERSION); if (flowList.has(VtnServiceJsonConsts.IPVERSION) && flowList.getAsJsonPrimitive( VtnServiceJsonConsts.IPVERSION).getAsString() != null && !flowList .getAsJsonPrimitive( VtnServiceJsonConsts.IPVERSION) .getAsString().isEmpty()) { final String ipVersion = flowList.getAsJsonPrimitive( VtnServiceJsonConsts.IPVERSION).getAsString(); isValid = ipVersion .equalsIgnoreCase(VtnServiceJsonConsts.IP) ipVersion .equalsIgnoreCase(VtnServiceJsonConsts.IPV6); } else { flowList.remove(VtnServiceJsonConsts.IPVERSION); flowList.addProperty(VtnServiceJsonConsts.IPVERSION, VtnServiceJsonConsts.IP); isValid = true; } } } LOG.trace(STR); return isValid; } | /**
* Validate post request Json object for FlowList API.
*
* @param requestBody
* the request Json object
* @return true, if successful
*/ | Validate post request Json object for FlowList API | validatePost | {
"repo_name": "opendaylight/vtn",
"path": "coordinator/java/vtn-javaapi/src/org/opendaylight/vtn/javaapi/validation/logical/FlowListResourceValidator.java",
"license": "epl-1.0",
"size": 8766
} | [
"com.google.gson.JsonObject",
"org.opendaylight.vtn.javaapi.constants.VtnServiceJsonConsts"
] | import com.google.gson.JsonObject; import org.opendaylight.vtn.javaapi.constants.VtnServiceJsonConsts; | import com.google.gson.*; import org.opendaylight.vtn.javaapi.constants.*; | [
"com.google.gson",
"org.opendaylight.vtn"
] | com.google.gson; org.opendaylight.vtn; | 1,944,485 |
public static OrderDTO delete(OrderDTO order, Integer itemId) {
for (OrderLineDTO line : order.getLines()) {
if (line.getItemId().equals(itemId)) {
line.setDeleted(1);
}
}
return order;
} | static OrderDTO function(OrderDTO order, Integer itemId) { for (OrderLineDTO line : order.getLines()) { if (line.getItemId().equals(itemId)) { line.setDeleted(1); } } return order; } | /**
* Marks all lines from the order with a matching item ID as deleted.
*
* @param order order to delete lines from
* @param itemId item ID to delete
* @return order with line deleted
*/ | Marks all lines from the order with a matching item ID as deleted | delete | {
"repo_name": "liquidJbilling/LT-Jbilling-MsgQ-3.1",
"path": "src/java/com/sapienter/jbilling/server/order/OrderHelper.java",
"license": "agpl-3.0",
"size": 14728
} | [
"com.sapienter.jbilling.server.order.db.OrderDTO",
"com.sapienter.jbilling.server.order.db.OrderLineDTO"
] | import com.sapienter.jbilling.server.order.db.OrderDTO; import com.sapienter.jbilling.server.order.db.OrderLineDTO; | import com.sapienter.jbilling.server.order.db.*; | [
"com.sapienter.jbilling"
] | com.sapienter.jbilling; | 1,115,269 |
@Override public void enterArrayTarget(@NotNull FragmentParser.ArrayTargetContext ctx) { } | @Override public void enterArrayTarget(@NotNull FragmentParser.ArrayTargetContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | exitParExpr | {
"repo_name": "wouwouwou/module_8",
"path": "src/main/java/pp/block4/cc/cfg/FragmentBaseListener.java",
"license": "apache-2.0",
"size": 9582
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 48,943 |
public void mAssignmentCallback (MResourceAssignment assignment)
{
m_mAssignment = assignment;
if (m_createNew)
dispose();
else
displayCalendar();
} // mAssignmentCallback
| void function (MResourceAssignment assignment) { m_mAssignment = assignment; if (m_createNew) dispose(); else displayCalendar(); } | /**
* Callback.
* Called from VSchedulePanel after VAssignmentDialog finished
* @param assignment New/Changed Assignment
*/ | Callback. Called from VSchedulePanel after VAssignmentDialog finished | mAssignmentCallback | {
"repo_name": "neuroidss/adempiere",
"path": "client/src/org/compiere/apps/search/InfoSchedule.java",
"license": "gpl-2.0",
"size": 14786
} | [
"org.compiere.model.MResourceAssignment"
] | import org.compiere.model.MResourceAssignment; | import org.compiere.model.*; | [
"org.compiere.model"
] | org.compiere.model; | 489,061 |
@ApiDocItem
public FileCollection getDirectDependencies (final Project project,
final Collection<String> listOfConfigurations,
final boolean withTransitiveDependencies) {
FileCollection directDependencies = project.files();
for (String next: listOfConfigurations) {
Configuration nextConfigTransitiveFalse = project.getConfigurations().getByName(next).copyRecursive().setTransitive(withTransitiveDependencies);
directDependencies = directDependencies.plus(nextConfigTransitiveFalse);
}
return directDependencies;
} | FileCollection function (final Project project, final Collection<String> listOfConfigurations, final boolean withTransitiveDependencies) { FileCollection directDependencies = project.files(); for (String next: listOfConfigurations) { Configuration nextConfigTransitiveFalse = project.getConfigurations().getByName(next).copyRecursive().setTransitive(withTransitiveDependencies); directDependencies = directDependencies.plus(nextConfigTransitiveFalse); } return directDependencies; } | /**
* get direct dependencies from configurations
*
* @param project Gradle Project
* @param listOfConfigurations list of configuration names were the dependencies are taken from
* @param withTransitiveDependencies true: include transitive dependencies, false: only direct ones
* @return FileCollection with requested dependencies
*/ | get direct dependencies from configurations | getDirectDependencies | {
"repo_name": "moley/leguan",
"path": "leguan-gradleutils/src/main/java/org/leguan/gradleutils/GradleClasspathUtils.java",
"license": "apache-2.0",
"size": 3789
} | [
"java.util.Collection",
"org.gradle.api.Project",
"org.gradle.api.artifacts.Configuration",
"org.gradle.api.file.FileCollection"
] | import java.util.Collection; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; | import java.util.*; import org.gradle.api.*; import org.gradle.api.artifacts.*; import org.gradle.api.file.*; | [
"java.util",
"org.gradle.api"
] | java.util; org.gradle.api; | 852,797 |
public Builder put(String resourceUrl, AuthzStatus status) {
Preconditions.checkNotNull(resourceUrl);
Preconditions.checkNotNull(status);
map.put(resourceUrl, status);
return this;
} | Builder function(String resourceUrl, AuthzStatus status) { Preconditions.checkNotNull(resourceUrl); Preconditions.checkNotNull(status); map.put(resourceUrl, status); return this; } | /**
* Add an entry to the builder.
*
* @param resourceUrl The resourceUrl for the new entry.
* @param status The status for the new entry.
* @return The builder, for convenience.
*/ | Add an entry to the builder | put | {
"repo_name": "googlegsa/secmgr",
"path": "src/main/java/com/google/enterprise/secmgr/modules/AuthzResult.java",
"license": "apache-2.0",
"size": 10470
} | [
"com.google.common.base.Preconditions",
"com.google.enterprise.secmgr.common.AuthzStatus"
] | import com.google.common.base.Preconditions; import com.google.enterprise.secmgr.common.AuthzStatus; | import com.google.common.base.*; import com.google.enterprise.secmgr.common.*; | [
"com.google.common",
"com.google.enterprise"
] | com.google.common; com.google.enterprise; | 1,005,232 |
public com.mozu.api.contracts.customer.CustomerAuthTicket refreshUserAuthTicket(String refreshToken, String responseFields) throws Exception
{
MozuClient<com.mozu.api.contracts.customer.CustomerAuthTicket> client = com.mozu.api.clients.commerce.customer.CustomerAuthTicketClient.refreshUserAuthTicketClient( refreshToken, responseFields);
client.setContext(_apiContext);
client.executeRequest();
return client.getResult();
} | com.mozu.api.contracts.customer.CustomerAuthTicket function(String refreshToken, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.customer.CustomerAuthTicket> client = com.mozu.api.clients.commerce.customer.CustomerAuthTicketClient.refreshUserAuthTicketClient( refreshToken, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); } | /**
* Refreshes an existing authentication ticket for a customer account by providing the refresh token string.
* <p><pre><code>
* CustomerAuthTicket customerauthticket = new CustomerAuthTicket();
* CustomerAuthTicket customerAuthTicket = customerauthticket.refreshUserAuthTicket( refreshToken, responseFields);
* </code></pre></p>
* @param refreshToken Alphanumeric string used for access tokens. This token refreshes access for accounts by generating a new developer or application account authentication ticket after an access token expires.
* @param responseFields Use this field to include those fields which are not included by default.
* @return com.mozu.api.contracts.customer.CustomerAuthTicket
* @see com.mozu.api.contracts.customer.CustomerAuthTicket
*/ | Refreshes an existing authentication ticket for a customer account by providing the refresh token string. <code><code> CustomerAuthTicket customerauthticket = new CustomerAuthTicket(); CustomerAuthTicket customerAuthTicket = customerauthticket.refreshUserAuthTicket( refreshToken, responseFields); </code></code> | refreshUserAuthTicket | {
"repo_name": "lakshmi-nair/mozu-java",
"path": "mozu-java-core/src/main/java/com/mozu/api/resources/commerce/customer/CustomerAuthTicketResource.java",
"license": "mit",
"size": 6721
} | [
"com.mozu.api.MozuClient"
] | import com.mozu.api.MozuClient; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 1,608,465 |
@Test
public void autowireWhenCustomLoginPageIsSlashLoginThenNoDefaultLoginPageGeneratingFilterIsWired()
throws Exception {
this.spring.configLocations(this.xml("ForSec2919")).autowire();
this.mvc.perform(get("/login")).andExpect(content().string("teapot"));
assertThat(getFilter(this.spring.getContext(), DefaultLoginPageGeneratingFilter.class)).isNull();
} | void function() throws Exception { this.spring.configLocations(this.xml(STR)).autowire(); this.mvc.perform(get(STR)).andExpect(content().string(STR)); assertThat(getFilter(this.spring.getContext(), DefaultLoginPageGeneratingFilter.class)).isNull(); } | /**
* SEC-2919 - DefaultLoginGeneratingFilter incorrectly used if login-url="/login"
*/ | SEC-2919 - DefaultLoginGeneratingFilter incorrectly used if login-url="/login" | autowireWhenCustomLoginPageIsSlashLoginThenNoDefaultLoginPageGeneratingFilterIsWired | {
"repo_name": "fhanik/spring-security",
"path": "config/src/test/java/org/springframework/security/config/http/FormLoginConfigTests.java",
"license": "apache-2.0",
"size": 9035
} | [
"org.assertj.core.api.Assertions",
"org.springframework.security.web.authentication.ui.DefaultLoginPageGeneratingFilter"
] | import org.assertj.core.api.Assertions; import org.springframework.security.web.authentication.ui.DefaultLoginPageGeneratingFilter; | import org.assertj.core.api.*; import org.springframework.security.web.authentication.ui.*; | [
"org.assertj.core",
"org.springframework.security"
] | org.assertj.core; org.springframework.security; | 698,894 |
@Reference(service = IPersistenceProvider.class, policy = ReferencePolicy.DYNAMIC, policyOption = ReferencePolicyOption.GREEDY,
cardinality = ReferenceCardinality.MANDATORY, target = "(com.ibm.ws.ui.persistence.provider=FILE)")
protected synchronized void setIPersistenceProviderFILE(final IPersistenceProvider provider) {
persistenceProviderFile = provider;
Tr.info(tc, "STORAGE_INITIALIZED", "FILE");
clearCachedInstances();
} | @Reference(service = IPersistenceProvider.class, policy = ReferencePolicy.DYNAMIC, policyOption = ReferencePolicyOption.GREEDY, cardinality = ReferenceCardinality.MANDATORY, target = STR) synchronized void function(final IPersistenceProvider provider) { persistenceProviderFile = provider; Tr.info(tc, STR, "FILE"); clearCachedInstances(); } | /**
* Only set the FILE persistence provider. This service is required.
*
* @param provider
*/ | Only set the FILE persistence provider. This service is required | setIPersistenceProviderFILE | {
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.ui/src/com/ibm/ws/ui/internal/v1/pojo/POJOLoaderService.java",
"license": "epl-1.0",
"size": 23461
} | [
"com.ibm.websphere.ras.Tr",
"com.ibm.ws.ui.persistence.IPersistenceProvider",
"org.osgi.service.component.annotations.Reference",
"org.osgi.service.component.annotations.ReferenceCardinality",
"org.osgi.service.component.annotations.ReferencePolicy",
"org.osgi.service.component.annotations.ReferencePolicyOption"
] | import com.ibm.websphere.ras.Tr; import com.ibm.ws.ui.persistence.IPersistenceProvider; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.osgi.service.component.annotations.ReferencePolicy; import org.osgi.service.component.annotations.ReferencePolicyOption; | import com.ibm.websphere.ras.*; import com.ibm.ws.ui.persistence.*; import org.osgi.service.component.annotations.*; | [
"com.ibm.websphere",
"com.ibm.ws",
"org.osgi.service"
] | com.ibm.websphere; com.ibm.ws; org.osgi.service; | 1,429,491 |
@Indexable(type = IndexableType.REINDEX)
public benefit_rating_lkp updatebenefit_rating_lkp(
benefit_rating_lkp benefit_rating_lkp) throws SystemException {
return updatebenefit_rating_lkp(benefit_rating_lkp, true);
} | @Indexable(type = IndexableType.REINDEX) benefit_rating_lkp function( benefit_rating_lkp benefit_rating_lkp) throws SystemException { return updatebenefit_rating_lkp(benefit_rating_lkp, true); } | /**
* Updates the benefit_rating_lkp in the database or adds it if it does not yet exist. Also notifies the appropriate model listeners.
*
* @param benefit_rating_lkp the benefit_rating_lkp
* @return the benefit_rating_lkp that was updated
* @throws SystemException if a system exception occurred
*/ | Updates the benefit_rating_lkp in the database or adds it if it does not yet exist. Also notifies the appropriate model listeners | updatebenefit_rating_lkp | {
"repo_name": "iucn-whp/world-heritage-outlook",
"path": "portlets/iucn-dbservice-portlet/docroot/WEB-INF/src/com/iucn/whp/dbservice/service/base/benefit_rating_lkpLocalServiceBaseImpl.java",
"license": "gpl-2.0",
"size": 175742
} | [
"com.liferay.portal.kernel.exception.SystemException",
"com.liferay.portal.kernel.search.Indexable",
"com.liferay.portal.kernel.search.IndexableType"
] | import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.kernel.search.Indexable; import com.liferay.portal.kernel.search.IndexableType; | import com.liferay.portal.kernel.exception.*; import com.liferay.portal.kernel.search.*; | [
"com.liferay.portal"
] | com.liferay.portal; | 1,159,837 |
@SuppressWarnings("unchecked") // compiler confused about varargs and generics.
private void addImplicitElements(XmlDocument lowerPriorityDocument,
MergingReport.Builder mergingReport) {
// if this document is an overlay, tolerate the absence of uses-sdk and do not
// assume implicit minimum versions.
Optional<XmlElement> usesSdk = getByTypeAndKey(
ManifestModel.NodeTypes.USES_SDK, null);
if (mType == Type.OVERLAY && !usesSdk.isPresent()) {
return;
}
// check that the uses-sdk element does not have any tools:node instruction.
if (usesSdk.isPresent()) {
XmlElement usesSdkElement = usesSdk.get();
if (usesSdkElement.getOperationType() != NodeOperationType.MERGE) {
mergingReport
.addMessage(
new SourceFilePosition(
getSourceFile(),
usesSdkElement.getPosition()),
MergingReport.Record.Severity.ERROR,
"uses-sdk element cannot have a \"tools:node\" attribute");
return;
}
}
int thisTargetSdk = getApiLevelFromAttribute(getTargetSdkVersion());
// when we are importing a library, we should never use the build.gradle injected
// values (only valid for overlay, main manifest) so use the raw versions coming from
// the AndroidManifest.xml
int libraryTargetSdk = getApiLevelFromAttribute(
lowerPriorityDocument.getFileType() == Type.LIBRARY
? lowerPriorityDocument.getRawTargetSdkVersion()
: lowerPriorityDocument.getTargetSdkVersion());
// if library is using a code name rather than an API level, make sure this document target
// sdk version is using the same code name.
String libraryTargetSdkVersion = lowerPriorityDocument.getTargetSdkVersion();
if (!Character.isDigit(libraryTargetSdkVersion.charAt(0))) {
// this is a code name, ensure this document uses the same code name.
if (!libraryTargetSdkVersion.equals(getTargetSdkVersion())) {
mergingReport.addMessage(getSourceFile(), MergingReport.Record.Severity.ERROR,
String.format(
"uses-sdk:targetSdkVersion %1$s cannot be different than version "
+ "%2$s declared in library %3$s",
getTargetSdkVersion(),
libraryTargetSdkVersion,
lowerPriorityDocument.getSourceFile().print(false)
)
);
return;
}
}
// same for minSdkVersion, if the library is using a code name, the application must
// also be using the same code name.
String libraryMinSdkVersion = lowerPriorityDocument.getRawMinSdkVersion();
if (!Character.isDigit(libraryMinSdkVersion.charAt(0))) {
// this is a code name, ensure this document uses the same code name.
if (!libraryMinSdkVersion.equals(getMinSdkVersion())) {
mergingReport.addMessage(getSourceFile(), MergingReport.Record.Severity.ERROR,
String.format(
"uses-sdk:minSdkVersion %1$s cannot be different than version "
+ "%2$s declared in library %3$s",
getMinSdkVersion(),
libraryMinSdkVersion,
lowerPriorityDocument.getSourceFile().print(false)
)
);
return;
}
}
if (!checkUsesSdkMinVersion(lowerPriorityDocument, mergingReport)) {
String error = String.format(
"uses-sdk:minSdkVersion %1$s cannot be smaller than version "
+ "%2$s declared in library %3$s\n"
+ "\tSuggestion: use tools:overrideLibrary=\"%4$s\" to force usage",
getMinSdkVersion(),
lowerPriorityDocument.getRawMinSdkVersion(),
lowerPriorityDocument.getSourceFile().print(false),
lowerPriorityDocument.getPackageName());
if (usesSdk.isPresent()) {
mergingReport.addMessage(
new SourceFilePosition(getSourceFile(), usesSdk.get().getPosition()),
MergingReport.Record.Severity.ERROR,
error);
} else {
mergingReport.addMessage(
getSourceFile(), MergingReport.Record.Severity.ERROR, error);
}
return;
}
// if the merged document target SDK is equal or smaller than the library's, nothing to do.
if (thisTargetSdk <= libraryTargetSdk) {
return;
}
// There is no need to add any implied permissions when targeting an old runtime.
if (thisTargetSdk < 4) {
return;
}
boolean hasWriteToExternalStoragePermission =
lowerPriorityDocument.getByTypeAndKey(
USES_PERMISSION, permission("WRITE_EXTERNAL_STORAGE")).isPresent();
if (libraryTargetSdk < 4) {
addIfAbsent(mergingReport.getActionRecorder(),
USES_PERMISSION,
permission("WRITE_EXTERNAL_STORAGE"),
lowerPriorityDocument.getPackageName() + " has a targetSdkVersion < 4");
hasWriteToExternalStoragePermission = true;
addIfAbsent(mergingReport.getActionRecorder(),
USES_PERMISSION,
permission("READ_PHONE_STATE"),
lowerPriorityDocument.getPackageName() + " has a targetSdkVersion < 4");
}
// If the application has requested WRITE_EXTERNAL_STORAGE, we will
// force them to always take READ_EXTERNAL_STORAGE as well. We always
// do this (regardless of target API version) because we can't have
// an app with write permission but not read permission.
if (hasWriteToExternalStoragePermission) {
addIfAbsent(mergingReport.getActionRecorder(),
USES_PERMISSION,
permission("READ_EXTERNAL_STORAGE"),
lowerPriorityDocument.getPackageName() + " requested WRITE_EXTERNAL_STORAGE");
}
// Pre-JellyBean call log permission compatibility.
if (thisTargetSdk >= 16 && libraryTargetSdk < 16) {
if (lowerPriorityDocument.getByTypeAndKey(
USES_PERMISSION, permission("READ_CONTACTS")).isPresent()) {
addIfAbsent(mergingReport.getActionRecorder(),
USES_PERMISSION, permission("READ_CALL_LOG"),
lowerPriorityDocument.getPackageName()
+ " has targetSdkVersion < 16 and requested READ_CONTACTS");
}
if (lowerPriorityDocument.getByTypeAndKey(
USES_PERMISSION, permission("WRITE_CONTACTS")).isPresent()) {
addIfAbsent(mergingReport.getActionRecorder(),
USES_PERMISSION, permission("WRITE_CALL_LOG"),
lowerPriorityDocument.getPackageName()
+ " has targetSdkVersion < 16 and requested WRITE_CONTACTS");
}
}
} | @SuppressWarnings(STR) void function(XmlDocument lowerPriorityDocument, MergingReport.Builder mergingReport) { Optional<XmlElement> usesSdk = getByTypeAndKey( ManifestModel.NodeTypes.USES_SDK, null); if (mType == Type.OVERLAY && !usesSdk.isPresent()) { return; } if (usesSdk.isPresent()) { XmlElement usesSdkElement = usesSdk.get(); if (usesSdkElement.getOperationType() != NodeOperationType.MERGE) { mergingReport .addMessage( new SourceFilePosition( getSourceFile(), usesSdkElement.getPosition()), MergingReport.Record.Severity.ERROR, STRtools:node\STR); return; } } int thisTargetSdk = getApiLevelFromAttribute(getTargetSdkVersion()); int libraryTargetSdk = getApiLevelFromAttribute( lowerPriorityDocument.getFileType() == Type.LIBRARY ? lowerPriorityDocument.getRawTargetSdkVersion() : lowerPriorityDocument.getTargetSdkVersion()); String libraryTargetSdkVersion = lowerPriorityDocument.getTargetSdkVersion(); if (!Character.isDigit(libraryTargetSdkVersion.charAt(0))) { if (!libraryTargetSdkVersion.equals(getTargetSdkVersion())) { mergingReport.addMessage(getSourceFile(), MergingReport.Record.Severity.ERROR, String.format( STR + STR, getTargetSdkVersion(), libraryTargetSdkVersion, lowerPriorityDocument.getSourceFile().print(false) ) ); return; } } String libraryMinSdkVersion = lowerPriorityDocument.getRawMinSdkVersion(); if (!Character.isDigit(libraryMinSdkVersion.charAt(0))) { if (!libraryMinSdkVersion.equals(getMinSdkVersion())) { mergingReport.addMessage(getSourceFile(), MergingReport.Record.Severity.ERROR, String.format( STR + STR, getMinSdkVersion(), libraryMinSdkVersion, lowerPriorityDocument.getSourceFile().print(false) ) ); return; } } if (!checkUsesSdkMinVersion(lowerPriorityDocument, mergingReport)) { String error = String.format( STR + STR + STR%4$s\STR, getMinSdkVersion(), lowerPriorityDocument.getRawMinSdkVersion(), lowerPriorityDocument.getSourceFile().print(false), lowerPriorityDocument.getPackageName()); if (usesSdk.isPresent()) { mergingReport.addMessage( new SourceFilePosition(getSourceFile(), usesSdk.get().getPosition()), MergingReport.Record.Severity.ERROR, error); } else { mergingReport.addMessage( getSourceFile(), MergingReport.Record.Severity.ERROR, error); } return; } if (thisTargetSdk <= libraryTargetSdk) { return; } if (thisTargetSdk < 4) { return; } boolean hasWriteToExternalStoragePermission = lowerPriorityDocument.getByTypeAndKey( USES_PERMISSION, permission(STR)).isPresent(); if (libraryTargetSdk < 4) { addIfAbsent(mergingReport.getActionRecorder(), USES_PERMISSION, permission(STR), lowerPriorityDocument.getPackageName() + STR); hasWriteToExternalStoragePermission = true; addIfAbsent(mergingReport.getActionRecorder(), USES_PERMISSION, permission(STR), lowerPriorityDocument.getPackageName() + STR); } if (hasWriteToExternalStoragePermission) { addIfAbsent(mergingReport.getActionRecorder(), USES_PERMISSION, permission(STR), lowerPriorityDocument.getPackageName() + STR); } if (thisTargetSdk >= 16 && libraryTargetSdk < 16) { if (lowerPriorityDocument.getByTypeAndKey( USES_PERMISSION, permission(STR)).isPresent()) { addIfAbsent(mergingReport.getActionRecorder(), USES_PERMISSION, permission(STR), lowerPriorityDocument.getPackageName() + STR); } if (lowerPriorityDocument.getByTypeAndKey( USES_PERMISSION, permission(STR)).isPresent()) { addIfAbsent(mergingReport.getActionRecorder(), USES_PERMISSION, permission(STR), lowerPriorityDocument.getPackageName() + STR); } } } | /**
* Add all implicit elements from the passed lower priority document that are
* required in the target SDK.
*/ | Add all implicit elements from the passed lower priority document that are required in the target SDK | addImplicitElements | {
"repo_name": "tranleduy2000/javaide",
"path": "aosp/manifest-merger/src/main/java/com/android/manifmerger/XmlDocument.java",
"license": "gpl-3.0",
"size": 23681
} | [
"com.android.ide.common.blame.SourceFilePosition",
"com.google.common.base.Optional"
] | import com.android.ide.common.blame.SourceFilePosition; import com.google.common.base.Optional; | import com.android.ide.common.blame.*; import com.google.common.base.*; | [
"com.android.ide",
"com.google.common"
] | com.android.ide; com.google.common; | 234,155 |
public static boolean checkpw(String plaintext, String hashed) {
byte hashed_bytes[];
byte try_bytes[];
try {
String try_pw = hashpw(plaintext, hashed );
hashed_bytes = hashed.getBytes("UTF-8");
try_bytes = try_pw.getBytes("UTF-8");
} catch (UnsupportedEncodingException uee) {
System.out.println(uee.getMessage());
uee.printStackTrace();
return false;
}
if (hashed_bytes.length != try_bytes.length)
return false;
byte ret = 0;
for (int i = 0; i < try_bytes.length; i++)
ret |= hashed_bytes[i] ^ try_bytes[i];
return ret == 0;
}
| static boolean function(String plaintext, String hashed) { byte hashed_bytes[]; byte try_bytes[]; try { String try_pw = hashpw(plaintext, hashed ); hashed_bytes = hashed.getBytes("UTF-8"); try_bytes = try_pw.getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { System.out.println(uee.getMessage()); uee.printStackTrace(); return false; } if (hashed_bytes.length != try_bytes.length) return false; byte ret = 0; for (int i = 0; i < try_bytes.length; i++) ret = hashed_bytes[i] ^ try_bytes[i]; return ret == 0; } | /**
* Check that a plaintext password matches a previously hashed
* one
* @param plaintext the plaintext password to verify
* @param hashed the previously-hashed password
* @return true if the passwords match, false otherwise
*/ | Check that a plaintext password matches a previously hashed one | checkpw | {
"repo_name": "PuppyRush/WidgetStore",
"path": "src/org/mindrot/jbcrypt/BCrypt.java",
"license": "apache-2.0",
"size": 28908
} | [
"java.io.UnsupportedEncodingException"
] | import java.io.UnsupportedEncodingException; | import java.io.*; | [
"java.io"
] | java.io; | 1,627,228 |
@com.idevicesinc.sweetblue.annotations.Advanced
public HistoricalData newHistoricalData(final byte[] data, final EpochTime epochTime)
{
final BleDeviceConfig.HistoricalDataFactory factory_device = conf_node().historicalDataFactory;
final BleDeviceConfig.HistoricalDataFactory factory_mngr = conf_mngr().historicalDataFactory;
final BleDeviceConfig.HistoricalDataFactory factory = factory_device != null ? factory_device : factory_mngr;
if( factory != null )
{
return factory.newHistoricalData(data, epochTime);
}
else
{
return new HistoricalData(data, epochTime);
}
} | @com.idevicesinc.sweetblue.annotations.Advanced HistoricalData function(final byte[] data, final EpochTime epochTime) { final BleDeviceConfig.HistoricalDataFactory factory_device = conf_node().historicalDataFactory; final BleDeviceConfig.HistoricalDataFactory factory_mngr = conf_mngr().historicalDataFactory; final BleDeviceConfig.HistoricalDataFactory factory = factory_device != null ? factory_device : factory_mngr; if( factory != null ) { return factory.newHistoricalData(data, epochTime); } else { return new HistoricalData(data, epochTime); } } | /**
* Returns a new {@link com.idevicesinc.sweetblue.utils.HistoricalData} instance using
* {@link com.idevicesinc.sweetblue.BleDeviceConfig#historicalDataFactory} if available.
*/ | Returns a new <code>com.idevicesinc.sweetblue.utils.HistoricalData</code> instance using <code>com.idevicesinc.sweetblue.BleDeviceConfig#historicalDataFactory</code> if available | newHistoricalData | {
"repo_name": "iDevicesInc/SweetBlue",
"path": "library/src/main/java/com/idevicesinc/sweetblue/BleNode.java",
"license": "gpl-3.0",
"size": 37318
} | [
"com.idevicesinc.sweetblue.utils.EpochTime",
"com.idevicesinc.sweetblue.utils.HistoricalData"
] | import com.idevicesinc.sweetblue.utils.EpochTime; import com.idevicesinc.sweetblue.utils.HistoricalData; | import com.idevicesinc.sweetblue.utils.*; | [
"com.idevicesinc.sweetblue"
] | com.idevicesinc.sweetblue; | 1,896,871 |
public void setCamera (final Camera cam) {
if (camera == null) throw new GdxRuntimeException("Call begin() first.");
if (renderables.size > 0) flush();
camera = cam;
}
| void function (final Camera cam) { if (camera == null) throw new GdxRuntimeException(STR); if (renderables.size > 0) flush(); camera = cam; } | /** Change the camera in between {@link #begin(Camera)} and {@link #end()}. This causes the batch to be flushed. Can only be
* called after the call to {@link #begin(Camera)} and before the call to {@link #end()}.
* @param cam The new camera to use. */ | Change the camera in between <code>#begin(Camera)</code> and <code>#end()</code>. This causes the batch to be flushed. Can only be called after the call to <code>#begin(Camera)</code> and before the call to <code>#end()</code> | setCamera | {
"repo_name": "MikkelTAndersen/libgdx",
"path": "gdx/src/com/badlogic/gdx/graphics/g3d/ModelBatch.java",
"license": "apache-2.0",
"size": 17496
} | [
"com.badlogic.gdx.graphics.Camera",
"com.badlogic.gdx.utils.GdxRuntimeException"
] | import com.badlogic.gdx.graphics.Camera; import com.badlogic.gdx.utils.GdxRuntimeException; | import com.badlogic.gdx.graphics.*; import com.badlogic.gdx.utils.*; | [
"com.badlogic.gdx"
] | com.badlogic.gdx; | 2,626,088 |
private void reduceResults(final List<AnalysisResultFuture> results,
final Map<ComponentJob, AnalyzerResult> resultMap,
final List<AnalysisResultReductionException> reductionErrors) {
if (_hasRun.get()) {
// already reduced
return;
}
_hasRun.set(true);
for (AnalysisResultFuture result : results) {
if (result.isErrornous()) {
logger.error("Encountered errorneous slave result. Result reduction will stop. Result={}", result);
final List<Throwable> errors = result.getErrors();
if (!errors.isEmpty()) {
final Throwable firstError = errors.get(0);
logger.error(
"Encountered error before reducing results (showing stack trace of invoking the reducer): "
+ firstError.getMessage(), new Throwable());
_analysisListener.errorUknown(_masterJob, firstError);
}
// error occurred!
return;
}
}
final Collection<AnalyzerJob> analyzerJobs = _masterJob.getAnalyzerJobs();
for (AnalyzerJob masterAnalyzerJob : analyzerJobs) {
final Collection<AnalyzerResult> slaveResults = new ArrayList<AnalyzerResult>();
logger.info("Reducing {} slave results for component: {}", results.size(), masterAnalyzerJob);
for (AnalysisResultFuture result : results) {
final Map<ComponentJob, AnalyzerResult> slaveResultMap = result.getResultMap();
final List<AnalyzerJob> slaveAnalyzerJobs = CollectionUtils2.filterOnClass(slaveResultMap.keySet(),
AnalyzerJob.class);
final AnalyzerJobHelper analyzerJobHelper = new AnalyzerJobHelper(slaveAnalyzerJobs);
final AnalyzerJob slaveAnalyzerJob = analyzerJobHelper.getAnalyzerJob(masterAnalyzerJob);
if (slaveAnalyzerJob == null) {
throw new IllegalStateException("Could not resolve slave component matching [" + masterAnalyzerJob
+ "] in slave result: " + result);
}
final AnalyzerResult analyzerResult = result.getResult(slaveAnalyzerJob);
slaveResults.add(analyzerResult);
}
reduce(masterAnalyzerJob, slaveResults, resultMap, reductionErrors);
}
} | void function(final List<AnalysisResultFuture> results, final Map<ComponentJob, AnalyzerResult> resultMap, final List<AnalysisResultReductionException> reductionErrors) { if (_hasRun.get()) { return; } _hasRun.set(true); for (AnalysisResultFuture result : results) { if (result.isErrornous()) { logger.error(STR, result); final List<Throwable> errors = result.getErrors(); if (!errors.isEmpty()) { final Throwable firstError = errors.get(0); logger.error( STR + firstError.getMessage(), new Throwable()); _analysisListener.errorUknown(_masterJob, firstError); } return; } } final Collection<AnalyzerJob> analyzerJobs = _masterJob.getAnalyzerJobs(); for (AnalyzerJob masterAnalyzerJob : analyzerJobs) { final Collection<AnalyzerResult> slaveResults = new ArrayList<AnalyzerResult>(); logger.info(STR, results.size(), masterAnalyzerJob); for (AnalysisResultFuture result : results) { final Map<ComponentJob, AnalyzerResult> slaveResultMap = result.getResultMap(); final List<AnalyzerJob> slaveAnalyzerJobs = CollectionUtils2.filterOnClass(slaveResultMap.keySet(), AnalyzerJob.class); final AnalyzerJobHelper analyzerJobHelper = new AnalyzerJobHelper(slaveAnalyzerJobs); final AnalyzerJob slaveAnalyzerJob = analyzerJobHelper.getAnalyzerJob(masterAnalyzerJob); if (slaveAnalyzerJob == null) { throw new IllegalStateException(STR + masterAnalyzerJob + STR + result); } final AnalyzerResult analyzerResult = result.getResult(slaveAnalyzerJob); slaveResults.add(analyzerResult); } reduce(masterAnalyzerJob, slaveResults, resultMap, reductionErrors); } } | /**
* Reduces all the analyzer results of an analysis
*
* @param results
* @param resultMap
* @param reductionErrors
*/ | Reduces all the analyzer results of an analysis | reduceResults | {
"repo_name": "datacleaner/AnalyzerBeans",
"path": "env/cluster/src/main/java/org/eobjects/analyzer/cluster/DistributedAnalysisResultReducer.java",
"license": "lgpl-3.0",
"size": 8148
} | [
"java.util.ArrayList",
"java.util.Collection",
"java.util.List",
"java.util.Map",
"org.eobjects.analyzer.job.AnalyzerJob",
"org.eobjects.analyzer.job.AnalyzerJobHelper",
"org.eobjects.analyzer.job.ComponentJob",
"org.eobjects.analyzer.job.runner.AnalysisResultFuture",
"org.eobjects.analyzer.result.AnalyzerResult",
"org.eobjects.analyzer.util.CollectionUtils2"
] | import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.eobjects.analyzer.job.AnalyzerJob; import org.eobjects.analyzer.job.AnalyzerJobHelper; import org.eobjects.analyzer.job.ComponentJob; import org.eobjects.analyzer.job.runner.AnalysisResultFuture; import org.eobjects.analyzer.result.AnalyzerResult; import org.eobjects.analyzer.util.CollectionUtils2; | import java.util.*; import org.eobjects.analyzer.job.*; import org.eobjects.analyzer.job.runner.*; import org.eobjects.analyzer.result.*; import org.eobjects.analyzer.util.*; | [
"java.util",
"org.eobjects.analyzer"
] | java.util; org.eobjects.analyzer; | 309,414 |
protected boolean supportsConcurrentChildInstantiation(ScopeImpl flowScope) {
CoreActivityBehavior<?> behavior = flowScope.getActivityBehavior();
return behavior == null || !(behavior instanceof SequentialMultiInstanceActivityBehavior);
} | boolean function(ScopeImpl flowScope) { CoreActivityBehavior<?> behavior = flowScope.getActivityBehavior(); return behavior == null !(behavior instanceof SequentialMultiInstanceActivityBehavior); } | /**
* Cannot create more than inner instance in a sequential MI construct
*/ | Cannot create more than inner instance in a sequential MI construct | supportsConcurrentChildInstantiation | {
"repo_name": "AlexMinsk/camunda-bpm-platform",
"path": "engine/src/main/java/org/camunda/bpm/engine/impl/cmd/AbstractInstantiationCmd.java",
"license": "apache-2.0",
"size": 15909
} | [
"org.camunda.bpm.engine.impl.bpmn.behavior.SequentialMultiInstanceActivityBehavior",
"org.camunda.bpm.engine.impl.core.delegate.CoreActivityBehavior",
"org.camunda.bpm.engine.impl.pvm.process.ScopeImpl"
] | import org.camunda.bpm.engine.impl.bpmn.behavior.SequentialMultiInstanceActivityBehavior; import org.camunda.bpm.engine.impl.core.delegate.CoreActivityBehavior; import org.camunda.bpm.engine.impl.pvm.process.ScopeImpl; | import org.camunda.bpm.engine.impl.bpmn.behavior.*; import org.camunda.bpm.engine.impl.core.delegate.*; import org.camunda.bpm.engine.impl.pvm.process.*; | [
"org.camunda.bpm"
] | org.camunda.bpm; | 677,004 |
@NonNull
public Builder addFilterNamespaces(@NonNull String... namespaces) {
Preconditions.checkNotNull(namespaces);
resetIfBuilt();
return addFilterNamespaces(Arrays.asList(namespaces));
} | Builder function(@NonNull String... namespaces) { Preconditions.checkNotNull(namespaces); resetIfBuilt(); return addFilterNamespaces(Arrays.asList(namespaces)); } | /**
* Adds a namespace filter to {@link SearchSpec} Entry. Only search for documents that
* have the specified namespaces.
* <p>If unset, the query will search over all namespaces.
*/ | Adds a namespace filter to <code>SearchSpec</code> Entry. Only search for documents that have the specified namespaces. If unset, the query will search over all namespaces | addFilterNamespaces | {
"repo_name": "AndroidX/androidx",
"path": "appsearch/appsearch/src/main/java/androidx/appsearch/app/SearchSpec.java",
"license": "apache-2.0",
"size": 29551
} | [
"androidx.annotation.NonNull",
"androidx.core.util.Preconditions",
"java.util.Arrays"
] | import androidx.annotation.NonNull; import androidx.core.util.Preconditions; import java.util.Arrays; | import androidx.annotation.*; import androidx.core.util.*; import java.util.*; | [
"androidx.annotation",
"androidx.core",
"java.util"
] | androidx.annotation; androidx.core; java.util; | 1,260,853 |
public static TransactionResponse doAuthorizationAndCapture(
Map<String, String> parameters) throws PayUException,
InvalidParametersException, ConnectionException {
return doPayment(parameters, TransactionType.AUTHORIZATION_AND_CAPTURE, HttpClientHelper.SOCKET_TIMEOUT);
} | static TransactionResponse function( Map<String, String> parameters) throws PayUException, InvalidParametersException, ConnectionException { return doPayment(parameters, TransactionType.AUTHORIZATION_AND_CAPTURE, HttpClientHelper.SOCKET_TIMEOUT); } | /**
* Do an authorization and capture transaction
*
* @param parameters
* The parameters to be sent to the server
* @return The transaction response to the request sent
* @throws PayUException
* @throws ConnectionException
* @throws InvalidParametersException
*/ | Do an authorization and capture transaction | doAuthorizationAndCapture | {
"repo_name": "juanalvarez123/payu-latam-java-payments-sdk",
"path": "src/main/java/com/payu/sdk/PayUPayments.java",
"license": "mit",
"size": 21798
} | [
"com.payu.sdk.exceptions.ConnectionException",
"com.payu.sdk.exceptions.InvalidParametersException",
"com.payu.sdk.exceptions.PayUException",
"com.payu.sdk.helper.HttpClientHelper",
"com.payu.sdk.model.TransactionResponse",
"com.payu.sdk.model.TransactionType",
"java.util.Map"
] | import com.payu.sdk.exceptions.ConnectionException; import com.payu.sdk.exceptions.InvalidParametersException; import com.payu.sdk.exceptions.PayUException; import com.payu.sdk.helper.HttpClientHelper; import com.payu.sdk.model.TransactionResponse; import com.payu.sdk.model.TransactionType; import java.util.Map; | import com.payu.sdk.exceptions.*; import com.payu.sdk.helper.*; import com.payu.sdk.model.*; import java.util.*; | [
"com.payu.sdk",
"java.util"
] | com.payu.sdk; java.util; | 838,040 |
@Test
public void testGetPage() {
UUID id = UUID.randomUUID();
ItemStack stack = new ItemStack(Material.WOOD);
IBankItem bankItem = getBankItem(id, stack, 128);
bankItem.setItemsPerPage(2);
List<IBankItem> items = bankItem.getPage(1);
assertEquals(2, items.size());
bankItem.setItemsPerPage(1);
items = bankItem.getPage(1);
assertEquals(1, items.size());
} | void function() { UUID id = UUID.randomUUID(); ItemStack stack = new ItemStack(Material.WOOD); IBankItem bankItem = getBankItem(id, stack, 128); bankItem.setItemsPerPage(2); List<IBankItem> items = bankItem.getPage(1); assertEquals(2, items.size()); bankItem.setItemsPerPage(1); items = bankItem.getPage(1); assertEquals(1, items.size()); } | /**
* Make sure {@link getPage} works correctly.
*/ | Make sure <code>getPage</code> works correctly | testGetPage | {
"repo_name": "JCThePants/NucleusFramework",
"path": "tests/src/com/jcwhatever/nucleus/providers/bankitems/IBankItemTest.java",
"license": "mit",
"size": 8428
} | [
"java.util.List",
"java.util.UUID",
"org.bukkit.Material",
"org.bukkit.inventory.ItemStack",
"org.junit.Assert"
] | import java.util.List; import java.util.UUID; import org.bukkit.Material; import org.bukkit.inventory.ItemStack; import org.junit.Assert; | import java.util.*; import org.bukkit.*; import org.bukkit.inventory.*; import org.junit.*; | [
"java.util",
"org.bukkit",
"org.bukkit.inventory",
"org.junit"
] | java.util; org.bukkit; org.bukkit.inventory; org.junit; | 1,551,267 |
@Test
public void testDeserializeNull() throws IOException {
Mockito.when(this.parser.getText()).thenReturn(null);
final JsonDateDeserializer deserializer = new JsonDateDeserializer();
final Date date = deserializer.deserialize(this.parser, this.context);
Assert.assertNull(date);
} | void function() throws IOException { Mockito.when(this.parser.getText()).thenReturn(null); final JsonDateDeserializer deserializer = new JsonDateDeserializer(); final Date date = deserializer.deserialize(this.parser, this.context); Assert.assertNull(date); } | /**
* Test the de-serialization method with null.
*
* @throws IOException When exception happens during serialization
*/ | Test the de-serialization method with null | testDeserializeNull | {
"repo_name": "irontable/genie",
"path": "genie-common/src/test/java/com/netflix/genie/common/util/JsonDateDeserializerUnitTests.java",
"license": "apache-2.0",
"size": 3151
} | [
"java.io.IOException",
"java.util.Date",
"org.junit.Assert",
"org.mockito.Mockito"
] | import java.io.IOException; import java.util.Date; import org.junit.Assert; import org.mockito.Mockito; | import java.io.*; import java.util.*; import org.junit.*; import org.mockito.*; | [
"java.io",
"java.util",
"org.junit",
"org.mockito"
] | java.io; java.util; org.junit; org.mockito; | 408,849 |
public void setJobDetail(JobDetail jobDetail) {
this.jobDetail = jobDetail;
} | void function(JobDetail jobDetail) { this.jobDetail = jobDetail; } | /**
* Set the JobDetail that this trigger should be associated with.
* <p>This is typically used with a bean reference if the JobDetail
* is a Spring-managed bean. Alternatively, the trigger can also
* be associated with a job by name and group.
* @see #setJobName
* @see #setJobGroup
*/ | Set the JobDetail that this trigger should be associated with. This is typically used with a bean reference if the JobDetail is a Spring-managed bean. Alternatively, the trigger can also be associated with a job by name and group | setJobDetail | {
"repo_name": "raedle/univis",
"path": "lib/springframework-1.2.8/src/org/springframework/scheduling/quartz/CronTriggerBean.java",
"license": "lgpl-2.1",
"size": 4079
} | [
"org.quartz.JobDetail"
] | import org.quartz.JobDetail; | import org.quartz.*; | [
"org.quartz"
] | org.quartz; | 483,081 |
@Test
public void whenAddStringToListContainerThenGetItIn() {
SimpleContainer<String> list = new ListContainer<>();
list.add(new String("one"));
list.add(new String("two"));
list.add(new String("three"));
String result = list.get(2);
assertThat(result, is("three"));
} | void function() { SimpleContainer<String> list = new ListContainer<>(); list.add(new String("one")); list.add(new String("two")); list.add(new String("three")); String result = list.get(2); assertThat(result, is("three")); } | /**
* Test add String to container and get it.
*/ | Test add String to container and get it | whenAddStringToListContainerThenGetItIn | {
"repo_name": "dimir2/vivanov",
"path": "part2/ch2/src/test/java/ru/job4j/multithreading/monitor/list/ListContainerTest.java",
"license": "apache-2.0",
"size": 4084
} | [
"org.hamcrest.core.Is",
"org.junit.Assert"
] | import org.hamcrest.core.Is; import org.junit.Assert; | import org.hamcrest.core.*; import org.junit.*; | [
"org.hamcrest.core",
"org.junit"
] | org.hamcrest.core; org.junit; | 1,106,016 |
private CigarElement getNextIndelCigarElement() {
if ( isBeforeDeletionStart() ) {
final CigarElement element = getNextOnGenomeCigarElement();
Utils.validateArg(element != null && element.getOperator() == CigarOperator.D, () -> "Immediately before deletion but the next cigar element isn't a deletion " + element);
return element;
} else if ( isBeforeInsertion() ) {
final CigarElement element = getBetweenNextPosition().get(0);
Utils.validateArg(element.getOperator() == CigarOperator.I, () -> "Immediately before insertion but the next cigar element isn't an insertion " + element);
return element;
} else {
return null;
}
} | CigarElement function() { if ( isBeforeDeletionStart() ) { final CigarElement element = getNextOnGenomeCigarElement(); Utils.validateArg(element != null && element.getOperator() == CigarOperator.D, () -> STR + element); return element; } else if ( isBeforeInsertion() ) { final CigarElement element = getBetweenNextPosition().get(0); Utils.validateArg(element.getOperator() == CigarOperator.I, () -> STR + element); return element; } else { return null; } } | /**
* Helpful function to get the immediately following cigar element, for an insertion or deletion
*
* if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between
* this and the next position) returns the CigarElement corresponding to this event. Otherwise returns
* null.
*
* @return a CigarElement, or null if the next alignment state isn't an insertion or deletion.
*/ | Helpful function to get the immediately following cigar element, for an insertion or deletion if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between this and the next position) returns the CigarElement corresponding to this event. Otherwise returns null | getNextIndelCigarElement | {
"repo_name": "BGI-flexlab/SOAPgaeaDevelopment4.0",
"path": "src/main/java/org/bgi/flexlab/gaea/tools/haplotypecaller/pileup/PileupElement.java",
"license": "gpl-3.0",
"size": 20929
} | [
"org.bgi.flexlab.gaea.util.Utils"
] | import org.bgi.flexlab.gaea.util.Utils; | import org.bgi.flexlab.gaea.util.*; | [
"org.bgi.flexlab"
] | org.bgi.flexlab; | 883,494 |
public static boolean isProxy(ServiceReference sr) {
return sr.getProperty(MenuOperationsProxy.GVNIX_PROXY_COMPONENT) != null;
} | static boolean function(ServiceReference sr) { return sr.getProperty(MenuOperationsProxy.GVNIX_PROXY_COMPONENT) != null; } | /**
* Check if service reference is the service proxy <br>
* Uses {@link MenuOperationsProxy#GVNIX_PROXY_COMPONENT} service property.
*
* @param sr
* @return
*/ | Check if service reference is the service proxy Uses <code>MenuOperationsProxy#GVNIX_PROXY_COMPONENT</code> service property | isProxy | {
"repo_name": "osroca/gvnix",
"path": "addon-web-mvc-menu/src/main/java/org/gvnix/web/menu/roo/addon/FilterMenuOperationsHook.java",
"license": "gpl-3.0",
"size": 5910
} | [
"org.osgi.framework.ServiceReference"
] | import org.osgi.framework.ServiceReference; | import org.osgi.framework.*; | [
"org.osgi.framework"
] | org.osgi.framework; | 282,778 |
void anyGetter(JMethod getter); | void anyGetter(JMethod getter); | /**
* Add the necessary annotation to mark a Java method as the getter for
* additional JSON property values that do not match any of the other
* property names found in the bean.
*
* @param getter
* the method that will be used to get the values of additional
* properties
*/ | Add the necessary annotation to mark a Java method as the getter for additional JSON property values that do not match any of the other property names found in the bean | anyGetter | {
"repo_name": "SiftScience/jsonschema2pojo",
"path": "jsonschema2pojo-core/src/main/java/org/jsonschema2pojo/Annotator.java",
"license": "apache-2.0",
"size": 6250
} | [
"com.sun.codemodel.JMethod"
] | import com.sun.codemodel.JMethod; | import com.sun.codemodel.*; | [
"com.sun.codemodel"
] | com.sun.codemodel; | 2,582,737 |
private void deployCustomSequences(API api, String tenantDomain, Environment environment)
throws APIManagementException,
AxisFault {
if (isSequenceDefined(api.getInSequence()) || isSequenceDefined(api.getOutSequence())) {
try {
PrivilegedCarbonContext.startTenantFlow();
if(tenantDomain != null && !tenantDomain.equals("")){
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
else{
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain
(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
}
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
if (isSequenceDefined(api.getInSequence())) {
deployInSequence(api, tenantId, tenantDomain, environment);
}
if (isSequenceDefined(api.getOutSequence())) {
deployOutSequence(api, tenantId, tenantDomain, environment);
}
} catch (Exception e) {
String msg = "Error in deploying the sequence to gateway";
log.error(msg, e);
throw new APIManagementException(msg);
}
finally {
PrivilegedCarbonContext.endTenantFlow();
}
}
} | void function(API api, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { if (isSequenceDefined(api.getInSequence()) isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !tenantDomain.equals(STRError in deploying the sequence to gateway"; log.error(msg, e); throw new APIManagementException(msg); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } | /**
* Get the specified in/out sequences from api object
*
* @param api
* -API object
* @param tenantDomain
* @param environment
* @throws APIManagementException
* @throws AxisFault
*/ | Get the specified in/out sequences from api object | deployCustomSequences | {
"repo_name": "thushara35/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIGatewayManager.java",
"license": "apache-2.0",
"size": 22827
} | [
"org.apache.axis2.AxisFault",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.impl.dto.Environment",
"org.wso2.carbon.context.PrivilegedCarbonContext"
] | import org.apache.axis2.AxisFault; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.context.PrivilegedCarbonContext; | import org.apache.axis2.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.impl.dto.*; import org.wso2.carbon.context.*; | [
"org.apache.axis2",
"org.wso2.carbon"
] | org.apache.axis2; org.wso2.carbon; | 1,371,311 |
public void addStanzaDroppedListener(StanzaListener listener) {
stanzaDroppedListeners.add(listener);
} | void function(StanzaListener listener) { stanzaDroppedListeners.add(listener); } | /**
* Add a Stanza dropped listener.
* <p>
* Those listeners will be invoked every time a Stanza has been dropped due to a failed SM resume. They will not get
* automatically removed. If at least one StanzaDroppedListener is configured, no attempt will be made to retransmit
* the Stanzas.
* </p>
*
* @param listener the listener to add.
* @since 4.3.3
*/ | Add a Stanza dropped listener. Those listeners will be invoked every time a Stanza has been dropped due to a failed SM resume. They will not get automatically removed. If at least one StanzaDroppedListener is configured, no attempt will be made to retransmit the Stanzas. | addStanzaDroppedListener | {
"repo_name": "igniterealtime/Smack",
"path": "smack-tcp/src/main/java/org/jivesoftware/smack/tcp/XMPPTCPConnection.java",
"license": "apache-2.0",
"size": 86816
} | [
"org.jivesoftware.smack.StanzaListener"
] | import org.jivesoftware.smack.StanzaListener; | import org.jivesoftware.smack.*; | [
"org.jivesoftware.smack"
] | org.jivesoftware.smack; | 945,833 |
@Override
public Adapter createGetFixedRateConversionAdapter() {
if (getFixedRateConversionItemProvider == null) {
getFixedRateConversionItemProvider = new GetFixedRateConversionItemProvider(this);
}
return getFixedRateConversionItemProvider;
}
protected GetFixedCurrencyConversionItemProvider getFixedCurrencyConversionItemProvider; | Adapter function() { if (getFixedRateConversionItemProvider == null) { getFixedRateConversionItemProvider = new GetFixedRateConversionItemProvider(this); } return getFixedRateConversionItemProvider; } protected GetFixedCurrencyConversionItemProvider getFixedCurrencyConversionItemProvider; | /**
* This creates an adapter for a {@link com.odcgroup.t24.enquiry.enquiry.GetFixedRateConversion}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This creates an adapter for a <code>com.odcgroup.t24.enquiry.enquiry.GetFixedRateConversion</code>. | createGetFixedRateConversionAdapter | {
"repo_name": "debabratahazra/DS",
"path": "designstudio/components/t24/core/com.odcgroup.t24.enquiry.model.edit/src/com/odcgroup/t24/enquiry/enquiry/provider/EnquiryItemProviderAdapterFactory.java",
"license": "epl-1.0",
"size": 78683
} | [
"org.eclipse.emf.common.notify.Adapter"
] | import org.eclipse.emf.common.notify.Adapter; | import org.eclipse.emf.common.notify.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 311,158 |
@Test(timeout=60000)
public void testEvictorVisiting() throws Exception {
checkEvictorVisiting(true);
checkEvictorVisiting(false);
} | @Test(timeout=60000) void function() throws Exception { checkEvictorVisiting(true); checkEvictorVisiting(false); } | /**
* Verifies that the evictor visits objects in expected order
* and frequency.
*
* @throws Exception May occur in some failure modes
*/ | Verifies that the evictor visits objects in expected order and frequency | testEvictorVisiting | {
"repo_name": "lovejinstar/POOL2.LINE",
"path": "src/test/java/org/apache/commons/pool2/impl/TestGenericKeyedObjectPool.java",
"license": "apache-2.0",
"size": 78342
} | [
"org.junit.Test"
] | import org.junit.Test; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,460,880 |
@Test
void testConstructorSurface()
{
final ImageBuffer surface = Graphics.createImageBuffer(64, 32);
final SpriteAnimated sprite = new SpriteAnimatedImpl(surface, 16, 8);
assertTrue(sprite.isLoaded());
assertEquals(surface, sprite.getSurface());
assertEquals(64, sprite.getWidth());
assertEquals(32, sprite.getHeight());
assertEquals(4, sprite.getTileWidth());
assertEquals(4, sprite.getTileHeight());
assertEquals(16, sprite.getFramesHorizontal());
assertEquals(8, sprite.getFramesVertical());
}
| void testConstructorSurface() { final ImageBuffer surface = Graphics.createImageBuffer(64, 32); final SpriteAnimated sprite = new SpriteAnimatedImpl(surface, 16, 8); assertTrue(sprite.isLoaded()); assertEquals(surface, sprite.getSurface()); assertEquals(64, sprite.getWidth()); assertEquals(32, sprite.getHeight()); assertEquals(4, sprite.getTileWidth()); assertEquals(4, sprite.getTileHeight()); assertEquals(16, sprite.getFramesHorizontal()); assertEquals(8, sprite.getFramesVertical()); } | /**
* Test constructor with surface.
*/ | Test constructor with surface | testConstructorSurface | {
"repo_name": "b3dgs/lionengine",
"path": "lionengine-core/src/test/java/com/b3dgs/lionengine/graphic/drawable/SpriteAnimatedTest.java",
"license": "gpl-3.0",
"size": 28639
} | [
"com.b3dgs.lionengine.UtilAssert",
"com.b3dgs.lionengine.graphic.Graphics",
"com.b3dgs.lionengine.graphic.ImageBuffer"
] | import com.b3dgs.lionengine.UtilAssert; import com.b3dgs.lionengine.graphic.Graphics; import com.b3dgs.lionengine.graphic.ImageBuffer; | import com.b3dgs.lionengine.*; import com.b3dgs.lionengine.graphic.*; | [
"com.b3dgs.lionengine"
] | com.b3dgs.lionengine; | 2,214,993 |
List<PlayerOrder> list(); | List<PlayerOrder> list(); | /**
* Listing all the PlayerOrders from the database.
*
* @return a list of all the PlayerOrders that exist inside the table PlayerOrders.
*/ | Listing all the PlayerOrders from the database | list | {
"repo_name": "EaW1805/data",
"path": "src/main/java/com/eaw1805/data/managers/beans/PlayerOrderManagerBean.java",
"license": "mit",
"size": 7401
} | [
"com.eaw1805.data.model.PlayerOrder",
"java.util.List"
] | import com.eaw1805.data.model.PlayerOrder; import java.util.List; | import com.eaw1805.data.model.*; import java.util.*; | [
"com.eaw1805.data",
"java.util"
] | com.eaw1805.data; java.util; | 1,596,909 |
@Test(expectedExceptions = { LDAPException.class })
public void testBindStringWrongPassword()
throws Exception
{
if (! isDirectoryInstanceAvailable())
{
throw new LDAPException(ResultCode.CONNECT_ERROR);
}
String password;
if (getTestBindPassword().equals("wrong"))
{
password = "notright";
}
else
{
password = "wrong";
}
LDAPConnection conn = getUnauthenticatedConnection();
try
{
assertTrue(conn.isConnected());
assertNotNull(conn.getConnectedAddress());
assertNotNull(conn.getConnectedIPAddress());
assertNotNull(conn.getConnectedInetAddress());
assertTrue((conn.getConnectedPort() >= 1) &&
(conn.getConnectedPort() <= 65535));
assertNotNull(conn.toString());
conn.bind(getTestBindDN(), password);
}
finally
{
conn.close();
assertFalse(conn.isConnected());
assertNull(conn.getConnectedAddress());
assertNull(conn.getConnectedIPAddress());
assertNull(conn.getConnectedInetAddress());
assertTrue(conn.getConnectedPort() < 0);
assertNotNull(conn.toString());
}
} | @Test(expectedExceptions = { LDAPException.class }) void function() throws Exception { if (! isDirectoryInstanceAvailable()) { throw new LDAPException(ResultCode.CONNECT_ERROR); } String password; if (getTestBindPassword().equals("wrong")) { password = STR; } else { password = "wrong"; } LDAPConnection conn = getUnauthenticatedConnection(); try { assertTrue(conn.isConnected()); assertNotNull(conn.getConnectedAddress()); assertNotNull(conn.getConnectedIPAddress()); assertNotNull(conn.getConnectedInetAddress()); assertTrue((conn.getConnectedPort() >= 1) && (conn.getConnectedPort() <= 65535)); assertNotNull(conn.toString()); conn.bind(getTestBindDN(), password); } finally { conn.close(); assertFalse(conn.isConnected()); assertNull(conn.getConnectedAddress()); assertNull(conn.getConnectedIPAddress()); assertNull(conn.getConnectedInetAddress()); assertTrue(conn.getConnectedPort() < 0); assertNotNull(conn.toString()); } } | /**
* Tests the {@code bind} method variant that takes a string DN and a string
* password, using the wrong password.
* <BR><BR>
* Access to a Directory Server instance is required for complete processing.
*
* @throws Exception If an unexpected problem occurs.
*/ | Tests the bind method variant that takes a string DN and a string password, using the wrong password. Access to a Directory Server instance is required for complete processing | testBindStringWrongPassword | {
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/ldap/sdk/LDAPConnectionTestCase.java",
"license": "gpl-2.0",
"size": 157011
} | [
"org.testng.annotations.Test"
] | import org.testng.annotations.Test; | import org.testng.annotations.*; | [
"org.testng.annotations"
] | org.testng.annotations; | 2,454,887 |
public Properties parse(InputStream in) throws IOException; | Properties function(InputStream in) throws IOException; | /**
* Parses the data from the supplied {@link InputStream}.
*
* @param in
* The InputStream from which to read the data.
* @throws IOException
* If an I/O error occurred while data was read from the
* InputStream.
*/ | Parses the data from the supplied <code>InputStream</code> | parse | {
"repo_name": "blazegraph/database",
"path": "rdf-properties/src/main/java/com/bigdata/rdf/properties/PropertiesParser.java",
"license": "gpl-2.0",
"size": 3659
} | [
"java.io.IOException",
"java.io.InputStream",
"java.util.Properties"
] | import java.io.IOException; import java.io.InputStream; import java.util.Properties; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 730,014 |
private Result pDeclarators(final int yyStart) throws IOException {
JeannieParserColumn yyColumn = (JeannieParserColumn)column(yyStart);
if (null == yyColumn.chunk7) yyColumn.chunk7 = new Chunk7();
if (null == yyColumn.chunk7.fDeclarators)
yyColumn.chunk7.fDeclarators = pDeclarators$1(yyStart);
return yyColumn.chunk7.fDeclarators;
} | Result function(final int yyStart) throws IOException { JeannieParserColumn yyColumn = (JeannieParserColumn)column(yyStart); if (null == yyColumn.chunk7) yyColumn.chunk7 = new Chunk7(); if (null == yyColumn.chunk7.fDeclarators) yyColumn.chunk7.fDeclarators = pDeclarators$1(yyStart); return yyColumn.chunk7.fDeclarators; } | /**
* Parse nonterminal xtc.lang.jeannie.JeannieJava.Declarators.
*
* @param yyStart The index.
* @return The result.
* @throws IOException Signals an I/O error.
*/ | Parse nonterminal xtc.lang.jeannie.JeannieJava.Declarators | pDeclarators | {
"repo_name": "wandoulabs/xtc-rats",
"path": "xtc-core/src/main/java/xtc/lang/jeannie/JeannieParser.java",
"license": "lgpl-2.1",
"size": 647687
} | [
"java.io.IOException",
"xtc.parser.Result"
] | import java.io.IOException; import xtc.parser.Result; | import java.io.*; import xtc.parser.*; | [
"java.io",
"xtc.parser"
] | java.io; xtc.parser; | 2,001,547 |
public Object clone() throws CloneNotSupportedException {
KeyToGroupMap result = (KeyToGroupMap) super.clone();
result.defaultGroup
= (Comparable) KeyToGroupMap.clone(this.defaultGroup);
result.groups = (List) KeyToGroupMap.clone(this.groups);
result.keyToGroupMap = (Map) KeyToGroupMap.clone(this.keyToGroupMap);
return result;
} | Object function() throws CloneNotSupportedException { KeyToGroupMap result = (KeyToGroupMap) super.clone(); result.defaultGroup = (Comparable) KeyToGroupMap.clone(this.defaultGroup); result.groups = (List) KeyToGroupMap.clone(this.groups); result.keyToGroupMap = (Map) KeyToGroupMap.clone(this.keyToGroupMap); return result; } | /**
* Returns a clone of the map.
*
* @return A clone.
*
* @throws CloneNotSupportedException if there is a problem cloning the
* map.
*/ | Returns a clone of the map | clone | {
"repo_name": "ibestvina/multithread-centiscape",
"path": "CentiScaPe2.1/src/main/java/org/jfree/data/KeyToGroupMap.java",
"license": "mit",
"size": 10234
} | [
"java.util.List",
"java.util.Map"
] | import java.util.List; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 915,963 |
double getDouble(String columnLabel) throws SQLException;
/**
* Retrieves the value of the designated column in the current row
* of this <code>ResultSet</code> object as
* a <code>java.math.BigDecimal</code> in the Java programming language.
*
* @param columnLabel the label for the column specified with the SQL AS clause. If the SQL AS clause was not specified, then the label is the name of the column
* @param scale the number of digits to the right of the decimal point
* @return the column value; if the value is SQL <code>NULL</code>, the
* value returned is <code>null</code>
* @exception SQLException if the columnLabel is not valid;
* if a database access error occurs or this method is
* called on a closed result set
* @exception SQLFeatureNotSupportedException if the JDBC driver does not support
* this method
* @deprecated Use {@code getBigDecimal(int columnIndex)}
* or {@code getBigDecimal(String columnLabel)} | double getDouble(String columnLabel) throws SQLException; /** * Retrieves the value of the designated column in the current row * of this <code>ResultSet</code> object as * a <code>java.math.BigDecimal</code> in the Java programming language. * * @param columnLabel the label for the column specified with the SQL AS clause. If the SQL AS clause was not specified, then the label is the name of the column * @param scale the number of digits to the right of the decimal point * @return the column value; if the value is SQL <code>NULL</code>, the * value returned is <code>null</code> * @exception SQLException if the columnLabel is not valid; * if a database access error occurs or this method is * called on a closed result set * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @deprecated Use {@code getBigDecimal(int columnIndex)} * or {@code getBigDecimal(String columnLabel)} | /**
* Retrieves the value of the designated column in the current row
* of this <code>ResultSet</code> object as
* a <code>double</code> in the Java programming language.
*
* @param columnLabel the label for the column specified with the SQL AS clause. If the SQL AS clause was not specified, then the label is the name of the column
* @return the column value; if the value is SQL <code>NULL</code>, the
* value returned is <code>0</code>
* @exception SQLException if the columnLabel is not valid;
* if a database access error occurs or this method is
* called on a closed result set
*/ | Retrieves the value of the designated column in the current row of this <code>ResultSet</code> object as a <code>double</code> in the Java programming language | getDouble | {
"repo_name": "workarounds/auto-value-result-set",
"path": "processor/src/test/java/java/sql/ResultSet.java",
"license": "apache-2.0",
"size": 200453
} | [
"java.math.BigDecimal"
] | import java.math.BigDecimal; | import java.math.*; | [
"java.math"
] | java.math; | 585,979 |
private static Properties loadPropertiesFromClasspath() {
List<String> validNames = Arrays.asList("StanfordCoreNLP", "edu.stanford.nlp.pipeline.StanfordCoreNLP");
for (String name : validNames) {
Properties props = loadProperties(name);
if (props != null)
return props;
}
throw new RuntimeException("ERROR: Could not find properties file in the classpath!");
} | static Properties function() { List<String> validNames = Arrays.asList(STR, STR); for (String name : validNames) { Properties props = loadProperties(name); if (props != null) return props; } throw new RuntimeException(STR); } | /**
* Finds the properties file in the classpath and loads the properties from there.
*
* @return The found properties object (must be not-null)
* @throws RuntimeException
* If no properties file can be found on the classpath
*/ | Finds the properties file in the classpath and loads the properties from there | loadPropertiesFromClasspath | {
"repo_name": "begab/kpe",
"path": "src/edu/stanford/nlp/pipeline/SzTECoreNLP.java",
"license": "gpl-3.0",
"size": 43420
} | [
"java.util.Arrays",
"java.util.List",
"java.util.Properties"
] | import java.util.Arrays; import java.util.List; import java.util.Properties; | import java.util.*; | [
"java.util"
] | java.util; | 944,384 |
void markSuccess(long duration) {
eventNotifier.markEvent(HystrixEventType.SUCCESS, key);
counter.increment(HystrixRollingNumberEvent.SUCCESS);
} | void markSuccess(long duration) { eventNotifier.markEvent(HystrixEventType.SUCCESS, key); counter.increment(HystrixRollingNumberEvent.SUCCESS); } | /**
* When a {@link HystrixCommand} successfully completes it will call this method to report its success along with how long the execution took.
*
* @param duration command duration
*/ | When a <code>HystrixCommand</code> successfully completes it will call this method to report its success along with how long the execution took | markSuccess | {
"repo_name": "npccsb/Hystrix",
"path": "hystrix-core/src/main/java/com/netflix/hystrix/HystrixCommandMetrics.java",
"license": "apache-2.0",
"size": 21573
} | [
"com.netflix.hystrix.util.HystrixRollingNumberEvent"
] | import com.netflix.hystrix.util.HystrixRollingNumberEvent; | import com.netflix.hystrix.util.*; | [
"com.netflix.hystrix"
] | com.netflix.hystrix; | 860,804 |
public void claim(URI taskId)
throws IllegalArgumentFault, IllegalAccessFault, IllegalStateFault, RemoteException, IllegalOperationFault {
String errMsg = "Error occurred while performing claim operation";
try {
stub.claim(taskId);
} catch (Exception ex) {
handleException(errMsg, ex);
}
} | void function(URI taskId) throws IllegalArgumentFault, IllegalAccessFault, IllegalStateFault, RemoteException, IllegalOperationFault { String errMsg = STR; try { stub.claim(taskId); } catch (Exception ex) { handleException(errMsg, ex); } } | /**
* Claim task operation.
*
* @param taskId : The ID of the task to be claimed.
* @throws org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalArgumentFault :
* @throws org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalAccessFault :
* @throws org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalStateFault :
* @throws java.rmi.RemoteException :
* @throws org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalOperationFault :
*/ | Claim task operation | claim | {
"repo_name": "madhawa-gunasekara/product-ei",
"path": "samples/business-process-samples/product/humantask-ui-webapp/src/main/java/org/wso2/bps/humantask/sample/clients/HumanTaskClientAPIServiceClient.java",
"license": "apache-2.0",
"size": 11171
} | [
"java.rmi.RemoteException",
"org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalAccessFault",
"org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalArgumentFault",
"org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalOperationFault",
"org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalStateFault"
] | import java.rmi.RemoteException; import org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalAccessFault; import org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalArgumentFault; import org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalOperationFault; import org.wso2.carbon.humantask.stub.ui.task.client.api.IllegalStateFault; | import java.rmi.*; import org.wso2.carbon.humantask.stub.ui.task.client.api.*; | [
"java.rmi",
"org.wso2.carbon"
] | java.rmi; org.wso2.carbon; | 1,015,135 |
@Test
public void testSocketFactoryFailure() throws IOException {
final SocketFactory mockFactory = createMock(SocketFactory.class);
final IOException thrown = new IOException("Injected.");
expect(mockFactory.createSocket()).andThrow(thrown);
replay(mockFactory);
final MongoClientConfiguration config = new MongoClientConfiguration();
config.setSocketFactory(mockFactory);
final Cluster cluster = new Cluster(config, ClusterType.STAND_ALONE);
final Server server = cluster.add(ourServer.getInetSocketAddress());
try {
connect(server, config);
fail("Should have thrown an IOException");
}
catch (final IOException good) {
assertThat(good, sameInstance(thrown));
}
verify(mockFactory);
} | void function() throws IOException { final SocketFactory mockFactory = createMock(SocketFactory.class); final IOException thrown = new IOException(STR); expect(mockFactory.createSocket()).andThrow(thrown); replay(mockFactory); final MongoClientConfiguration config = new MongoClientConfiguration(); config.setSocketFactory(mockFactory); final Cluster cluster = new Cluster(config, ClusterType.STAND_ALONE); final Server server = cluster.add(ourServer.getInetSocketAddress()); try { connect(server, config); fail(STR); } catch (final IOException good) { assertThat(good, sameInstance(thrown)); } verify(mockFactory); } | /**
* Test method for {@link TransportConnection#TransportConnection} .
*
* @throws IOException
* On a failure connecting to the Mock MongoDB server.
*/ | Test method for <code>TransportConnection#TransportConnection</code> | testSocketFactoryFailure | {
"repo_name": "allanbank/mongodb-async-driver",
"path": "src/test/java/com/allanbank/mongodb/client/connection/socket/AbstractTransportConnectionTestCases.java",
"license": "apache-2.0",
"size": 125000
} | [
"com.allanbank.mongodb.MongoClientConfiguration",
"com.allanbank.mongodb.client.ClusterType",
"com.allanbank.mongodb.client.state.Cluster",
"com.allanbank.mongodb.client.state.Server",
"java.io.IOException",
"javax.net.SocketFactory",
"org.easymock.EasyMock",
"org.hamcrest.Matchers",
"org.junit.Assert"
] | import com.allanbank.mongodb.MongoClientConfiguration; import com.allanbank.mongodb.client.ClusterType; import com.allanbank.mongodb.client.state.Cluster; import com.allanbank.mongodb.client.state.Server; import java.io.IOException; import javax.net.SocketFactory; import org.easymock.EasyMock; import org.hamcrest.Matchers; import org.junit.Assert; | import com.allanbank.mongodb.*; import com.allanbank.mongodb.client.*; import com.allanbank.mongodb.client.state.*; import java.io.*; import javax.net.*; import org.easymock.*; import org.hamcrest.*; import org.junit.*; | [
"com.allanbank.mongodb",
"java.io",
"javax.net",
"org.easymock",
"org.hamcrest",
"org.junit"
] | com.allanbank.mongodb; java.io; javax.net; org.easymock; org.hamcrest; org.junit; | 2,402,729 |
public ServiceCall putError201NoProvisioningStatePayloadAsync(final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException {
if (serviceCallback == null) {
throw new IllegalArgumentException("ServiceCallback is required for async calls.");
} | ServiceCall function(final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException { if (serviceCallback == null) { throw new IllegalArgumentException(STR); } | /**
* Long running put request, service returns a 201 to the initial request with no payload.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if callback is null
* @return the {@link ServiceCall} object
*/ | Long running put request, service returns a 201 to the initial request with no payload | putError201NoProvisioningStatePayloadAsync | {
"repo_name": "xingwu1/autorest",
"path": "AutoRest/Generators/Java/Azure.Java.Tests/src/main/java/fixtures/lro/LROSADsOperationsImpl.java",
"license": "mit",
"size": 236888
} | [
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback"
] | import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 374,039 |
public void accessLevelChanged(final AjaxBehaviorEvent event) {
RolePreference preference = pagesModel.getRowData().getPreference();
int prefAccess = -1;
String selectedAccess = pagesModel.getRowData().getSelectedAccessLevel();
if (selectedAccess.equals(PageAccessMapping.NO_ACCESS)) {
prefAccess = -1;
} else if (selectedAccess.equals(PageAccessMapping.READ_ONLY)) {
prefAccess = 0;
} else if (selectedAccess.equals(PageAccessMapping.READ_WRITE)) {
prefAccess = 1;
}
preference.setAccess(prefAccess);
boolean updated = roleService.updatePreference(preference);
if (updated) {
UserRole userRole = getCurrentUser().getUserRole();
if (preference.getUserRole().getRoleName().equals(userRole.getRoleName())) {
for (RolePreference currPref : userRole.getPreferences()) {
if (currPref.getPageName().equals(preference.getPageName())) {
currPref.setAccess(prefAccess);
break;
}
}
}
FacesContext.getCurrentInstance().addMessage(null,
new FacesMessage(FacesMessage.SEVERITY_INFO, "INFO", "Access Level Changed."));
}
} | void function(final AjaxBehaviorEvent event) { RolePreference preference = pagesModel.getRowData().getPreference(); int prefAccess = -1; String selectedAccess = pagesModel.getRowData().getSelectedAccessLevel(); if (selectedAccess.equals(PageAccessMapping.NO_ACCESS)) { prefAccess = -1; } else if (selectedAccess.equals(PageAccessMapping.READ_ONLY)) { prefAccess = 0; } else if (selectedAccess.equals(PageAccessMapping.READ_WRITE)) { prefAccess = 1; } preference.setAccess(prefAccess); boolean updated = roleService.updatePreference(preference); if (updated) { UserRole userRole = getCurrentUser().getUserRole(); if (preference.getUserRole().getRoleName().equals(userRole.getRoleName())) { for (RolePreference currPref : userRole.getPreferences()) { if (currPref.getPageName().equals(preference.getPageName())) { currPref.setAccess(prefAccess); break; } } } FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "INFO", STR)); } } | /**
* Access level changed.
*
* @param event the event
*/ | Access level changed | accessLevelChanged | {
"repo_name": "beiyuxinke/CONNECT",
"path": "Product/Production/Adapters/General/CONNECTAdminGUI/src/main/java/gov/hhs/fha/nhinc/admingui/managed/ManageRoleBean.java",
"license": "bsd-3-clause",
"size": 7338
} | [
"gov.hhs.fha.nhinc.admingui.services.persistence.jpa.entity.RolePreference",
"gov.hhs.fha.nhinc.admingui.services.persistence.jpa.entity.UserRole",
"javax.faces.application.FacesMessage",
"javax.faces.context.FacesContext",
"javax.faces.event.AjaxBehaviorEvent"
] | import gov.hhs.fha.nhinc.admingui.services.persistence.jpa.entity.RolePreference; import gov.hhs.fha.nhinc.admingui.services.persistence.jpa.entity.UserRole; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.event.AjaxBehaviorEvent; | import gov.hhs.fha.nhinc.admingui.services.persistence.jpa.entity.*; import javax.faces.application.*; import javax.faces.context.*; import javax.faces.event.*; | [
"gov.hhs.fha",
"javax.faces"
] | gov.hhs.fha; javax.faces; | 47,590 |
public void setSelected(boolean selected) {
// bug 411
if (widget == null || widget.isDisposed())
return;
if (selected) {
((TreeItem) widget).setBackground(ColorManager.LIGHTGRAY);
} else {
((TreeItem) widget).setBackground(ColorManager.WHITE);
}
// refreshVisuals();
} | void function(boolean selected) { if (widget == null widget.isDisposed()) return; if (selected) { ((TreeItem) widget).setBackground(ColorManager.LIGHTGRAY); } else { ((TreeItem) widget).setBackground(ColorManager.WHITE); } } | /**
* If selected, set the element in bold.
*/ | If selected, set the element in bold | setSelected | {
"repo_name": "McGill-DP-Group/seg.jUCMNav",
"path": "src/seg/jUCMNav/editparts/strategyTreeEditparts/EvaluationStrategyTreeEditPart.java",
"license": "epl-1.0",
"size": 4091
} | [
"org.eclipse.swt.widgets.TreeItem"
] | import org.eclipse.swt.widgets.TreeItem; | import org.eclipse.swt.widgets.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 2,488,241 |
public Date getCreatedDate() throws FedoraException; | Date function() throws FedoraException; | /**
* Get the creation date of this Resource.
**/ | Get the creation date of this Resource | getCreatedDate | {
"repo_name": "mikedurbin/fcrepo4-client",
"path": "fcrepo-client/src/main/java/org/fcrepo/client/FedoraResource.java",
"license": "apache-2.0",
"size": 3124
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 223,390 |
public TreeMap getGroupsForSite(){
String siteId = ToolManager.getCurrentPlacement().getContext();
return getGroupsForSite(siteId);
}
| TreeMap function(){ String siteId = ToolManager.getCurrentPlacement().getContext(); return getGroupsForSite(siteId); } | /**
* Returns all groups for site
* @return
*/ | Returns all groups for site | getGroupsForSite | {
"repo_name": "harfalm/Sakai-10.1",
"path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/PublishedAssessmentFacadeQueries.java",
"license": "apache-2.0",
"size": 132218
} | [
"java.util.TreeMap",
"org.sakaiproject.tool.cover.ToolManager"
] | import java.util.TreeMap; import org.sakaiproject.tool.cover.ToolManager; | import java.util.*; import org.sakaiproject.tool.cover.*; | [
"java.util",
"org.sakaiproject.tool"
] | java.util; org.sakaiproject.tool; | 1,328,609 |
@DoesServiceRequest
public boolean createIfNotExists(QueueRequestOptions options, OperationContext opContext) throws StorageException {
options = QueueRequestOptions.populateAndApplyDefaults(options, this.queueServiceClient);
boolean exists = this.exists(true, options, opContext);
if (exists) {
return false;
}
else {
try {
this.create(options, opContext);
return true;
}
catch (StorageException e) {
if (e.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT
&& StorageErrorCodeStrings.QUEUE_ALREADY_EXISTS.equals(e.getErrorCode())) {
return false;
}
else {
throw e;
}
}
}
} | boolean function(QueueRequestOptions options, OperationContext opContext) throws StorageException { options = QueueRequestOptions.populateAndApplyDefaults(options, this.queueServiceClient); boolean exists = this.exists(true, options, opContext); if (exists) { return false; } else { try { this.create(options, opContext); return true; } catch (StorageException e) { if (e.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && StorageErrorCodeStrings.QUEUE_ALREADY_EXISTS.equals(e.getErrorCode())) { return false; } else { throw e; } } } } | /**
* Creates the queue if it does not already exist, using the specified request options and operation context.
*
* @param options
* A {@link QueueRequestOptions} object that specifies any additional options for the request. Specifying
* <code>null</code> will use the default request options from the associated service client (
* {@link CloudQueueClient}).
* @param opContext
* An {@link OperationContext} object that represents the context for the current operation. This object
* is used to track requests to the storage service, and to provide additional runtime information about
* the operation.
*
* @return A value of <code>true</code> if the queue is created in the storage service, otherwise <code>false</code>
* .
*
* @throws StorageException
* If a storage service error occurred during the operation.
*/ | Creates the queue if it does not already exist, using the specified request options and operation context | createIfNotExists | {
"repo_name": "iterate-ch/azure-storage-java",
"path": "microsoft-azure-storage/src/com/microsoft/azure/storage/queue/CloudQueue.java",
"license": "apache-2.0",
"size": 83549
} | [
"com.microsoft.azure.storage.OperationContext",
"com.microsoft.azure.storage.StorageErrorCodeStrings",
"com.microsoft.azure.storage.StorageException",
"java.net.HttpURLConnection"
] | import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageErrorCodeStrings; import com.microsoft.azure.storage.StorageException; import java.net.HttpURLConnection; | import com.microsoft.azure.storage.*; import java.net.*; | [
"com.microsoft.azure",
"java.net"
] | com.microsoft.azure; java.net; | 1,324,215 |
public static double getExcelDate(LocalDateTime date, boolean use1904windowing) {
int year = date.getYear();
int dayOfYear = date.getDayOfYear();
int hour = date.getHour();
int minute = date.getMinute();
int second = date.getSecond();
int milliSecond = date.getNano()/1_000_000;
return internalGetExcelDate(year, dayOfYear, hour, minute, second, milliSecond, use1904windowing);
} | static double function(LocalDateTime date, boolean use1904windowing) { int year = date.getYear(); int dayOfYear = date.getDayOfYear(); int hour = date.getHour(); int minute = date.getMinute(); int second = date.getSecond(); int milliSecond = date.getNano()/1_000_000; return internalGetExcelDate(year, dayOfYear, hour, minute, second, milliSecond, use1904windowing); } | /**
* Given a LocalDateTime, converts it into a double representing its internal Excel representation,
* which is the number of days since 1/1/1900. Fractional days represent hours, minutes, and seconds.
*
* @return Excel representation of Date (-1 if error - test for error by checking for less than 0.1)
* @param date the Date
* @param use1904windowing Should 1900 or 1904 date windowing be used?
*/ | Given a LocalDateTime, converts it into a double representing its internal Excel representation, which is the number of days since 1/1/1900. Fractional days represent hours, minutes, and seconds | getExcelDate | {
"repo_name": "lamsfoundation/lams",
"path": "3rdParty_sources/poi/org/apache/poi/ss/usermodel/DateUtil.java",
"license": "gpl-2.0",
"size": 40897
} | [
"java.time.LocalDateTime"
] | import java.time.LocalDateTime; | import java.time.*; | [
"java.time"
] | java.time; | 1,822,409 |
public static void uploadImageFromStorage(URL fsName, Configuration conf,
NNStorage storage, NameNodeFile nnf, long txid) throws IOException {
URL url = new URL(fsName, ImageServlet.PATH_SPEC);
long startTime = Time.monotonicNow();
try {
uploadImage(url, conf, storage, nnf, txid);
} catch (HttpPutFailedException e) {
if (e.getResponseCode() == HttpServletResponse.SC_CONFLICT) {
// this is OK - this means that a previous attempt to upload
// this checkpoint succeeded even though we thought it failed.
LOG.info("Image upload with txid " + txid +
" conflicted with a previous image upload to the " +
"same NameNode. Continuing...", e);
return;
} else {
throw e;
}
}
double xferSec = Math.max(
((float) (Time.monotonicNow() - startTime)) / 1000.0, 0.001);
LOG.info("Uploaded image with txid " + txid + " to namenode at " + fsName
+ " in " + xferSec + " seconds");
} | static void function(URL fsName, Configuration conf, NNStorage storage, NameNodeFile nnf, long txid) throws IOException { URL url = new URL(fsName, ImageServlet.PATH_SPEC); long startTime = Time.monotonicNow(); try { uploadImage(url, conf, storage, nnf, txid); } catch (HttpPutFailedException e) { if (e.getResponseCode() == HttpServletResponse.SC_CONFLICT) { LOG.info(STR + txid + STR + STR, e); return; } else { throw e; } } double xferSec = Math.max( ((float) (Time.monotonicNow() - startTime)) / 1000.0, 0.001); LOG.info(STR + txid + STR + fsName + STR + xferSec + STR); } | /**
* Requests that the NameNode download an image from this node.
*
* @param fsName the http address for the remote NN
* @param conf Configuration
* @param storage the storage directory to transfer the image from
* @param nnf the NameNodeFile type of the image
* @param txid the transaction ID of the image to be uploaded
*/ | Requests that the NameNode download an image from this node | uploadImageFromStorage | {
"repo_name": "songweijia/fffs",
"path": "sources/hadoop-2.4.1-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java",
"license": "apache-2.0",
"size": 20966
} | [
"java.io.IOException",
"javax.servlet.http.HttpServletResponse",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hdfs.server.namenode.NNStorage",
"org.apache.hadoop.util.Time"
] | import java.io.IOException; import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.util.Time; | import java.io.*; import javax.servlet.http.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.util.*; | [
"java.io",
"javax.servlet",
"org.apache.hadoop"
] | java.io; javax.servlet; org.apache.hadoop; | 1,591,973 |
public static void getElementLabel(IJavaElement element, long flags, StringBuilder builder) {
new JavaElementLabelComposer(builder).appendElementLabel(element, flags);
}
| static void function(IJavaElement element, long flags, StringBuilder builder) { new JavaElementLabelComposer(builder).appendElementLabel(element, flags); } | /**
* Returns the label for a Java element with the flags as defined by this class.
*
* @param element the element to render
* @param flags the rendering flags
* @param builder the buffer to append the resulting label to
*/ | Returns the label for a Java element with the flags as defined by this class | getElementLabel | {
"repo_name": "othomann/java-language-server",
"path": "org.jboss.tools.vscode.java/src/org/jboss/tools/vscode/java/internal/hover/JavaElementLabels.java",
"license": "epl-1.0",
"size": 19086
} | [
"org.eclipse.jdt.core.IJavaElement"
] | import org.eclipse.jdt.core.IJavaElement; | import org.eclipse.jdt.core.*; | [
"org.eclipse.jdt"
] | org.eclipse.jdt; | 2,463,792 |
boolean applies(Item item);
| boolean applies(Item item); | /**
* Whether or not the policy applies to the specified item.
*/ | Whether or not the policy applies to the specified item | applies | {
"repo_name": "genjosanzo/galaxy-ce",
"path": "api/src/main/java/org/mule/galaxy/policy/Policy.java",
"license": "gpl-2.0",
"size": 1414
} | [
"org.mule.galaxy.Item"
] | import org.mule.galaxy.Item; | import org.mule.galaxy.*; | [
"org.mule.galaxy"
] | org.mule.galaxy; | 2,671,664 |
public Path getOldLogDir() {
return this.oldLogDir;
} | Path function() { return this.oldLogDir; } | /**
* Get the directory where old logs go
* @return the dir
*/ | Get the directory where old logs go | getOldLogDir | {
"repo_name": "lilonglai/hbase-0.96.2",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java",
"license": "apache-2.0",
"size": 24903
} | [
"org.apache.hadoop.fs.Path"
] | import org.apache.hadoop.fs.Path; | import org.apache.hadoop.fs.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 464,789 |
public ItemLabelPosition getSeriesPositiveItemLabelPosition(int series) {
// otherwise look up the position table
ItemLabelPosition position = (ItemLabelPosition) this.positiveItemLabelPositionList
.get(series);
if (position == null) {
position = this.basePositiveItemLabelPosition;
}
return position;
} | ItemLabelPosition function(int series) { ItemLabelPosition position = (ItemLabelPosition) this.positiveItemLabelPositionList .get(series); if (position == null) { position = this.basePositiveItemLabelPosition; } return position; } | /**
* Returns the item label position for all positive values in a series.
*
* @param series
* the series index (zero-based).
*
* @return The item label position (never <code>null</code>).
*
* @see #setSeriesPositiveItemLabelPosition(int, ItemLabelPosition)
*/ | Returns the item label position for all positive values in a series | getSeriesPositiveItemLabelPosition | {
"repo_name": "djun100/afreechart",
"path": "src/org/afree/chart/renderer/AbstractRenderer.java",
"license": "lgpl-3.0",
"size": 122597
} | [
"org.afree.chart.labels.ItemLabelPosition"
] | import org.afree.chart.labels.ItemLabelPosition; | import org.afree.chart.labels.*; | [
"org.afree.chart"
] | org.afree.chart; | 420,020 |
public static String getGraphvizString(final Configuration config,
final boolean showImpl, final boolean showLegend) {
final GraphvizConfigVisitor visitor = new GraphvizConfigVisitor(config, showImpl, showLegend);
final Node root = config.getClassHierarchy().getNamespace();
Walk.preorder(visitor, visitor, root);
return visitor.toString();
} | static String function(final Configuration config, final boolean showImpl, final boolean showLegend) { final GraphvizConfigVisitor visitor = new GraphvizConfigVisitor(config, showImpl, showLegend); final Node root = config.getClassHierarchy().getNamespace(); Walk.preorder(visitor, visitor, root); return visitor.toString(); } | /**
* Produce a Graphviz DOT string for a given TANG configuration.
*
* @param config TANG configuration object.
* @param showImpl If true, plot IS-A edges for know implementations.
* @param showLegend If true, add legend to the plot.
* @return configuration graph represented as a string in Graphviz DOT format.
*/ | Produce a Graphviz DOT string for a given TANG configuration | getGraphvizString | {
"repo_name": "taegeonum/incubator-reef",
"path": "lang/java/reef-tang/tang/src/main/java/org/apache/reef/tang/util/walk/graphviz/GraphvizConfigVisitor.java",
"license": "apache-2.0",
"size": 8503
} | [
"org.apache.reef.tang.Configuration",
"org.apache.reef.tang.types.Node",
"org.apache.reef.tang.util.walk.Walk"
] | import org.apache.reef.tang.Configuration; import org.apache.reef.tang.types.Node; import org.apache.reef.tang.util.walk.Walk; | import org.apache.reef.tang.*; import org.apache.reef.tang.types.*; import org.apache.reef.tang.util.walk.*; | [
"org.apache.reef"
] | org.apache.reef; | 1,564,146 |
public static long get1D( DenseMatrix64F A , int n ) {
long before = System.currentTimeMillis();
double total = 0;
for( int iter = 0; iter < n; iter++ ) {
int index = 0;
for( int i = 0; i < A.numRows; i++ ) {
int end = index+A.numCols;
while( index != end ) {
total += A.get(index++);
}
}
}
long after = System.currentTimeMillis();
// print to ensure that ensure that an overly smart compiler does not optimize out
// the whole function and to show that both produce the same results.
System.out.println(total);
return after-before;
} | static long function( DenseMatrix64F A , int n ) { long before = System.currentTimeMillis(); double total = 0; for( int iter = 0; iter < n; iter++ ) { int index = 0; for( int i = 0; i < A.numRows; i++ ) { int end = index+A.numCols; while( index != end ) { total += A.get(index++); } } } long after = System.currentTimeMillis(); System.out.println(total); return after-before; } | /**
* Get by index is used here.
*/ | Get by index is used here | get1D | {
"repo_name": "phuongtg/efficient-java-matrix-library",
"path": "benchmarks/src/org/ejml/BenchmarkInliningGetSet.java",
"license": "apache-2.0",
"size": 4406
} | [
"org.ejml.data.DenseMatrix64F"
] | import org.ejml.data.DenseMatrix64F; | import org.ejml.data.*; | [
"org.ejml.data"
] | org.ejml.data; | 744,237 |
public boolean removeAssignment(AssignmentModel assignment) throws IOException {
int rowAffected = databaseConnection.update("DELETE FROM Assignment WHERE AssignmentId = ?",
assignment.getAssignmentID());
FilesystemInterface.deleteAssignmentFiles(assignment);
return rowAffected > 0;
} | boolean function(AssignmentModel assignment) throws IOException { int rowAffected = databaseConnection.update(STR, assignment.getAssignmentID()); FilesystemInterface.deleteAssignmentFiles(assignment); return rowAffected > 0; } | /**
* Removes an assignment from the database.
*
* @param assignment The assignment to remove
* @return true if the assignment were removed, else false.
*/ | Removes an assignment from the database | removeAssignment | {
"repo_name": "student-capture/student-capture",
"path": "src/main/java/studentcapture/assignment/AssignmentDAO.java",
"license": "mit",
"size": 15919
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,395,091 |
@Test
public void whenGetAllWithEmptySetIsUsed_thenNearCacheShouldNotBePopulated() {
NearCacheTestContext<Integer, String, NK, NV> context = createContext();
assumeThatMethodIsAvailable(context.nearCacheAdapter, DataStructureMethods.GET_ALL);
// populate the data structure
populateDataAdapter(context);
assertNearCacheSize(context, 0);
assertNearCacheStats(context, 0, 0, 0);
// use getAll() with an empty set, which should not populate the Near Cache
context.nearCacheAdapter.getAll(Collections.<Integer>emptySet());
assertNearCacheSize(context, 0);
assertNearCacheStats(context, 0, 0, 0);
} | void function() { NearCacheTestContext<Integer, String, NK, NV> context = createContext(); assumeThatMethodIsAvailable(context.nearCacheAdapter, DataStructureMethods.GET_ALL); populateDataAdapter(context); assertNearCacheSize(context, 0); assertNearCacheStats(context, 0, 0, 0); context.nearCacheAdapter.getAll(Collections.<Integer>emptySet()); assertNearCacheSize(context, 0); assertNearCacheStats(context, 0, 0, 0); } | /**
* Checks that the Near Cache is not populated when {@link DataStructureMethods#GET_ALL} is used with an empty key set.
*/ | Checks that the Near Cache is not populated when <code>DataStructureMethods#GET_ALL</code> is used with an empty key set | whenGetAllWithEmptySetIsUsed_thenNearCacheShouldNotBePopulated | {
"repo_name": "emrahkocaman/hazelcast",
"path": "hazelcast/src/test/java/com/hazelcast/internal/nearcache/AbstractNearCacheBasicTest.java",
"license": "apache-2.0",
"size": 46469
} | [
"com.hazelcast.internal.adapter.DataStructureAdapter",
"com.hazelcast.internal.nearcache.NearCacheTestUtils",
"java.util.Collections"
] | import com.hazelcast.internal.adapter.DataStructureAdapter; import com.hazelcast.internal.nearcache.NearCacheTestUtils; import java.util.Collections; | import com.hazelcast.internal.adapter.*; import com.hazelcast.internal.nearcache.*; import java.util.*; | [
"com.hazelcast.internal",
"java.util"
] | com.hazelcast.internal; java.util; | 657,268 |
EAttribute getSPRepresentation_Title(); | EAttribute getSPRepresentation_Title(); | /**
* Returns the meta object for the attribute '{@link fr.obeo.dsl.sPrototyper.SPRepresentation#getTitle <em>Title</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Title</em>'.
* @see fr.obeo.dsl.sPrototyper.SPRepresentation#getTitle()
* @see #getSPRepresentation()
* @generated
*/ | Returns the meta object for the attribute '<code>fr.obeo.dsl.sPrototyper.SPRepresentation#getTitle Title</code>'. | getSPRepresentation_Title | {
"repo_name": "glefur/s-prototyper",
"path": "plugins/fr.obeo.dsl.sprototyper/src-gen/fr/obeo/dsl/sPrototyper/SPrototyperPackage.java",
"license": "apache-2.0",
"size": 98928
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,402,527 |
public ParameterService getParameterService() {
return parameterService;
} | ParameterService function() { return parameterService; } | /**
* Gets the parameterService attribute.
* @return Returns the parameterService.
*/ | Gets the parameterService attribute | getParameterService | {
"repo_name": "rashikpolus/MIT_KC",
"path": "coeus-impl/src/main/java/org/kuali/coeus/common/budget/impl/rate/BudgetRateServiceDecorator.java",
"license": "agpl-3.0",
"size": 17459
} | [
"org.kuali.rice.coreservice.framework.parameter.ParameterService"
] | import org.kuali.rice.coreservice.framework.parameter.ParameterService; | import org.kuali.rice.coreservice.framework.parameter.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 95,794 |
protected Answer execute(UnregisterNicCommand cmd) {
s_logger.info("Executing resource UnregisterNicCommand: " + _gson.toJson(cmd));
if (_guestTrafficInfo == null) {
return new Answer(cmd, false, "No Guest Traffic Info found, unable to determine where to clean up");
}
try {
if (_guestTrafficInfo.getVirtualSwitchType() != VirtualSwitchType.StandardVirtualSwitch) {
// For now we only need to cleanup the nvp specific portgroups
// on the standard switches
return new Answer(cmd, true, "Nothing to do");
}
s_logger.debug("Cleaning up portgroup " + cmd.getNicUuid() + " on switch " + _guestTrafficInfo.getVirtualSwitchName());
VmwareContext context = getServiceContext();
VmwareHypervisorHost host = getHyperHost(context);
ManagedObjectReference clusterMO = host.getHyperHostCluster();
// Get a list of all the hosts in this cluster
@SuppressWarnings("unchecked")
List<ManagedObjectReference> hosts = (List<ManagedObjectReference>)context.getVimClient().getDynamicProperty(clusterMO, "host");
if (hosts == null) {
return new Answer(cmd, false, "No hosts in cluster, which is pretty weird");
}
for (ManagedObjectReference hostMOR : hosts) {
HostMO hostMo = new HostMO(context, hostMOR);
hostMo.deletePortGroup(cmd.getNicUuid().toString());
s_logger.debug("Removed portgroup " + cmd.getNicUuid() + " from host " + hostMo.getHostName());
}
return new Answer(cmd, true, "Unregistered resources for NIC " + cmd.getNicUuid());
} catch (Exception e) {
if (e instanceof RemoteException) {
s_logger.warn("Encounter remote exception to vCenter, invalidate VMware session context");
invalidateServiceContext();
}
String msg = "UnregisterVMCommand failed due to " + VmwareHelper.getExceptionMessage(e);
s_logger.error(msg);
return new Answer(cmd, false, msg);
}
} | Answer function(UnregisterNicCommand cmd) { s_logger.info(STR + _gson.toJson(cmd)); if (_guestTrafficInfo == null) { return new Answer(cmd, false, STR); } try { if (_guestTrafficInfo.getVirtualSwitchType() != VirtualSwitchType.StandardVirtualSwitch) { return new Answer(cmd, true, STR); } s_logger.debug(STR + cmd.getNicUuid() + STR + _guestTrafficInfo.getVirtualSwitchName()); VmwareContext context = getServiceContext(); VmwareHypervisorHost host = getHyperHost(context); ManagedObjectReference clusterMO = host.getHyperHostCluster(); @SuppressWarnings(STR) List<ManagedObjectReference> hosts = (List<ManagedObjectReference>)context.getVimClient().getDynamicProperty(clusterMO, "host"); if (hosts == null) { return new Answer(cmd, false, STR); } for (ManagedObjectReference hostMOR : hosts) { HostMO hostMo = new HostMO(context, hostMOR); hostMo.deletePortGroup(cmd.getNicUuid().toString()); s_logger.debug(STR + cmd.getNicUuid() + STR + hostMo.getHostName()); } return new Answer(cmd, true, STR + cmd.getNicUuid()); } catch (Exception e) { if (e instanceof RemoteException) { s_logger.warn(STR); invalidateServiceContext(); } String msg = STR + VmwareHelper.getExceptionMessage(e); s_logger.error(msg); return new Answer(cmd, false, msg); } } | /**
* UnregisterNicCommand is used to remove a portgroup created for this
* specific nic. The portgroup will have the name set to the UUID of the
* nic. Introduced to cleanup the portgroups created for each nic that is
* plugged into an lswitch (Nicira NVP plugin)
*
* @param cmd
* @return
*/ | UnregisterNicCommand is used to remove a portgroup created for this specific nic. The portgroup will have the name set to the UUID of the nic. Introduced to cleanup the portgroups created for each nic that is plugged into an lswitch (Nicira NVP plugin) | execute | {
"repo_name": "resmo/cloudstack",
"path": "plugins/hypervisors/vmware/src/com/cloud/hypervisor/vmware/resource/VmwareResource.java",
"license": "apache-2.0",
"size": 272433
} | [
"com.cloud.agent.api.Answer",
"com.cloud.agent.api.UnregisterNicCommand",
"com.cloud.hypervisor.vmware.mo.HostMO",
"com.cloud.hypervisor.vmware.mo.VirtualSwitchType",
"com.cloud.hypervisor.vmware.mo.VmwareHypervisorHost",
"com.cloud.hypervisor.vmware.util.VmwareContext",
"com.cloud.hypervisor.vmware.util.VmwareHelper",
"com.vmware.vim25.ManagedObjectReference",
"java.rmi.RemoteException",
"java.util.List"
] | import com.cloud.agent.api.Answer; import com.cloud.agent.api.UnregisterNicCommand; import com.cloud.hypervisor.vmware.mo.HostMO; import com.cloud.hypervisor.vmware.mo.VirtualSwitchType; import com.cloud.hypervisor.vmware.mo.VmwareHypervisorHost; import com.cloud.hypervisor.vmware.util.VmwareContext; import com.cloud.hypervisor.vmware.util.VmwareHelper; import com.vmware.vim25.ManagedObjectReference; import java.rmi.RemoteException; import java.util.List; | import com.cloud.agent.api.*; import com.cloud.hypervisor.vmware.mo.*; import com.cloud.hypervisor.vmware.util.*; import com.vmware.vim25.*; import java.rmi.*; import java.util.*; | [
"com.cloud.agent",
"com.cloud.hypervisor",
"com.vmware.vim25",
"java.rmi",
"java.util"
] | com.cloud.agent; com.cloud.hypervisor; com.vmware.vim25; java.rmi; java.util; | 2,444,098 |
CompositorAnimationHandler getAnimationHandler(); | CompositorAnimationHandler getAnimationHandler(); | /**
* Get the handler responsible for running the compositor's animations.
* @return The {@link CompositorAnimationHandler}.
*/ | Get the handler responsible for running the compositor's animations | getAnimationHandler | {
"repo_name": "chromium/chromium",
"path": "chrome/android/java/src/org/chromium/chrome/browser/compositor/layouts/LayoutUpdateHost.java",
"license": "bsd-3-clause",
"size": 4235
} | [
"org.chromium.chrome.browser.layouts.animation.CompositorAnimationHandler"
] | import org.chromium.chrome.browser.layouts.animation.CompositorAnimationHandler; | import org.chromium.chrome.browser.layouts.animation.*; | [
"org.chromium.chrome"
] | org.chromium.chrome; | 2,714,485 |
public Map<String, Kost2Row> getKost2Rows()
{
return kost2Rows;
} | Map<String, Kost2Row> function() { return kost2Rows; } | /**
* Key is the shortDisplayName of Kost2DO. The Map is a TreeMap sorted by the keys.
*/ | Key is the shortDisplayName of Kost2DO. The Map is a TreeMap sorted by the keys | getKost2Rows | {
"repo_name": "linqingyicen/projectforge-webapp",
"path": "src/main/java/org/projectforge/fibu/MonthlyEmployeeReport.java",
"license": "gpl-3.0",
"size": 12122
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,446,188 |
@Override
protected Void doInBackground(Void... v) {
try {
cpuSpyApp.getCpuStateMonitor().updateStates();
} catch (CpuStateMonitor.CpuStateMonitorException e) {
Log.e(TAG, "FrequencyTable: Problem getting CPU states");
}
if (cpuSpyAppLITTLE != null) {
try {
cpuSpyAppLITTLE.getCpuStateMonitor().updateStates();
} catch (CpuStateMonitor.CpuStateMonitorException e) {
Log.e(TAG, "FrequencyTable: Problem getting CPU LITTLE states");
}
}
return null;
} | Void function(Void... v) { try { cpuSpyApp.getCpuStateMonitor().updateStates(); } catch (CpuStateMonitor.CpuStateMonitorException e) { Log.e(TAG, STR); } if (cpuSpyAppLITTLE != null) { try { cpuSpyAppLITTLE.getCpuStateMonitor().updateStates(); } catch (CpuStateMonitor.CpuStateMonitorException e) { Log.e(TAG, STR); } } return null; } | /**
* Stuff to do on a separate thread
*/ | Stuff to do on a separate thread | doInBackground | {
"repo_name": "TeamTwisted/KernelAdiutor",
"path": "app/src/main/java/com/grarak/kerneladiutor/fragments/information/FrequencyTableFragment.java",
"license": "apache-2.0",
"size": 13185
} | [
"android.util.Log",
"com.bvalosek.cpuspy.CpuStateMonitor"
] | import android.util.Log; import com.bvalosek.cpuspy.CpuStateMonitor; | import android.util.*; import com.bvalosek.cpuspy.*; | [
"android.util",
"com.bvalosek.cpuspy"
] | android.util; com.bvalosek.cpuspy; | 2,179,370 |
public MetaProperty<LocalDate> lastNoticeDate() {
return lastNoticeDate;
} | MetaProperty<LocalDate> function() { return lastNoticeDate; } | /**
* The meta-property for the {@code lastNoticeDate} property.
* @return the meta-property, not null
*/ | The meta-property for the lastNoticeDate property | lastNoticeDate | {
"repo_name": "OpenGamma/Strata",
"path": "modules/product/src/main/java/com/opengamma/strata/product/bond/ResolvedBondFuture.java",
"license": "apache-2.0",
"size": 35094
} | [
"java.time.LocalDate",
"org.joda.beans.MetaProperty"
] | import java.time.LocalDate; import org.joda.beans.MetaProperty; | import java.time.*; import org.joda.beans.*; | [
"java.time",
"org.joda.beans"
] | java.time; org.joda.beans; | 1,950,124 |
private void assertFailureWithErrorMessage(String expected) {
try {
ObjectMapper mapper = new ObjectMapper();
JsonNode result = mapper.readTree(resp.getOutput());
assertEquals("errorMessage", expected, getFieldValue(result, "errorMessage").asText());
assertEquals("status", SC_INTERNAL_SERVER_ERROR, resp.getStatus());
} catch (IOException e) {
fail(e.toString());
}
} | void function(String expected) { try { ObjectMapper mapper = new ObjectMapper(); JsonNode result = mapper.readTree(resp.getOutput()); assertEquals(STR, expected, getFieldValue(result, STR).asText()); assertEquals(STR, SC_INTERNAL_SERVER_ERROR, resp.getStatus()); } catch (IOException e) { fail(e.toString()); } } | /**
* The response should be a JSONObject that contained this error-message,
* and the status should be set to INTERNAL_SERVER_ERROR.
*/ | The response should be a JSONObject that contained this error-message, and the status should be set to INTERNAL_SERVER_ERROR | assertFailureWithErrorMessage | {
"repo_name": "vivo-project/Vitro",
"path": "api/src/test/java/edu/cornell/mannlib/vitro/webapp/controller/json/JsonServletTest.java",
"license": "bsd-3-clause",
"size": 8595
} | [
"com.fasterxml.jackson.databind.JsonNode",
"com.fasterxml.jackson.databind.ObjectMapper",
"java.io.IOException",
"org.junit.Assert"
] | import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import org.junit.Assert; | import com.fasterxml.jackson.databind.*; import java.io.*; import org.junit.*; | [
"com.fasterxml.jackson",
"java.io",
"org.junit"
] | com.fasterxml.jackson; java.io; org.junit; | 2,292,981 |
public ProcessingComponentConfiguration getSerializerConfiguration() {
return this.serializerConfiguration;
} | ProcessingComponentConfiguration function() { return this.serializerConfiguration; } | /**
* The configuration for the serializer.
*/ | The configuration for the serializer | getSerializerConfiguration | {
"repo_name": "MRivas-XumaK/slingBuild",
"path": "contrib/extensions/rewriter/src/main/java/org/apache/sling/rewriter/impl/ProcessorConfigurationImpl.java",
"license": "apache-2.0",
"size": 18680
} | [
"org.apache.sling.rewriter.ProcessingComponentConfiguration"
] | import org.apache.sling.rewriter.ProcessingComponentConfiguration; | import org.apache.sling.rewriter.*; | [
"org.apache.sling"
] | org.apache.sling; | 2,148,451 |
@Test
public void testReplayEditsAfterAbortingFlush() throws IOException {
final TableName tableName =
TableName.valueOf("testReplayEditsAfterAbortingFlush");
final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName);
final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName);
deleteDir(basedir);
final HTableDescriptor htd = createBasic3FamilyHTD(tableName);
HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd);
HBaseTestingUtility.closeRegionAndWAL(region3);
// Write countPerFamily edits into the three families. Do a flush on one
// of the families during the load of edits so its seqid is not same as
// others to test we do right thing when different seqids.
WAL wal = createWAL(this.conf, hbaseRootDir, logName);
RegionServerServices rsServices = Mockito.mock(RegionServerServices.class);
Mockito.doReturn(false).when(rsServices).isAborted();
when(rsServices.getServerName()).thenReturn(ServerName.valueOf("foo", 10, 10));
when(rsServices.getConfiguration()).thenReturn(conf);
Configuration customConf = new Configuration(this.conf);
customConf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY,
CustomStoreFlusher.class.getName());
HRegion region =
HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal, customConf, rsServices, null);
int writtenRowCount = 10;
List<HColumnDescriptor> families = new ArrayList<>(htd.getFamilies());
for (int i = 0; i < writtenRowCount; i++) {
Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i)));
put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
Bytes.toBytes("val"));
region.put(put);
}
// Now assert edits made it in.
RegionScanner scanner = region.getScanner(new Scan());
assertEquals(writtenRowCount, getScannedCount(scanner));
// Let us flush the region
CustomStoreFlusher.throwExceptionWhenFlushing.set(true);
try {
region.flush(true);
fail("Injected exception hasn't been thrown");
} catch (IOException e) {
LOG.info("Expected simulated exception when flushing region, {}", e.getMessage());
// simulated to abort server
Mockito.doReturn(true).when(rsServices).isAborted();
region.setClosing(false); // region normally does not accept writes after
// DroppedSnapshotException. We mock around it for this test.
}
// writing more data
int moreRow = 10;
for (int i = writtenRowCount; i < writtenRowCount + moreRow; i++) {
Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i)));
put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
Bytes.toBytes("val"));
region.put(put);
}
writtenRowCount += moreRow;
// call flush again
CustomStoreFlusher.throwExceptionWhenFlushing.set(false);
try {
region.flush(true);
} catch (IOException t) {
LOG.info("Expected exception when flushing region because server is stopped,"
+ t.getMessage());
}
region.close(true);
wal.shutdown();
// Let us try to split and recover
runWALSplit(this.conf);
WAL wal2 = createWAL(this.conf, hbaseRootDir, logName);
Mockito.doReturn(false).when(rsServices).isAborted();
HRegion region2 =
HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal2, this.conf, rsServices, null);
scanner = region2.getScanner(new Scan());
assertEquals(writtenRowCount, getScannedCount(scanner));
} | void function() throws IOException { final TableName tableName = TableName.valueOf(STR); final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName); final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region3); WAL wal = createWAL(this.conf, hbaseRootDir, logName); RegionServerServices rsServices = Mockito.mock(RegionServerServices.class); Mockito.doReturn(false).when(rsServices).isAborted(); when(rsServices.getServerName()).thenReturn(ServerName.valueOf("foo", 10, 10)); when(rsServices.getConfiguration()).thenReturn(conf); Configuration customConf = new Configuration(this.conf); customConf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY, CustomStoreFlusher.class.getName()); HRegion region = HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal, customConf, rsServices, null); int writtenRowCount = 10; List<HColumnDescriptor> families = new ArrayList<>(htd.getFamilies()); for (int i = 0; i < writtenRowCount; i++) { Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i))); put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"), Bytes.toBytes("val")); region.put(put); } RegionScanner scanner = region.getScanner(new Scan()); assertEquals(writtenRowCount, getScannedCount(scanner)); CustomStoreFlusher.throwExceptionWhenFlushing.set(true); try { region.flush(true); fail(STR); } catch (IOException e) { LOG.info(STR, e.getMessage()); Mockito.doReturn(true).when(rsServices).isAborted(); region.setClosing(false); } int moreRow = 10; for (int i = writtenRowCount; i < writtenRowCount + moreRow; i++) { Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i))); put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"), Bytes.toBytes("val")); region.put(put); } writtenRowCount += moreRow; CustomStoreFlusher.throwExceptionWhenFlushing.set(false); try { region.flush(true); } catch (IOException t) { LOG.info(STR + t.getMessage()); } region.close(true); wal.shutdown(); runWALSplit(this.conf); WAL wal2 = createWAL(this.conf, hbaseRootDir, logName); Mockito.doReturn(false).when(rsServices).isAborted(); HRegion region2 = HRegion.openHRegion(this.hbaseRootDir, hri, htd, wal2, this.conf, rsServices, null); scanner = region2.getScanner(new Scan()); assertEquals(writtenRowCount, getScannedCount(scanner)); } | /**
* Test that we could recover the data correctly after aborting flush. In the
* test, first we abort flush after writing some data, then writing more data
* and flush again, at last verify the data.
* @throws IOException
*/ | Test that we could recover the data correctly after aborting flush. In the test, first we abort flush after writing some data, then writing more data and flush again, at last verify the data | testReplayEditsAfterAbortingFlush | {
"repo_name": "ultratendency/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java",
"license": "apache-2.0",
"size": 50346
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.HBaseTestingUtility",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.HTableDescriptor",
"org.apache.hadoop.hbase.ServerName",
"org.apache.hadoop.hbase.TableName",
"org.apache.hadoop.hbase.client.Put",
"org.apache.hadoop.hbase.client.Scan",
"org.apache.hadoop.hbase.regionserver.DefaultStoreEngine",
"org.apache.hadoop.hbase.regionserver.HRegion",
"org.apache.hadoop.hbase.regionserver.RegionScanner",
"org.apache.hadoop.hbase.regionserver.RegionServerServices",
"org.apache.hadoop.hbase.util.Bytes",
"org.apache.hadoop.hbase.util.FSUtils",
"org.junit.Assert",
"org.mockito.Mockito"
] | import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.junit.Assert; import org.mockito.Mockito; | import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.regionserver.*; import org.apache.hadoop.hbase.util.*; import org.junit.*; import org.mockito.*; | [
"java.io",
"java.util",
"org.apache.hadoop",
"org.junit",
"org.mockito"
] | java.io; java.util; org.apache.hadoop; org.junit; org.mockito; | 386,776 |
@Nullable
private ProgramRuntimeService.RuntimeInfo findRuntimeInfo(Id.Program identifier, @Nullable String runId) {
Map<RunId, ProgramRuntimeService.RuntimeInfo> runtimeInfos = runtimeService.list(identifier.getType());
if (runId != null) {
return runtimeInfos.get(RunIds.fromString(runId));
}
for (ProgramRuntimeService.RuntimeInfo info : runtimeInfos.values()) {
if (identifier.equals(info.getProgramId())) {
return info;
}
}
return null;
} | ProgramRuntimeService.RuntimeInfo function(Id.Program identifier, @Nullable String runId) { Map<RunId, ProgramRuntimeService.RuntimeInfo> runtimeInfos = runtimeService.list(identifier.getType()); if (runId != null) { return runtimeInfos.get(RunIds.fromString(runId)); } for (ProgramRuntimeService.RuntimeInfo info : runtimeInfos.values()) { if (identifier.equals(info.getProgramId())) { return info; } } return null; } | /**
* Temporarily protected. Should be made private when all v3 APIs (webapp in this case) have been implemented.
*/ | Temporarily protected. Should be made private when all v3 APIs (webapp in this case) have been implemented | findRuntimeInfo | {
"repo_name": "mpouttuclarke/cdap",
"path": "cdap-app-fabric/src/main/java/co/cask/cdap/gateway/handlers/ProgramLifecycleHttpHandler.java",
"license": "apache-2.0",
"size": 69089
} | [
"co.cask.cdap.app.runtime.ProgramRuntimeService",
"co.cask.cdap.common.app.RunIds",
"co.cask.cdap.proto.Id",
"java.util.Map",
"javax.annotation.Nullable",
"org.apache.twill.api.RunId"
] | import co.cask.cdap.app.runtime.ProgramRuntimeService; import co.cask.cdap.common.app.RunIds; import co.cask.cdap.proto.Id; import java.util.Map; import javax.annotation.Nullable; import org.apache.twill.api.RunId; | import co.cask.cdap.app.runtime.*; import co.cask.cdap.common.app.*; import co.cask.cdap.proto.*; import java.util.*; import javax.annotation.*; import org.apache.twill.api.*; | [
"co.cask.cdap",
"java.util",
"javax.annotation",
"org.apache.twill"
] | co.cask.cdap; java.util; javax.annotation; org.apache.twill; | 344,303 |
public static synchronized void info(String... message)
{
Check.notNull(message);
if (LEVELS.contains(INFORMATION))
{
LOGGER.logp(Level.INFO, null, null, getMessage(message));
}
}
| static synchronized void function(String... message) { Check.notNull(message); if (LEVELS.contains(INFORMATION)) { LOGGER.logp(Level.INFO, null, null, getMessage(message)); } } | /**
* Display an informative verbose message to standard output.
*
* @param message The list of messages (must not be <code>null</code>).
* @throws LionEngineException If invalid argument.
* @see Verbose#INFORMATION
*/ | Display an informative verbose message to standard output | info | {
"repo_name": "b3dgs/lionengine",
"path": "lionengine-core/src/main/java/com/b3dgs/lionengine/Verbose.java",
"license": "gpl-3.0",
"size": 7427
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 539,440 |
public static boolean isFullscreen() {
if (instance != null) {
return fullscreen;
} else {
Gdx.app.error("Wurfel Engine", "There is no instance of the engine. You should call initGame first.");
return false;
}
} | static boolean function() { if (instance != null) { return fullscreen; } else { Gdx.app.error(STR, STR); return false; } } | /**
*Check if the game is running in fullscreen.
* @return true when running in fullscreen, false if in window mode
*/ | Check if the game is running in fullscreen | isFullscreen | {
"repo_name": "thtomate/W-E-f-a",
"path": "src/com/BombingGames/WurfelEngine/WE.java",
"license": "bsd-3-clause",
"size": 12285
} | [
"com.badlogic.gdx.Gdx"
] | import com.badlogic.gdx.Gdx; | import com.badlogic.gdx.*; | [
"com.badlogic.gdx"
] | com.badlogic.gdx; | 1,678,342 |
private static boolean isXLargeTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout
& Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE;
} | static boolean function(Context context) { return (context.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE; } | /**
* Helper method to determine if the device has an extra-large screen. For
* example, 10" tablets are extra-large.
*/ | Helper method to determine if the device has an extra-large screen. For example, 10" tablets are extra-large | isXLargeTablet | {
"repo_name": "aclogar/chore_manager",
"path": "app/src/main/java/com/aclogar/choremanager/SettingsActivity.java",
"license": "apache-2.0",
"size": 10058
} | [
"android.content.Context",
"android.content.res.Configuration"
] | import android.content.Context; import android.content.res.Configuration; | import android.content.*; import android.content.res.*; | [
"android.content"
] | android.content; | 248,169 |
@Test(expected = NoResolvedResultException.class)
public void shouldHaveCentralMavenRepositoryDisabled() {
// This should NOT resolve from Maven Central
Maven.resolver().loadPomFromFile("pom.xml").resolve("junit:junit").withClassPathResolution(false)
.withMavenCentralRepo(false).withoutTransitivity().asSingle(File.class);
} | @Test(expected = NoResolvedResultException.class) void function() { Maven.resolver().loadPomFromFile(STR).resolve(STR).withClassPathResolution(false) .withMavenCentralRepo(false).withoutTransitivity().asSingle(File.class); } | /**
* Tests the disabling of the Maven central repository
*/ | Tests the disabling of the Maven central repository | shouldHaveCentralMavenRepositoryDisabled | {
"repo_name": "oliveti/resolver",
"path": "impl-maven/src/test/java/org/jboss/shrinkwrap/resolver/impl/maven/integration/DisabledCentralRepositoryTestCase.java",
"license": "apache-2.0",
"size": 3497
} | [
"java.io.File",
"org.jboss.shrinkwrap.resolver.api.NoResolvedResultException",
"org.jboss.shrinkwrap.resolver.api.maven.Maven",
"org.junit.Test"
] | import java.io.File; import org.jboss.shrinkwrap.resolver.api.NoResolvedResultException; import org.jboss.shrinkwrap.resolver.api.maven.Maven; import org.junit.Test; | import java.io.*; import org.jboss.shrinkwrap.resolver.api.*; import org.jboss.shrinkwrap.resolver.api.maven.*; import org.junit.*; | [
"java.io",
"org.jboss.shrinkwrap",
"org.junit"
] | java.io; org.jboss.shrinkwrap; org.junit; | 1,479,657 |
@Override
public IKnowledge parseKnowledge(File file) throws IOException {
FileReader reader = new FileReader(file);
Lineizer lineizer = new Lineizer(reader, commentMarker);
IKnowledge knowledge = parseKnowledge(lineizer, delimiterType.getPattern());
this.logger.reset();
return knowledge;
} | IKnowledge function(File file) throws IOException { FileReader reader = new FileReader(file); Lineizer lineizer = new Lineizer(reader, commentMarker); IKnowledge knowledge = parseKnowledge(lineizer, delimiterType.getPattern()); this.logger.reset(); return knowledge; } | /**
* Loads knowledge from a file. Assumes knowledge is the only thing in the
* file. No jokes please. :)
*/ | Loads knowledge from a file. Assumes knowledge is the only thing in the file. No jokes please. :) | parseKnowledge | {
"repo_name": "amurrayw/tetrad",
"path": "tetrad-lib/src/main/java/edu/cmu/tetrad/data/DataReader.java",
"license": "gpl-2.0",
"size": 45245
} | [
"java.io.File",
"java.io.FileReader",
"java.io.IOException"
] | import java.io.File; import java.io.FileReader; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 115,798 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.