method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public final T timeout(String timeout) { return timeout(TimeValue.parseTimeValue(timeout, null)); }
final T function(String timeout) { return timeout(TimeValue.parseTimeValue(timeout, null)); }
/** * A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>. */
A timeout to wait if the index operation can't be performed immediately. Defaults to 1m
timeout
{ "repo_name": "Flipkart/elasticsearch", "path": "src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java", "license": "apache-2.0", "size": 6335 }
[ "org.elasticsearch.common.unit.TimeValue" ]
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.unit.*;
[ "org.elasticsearch.common" ]
org.elasticsearch.common;
1,584,227
@RequiredScope({modify}) @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.ADMIN_REDACT_USER, method = RequestMethod.POST) public @ResponseBody void clearUserProfile(@RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId, @PathVariable Long principalId) throws NotFoundException, UnauthorizedException { serviceProvider.getPrincipalService().redactPrincipalInformation(userId, principalId); }
@RequiredScope({modify}) @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.ADMIN_REDACT_USER, method = RequestMethod.POST) @ResponseBody void function(@RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId, @PathVariable Long principalId) throws NotFoundException, UnauthorizedException { serviceProvider.getPrincipalService().redactPrincipalInformation(userId, principalId); }
/** * Redacts all information about a user to comply with data removal requests. * @param userId Principal ID of the caller. Must be an administrator * @param principalId The principal ID of the user whose information should be cleared */
Redacts all information about a user to comply with data removal requests
clearUserProfile
{ "repo_name": "zimingd/Synapse-Repository-Services", "path": "services/repository/src/main/java/org/sagebionetworks/repo/web/controller/AdministrationController.java", "license": "apache-2.0", "size": 14480 }
[ "org.sagebionetworks.repo.model.AuthorizationConstants", "org.sagebionetworks.repo.model.UnauthorizedException", "org.sagebionetworks.repo.web.NotFoundException", "org.sagebionetworks.repo.web.RequiredScope", "org.sagebionetworks.repo.web.UrlHelpers", "org.springframework.http.HttpStatus", "org.springframework.web.bind.annotation.PathVariable", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestMethod", "org.springframework.web.bind.annotation.RequestParam", "org.springframework.web.bind.annotation.ResponseBody", "org.springframework.web.bind.annotation.ResponseStatus" ]
import org.sagebionetworks.repo.model.AuthorizationConstants; import org.sagebionetworks.repo.model.UnauthorizedException; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.RequiredScope; import org.sagebionetworks.repo.web.UrlHelpers; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus;
import org.sagebionetworks.repo.model.*; import org.sagebionetworks.repo.web.*; import org.springframework.http.*; import org.springframework.web.bind.annotation.*;
[ "org.sagebionetworks.repo", "org.springframework.http", "org.springframework.web" ]
org.sagebionetworks.repo; org.springframework.http; org.springframework.web;
396,545
public ExecRow makeRow(TupleDescriptor td, TupleDescriptor parent) throws StandardException { DataValueDescriptor col; ExecIndexRow row; ReferencedColumns rcd = null; String checkDefinition = null; String constraintID = null; if (td != null) { CheckConstraintDescriptor cd = (CheckConstraintDescriptor)td; constraintID = cd.getUUID().toString(); checkDefinition = cd.getConstraintText(); rcd = cd.getReferencedColumnsDescriptor(); } row = getExecutionFactory().getIndexableRow(SYSCHECKS_COLUMN_COUNT); row.setColumn(SYSCHECKS_CONSTRAINTID, new SQLChar(constraintID)); row.setColumn(SYSCHECKS_CHECKDEFINITION, dvf.getLongvarcharDataValue(checkDefinition)); row.setColumn(SYSCHECKS_REFERENCEDCOLUMNS, new UserType(rcd)); return row; } /////////////////////////////////////////////////////////////////////////// // // ABSTRACT METHODS TO BE IMPLEMENTED BY CHILDREN OF CatalogRowFactory // ///////////////////////////////////////////////////////////////////////////
ExecRow function(TupleDescriptor td, TupleDescriptor parent) throws StandardException { DataValueDescriptor col; ExecIndexRow row; ReferencedColumns rcd = null; String checkDefinition = null; String constraintID = null; if (td != null) { CheckConstraintDescriptor cd = (CheckConstraintDescriptor)td; constraintID = cd.getUUID().toString(); checkDefinition = cd.getConstraintText(); rcd = cd.getReferencedColumnsDescriptor(); } row = getExecutionFactory().getIndexableRow(SYSCHECKS_COLUMN_COUNT); row.setColumn(SYSCHECKS_CONSTRAINTID, new SQLChar(constraintID)); row.setColumn(SYSCHECKS_CHECKDEFINITION, dvf.getLongvarcharDataValue(checkDefinition)); row.setColumn(SYSCHECKS_REFERENCEDCOLUMNS, new UserType(rcd)); return row; }
/** * Make a SYSCHECKS row * * @param td CheckConstraintDescriptorImpl * * @return Row suitable for inserting into SYSCHECKS. * * @exception StandardException thrown on failure */
Make a SYSCHECKS row
makeRow
{ "repo_name": "trejkaz/derby", "path": "java/engine/org/apache/derby/impl/sql/catalog/SYSCHECKSRowFactory.java", "license": "apache-2.0", "size": 7197 }
[ "org.apache.derby.catalog.ReferencedColumns", "org.apache.derby.iapi.error.StandardException", "org.apache.derby.iapi.sql.dictionary.CheckConstraintDescriptor", "org.apache.derby.iapi.sql.dictionary.TupleDescriptor", "org.apache.derby.iapi.sql.execute.ExecIndexRow", "org.apache.derby.iapi.sql.execute.ExecRow", "org.apache.derby.iapi.types.DataValueDescriptor", "org.apache.derby.iapi.types.SQLChar", "org.apache.derby.iapi.types.UserType" ]
import org.apache.derby.catalog.ReferencedColumns; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.sql.dictionary.CheckConstraintDescriptor; import org.apache.derby.iapi.sql.dictionary.TupleDescriptor; import org.apache.derby.iapi.sql.execute.ExecIndexRow; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.SQLChar; import org.apache.derby.iapi.types.UserType;
import org.apache.derby.catalog.*; import org.apache.derby.iapi.error.*; import org.apache.derby.iapi.sql.dictionary.*; import org.apache.derby.iapi.sql.execute.*; import org.apache.derby.iapi.types.*;
[ "org.apache.derby" ]
org.apache.derby;
1,316,876
@Nullable public static Credentials newCredentials(@Nullable AuthAndTLSOptions options) throws IOException { if (options == null) { return null; } else if (options.googleCredentials != null) { // Credentials from file try (InputStream authFile = new FileInputStream(options.googleCredentials)) { return newCredentials(authFile, options.googleAuthScopes); } catch (FileNotFoundException e) { String message = String.format( "Could not open auth credentials file '%s': %s", options.googleCredentials, e.getMessage()); throw new IOException(message, e); } } else if (options.useGoogleDefaultCredentials) { return newCredentials( null , options.googleAuthScopes); } return null; }
static Credentials function(@Nullable AuthAndTLSOptions options) throws IOException { if (options == null) { return null; } else if (options.googleCredentials != null) { try (InputStream authFile = new FileInputStream(options.googleCredentials)) { return newCredentials(authFile, options.googleAuthScopes); } catch (FileNotFoundException e) { String message = String.format( STR, options.googleCredentials, e.getMessage()); throw new IOException(message, e); } } else if (options.useGoogleDefaultCredentials) { return newCredentials( null , options.googleAuthScopes); } return null; }
/** * Create a new {@link Credentials} object, or {@code null} if no options are provided. * * @throws IOException in case the credentials can't be constructed. */
Create a new <code>Credentials</code> object, or null if no options are provided
newCredentials
{ "repo_name": "davidzchen/bazel", "path": "src/main/java/com/google/devtools/build/lib/authandtls/GoogleAuthUtils.java", "license": "apache-2.0", "size": 9719 }
[ "com.google.auth.Credentials", "java.io.FileInputStream", "java.io.FileNotFoundException", "java.io.IOException", "java.io.InputStream", "javax.annotation.Nullable" ]
import com.google.auth.Credentials; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import javax.annotation.Nullable;
import com.google.auth.*; import java.io.*; import javax.annotation.*;
[ "com.google.auth", "java.io", "javax.annotation" ]
com.google.auth; java.io; javax.annotation;
2,437,228
private static boolean isNodeDeleted(final Connection dbc, final int nodeId) throws SQLException { boolean nodeDeleted = false; final DBUtils d = new DBUtils(RescanProcessor.class); try { final PreparedStatement stmt = dbc.prepareStatement(SQL_DB_RETRIEVE_NODE_TYPE); d.watch(stmt); stmt.setInt(1, nodeId); final ResultSet rs = stmt.executeQuery(); d.watch(rs); rs.next(); final String nodeTypeStr = rs.getString(1); if (!rs.wasNull()) { if (NodeType.DELETED.toString().equals(nodeTypeStr.charAt(0))) { nodeDeleted = true; } } } finally { d.cleanUp(); } return nodeDeleted; }
static boolean function(final Connection dbc, final int nodeId) throws SQLException { boolean nodeDeleted = false; final DBUtils d = new DBUtils(RescanProcessor.class); try { final PreparedStatement stmt = dbc.prepareStatement(SQL_DB_RETRIEVE_NODE_TYPE); d.watch(stmt); stmt.setInt(1, nodeId); final ResultSet rs = stmt.executeQuery(); d.watch(rs); rs.next(); final String nodeTypeStr = rs.getString(1); if (!rs.wasNull()) { if (NodeType.DELETED.toString().equals(nodeTypeStr.charAt(0))) { nodeDeleted = true; } } } finally { d.cleanUp(); } return nodeDeleted; }
/** * Utility method used to determine if the specified node has been marked as * deleted in the node table. * * @param dbc * Database connection. * @param nodeId * Node identifier to check * * @return TRUE if node has been marked as deleted, FALSE otherwise. */
Utility method used to determine if the specified node has been marked as deleted in the node table
isNodeDeleted
{ "repo_name": "roskens/opennms-pre-github", "path": "opennms-services/src/main/java/org/opennms/netmgt/capsd/RescanProcessor.java", "license": "agpl-3.0", "size": 151155 }
[ "java.sql.Connection", "java.sql.PreparedStatement", "java.sql.ResultSet", "java.sql.SQLException", "org.opennms.core.utils.DBUtils", "org.opennms.netmgt.model.OnmsNode" ]
import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.opennms.core.utils.DBUtils; import org.opennms.netmgt.model.OnmsNode;
import java.sql.*; import org.opennms.core.utils.*; import org.opennms.netmgt.model.*;
[ "java.sql", "org.opennms.core", "org.opennms.netmgt" ]
java.sql; org.opennms.core; org.opennms.netmgt;
1,166,151
protected void setDocument(Document doc) { m_text.setDocument(doc); } // getDocument private String m_columnName; protected int m_displayType; // Currency / UoM via Context private DecimalFormat m_format; private String m_title; private boolean m_setting; private String m_oldText; private String m_initialText; private boolean m_rangeSet = false; private Double m_minValue; private Double m_maxValue; private boolean m_modified = false; private CTextField m_text = new CTextField(SIZE); // Standard private CButton m_button = new CButton(); private GridField m_mField = null; private static CLogger log = CLogger.getCLogger(VNumber.class);
void function(Document doc) { m_text.setDocument(doc); } private String m_columnName; protected int m_displayType; private DecimalFormat m_format; private String m_title; private boolean m_setting; private String m_oldText; private String m_initialText; private boolean m_rangeSet = false; private Double m_minValue; private Double m_maxValue; private boolean m_modified = false; private CTextField m_text = new CTextField(SIZE); private CButton m_button = new CButton(); private GridField m_mField = null; private static CLogger log = CLogger.getCLogger(VNumber.class);
/** * Set Document * @param doc document */
Set Document
setDocument
{ "repo_name": "arthurmelo88/palmetalADP", "path": "adempiereTrunk/client/src/org/compiere/grid/ed/VNumber.java", "license": "gpl-2.0", "size": 21469 }
[ "java.text.DecimalFormat", "javax.swing.text.Document", "org.compiere.model.GridField", "org.compiere.swing.CButton", "org.compiere.swing.CTextField", "org.compiere.util.CLogger" ]
import java.text.DecimalFormat; import javax.swing.text.Document; import org.compiere.model.GridField; import org.compiere.swing.CButton; import org.compiere.swing.CTextField; import org.compiere.util.CLogger;
import java.text.*; import javax.swing.text.*; import org.compiere.model.*; import org.compiere.swing.*; import org.compiere.util.*;
[ "java.text", "javax.swing", "org.compiere.model", "org.compiere.swing", "org.compiere.util" ]
java.text; javax.swing; org.compiere.model; org.compiere.swing; org.compiere.util;
1,875,764
@Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof XYSeries)) { return false; } if (!super.equals(obj)) { return false; } XYSeries that = (XYSeries) obj; if (this.maximumItemCount != that.maximumItemCount) { return false; } if (this.autoSort != that.autoSort) { return false; } if (this.allowDuplicateXValues != that.allowDuplicateXValues) { return false; } if (!ObjectUtilities.equal(this.data, that.data)) { return false; } return true; }
boolean function(Object obj) { if (obj == this) { return true; } if (!(obj instanceof XYSeries)) { return false; } if (!super.equals(obj)) { return false; } XYSeries that = (XYSeries) obj; if (this.maximumItemCount != that.maximumItemCount) { return false; } if (this.autoSort != that.autoSort) { return false; } if (this.allowDuplicateXValues != that.allowDuplicateXValues) { return false; } if (!ObjectUtilities.equal(this.data, that.data)) { return false; } return true; }
/** * Tests this series for equality with an arbitrary object. * * @param obj the object to test against for equality * (<code>null</code> permitted). * * @return A boolean. */
Tests this series for equality with an arbitrary object
equals
{ "repo_name": "greearb/jfreechart-fse-ct", "path": "src/main/java/org/jfree/data/xy/XYSeries.java", "license": "lgpl-2.1", "size": 33902 }
[ "org.jfree.chart.util.ObjectUtilities" ]
import org.jfree.chart.util.ObjectUtilities;
import org.jfree.chart.util.*;
[ "org.jfree.chart" ]
org.jfree.chart;
1,460,589
public GetMediaInfoOfFileResponse getMediaInfoOfFile(String bucket, String key) { GetMediaInfoOfFileRequest request = new GetMediaInfoOfFileRequest(); request.setBucket(bucket); request.setKey(key); return getMediaInfoOfFile(request); }
GetMediaInfoOfFileResponse function(String bucket, String key) { GetMediaInfoOfFileRequest request = new GetMediaInfoOfFileRequest(); request.setBucket(bucket); request.setKey(key); return getMediaInfoOfFile(request); }
/** * Retrieve the media information of an object in Bos bucket. * * @param bucket The bucket name of Bos object which you want to read. * @param key The key name of Bos object which your want to read. * * @return The media information of an object in Bos bucket. */
Retrieve the media information of an object in Bos bucket
getMediaInfoOfFile
{ "repo_name": "baidubce/bce-sdk-java", "path": "src/main/java/com/baidubce/services/media/MediaClient.java", "license": "apache-2.0", "size": 91398 }
[ "com.baidubce.services.media.model.GetMediaInfoOfFileRequest", "com.baidubce.services.media.model.GetMediaInfoOfFileResponse" ]
import com.baidubce.services.media.model.GetMediaInfoOfFileRequest; import com.baidubce.services.media.model.GetMediaInfoOfFileResponse;
import com.baidubce.services.media.model.*;
[ "com.baidubce.services" ]
com.baidubce.services;
1,847,311
private static boolean hasLimitOrOrder(VoltXMLElement xmlElement) { String names[] = {"limit", "offset", "ordercolumns"}; for (String name : names) { List<VoltXMLElement> elements = xmlElement.findChildren(name); if (!elements.isEmpty()) { return true; } } return false; }
static boolean function(VoltXMLElement xmlElement) { String names[] = {"limit", STR, STR}; for (String name : names) { List<VoltXMLElement> elements = xmlElement.findChildren(name); if (!elements.isEmpty()) { return true; } } return false; }
/** * Return true if the input element itself contains one of the limit/offset/ordercolumns elements * @param xmlElement */
Return true if the input element itself contains one of the limit/offset/ordercolumns elements
hasLimitOrOrder
{ "repo_name": "deerwalk/voltdb", "path": "src/hsqldb19b3/org/hsqldb_voltpatches/StatementDMQL.java", "license": "agpl-3.0", "size": 50128 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
159,712
public JsonValueProcessorMatcher getJsonValueProcessorMatcher() { return jsonValueProcessorMatcher; }
JsonValueProcessorMatcher function() { return jsonValueProcessorMatcher; }
/** * Returns the configured JsonValueProcessorMatcher.<br> * Default value is JsonValueProcessorMatcher.DEFAULT<br> * [Java -&gt; JSON] */
Returns the configured JsonValueProcessorMatcher. Default value is JsonValueProcessorMatcher.DEFAULT [Java -&gt; JSON]
getJsonValueProcessorMatcher
{ "repo_name": "kohsuke/Json-lib", "path": "src/main/java/net/sf/json/JsonConfig.java", "license": "apache-2.0", "size": 49405 }
[ "net.sf.json.processors.JsonValueProcessorMatcher" ]
import net.sf.json.processors.JsonValueProcessorMatcher;
import net.sf.json.processors.*;
[ "net.sf.json" ]
net.sf.json;
2,130,860
protected static String[] convertACL(String[] acl, String authorityNameString, IOutputAddActivity activities) throws ManifoldCFException { if (acl != null) { String[] rval = new String[acl.length]; int i = 0; while (i < rval.length) { rval[i] = activities.qualifyAccessToken(authorityNameString,acl[i]); i++; } return rval; } return new String[0]; }
static String[] function(String[] acl, String authorityNameString, IOutputAddActivity activities) throws ManifoldCFException { if (acl != null) { String[] rval = new String[acl.length]; int i = 0; while (i < rval.length) { rval[i] = activities.qualifyAccessToken(authorityNameString,acl[i]); i++; } return rval; } return new String[0]; }
/** Convert an unqualified ACL to qualified form. * @param acl is the initial, unqualified ACL. * @param authorityNameString is the name of the governing authority for this document's acls, or null if none. * @param activities is the activities object, so we can report what's happening. * @return the modified ACL. */
Convert an unqualified ACL to qualified form
convertACL
{ "repo_name": "cogfor/mcf-cogfor", "path": "connectors/elasticsearch/connector/src/main/java/org/apache/manifoldcf/agents/output/elasticsearch/ElasticSearchConnector.java", "license": "apache-2.0", "size": 16398 }
[ "org.apache.manifoldcf.agents.interfaces.IOutputAddActivity", "org.apache.manifoldcf.core.interfaces.ManifoldCFException" ]
import org.apache.manifoldcf.agents.interfaces.IOutputAddActivity; import org.apache.manifoldcf.core.interfaces.ManifoldCFException;
import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.core.interfaces.*;
[ "org.apache.manifoldcf" ]
org.apache.manifoldcf;
2,688,781
public final Change getChange() throws CoreException { // if (Util.isGtk()) { // // workaround for https://bugs.eclipse.org/bugs/show_bug.cgi?id=293995 : // // [Widgets] Deadlock while UI thread displaying/computing a change proposal and non-UI // thread creating image // // // Solution is to create the change outside a 'synchronized' block. // // Synchronization is achieved by polling fChange, using "fChange == COMPUTING_CHANGE" as // barrier. // // Timeout of 10s for safety reasons (should not be reached). // long end= System.currentTimeMillis() + 10000; // do { // boolean computing; // synchronized (this) { // computing= fChange == COMPUTING_CHANGE; // } // if (computing) { // try { // Display display= Display.getCurrent(); // if (display != null) { // while (! display.isDisposed() && display.readAndDispatch()) { // // empty the display loop // } // display.sleep(); // } else { // Thread.sleep(100); // } // } catch (InterruptedException e) { // //continue // } // } else { // synchronized (this) { // if (fChange == COMPUTING_CHANGE) { // continue; // } else if (fChange != null) { // return fChange; // } else { // fChange= COMPUTING_CHANGE; // } // } // Change change= createChange(); // synchronized (this) { // fChange= change; // } // return change; // } // } while (System.currentTimeMillis() < end); // // synchronized (this) { // if (fChange == COMPUTING_CHANGE) { // return null; //failed // } // } // // } else { synchronized (this) { if (fChange == null) { fChange = createChange(); } } // } return fChange; }
final Change function() throws CoreException { synchronized (this) { if (fChange == null) { fChange = createChange(); } } return fChange; }
/** * Returns the change that will be executed when the proposal is applied. This method calls {@link * #createChange()} to compute the change. * * @return the change for this proposal, can be <code>null</code> in rare cases if creation of the * change failed * @throws CoreException when the change could not be created */
Returns the change that will be executed when the proposal is applied. This method calls <code>#createChange()</code> to compute the change
getChange
{ "repo_name": "sleshchenko/che", "path": "plugins/plugin-java/che-plugin-java-ext-jdt/org-eclipse-jdt-ui/src/main/java/org/eclipse/jdt/ui/text/java/correction/ChangeCorrectionProposal.java", "license": "epl-1.0", "size": 15320 }
[ "org.eclipse.core.runtime.CoreException", "org.eclipse.ltk.core.refactoring.Change" ]
import org.eclipse.core.runtime.CoreException; import org.eclipse.ltk.core.refactoring.Change;
import org.eclipse.core.runtime.*; import org.eclipse.ltk.core.refactoring.*;
[ "org.eclipse.core", "org.eclipse.ltk" ]
org.eclipse.core; org.eclipse.ltk;
1,573,686
private static void setPort(final DebuggerTemplate debugger, final String port) { // TODO: Separate model from view try { if (NetHelpers.isValidPort(port)) { debugger.setPort(Integer.parseInt(port)); } else { CMessageBox.showError(null, "Not a valid port."); } } catch (final CouldntSaveDataException e) { CUtilityFunctions.logException(e); final String message = "E00163: " + "Could not change the debugger port"; final String description = CUtilityFunctions .createDescription( "The new debugger port could not be saved to the database.", new String[] {"There was a problem with the connection to the database while the debugger port was saved"}, new String[] {"The debugger port was not saved. Please try to find out what went wrong with the database connection and try to save the debugger port again."}); NaviErrorDialog.show(null, message, description, e); } }
static void function(final DebuggerTemplate debugger, final String port) { try { if (NetHelpers.isValidPort(port)) { debugger.setPort(Integer.parseInt(port)); } else { CMessageBox.showError(null, STR); } } catch (final CouldntSaveDataException e) { CUtilityFunctions.logException(e); final String message = STR + STR; final String description = CUtilityFunctions .createDescription( STR, new String[] {STR}, new String[] {STR}); NaviErrorDialog.show(null, message, description, e); } }
/** * Changes the port of a given debugger. * * @param debugger The debugger whose port is changed. * @param port The new host of the debugger. */
Changes the port of a given debugger
setPort
{ "repo_name": "chubbymaggie/binnavi", "path": "src/main/java/com/google/security/zynamics/binnavi/Gui/MainWindow/ProjectTree/Nodes/DebuggerContainer/Component/CDebuggersModel.java", "license": "apache-2.0", "size": 10594 }
[ "com.google.security.zynamics.binnavi.CUtilityFunctions", "com.google.security.zynamics.binnavi.Database", "com.google.security.zynamics.binnavi.Gui", "com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate", "com.google.security.zynamics.zylib.gui.CMessageBox", "com.google.security.zynamics.zylib.net.NetHelpers" ]
import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.Database; import com.google.security.zynamics.binnavi.Gui; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate; import com.google.security.zynamics.zylib.gui.CMessageBox; import com.google.security.zynamics.zylib.net.NetHelpers;
import com.google.security.zynamics.binnavi.*; import com.google.security.zynamics.binnavi.debug.debugger.*; import com.google.security.zynamics.zylib.gui.*; import com.google.security.zynamics.zylib.net.*;
[ "com.google.security" ]
com.google.security;
832,604
public Object getValue2() { if (value == null) { return null; } switch (typeName) { case CHAR: return ((NlsString) value).getValue(); case DECIMAL: return ((BigDecimal) value).unscaledValue().longValue(); case DATE: return (int) (((Calendar) value).getTimeInMillis() / DateTimeUtils.MILLIS_PER_DAY); case TIME: return (int) (((Calendar) value).getTimeInMillis() % DateTimeUtils.MILLIS_PER_DAY); case TIMESTAMP: return ((Calendar) value).getTimeInMillis(); default: return value; } }
Object function() { if (value == null) { return null; } switch (typeName) { case CHAR: return ((NlsString) value).getValue(); case DECIMAL: return ((BigDecimal) value).unscaledValue().longValue(); case DATE: return (int) (((Calendar) value).getTimeInMillis() / DateTimeUtils.MILLIS_PER_DAY); case TIME: return (int) (((Calendar) value).getTimeInMillis() % DateTimeUtils.MILLIS_PER_DAY); case TIMESTAMP: return ((Calendar) value).getTimeInMillis(); default: return value; } }
/** * Returns the value of this literal, in the form that the calculator * program builder wants it. */
Returns the value of this literal, in the form that the calculator program builder wants it
getValue2
{ "repo_name": "sreev/incubator-calcite", "path": "core/src/main/java/org/apache/calcite/rex/RexLiteral.java", "license": "apache-2.0", "size": 21359 }
[ "java.math.BigDecimal", "java.util.Calendar", "org.apache.calcite.avatica.util.DateTimeUtils", "org.apache.calcite.util.NlsString" ]
import java.math.BigDecimal; import java.util.Calendar; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.util.NlsString;
import java.math.*; import java.util.*; import org.apache.calcite.avatica.util.*; import org.apache.calcite.util.*;
[ "java.math", "java.util", "org.apache.calcite" ]
java.math; java.util; org.apache.calcite;
2,713,628
public void setTransactionArchiveDao(TransactionArchiveDao transactionArchiveDao) { this.transactionArchiveDao = transactionArchiveDao; }
void function(TransactionArchiveDao transactionArchiveDao) { this.transactionArchiveDao = transactionArchiveDao; }
/** * Sets the transactionArchiveDao attribute value. * * @param transactionArchiveDao The transactionArchiveDao to set. */
Sets the transactionArchiveDao attribute value
setTransactionArchiveDao
{ "repo_name": "ua-eas/ua-kfs-5.3", "path": "work/src/org/kuali/kfs/module/endow/batch/service/impl/ProcessFeeTransactionsServiceImpl.java", "license": "agpl-3.0", "size": 72907 }
[ "org.kuali.kfs.module.endow.dataaccess.TransactionArchiveDao" ]
import org.kuali.kfs.module.endow.dataaccess.TransactionArchiveDao;
import org.kuali.kfs.module.endow.dataaccess.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
340,780
public java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI> getSubterm_strings_GreaterThanOrEqualHLAPI(){ java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.strings.impl.GreaterThanOrEqualImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI( (fr.lip6.move.pnml.hlpn.strings.GreaterThanOrEqual)elemnt )); } } return retour; }
java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.strings.impl.GreaterThanOrEqualImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.strings.hlapi.GreaterThanOrEqualHLAPI( (fr.lip6.move.pnml.hlpn.strings.GreaterThanOrEqual)elemnt )); } } return retour; }
/** * This accessor return a list of encapsulated subelement, only of GreaterThanOrEqualHLAPI kind. * WARNING : this method can creates a lot of new object in memory. */
This accessor return a list of encapsulated subelement, only of GreaterThanOrEqualHLAPI kind. WARNING : this method can creates a lot of new object in memory
getSubterm_strings_GreaterThanOrEqualHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/strings/hlapi/LessThanOrEqualHLAPI.java", "license": "epl-1.0", "size": 108661 }
[ "fr.lip6.move.pnml.hlpn.terms.Term", "java.util.ArrayList", "java.util.List" ]
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
[ "fr.lip6.move", "java.util" ]
fr.lip6.move; java.util;
2,815,380
public AntXMLContext getAntXmlContext() { return antXmlContext; }
AntXMLContext function() { return antXmlContext; }
/** * Gets the xml context of Ant used while creating tasks * * @return the Ant xml context */
Gets the xml context of Ant used while creating tasks
getAntXmlContext
{ "repo_name": "apache/incubator-groovy", "path": "subprojects/groovy-ant/src/main/java/groovy/ant/AntBuilder.java", "license": "apache-2.0", "size": 20095 }
[ "org.apache.tools.ant.helper.AntXMLContext" ]
import org.apache.tools.ant.helper.AntXMLContext;
import org.apache.tools.ant.helper.*;
[ "org.apache.tools" ]
org.apache.tools;
1,251,290
public OCommandRequest command(final OCommandRequest iCommand) { makeActive(); return new OrientGraphCommand(this, getRawGraph().command(iCommand)); }
OCommandRequest function(final OCommandRequest iCommand) { makeActive(); return new OrientGraphCommand(this, getRawGraph().command(iCommand)); }
/** * Executes commands against the graph. Commands are executed outside transaction. * * @param iCommand * Command request between SQL, GREMLIN and SCRIPT commands */
Executes commands against the graph. Commands are executed outside transaction
command
{ "repo_name": "mmacfadden/orientdb", "path": "graphdb/src/main/java/com/tinkerpop/blueprints/impls/orient/OrientBaseGraph.java", "license": "apache-2.0", "size": 69036 }
[ "com.orientechnologies.orient.core.command.OCommandRequest" ]
import com.orientechnologies.orient.core.command.OCommandRequest;
import com.orientechnologies.orient.core.command.*;
[ "com.orientechnologies.orient" ]
com.orientechnologies.orient;
2,357,530
@Test public void testGetBusinessObjectDataStatusMissingRequiredParameter() throws Exception { // Validate that activiti task fails when we do not pass a namespace value. List<FieldExtension> fieldExtensionList = new ArrayList<>(); List<Parameter> parameters = new ArrayList<>(); Map<String, Object> variableValuesToValidate = new HashMap<>(); variableValuesToValidate.put(ActivitiRuntimeHelper.VARIABLE_ERROR_MESSAGE, "A namespace must be specified."); executeWithoutLogging(ActivitiRuntimeHelper.class, () -> { testActivitiServiceTaskFailure(GetBusinessObjectDataStatus.class.getCanonicalName(), fieldExtensionList, parameters, variableValuesToValidate); }); }
void function() throws Exception { List<FieldExtension> fieldExtensionList = new ArrayList<>(); List<Parameter> parameters = new ArrayList<>(); Map<String, Object> variableValuesToValidate = new HashMap<>(); variableValuesToValidate.put(ActivitiRuntimeHelper.VARIABLE_ERROR_MESSAGE, STR); executeWithoutLogging(ActivitiRuntimeHelper.class, () -> { testActivitiServiceTaskFailure(GetBusinessObjectDataStatus.class.getCanonicalName(), fieldExtensionList, parameters, variableValuesToValidate); }); }
/** * This unit tests covers scenario when business object data status service fails due to a missing required parameter. */
This unit tests covers scenario when business object data status service fails due to a missing required parameter
testGetBusinessObjectDataStatusMissingRequiredParameter
{ "repo_name": "FINRAOS/herd", "path": "herd-code/herd-service/src/test/java/org/finra/herd/service/activiti/task/GetBusinessObjectDataStatusTest.java", "license": "apache-2.0", "size": 12592 }
[ "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map", "org.activiti.bpmn.model.FieldExtension", "org.finra.herd.model.api.xml.Parameter", "org.finra.herd.service.activiti.ActivitiRuntimeHelper" ]
import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.activiti.bpmn.model.FieldExtension; import org.finra.herd.model.api.xml.Parameter; import org.finra.herd.service.activiti.ActivitiRuntimeHelper;
import java.util.*; import org.activiti.bpmn.model.*; import org.finra.herd.model.api.xml.*; import org.finra.herd.service.activiti.*;
[ "java.util", "org.activiti.bpmn", "org.finra.herd" ]
java.util; org.activiti.bpmn; org.finra.herd;
2,283,723
public InstructionHandle compile(ClassGenerator classGen, MethodGenerator methodGen, InstructionHandle continuation) { // Returned cached value if already compiled if (_start != null) { return _start; } // If not patterns, then return handle for default template final int count = _patterns.size(); if (count == 0) { return (_start = getTemplateHandle(_default)); } // Init handle to jump when all patterns failed InstructionHandle fail = (_default == null) ? continuation : getTemplateHandle(_default); // Compile all patterns in reverse order for (int n = count - 1; n >= 0; n--) { final LocationPathPattern pattern = getPattern(n); final Template template = pattern.getTemplate(); final InstructionList il = new InstructionList(); // Patterns expect current node on top of stack il.append(methodGen.loadCurrentNode()); // Apply the test-code compiled for the pattern InstructionList ilist = methodGen.getInstructionList(pattern); if (ilist == null) { ilist = pattern.compile(classGen, methodGen); methodGen.addInstructionList(pattern, ilist); } // Make a copy of the instruction list for backpatching InstructionList copyOfilist = ilist.copy(); FlowList trueList = pattern.getTrueList(); if (trueList != null) { trueList = trueList.copyAndRedirect(ilist, copyOfilist); } FlowList falseList = pattern.getFalseList(); if (falseList != null) { falseList = falseList.copyAndRedirect(ilist, copyOfilist); } il.append(copyOfilist); // On success branch to the template code final InstructionHandle gtmpl = getTemplateHandle(template); final InstructionHandle success = il.append(new GOTO_W(gtmpl)); if (trueList != null) { trueList.backPatch(success); } if (falseList != null) { falseList.backPatch(fail); } // Next pattern's 'fail' target is this pattern's first instruction fail = il.getStart(); // Append existing instruction list to the end of this one if (_instructionList != null) { il.append(_instructionList); } // Set current instruction list to be this one _instructionList = il; } return (_start = fail); }
InstructionHandle function(ClassGenerator classGen, MethodGenerator methodGen, InstructionHandle continuation) { if (_start != null) { return _start; } final int count = _patterns.size(); if (count == 0) { return (_start = getTemplateHandle(_default)); } InstructionHandle fail = (_default == null) ? continuation : getTemplateHandle(_default); for (int n = count - 1; n >= 0; n--) { final LocationPathPattern pattern = getPattern(n); final Template template = pattern.getTemplate(); final InstructionList il = new InstructionList(); il.append(methodGen.loadCurrentNode()); InstructionList ilist = methodGen.getInstructionList(pattern); if (ilist == null) { ilist = pattern.compile(classGen, methodGen); methodGen.addInstructionList(pattern, ilist); } InstructionList copyOfilist = ilist.copy(); FlowList trueList = pattern.getTrueList(); if (trueList != null) { trueList = trueList.copyAndRedirect(ilist, copyOfilist); } FlowList falseList = pattern.getFalseList(); if (falseList != null) { falseList = falseList.copyAndRedirect(ilist, copyOfilist); } il.append(copyOfilist); final InstructionHandle gtmpl = getTemplateHandle(template); final InstructionHandle success = il.append(new GOTO_W(gtmpl)); if (trueList != null) { trueList.backPatch(success); } if (falseList != null) { falseList.backPatch(fail); } fail = il.getStart(); if (_instructionList != null) { il.append(_instructionList); } _instructionList = il; } return (_start = fail); }
/** * Compile the code for this test sequence. Compile patterns * from highest to lowest priority. Note that since patterns * can be share by multiple test sequences, instruction lists * must be copied before backpatching. */
Compile the code for this test sequence. Compile patterns from highest to lowest priority. Note that since patterns can be share by multiple test sequences, instruction lists must be copied before backpatching
compile
{ "repo_name": "srnsw/xena", "path": "xena/ext/src/xalan-j_2_7_1/src/org/apache/xalan/xsltc/compiler/TestSeq.java", "license": "gpl-3.0", "size": 8949 }
[ "org.apache.bcel.generic.InstructionHandle", "org.apache.bcel.generic.InstructionList", "org.apache.xalan.xsltc.compiler.util.ClassGenerator", "org.apache.xalan.xsltc.compiler.util.MethodGenerator" ]
import org.apache.bcel.generic.InstructionHandle; import org.apache.bcel.generic.InstructionList; import org.apache.xalan.xsltc.compiler.util.ClassGenerator; import org.apache.xalan.xsltc.compiler.util.MethodGenerator;
import org.apache.bcel.generic.*; import org.apache.xalan.xsltc.compiler.util.*;
[ "org.apache.bcel", "org.apache.xalan" ]
org.apache.bcel; org.apache.xalan;
775,230
EAttribute getProcess_Filename();
EAttribute getProcess_Filename();
/** * Returns the meta object for the attribute '{@link com.odcgroup.process.model.Process#getFilename <em>Filename</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Filename</em>'. * @see com.odcgroup.process.model.Process#getFilename() * @see #getProcess() * @generated */
Returns the meta object for the attribute '<code>com.odcgroup.process.model.Process#getFilename Filename</code>'.
getProcess_Filename
{ "repo_name": "debabratahazra/DS", "path": "designstudio/components/process/core/com.odcgroup.process.model/src/generated/java/com/odcgroup/process/model/ProcessPackage.java", "license": "epl-1.0", "size": 83413 }
[ "org.eclipse.emf.ecore.EAttribute" ]
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,759,364
public Vector3f getVec3(String key);
Vector3f function(String key);
/** * Queries the value of the given property key. * * @throws IllegalArgumentException if property has not the specified type * @param key used to find the value * @return the value or {@code null} if property doesn't exist */
Queries the value of the given property key
getVec3
{ "repo_name": "cmur2/ssim-ed", "path": "src/de/mycrobase/ssim/ed/weather/Weather.java", "license": "apache-2.0", "size": 1645 }
[ "com.jme3.math.Vector3f" ]
import com.jme3.math.Vector3f;
import com.jme3.math.*;
[ "com.jme3.math" ]
com.jme3.math;
838,823
public void setLayout(Layout layout) { this.layout = layout; }
void function(Layout layout) { this.layout = layout; }
/** * Set the layout in the context * @param layout */
Set the layout in the context
setLayout
{ "repo_name": "dotCMS/core", "path": "dotCMS/src/main/java/com/dotcms/rest/api/v1/menu/MenuContext.java", "license": "gpl-3.0", "size": 2026 }
[ "com.dotmarketing.business.Layout" ]
import com.dotmarketing.business.Layout;
import com.dotmarketing.business.*;
[ "com.dotmarketing.business" ]
com.dotmarketing.business;
1,113,145
void closedCxn(SocketChannel chan) { // Locate the channel that went down int link = cubeState.neighbors.indexOf(chan); if (-1 == link) { // This should never happen System.err.println("closedCxn() called on non-connected channel!"); return; } // Clean up state cubeState.neighbors.set(link, null); cubeState.links = cubeState.links.clearBit(link); neighborDisconnected(link); }
void closedCxn(SocketChannel chan) { int link = cubeState.neighbors.indexOf(chan); if (-1 == link) { System.err.println(STR); return; } cubeState.neighbors.set(link, null); cubeState.links = cubeState.links.clearBit(link); neighborDisconnected(link); }
/** * Clean up state when a peer closes its connection to us. The primary function is to update the neighbors and * links. * * @param chan */
Clean up state when a peer closes its connection to us. The primary function is to update the neighbors and links
closedCxn
{ "repo_name": "deblau/hyper", "path": "Hyper/src/hyper/CubeProtocol.java", "license": "gpl-2.0", "size": 70120 }
[ "java.nio.channels.SocketChannel" ]
import java.nio.channels.SocketChannel;
import java.nio.channels.*;
[ "java.nio" ]
java.nio;
1,824,782
public void reNestExpands() { List<Expand> newExpands = new ArrayList<>(); Map<EntityType, Expand> expandMap = new EnumMap<>(EntityType.class); for (Expand oldExpand : expand) { final NavigationProperty oldPath = oldExpand.getPath(); EntityType expandEntityType = oldPath.getType(); if (oldPath instanceof NavigationPropertyMain && expandMap.containsKey(expandEntityType)) { Expand existing = expandMap.get(expandEntityType); existing.getSubQuery().addExpand(oldExpand.getSubQuery().getExpand()); existing.getSubQuery().reNestExpands(); } else { newExpands.add(oldExpand); if (oldPath instanceof NavigationPropertyMain) { expandMap.put(expandEntityType, oldExpand); } } } expand.clear(); expand.addAll(newExpands); }
void function() { List<Expand> newExpands = new ArrayList<>(); Map<EntityType, Expand> expandMap = new EnumMap<>(EntityType.class); for (Expand oldExpand : expand) { final NavigationProperty oldPath = oldExpand.getPath(); EntityType expandEntityType = oldPath.getType(); if (oldPath instanceof NavigationPropertyMain && expandMap.containsKey(expandEntityType)) { Expand existing = expandMap.get(expandEntityType); existing.getSubQuery().addExpand(oldExpand.getSubQuery().getExpand()); existing.getSubQuery().reNestExpands(); } else { newExpands.add(oldExpand); if (oldPath instanceof NavigationPropertyMain) { expandMap.put(expandEntityType, oldExpand); } } } expand.clear(); expand.addAll(newExpands); }
/** * Properly nests the expands. Removes duplicates. */
Properly nests the expands. Removes duplicates
reNestExpands
{ "repo_name": "FraunhoferIOSB/SensorThingsServer", "path": "FROST-Server.Core.Model/src/main/java/de/fraunhofer/iosb/ilt/frostserver/query/Query.java", "license": "lgpl-3.0", "size": 16612 }
[ "de.fraunhofer.iosb.ilt.frostserver.model.EntityType", "de.fraunhofer.iosb.ilt.frostserver.property.NavigationProperty", "de.fraunhofer.iosb.ilt.frostserver.property.NavigationPropertyMain", "java.util.ArrayList", "java.util.EnumMap", "java.util.List", "java.util.Map" ]
import de.fraunhofer.iosb.ilt.frostserver.model.EntityType; import de.fraunhofer.iosb.ilt.frostserver.property.NavigationProperty; import de.fraunhofer.iosb.ilt.frostserver.property.NavigationPropertyMain; import java.util.ArrayList; import java.util.EnumMap; import java.util.List; import java.util.Map;
import de.fraunhofer.iosb.ilt.frostserver.model.*; import de.fraunhofer.iosb.ilt.frostserver.property.*; import java.util.*;
[ "de.fraunhofer.iosb", "java.util" ]
de.fraunhofer.iosb; java.util;
1,746,599
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository) throws KettleException { try { // The object that we're modifying here is a copy of the original! // So let's change the filename from relative to absolute by grabbing the file object... // if (!Const.isEmpty(fileName)) { FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(fileName), space); fileName = resourceNamingInterface.nameResource(fileObject, space, true); } if (!Const.isEmpty(templateFileName)) { FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(templateFileName), space); templateFileName = resourceNamingInterface.nameResource(fileObject, space, true); } return null; } catch (Exception e) { throw new KettleException(e); //$NON-NLS-1$ } }
String function(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository) throws KettleException { try { if (!Const.isEmpty(fileName)) { FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(fileName), space); fileName = resourceNamingInterface.nameResource(fileObject, space, true); } if (!Const.isEmpty(templateFileName)) { FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(templateFileName), space); templateFileName = resourceNamingInterface.nameResource(fileObject, space, true); } return null; } catch (Exception e) { throw new KettleException(e); } }
/** * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So what this does is turn the name of the base path into an absolute path. */
Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So what this does is turn the name of the base path into an absolute path
exportResources
{ "repo_name": "jjeb/kettle-trunk", "path": "engine/src/org/pentaho/di/trans/steps/excelwriter/ExcelWriterStepMeta.java", "license": "apache-2.0", "size": 37623 }
[ "java.util.Map", "org.apache.commons.vfs.FileObject", "org.pentaho.di.core.Const", "org.pentaho.di.core.exception.KettleException", "org.pentaho.di.core.variables.VariableSpace", "org.pentaho.di.core.vfs.KettleVFS", "org.pentaho.di.repository.Repository", "org.pentaho.di.resource.ResourceDefinition", "org.pentaho.di.resource.ResourceNamingInterface" ]
import java.util.Map; import org.apache.commons.vfs.FileObject; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceNamingInterface;
import java.util.*; import org.apache.commons.vfs.*; import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.variables.*; import org.pentaho.di.core.vfs.*; import org.pentaho.di.repository.*; import org.pentaho.di.resource.*;
[ "java.util", "org.apache.commons", "org.pentaho.di" ]
java.util; org.apache.commons; org.pentaho.di;
1,165,724
public void setTimes(String src, long mtime, long atime) throws IOException { checkOpen(); TraceScope scope = getPathTraceScope("setTimes", src); try { namenode.setTimes(src, mtime, atime); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } finally { scope.close(); } } @Deprecated public static class DFSDataInputStream extends HdfsDataInputStream { public DFSDataInputStream(DFSInputStream in) throws IOException { super(in); } }
void function(String src, long mtime, long atime) throws IOException { checkOpen(); TraceScope scope = getPathTraceScope(STR, src); try { namenode.setTimes(src, mtime, atime); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class, SnapshotAccessControlException.class); } finally { scope.close(); } } public static class DFSDataInputStream extends HdfsDataInputStream { public DFSDataInputStream(DFSInputStream in) throws IOException { super(in); } }
/** * set the modification and access time of a file * * @see ClientProtocol#setTimes(String, long, long) */
set the modification and access time of a file
setTimes
{ "repo_name": "vesense/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java", "license": "apache-2.0", "size": 132560 }
[ "java.io.FileNotFoundException", "java.io.IOException", "org.apache.hadoop.hdfs.client.HdfsDataInputStream", "org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException", "org.apache.hadoop.hdfs.protocol.UnresolvedPathException", "org.apache.hadoop.ipc.RemoteException", "org.apache.hadoop.security.AccessControlException", "org.apache.htrace.TraceScope" ]
import java.io.FileNotFoundException; import java.io.IOException; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.UnresolvedPathException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.AccessControlException; import org.apache.htrace.TraceScope;
import java.io.*; import org.apache.hadoop.hdfs.client.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.ipc.*; import org.apache.hadoop.security.*; import org.apache.htrace.*;
[ "java.io", "org.apache.hadoop", "org.apache.htrace" ]
java.io; org.apache.hadoop; org.apache.htrace;
1,111,307
public List<JmxEndpointOperationParameterInfo> getParameters() { return Collections.unmodifiableList(this.parameters); }
List<JmxEndpointOperationParameterInfo> function() { return Collections.unmodifiableList(this.parameters); }
/** * Returns the parameters of the operation. * @return the operation parameters */
Returns the parameters of the operation
getParameters
{ "repo_name": "linead/spring-boot", "path": "spring-boot/src/main/java/org/springframework/boot/endpoint/jmx/JmxEndpointOperation.java", "license": "apache-2.0", "size": 2808 }
[ "java.util.Collections", "java.util.List" ]
import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,794,450
private GeoLocation sendRequestToServiceProvider(String requestParameters) { if (!this.geoCodingEnabled) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.DISABLED); } // *********************************************** // Immediately check cache if available. // // TODO - Have Geo Coding service Provider use Memcached as Internal cache for Queries. // TODO - Jeff Schenk // // Throttle Requests if (!canRequestBeSentToProvider()) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.OVER_QUERY_LIMIT); } if (log.isDebugEnabled()) { log.debug("GeoLocation Request being Sent:[" + requestParameters + "]"); } if (StringUtils.isEmpty(requestParameters)) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.INVALID_REQUEST); } // Prepare for sending request RestTemplate restTemplate = new RestTemplate(); // // must be CommonsClientHttpRequestFactory or else the location header // in an HTTP 302 won't be followed // restTemplate.setRequestFactory(new CommonsClientHttpRequestFactory()); // TODO .. repair the refactoring... // MappingJacksonHttpMessageConverter json = new MappingJacksonHttpMessageConverter(); json.setSupportedMediaTypes(Arrays.asList(new MediaType("text", "javascript"))); restTemplate.getMessageConverters().add(json); RestOperations restOperations = restTemplate; // geoCodingServiceProviderClientId if ((StringUtils.isEmpty(this.geoCodingServiceProviderClientId)) || (StringUtils.isEmpty(this.geoCodingServiceProviderClientSignature))) { // Perform the Call, without a specified ClientID GeoLocation geoLocation = restOperations.getForObject(this.geoCodingServiceProviderUrl + "{output}" + "?{requestParameters}" + "&sensor={sensorSetting}", GeoLocation.class, this.geoCodingServiceProviderOutput, requestParameters, this.getGeoCodingSensorForServer()); // TODO Place in Cache.... return geoLocation; } else { // Sign the Call and Specify the clientId as Well. try { String signedRequest = this.signGMapRequest(this.geoCodingServiceProviderUrl + this.geoCodingServiceProviderOutput + "?" + requestParameters + "&client=" + this.geoCodingServiceProviderClientId + "&sensor=" + this.getGeoCodingSensorForServer()); if (log.isDebugEnabled()) { log.debug("Signed URL:["+signedRequest+"]"); } GeoLocation geoLocation = restOperations.getForObject(signedRequest, GeoLocation.class); // TODO Place in Cache.... return geoLocation; } catch (Exception e) { log.error("Exception encountered while attempting to sign GMap Request, Sending request without Client and Signature:[" + e.getMessage() + "]"); // Try Request without ClientID and being signed... GeoLocation geoLocation = restOperations.getForObject(this.geoCodingServiceProviderUrl + "{output}" + "?{requestParameters}" + "&sensor={sensorSetting}", GeoLocation.class, this.geoCodingServiceProviderOutput, requestParameters, this.getGeoCodingSensorForServer()); // TODO Place in Cache.... return geoLocation; } } }
GeoLocation function(String requestParameters) { if (!this.geoCodingEnabled) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.DISABLED); } if (!canRequestBeSentToProvider()) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.OVER_QUERY_LIMIT); } if (log.isDebugEnabled()) { log.debug(STR + requestParameters + "]"); } if (StringUtils.isEmpty(requestParameters)) { return new GeoLocation(GeoCodingServiceProviderResponseStatus.INVALID_REQUEST); } RestTemplate restTemplate = new RestTemplate(); json.setSupportedMediaTypes(Arrays.asList(new MediaType("text", STR))); restTemplate.getMessageConverters().add(json); RestOperations restOperations = restTemplate; if ((StringUtils.isEmpty(this.geoCodingServiceProviderClientId)) (StringUtils.isEmpty(this.geoCodingServiceProviderClientSignature))) { GeoLocation geoLocation = restOperations.getForObject(this.geoCodingServiceProviderUrl + STR + STR + STR, GeoLocation.class, this.geoCodingServiceProviderOutput, requestParameters, this.getGeoCodingSensorForServer()); return geoLocation; } else { try { String signedRequest = this.signGMapRequest(this.geoCodingServiceProviderUrl + this.geoCodingServiceProviderOutput + "?" + requestParameters + STR + this.geoCodingServiceProviderClientId + STR + this.getGeoCodingSensorForServer()); if (log.isDebugEnabled()) { log.debug(STR+signedRequest+"]"); } GeoLocation geoLocation = restOperations.getForObject(signedRequest, GeoLocation.class); return geoLocation; } catch (Exception e) { log.error(STR + e.getMessage() + "]"); GeoLocation geoLocation = restOperations.getForObject(this.geoCodingServiceProviderUrl + STR + STR + STR, GeoLocation.class, this.geoCodingServiceProviderOutput, requestParameters, this.getGeoCodingSensorForServer()); return geoLocation; } } }
/** * Provide Request to Geocoding Service Provider to Obtain the Location based upon * the specified parameter information. * * @param requestParameters * @return GeoLocation */
Provide Request to Geocoding Service Provider to Obtain the Location based upon the specified parameter information
sendRequestToServiceProvider
{ "repo_name": "jaschenk/jeffaschenk-commons", "path": "src/main/java/jeffaschenk/commons/system/external/geocoding/GeoCodingServiceProviderImpl.java", "license": "apache-2.0", "size": 22587 }
[ "java.util.Arrays", "org.springframework.http.MediaType", "org.springframework.web.client.RestOperations", "org.springframework.web.client.RestTemplate" ]
import java.util.Arrays; import org.springframework.http.MediaType; import org.springframework.web.client.RestOperations; import org.springframework.web.client.RestTemplate;
import java.util.*; import org.springframework.http.*; import org.springframework.web.client.*;
[ "java.util", "org.springframework.http", "org.springframework.web" ]
java.util; org.springframework.http; org.springframework.web;
540,426
void markCompletedJob(JobInProgress job) { for (TaskInProgress tip : job.getTasks(TaskType.JOB_SETUP)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } for (TaskInProgress tip : job.getTasks(TaskType.MAP)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.FAILED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.KILLED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } for (TaskInProgress tip : job.getTasks(TaskType.REDUCE)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.FAILED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.KILLED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } } /** * Remove all 'marked' tasks running on a given {@link TaskTracker}
void markCompletedJob(JobInProgress job) { for (TaskInProgress tip : job.getTasks(TaskType.JOB_SETUP)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } for (TaskInProgress tip : job.getTasks(TaskType.MAP)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.FAILED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.KILLED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } for (TaskInProgress tip : job.getTasks(TaskType.REDUCE)) { for (TaskStatus taskStatus : tip.getTaskStatuses()) { if (taskStatus.getRunState() != TaskStatus.State.RUNNING && taskStatus.getRunState() != TaskStatus.State.COMMIT_PENDING && taskStatus.getRunState() != TaskStatus.State.FAILED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.KILLED_UNCLEAN && taskStatus.getRunState() != TaskStatus.State.UNASSIGNED) { markCompletedTaskAttempt(taskStatus.getTaskTracker(), taskStatus.getTaskID()); } } } } /** * Remove all 'marked' tasks running on a given {@link TaskTracker}
/** * Mark all 'non-running' jobs of the job for pruning. * This function assumes that the JobTracker is locked on entry. * * @param job the completed job */
Mark all 'non-running' jobs of the job for pruning. This function assumes that the JobTracker is locked on entry
markCompletedJob
{ "repo_name": "zxqt223/hadoop-ha.1.0.3", "path": "src/mapred/org/apache/hadoop/mapred/JobTracker.java", "license": "apache-2.0", "size": 199505 }
[ "org.apache.hadoop.mapreduce.TaskType", "org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker" ]
import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.server.jobtracker.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,906,865
@Nullable public Date getLastFinalizedBucketEnd() { return lastFinalizedBucketEnd; }
Date function() { return lastFinalizedBucketEnd; }
/** * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed. */
Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed
getLastFinalizedBucketEnd
{ "repo_name": "GlenRSmith/elasticsearch", "path": "client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java", "license": "apache-2.0", "size": 3500 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
1,237,685
public void writeEntityToNBT(NBTTagCompound tagCompound) { super.writeEntityToNBT(tagCompound); if (this.dataWatcher.getWatchableObjectByte(17) == 1) { tagCompound.setBoolean("powered", true); } tagCompound.setShort("Fuse", (short)this.fuseTime); tagCompound.setByte("ExplosionRadius", (byte)this.explosionRadius); tagCompound.setBoolean("ignited", this.hasIgnited()); }
void function(NBTTagCompound tagCompound) { super.writeEntityToNBT(tagCompound); if (this.dataWatcher.getWatchableObjectByte(17) == 1) { tagCompound.setBoolean(STR, true); } tagCompound.setShort("Fuse", (short)this.fuseTime); tagCompound.setByte(STR, (byte)this.explosionRadius); tagCompound.setBoolean(STR, this.hasIgnited()); }
/** * (abstract) Protected helper method to write subclass entity data to NBT. */
(abstract) Protected helper method to write subclass entity data to NBT
writeEntityToNBT
{ "repo_name": "SkidJava/BaseClient", "path": "new_1.8.8/net/minecraft/entity/monster/EntityCreeper.java", "license": "gpl-2.0", "size": 9648 }
[ "net.minecraft.nbt.NBTTagCompound" ]
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.*;
[ "net.minecraft.nbt" ]
net.minecraft.nbt;
2,407,708
public void write(int ch) throws IOException { }
void function(int ch) throws IOException { }
/** * Writes a byte. */
Writes a byte
write
{ "repo_name": "CleverCloud/Bianca", "path": "bianca/src/main/java/com/clevercloud/vfs/NullWriteStream.java", "license": "gpl-2.0", "size": 1759 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,383,773
@Test(timeout = 120000) public void testVertexPartiallyFinished_Broadcast() throws Exception { DAG dag = createDAG("VertexPartiallyFinished_Broadcast", ControlledImmediateStartVertexManager.class, DataMovementType.BROADCAST, true); TezCounters counters = runDAGAndVerify(dag, DAGStatus.State.SUCCEEDED); assertEquals(4, counters.findCounter(DAGCounter.NUM_SUCCEEDED_TASKS).getValue()); assertEquals(2, counters.findCounter(TestCounter.Counter_1).getValue()); List<HistoryEvent> historyEvents1 = readRecoveryLog(1); List<HistoryEvent> historyEvents2 = readRecoveryLog(2); printHistoryEvents(historyEvents1, 1); printHistoryEvents(historyEvents1, 2); // task_0 of v1 is finished in attempt 1, task_1 of v1 is not finished in // attempt 1 assertEquals(1, findTaskAttemptFinishedEvent(historyEvents1, 0, 0).size()); assertEquals(0, findTaskAttemptFinishedEvent(historyEvents1, 0, 1).size()); // task_0 of v1 is finished in attempt 1 and not rerun, task_1 of v1 is // finished in attempt 2 assertEquals(1, findTaskAttemptFinishedEvent(historyEvents2, 0, 0).size()); assertEquals(1, findTaskAttemptFinishedEvent(historyEvents2, 0, 1).size()); }
@Test(timeout = 120000) void function() throws Exception { DAG dag = createDAG(STR, ControlledImmediateStartVertexManager.class, DataMovementType.BROADCAST, true); TezCounters counters = runDAGAndVerify(dag, DAGStatus.State.SUCCEEDED); assertEquals(4, counters.findCounter(DAGCounter.NUM_SUCCEEDED_TASKS).getValue()); assertEquals(2, counters.findCounter(TestCounter.Counter_1).getValue()); List<HistoryEvent> historyEvents1 = readRecoveryLog(1); List<HistoryEvent> historyEvents2 = readRecoveryLog(2); printHistoryEvents(historyEvents1, 1); printHistoryEvents(historyEvents1, 2); assertEquals(1, findTaskAttemptFinishedEvent(historyEvents1, 0, 0).size()); assertEquals(0, findTaskAttemptFinishedEvent(historyEvents1, 0, 1).size()); assertEquals(1, findTaskAttemptFinishedEvent(historyEvents2, 0, 0).size()); assertEquals(1, findTaskAttemptFinishedEvent(historyEvents2, 0, 1).size()); }
/** * Fine-grained recovery task-level, In a vertex (v1), task 0 is done task 1 * is not started. History flush happens. AM dies. Once AM is recovered, task 0 is * not re-run. Task 1 is re-run. (Broadcast) * * @throws Exception */
Fine-grained recovery task-level, In a vertex (v1), task 0 is done task 1 is not started. History flush happens. AM dies. Once AM is recovered, task 0 is not re-run. Task 1 is re-run. (Broadcast)
testVertexPartiallyFinished_Broadcast
{ "repo_name": "ueshin/apache-tez", "path": "tez-tests/src/test/java/org/apache/tez/test/TestAMRecovery.java", "license": "apache-2.0", "size": 29226 }
[ "java.util.List", "org.apache.tez.common.counters.DAGCounter", "org.apache.tez.common.counters.TezCounters", "org.apache.tez.dag.api.EdgeProperty", "org.apache.tez.dag.api.client.DAGStatus", "org.apache.tez.dag.history.HistoryEvent", "org.junit.Assert", "org.junit.Test" ]
import java.util.List; import org.apache.tez.common.counters.DAGCounter; import org.apache.tez.common.counters.TezCounters; import org.apache.tez.dag.api.EdgeProperty; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.history.HistoryEvent; import org.junit.Assert; import org.junit.Test;
import java.util.*; import org.apache.tez.common.counters.*; import org.apache.tez.dag.api.*; import org.apache.tez.dag.api.client.*; import org.apache.tez.dag.history.*; import org.junit.*;
[ "java.util", "org.apache.tez", "org.junit" ]
java.util; org.apache.tez; org.junit;
1,902,513
public @NonNull Builder noDiskCache() { this.noDiskCache = true; return this; }
@NonNull Builder function() { this.noDiskCache = true; return this; }
/** * Dont cache the image in disk. * By default all images are cached * * @return Builder */
Dont cache the image in disk. By default all images are cached
noDiskCache
{ "repo_name": "uTeach/uteach-android", "path": "teach/src/main/java/com/u/teach/utils/FrescoImageController.java", "license": "mit", "size": 22434 }
[ "android.support.annotation.NonNull" ]
import android.support.annotation.NonNull;
import android.support.annotation.*;
[ "android.support" ]
android.support;
2,019,046
public void configure() throws IOException { // Read config file sConfig = new Properties(); URL url = WebDriverPool.class.getClassLoader().getResource(CONFIG_FILE); File conf = new File(url.getFile()); sConfig.load(new FileReader(conf)); // Prepare capabilities sCaps = new DesiredCapabilities(); sCaps.setJavascriptEnabled(true); sCaps.setCapability("takesScreenshot", false); String driver = sConfig.getProperty("driver", DRIVER_PHANTOMJS); // Fetch PhantomJS-specific configuration parameters if (driver.equals(DRIVER_PHANTOMJS)) { // "phantomjs_exec_path" if (sConfig.getProperty("phantomjs_exec_path") != null) { sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY, sConfig.getProperty("phantomjs_exec_path")); } else { throw new IOException( String.format( "Property '%s' not set!", PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY)); } // "phantomjs_driver_path" if (sConfig.getProperty("phantomjs_driver_path") != null) { System.out.println("Test will use an external GhostDriver"); sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_PATH_PROPERTY, sConfig.getProperty("phantomjs_driver_path")); } else { System.out .println("Test will use PhantomJS internal GhostDriver"); } } // Disable "web-security", enable all possible "ssl-protocols" and // "ignore-ssl-errors" for PhantomJSDriver // sCaps.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, new // String[] { // "--web-security=false", // "--ssl-protocol=any", // "--ignore-ssl-errors=true" // }); ArrayList<String> cliArgsCap = new ArrayList<String>(); cliArgsCap.add("--web-security=false"); cliArgsCap.add("--ssl-protocol=any"); cliArgsCap.add("--ignore-ssl-errors=true"); sCaps.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgsCap); // Control LogLevel for GhostDriver, via CLI arguments sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_CLI_ARGS, new String[] { "--logLevel=" + (sConfig.getProperty("phantomjs_driver_loglevel") != null ? sConfig .getProperty("phantomjs_driver_loglevel") : "INFO") }); // String driver = sConfig.getProperty("driver", DRIVER_PHANTOMJS); // Start appropriate Driver if (isUrl(driver)) { sCaps.setBrowserName("phantomjs"); mDriver = new RemoteWebDriver(new URL(driver), sCaps); } else if (driver.equals(DRIVER_FIREFOX)) { mDriver = new FirefoxDriver(sCaps); } else if (driver.equals(DRIVER_CHROME)) { mDriver = new ChromeDriver(sCaps); } else if (driver.equals(DRIVER_PHANTOMJS)) { mDriver = new PhantomJSDriver(sCaps); } }
void function() throws IOException { sConfig = new Properties(); URL url = WebDriverPool.class.getClassLoader().getResource(CONFIG_FILE); File conf = new File(url.getFile()); sConfig.load(new FileReader(conf)); sCaps = new DesiredCapabilities(); sCaps.setJavascriptEnabled(true); sCaps.setCapability(STR, false); String driver = sConfig.getProperty(STR, DRIVER_PHANTOMJS); if (driver.equals(DRIVER_PHANTOMJS)) { if (sConfig.getProperty(STR) != null) { sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY, sConfig.getProperty(STR)); } else { throw new IOException( String.format( STR, PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY)); } if (sConfig.getProperty(STR) != null) { System.out.println(STR); sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_PATH_PROPERTY, sConfig.getProperty(STR)); } else { System.out .println(STR); } } ArrayList<String> cliArgsCap = new ArrayList<String>(); cliArgsCap.add(STR); cliArgsCap.add(STR); cliArgsCap.add(STR); sCaps.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgsCap); sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_CLI_ARGS, new String[] { STR + (sConfig.getProperty(STR) != null ? sConfig .getProperty(STR) : "INFO") }); if (isUrl(driver)) { sCaps.setBrowserName(STR); mDriver = new RemoteWebDriver(new URL(driver), sCaps); } else if (driver.equals(DRIVER_FIREFOX)) { mDriver = new FirefoxDriver(sCaps); } else if (driver.equals(DRIVER_CHROME)) { mDriver = new ChromeDriver(sCaps); } else if (driver.equals(DRIVER_PHANTOMJS)) { mDriver = new PhantomJSDriver(sCaps); } }
/** * Configure the GhostDriver, and initialize a WebDriver instance. This part * of code comes from GhostDriver. * https://github.com/detro/ghostdriver/tree/master/test/java/src/test/java/ghostdriver * * @author [email protected] * @throws IOException */
Configure the GhostDriver, and initialize a WebDriver instance. This part of code comes from GhostDriver. HREF
configure
{ "repo_name": "danielleeht/zhuoyue", "path": "zhuoyue-crawler/src/main/java/us/codecraft/webmagic/downloader/selenium/WebDriverPool.java", "license": "mit", "size": 6737 }
[ "java.io.File", "java.io.FileReader", "java.io.IOException", "java.util.ArrayList", "java.util.Properties", "org.openqa.selenium.chrome.ChromeDriver", "org.openqa.selenium.firefox.FirefoxDriver", "org.openqa.selenium.phantomjs.PhantomJSDriver", "org.openqa.selenium.phantomjs.PhantomJSDriverService", "org.openqa.selenium.remote.DesiredCapabilities", "org.openqa.selenium.remote.RemoteWebDriver" ]
import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Properties; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.phantomjs.PhantomJSDriver; import org.openqa.selenium.phantomjs.PhantomJSDriverService; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.RemoteWebDriver;
import java.io.*; import java.util.*; import org.openqa.selenium.chrome.*; import org.openqa.selenium.firefox.*; import org.openqa.selenium.phantomjs.*; import org.openqa.selenium.remote.*;
[ "java.io", "java.util", "org.openqa.selenium" ]
java.io; java.util; org.openqa.selenium;
2,701,096
private void refreshHead(boolean dispatchChanges) { SegmentNodeState state = store.getHead(); if (!state.getRecordId().equals(head.get().getRecordId())) { head.set(state); if (dispatchChanges) { changeDispatcher.contentChanged(state.getChildNode(ROOT), CommitInfo.EMPTY_EXTERNAL); } } }
void function(boolean dispatchChanges) { SegmentNodeState state = store.getHead(); if (!state.getRecordId().equals(head.get().getRecordId())) { head.set(state); if (dispatchChanges) { changeDispatcher.contentChanged(state.getChildNode(ROOT), CommitInfo.EMPTY_EXTERNAL); } } }
/** * Refreshes the head state. Should only be called while holding a * permit from the {@link #commitSemaphore}. */
Refreshes the head state. Should only be called while holding a permit from the <code>#commitSemaphore</code>
refreshHead
{ "repo_name": "yesil/jackrabbit-oak", "path": "oak-segment/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeStore.java", "license": "apache-2.0", "size": 24303 }
[ "org.apache.jackrabbit.oak.spi.commit.CommitInfo" ]
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.*;
[ "org.apache.jackrabbit" ]
org.apache.jackrabbit;
2,686,374
public WebRegistry addMountPage(final String mountPage, final Class< ? extends WebPage> pageClass) { this.mountPages.put(mountPage, pageClass); return this; }
WebRegistry function(final String mountPage, final Class< ? extends WebPage> pageClass) { this.mountPages.put(mountPage, pageClass); return this; }
/** * Adds the page class as mount page. * @param mountPage * @param pageClass * @return this for chaining. */
Adds the page class as mount page
addMountPage
{ "repo_name": "developerleo/ProjectForge-2nd", "path": "src/main/java/org/projectforge/web/registry/WebRegistry.java", "license": "gpl-3.0", "size": 16698 }
[ "org.apache.wicket.markup.html.WebPage" ]
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.markup.html.*;
[ "org.apache.wicket" ]
org.apache.wicket;
2,748,078
public boolean isSignaled() { return !Files.exists(Paths.get(stopJobMarkerFile)); } } // ************************************************************************* // USER FUNCTIONS // ************************************************************************* private static final class InfiniteSourceFunction extends RichParallelSourceFunction<Integer> { private static final long serialVersionUID = -8758033916372648233L; private boolean running; private final StopJobSignal stopJobSignal; InfiniteSourceFunction(final StopJobSignal stopJobSignal) { this.running = true; this.stopJobSignal = stopJobSignal; }
boolean function() { return !Files.exists(Paths.get(stopJobMarkerFile)); } } private static final class InfiniteSourceFunction extends RichParallelSourceFunction<Integer> { private static final long serialVersionUID = -8758033916372648233L; private boolean running; private final StopJobSignal stopJobSignal; InfiniteSourceFunction(final StopJobSignal stopJobSignal) { this.running = true; this.stopJobSignal = stopJobSignal; }
/** * True if job should stop. */
True if job should stop
isSignaled
{ "repo_name": "hequn8128/flink", "path": "flink-yarn-tests/src/test/java/org/apache/flink/yarn/testjob/YarnTestJob.java", "license": "apache-2.0", "size": 3526 }
[ "java.nio.file.Files", "java.nio.file.Paths", "org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction" ]
import java.nio.file.Files; import java.nio.file.Paths; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import java.nio.file.*; import org.apache.flink.streaming.api.functions.source.*;
[ "java.nio", "org.apache.flink" ]
java.nio; org.apache.flink;
369,330
@Property(TAG_TYPE) public EnvironmentReferenceTagType getReferenceTagType();
@Property(TAG_TYPE) EnvironmentReferenceTagType function();
/** * The reference type */
The reference type
getReferenceTagType
{ "repo_name": "OndraZizka/windup", "path": "rules-java-ee/addon/src/main/java/org/jboss/windup/rules/apps/javaee/model/EnvironmentReferenceModel.java", "license": "epl-1.0", "size": 2359 }
[ "com.tinkerpop.frames.Property" ]
import com.tinkerpop.frames.Property;
import com.tinkerpop.frames.*;
[ "com.tinkerpop.frames" ]
com.tinkerpop.frames;
2,645,430
@Override public List<InputSplit> getSplits( final JobContext context ) throws IOException, InterruptedException { LOGGER.setLevel(getLogLevel(context)); validateOptions(context); final Integer minSplits = getMinimumSplitCount(context); final Integer maxSplits = getMaximumSplitCount(context); final TreeSet<IntermediateSplitInfo> splits = getIntermediateSplits( context, maxSplits); // this is an incremental algorithm, it may be better use the target // split count to drive it (ie. to get 3 splits this will split 1 large // range into two down the middle and then split one of those ranges // down the middle to get 3, rather than splitting one range into // thirds) if ((minSplits != null) && (splits.size() < minSplits)) { // set the ranges to at least min splits do { // remove the highest range, split it into 2 and add both back, // increasing the size by 1 final IntermediateSplitInfo highestSplit = splits.pollLast(); final IntermediateSplitInfo otherSplit = highestSplit.split(); splits.add(highestSplit); splits.add(otherSplit); } while (splits.size() < minSplits); } else if (((maxSplits != null) && (maxSplits > 0)) && (splits.size() > maxSplits)) { // merge splits to fit within max splits do { // this is the naive approach, remove the lowest two ranges and // merge them, decreasing the size by 1 // TODO Ideally merge takes into account locations (as well as // possibly the index as a secondary criteria) to limit the // number of locations/indices final IntermediateSplitInfo lowestSplit = splits.pollFirst(); final IntermediateSplitInfo nextLowestSplit = splits.pollFirst(); lowestSplit.merge(nextLowestSplit); splits.add(lowestSplit); } while (splits.size() > maxSplits); } final List<InputSplit> retVal = new ArrayList<InputSplit>(); for (final IntermediateSplitInfo split : splits) { retVal.add(split.toFinalSplit()); } return retVal; } private static final BigInteger ONE = new BigInteger( "1");
List<InputSplit> function( final JobContext context ) throws IOException, InterruptedException { LOGGER.setLevel(getLogLevel(context)); validateOptions(context); final Integer minSplits = getMinimumSplitCount(context); final Integer maxSplits = getMaximumSplitCount(context); final TreeSet<IntermediateSplitInfo> splits = getIntermediateSplits( context, maxSplits); if ((minSplits != null) && (splits.size() < minSplits)) { do { final IntermediateSplitInfo highestSplit = splits.pollLast(); final IntermediateSplitInfo otherSplit = highestSplit.split(); splits.add(highestSplit); splits.add(otherSplit); } while (splits.size() < minSplits); } else if (((maxSplits != null) && (maxSplits > 0)) && (splits.size() > maxSplits)) { do { final IntermediateSplitInfo lowestSplit = splits.pollFirst(); final IntermediateSplitInfo nextLowestSplit = splits.pollFirst(); lowestSplit.merge(nextLowestSplit); splits.add(lowestSplit); } while (splits.size() > maxSplits); } final List<InputSplit> retVal = new ArrayList<InputSplit>(); for (final IntermediateSplitInfo split : splits) { retVal.add(split.toFinalSplit()); } return retVal; } private static final BigInteger ONE = new BigInteger( "1");
/** * Read the metadata table to get tablets and match up ranges to them. */
Read the metadata table to get tablets and match up ranges to them
getSplits
{ "repo_name": "kcompher/geowave", "path": "extensions/datastores/accumulo/src/main/java/mil/nga/giat/geowave/datastore/accumulo/mapreduce/input/GeoWaveInputFormat.java", "license": "apache-2.0", "size": 38532 }
[ "java.io.IOException", "java.math.BigInteger", "java.util.ArrayList", "java.util.List", "java.util.TreeSet", "org.apache.hadoop.mapreduce.InputSplit", "org.apache.hadoop.mapreduce.JobContext" ]
import java.io.IOException; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.TreeSet; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext;
import java.io.*; import java.math.*; import java.util.*; import org.apache.hadoop.mapreduce.*;
[ "java.io", "java.math", "java.util", "org.apache.hadoop" ]
java.io; java.math; java.util; org.apache.hadoop;
963,725
private static String encode(final String text) throws UnsupportedEncodingException { return URLEncoder.encode(text, "UTF-8"); }
static String function(final String text) throws UnsupportedEncodingException { return URLEncoder.encode(text, "UTF-8"); }
/** * Encode text as UTF-8 * * @param text * @return */
Encode text as UTF-8
encode
{ "repo_name": "Monstercraft/AreaAPI", "path": "src/main/java/org/monstercraft/area/metrics/Metrics.java", "license": "gpl-3.0", "size": 20382 }
[ "java.io.UnsupportedEncodingException", "java.net.URLEncoder" ]
import java.io.UnsupportedEncodingException; import java.net.URLEncoder;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
2,762,117
public static <E extends Enum<E>> E getOptionalEnumParam(JSONObject params, String paramName, Class<E> enumType) throws ApiException { String enumValS = params.optString(paramName, null); E enumVal = null; if (enumValS != null && !enumValS.isEmpty()) { try { enumVal = Enum.valueOf(enumType, enumValS); } catch (Exception ex) { throw new ApiException(ApiException.Type.ILLEGAL_PARAMETER, paramName + ": " + ex.getLocalizedMessage()); } } return enumVal; }
static <E extends Enum<E>> E function(JSONObject params, String paramName, Class<E> enumType) throws ApiException { String enumValS = params.optString(paramName, null); E enumVal = null; if (enumValS != null && !enumValS.isEmpty()) { try { enumVal = Enum.valueOf(enumType, enumValS); } catch (Exception ex) { throw new ApiException(ApiException.Type.ILLEGAL_PARAMETER, paramName + STR + ex.getLocalizedMessage()); } } return enumVal; }
/** * Gets an optional enum param, returning <code>null</code> if the parameter was not found. * * @param params the params * @param paramName the param name * @return the enum, or <code>null</code> * @throws ApiException if the param value does not match any of the possible enum values */
Gets an optional enum param, returning <code>null</code> if the parameter was not found
getOptionalEnumParam
{ "repo_name": "UjuE/zaproxy", "path": "src/org/zaproxy/zap/utils/ApiUtils.java", "license": "apache-2.0", "size": 7086 }
[ "net.sf.json.JSONObject", "org.zaproxy.zap.extension.api.ApiException" ]
import net.sf.json.JSONObject; import org.zaproxy.zap.extension.api.ApiException;
import net.sf.json.*; import org.zaproxy.zap.extension.api.*;
[ "net.sf.json", "org.zaproxy.zap" ]
net.sf.json; org.zaproxy.zap;
1,981,215
public Set<SubResourceDefinition> getSubResourceDefinitions( String viewName, String version) { viewName = ViewEntity.getViewName(viewName, version); return subResourceDefinitionsMap.get(viewName); }
Set<SubResourceDefinition> function( String viewName, String version) { viewName = ViewEntity.getViewName(viewName, version); return subResourceDefinitionsMap.get(viewName); }
/** * Get the sub-resource definitions for the given view name. * * @param viewName the instance name * @param version the version * * @return the set of sub-resource definitions */
Get the sub-resource definitions for the given view name
getSubResourceDefinitions
{ "repo_name": "zouzhberk/ambaridemo", "path": "demo-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java", "license": "apache-2.0", "size": 64017 }
[ "java.util.Set", "org.apache.ambari.server.api.resources.SubResourceDefinition", "org.apache.ambari.server.orm.entities.ViewEntity" ]
import java.util.Set; import org.apache.ambari.server.api.resources.SubResourceDefinition; import org.apache.ambari.server.orm.entities.ViewEntity;
import java.util.*; import org.apache.ambari.server.api.resources.*; import org.apache.ambari.server.orm.entities.*;
[ "java.util", "org.apache.ambari" ]
java.util; org.apache.ambari;
1,904,349
private List<String> getXmlMessages(Metadata metadata, PropertyList propertyList) throws WebServiceException { List<String> messages = new ArrayList<String>(); // First, check PropertyList from Action if (propertyList != null) { for (PropertyContainer container : propertyList.getPropertyList()) { Property property = container.getProperty(); if (property.getType() == PropertyType.XML) { messages.add(((XmlProperty) property).getValue().getValue()); } } } // Second, check PropertyList of Metadata PropertyList metadataProperties = metadata.getPropertyList(); if (metadataProperties != null) { for (PropertyContainer container : metadataProperties.getPropertyList()) { Property property = container.getProperty(); if (property.getType() == PropertyType.XML) { messages.add(((XmlProperty) property).getValue().getValue()); } } } if (messages.isEmpty()) { throw new WebServiceException("No XML-Message defined"); } return messages; }
List<String> function(Metadata metadata, PropertyList propertyList) throws WebServiceException { List<String> messages = new ArrayList<String>(); if (propertyList != null) { for (PropertyContainer container : propertyList.getPropertyList()) { Property property = container.getProperty(); if (property.getType() == PropertyType.XML) { messages.add(((XmlProperty) property).getValue().getValue()); } } } PropertyList metadataProperties = metadata.getPropertyList(); if (metadataProperties != null) { for (PropertyContainer container : metadataProperties.getPropertyList()) { Property property = container.getProperty(); if (property.getType() == PropertyType.XML) { messages.add(((XmlProperty) property).getValue().getValue()); } } } if (messages.isEmpty()) { throw new WebServiceException(STR); } return messages; }
/** * Tries to find the XML-Message for the request within the properties of the given metadata. If * the message is not found, an exception is thrown. * * @param metadata * the metadata to get the XML-message from * @throws WebServiceException * thrown, if no message was found */
Tries to find the XML-Message for the request within the properties of the given metadata. If the message is not found, an exception is thrown
getXmlMessages
{ "repo_name": "NABUCCO/org.nabucco.testautomation.engine.proxy.ws", "path": "org.nabucco.testautomation.engine.proxy.ws/src/main/org/nabucco/testautomation/engine/proxy/ws/command/soap/client/SoapCallCommand.java", "license": "epl-1.0", "size": 9889 }
[ "java.util.ArrayList", "java.util.List", "org.nabucco.testautomation.engine.proxy.ws.exception.WebServiceException", "org.nabucco.testautomation.property.facade.datatype.PropertyList", "org.nabucco.testautomation.property.facade.datatype.XmlProperty", "org.nabucco.testautomation.property.facade.datatype.base.Property", "org.nabucco.testautomation.property.facade.datatype.base.PropertyContainer", "org.nabucco.testautomation.property.facade.datatype.base.PropertyType", "org.nabucco.testautomation.script.facade.datatype.metadata.Metadata" ]
import java.util.ArrayList; import java.util.List; import org.nabucco.testautomation.engine.proxy.ws.exception.WebServiceException; import org.nabucco.testautomation.property.facade.datatype.PropertyList; import org.nabucco.testautomation.property.facade.datatype.XmlProperty; import org.nabucco.testautomation.property.facade.datatype.base.Property; import org.nabucco.testautomation.property.facade.datatype.base.PropertyContainer; import org.nabucco.testautomation.property.facade.datatype.base.PropertyType; import org.nabucco.testautomation.script.facade.datatype.metadata.Metadata;
import java.util.*; import org.nabucco.testautomation.engine.proxy.ws.exception.*; import org.nabucco.testautomation.property.facade.datatype.*; import org.nabucco.testautomation.property.facade.datatype.base.*; import org.nabucco.testautomation.script.facade.datatype.metadata.*;
[ "java.util", "org.nabucco.testautomation" ]
java.util; org.nabucco.testautomation;
2,149,513
public static String shortenedStackTrace(String stackTrace) { if (stackTrace == null) { return null; } List<String> results = new ArrayList<String>(); final Pattern exclude = Pattern.compile("(org.springframework.|java.lang.reflect.Method.invoke|sun.reflect.)"); boolean found = false; for (String line : stackTrace.split("\n")) { Matcher m = exclude.matcher(line); if (m.find()) { found = true; } else { if (found) { found = false; results.add("\tat [ignored] ..."); } results.add(line); } } return StringUtils.join(results, "\n"); }
static String function(String stackTrace) { if (stackTrace == null) { return null; } List<String> results = new ArrayList<String>(); final Pattern exclude = Pattern.compile(STR); boolean found = false; for (String line : stackTrace.split("\n")) { Matcher m = exclude.matcher(line); if (m.find()) { found = true; } else { if (found) { found = false; results.add(STR); } results.add(line); } } return StringUtils.join(results, "\n"); }
/** * Convert a stack trace into a shortened version for easier viewing and data storage, excluding * those lines we are least concerned with; should average about 60% reduction in stack trace * length * * @param stackTrace original stack trace from an error * @return shortened stack trace * @should return null if stackTrace is null * @should remove springframework and reflection related lines * @since 1.7 */
Convert a stack trace into a shortened version for easier viewing and data storage, excluding those lines we are least concerned with; should average about 60% reduction in stack trace length
shortenedStackTrace
{ "repo_name": "milankarunarathne/openmrs-core", "path": "api/src/main/java/org/openmrs/util/OpenmrsUtil.java", "license": "mpl-2.0", "size": 78790 }
[ "java.util.ArrayList", "java.util.List", "java.util.regex.Matcher", "java.util.regex.Pattern", "org.apache.commons.lang.StringUtils" ]
import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils;
import java.util.*; import java.util.regex.*; import org.apache.commons.lang.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
2,170,853
@Nullable public static Range<Double> showRangeCalculationDialog() { ParameterSet myParameters = MZmineCore.getConfiguration().getModuleParameters(MzRangeFormulaCalculatorModule.class); if (myParameters == null) return null; ExitCode exitCode = myParameters.showSetupDialog(null, true); if (exitCode != ExitCode.OK) return null; return getMzRangeFromFormula(myParameters); }
static Range<Double> function() { ParameterSet myParameters = MZmineCore.getConfiguration().getModuleParameters(MzRangeFormulaCalculatorModule.class); if (myParameters == null) return null; ExitCode exitCode = myParameters.showSetupDialog(null, true); if (exitCode != ExitCode.OK) return null; return getMzRangeFromFormula(myParameters); }
/** * Shows the calculation dialog and returns the calculated m/z range. May return null in case user * clicked Cancel. */
Shows the calculation dialog and returns the calculated m/z range. May return null in case user clicked Cancel
showRangeCalculationDialog
{ "repo_name": "mzmine/mzmine2", "path": "src/main/java/net/sf/mzmine/modules/tools/mzrangecalculator/MzRangeFormulaCalculatorModule.java", "license": "gpl-2.0", "size": 3573 }
[ "com.google.common.collect.Range", "net.sf.mzmine.main.MZmineCore", "net.sf.mzmine.parameters.ParameterSet", "net.sf.mzmine.util.ExitCode" ]
import com.google.common.collect.Range; import net.sf.mzmine.main.MZmineCore; import net.sf.mzmine.parameters.ParameterSet; import net.sf.mzmine.util.ExitCode;
import com.google.common.collect.*; import net.sf.mzmine.main.*; import net.sf.mzmine.parameters.*; import net.sf.mzmine.util.*;
[ "com.google.common", "net.sf.mzmine" ]
com.google.common; net.sf.mzmine;
1,676,145
protected String getSqlFrom(WindowFunction windowFunctionField) { StringBuilder statement = new StringBuilder().append(getSqlFrom(windowFunctionField.getFunction())); statement.append(" OVER ("); if (windowFunctionField.getPartitionBys().size() > 0) { statement.append("PARTITION BY "); boolean firstField = true; for (AliasedField field : windowFunctionField.getPartitionBys()) { if (!firstField) { statement.append(", "); } statement.append(getSqlFrom(field)); firstField = false; } } if (windowFunctionField.getOrderBys().size() > 0) { statement.append(" ORDER BY "); boolean firstField = true; for (AliasedField field : windowFunctionField.getOrderBys()) { if (!firstField) { statement.append(", "); } statement.append(getSqlForOrderByField(field)); firstField = false; } } statement.append(")"); return statement.toString(); }
String function(WindowFunction windowFunctionField) { StringBuilder statement = new StringBuilder().append(getSqlFrom(windowFunctionField.getFunction())); statement.append(STR); if (windowFunctionField.getPartitionBys().size() > 0) { statement.append(STR); boolean firstField = true; for (AliasedField field : windowFunctionField.getPartitionBys()) { if (!firstField) { statement.append(STR); } statement.append(getSqlFrom(field)); firstField = false; } } if (windowFunctionField.getOrderBys().size() > 0) { statement.append(STR); boolean firstField = true; for (AliasedField field : windowFunctionField.getOrderBys()) { if (!firstField) { statement.append(STR); } statement.append(getSqlForOrderByField(field)); firstField = false; } } statement.append(")"); return statement.toString(); }
/** * Convert a {@link WindowFunction} into standards compliant SQL. * @param windowFunctionField The field to convert * @return The resulting SQL **/
Convert a <code>WindowFunction</code> into standards compliant SQL
getSqlFrom
{ "repo_name": "alfasoftware/morf", "path": "morf-core/src/main/java/org/alfasoftware/morf/jdbc/SqlDialect.java", "license": "apache-2.0", "size": 144949 }
[ "org.alfasoftware.morf.sql.element.AliasedField", "org.alfasoftware.morf.sql.element.WindowFunction" ]
import org.alfasoftware.morf.sql.element.AliasedField; import org.alfasoftware.morf.sql.element.WindowFunction;
import org.alfasoftware.morf.sql.element.*;
[ "org.alfasoftware.morf" ]
org.alfasoftware.morf;
1,483,934
List<PublishEvent> retrievePersistedPublishes(String clientID);
List<PublishEvent> retrievePersistedPublishes(String clientID);
/** * Return the list of persisted publishes for the given clientID. * For QoS1 and QoS2 with clean session flag, this method return the list of * missed publish events while the client was disconnected. */
Return the list of persisted publishes for the given clientID. For QoS1 and QoS2 with clean session flag, this method return the list of missed publish events while the client was disconnected
retrievePersistedPublishes
{ "repo_name": "kevoree/kevoree-telemetry", "path": "org.kevoree.telemetry.server/src/main/java/org/dna/mqtt/moquette/messaging/spi/IStorageService.java", "license": "lgpl-3.0", "size": 2383 }
[ "java.util.List", "org.dna.mqtt.moquette.messaging.spi.impl.events.PublishEvent" ]
import java.util.List; import org.dna.mqtt.moquette.messaging.spi.impl.events.PublishEvent;
import java.util.*; import org.dna.mqtt.moquette.messaging.spi.impl.events.*;
[ "java.util", "org.dna.mqtt" ]
java.util; org.dna.mqtt;
2,302,601
public Comparator<Point2D> distanceToOrder() { return new DistanceToOrder(); }
Comparator<Point2D> function() { return new DistanceToOrder(); }
/** * Compares two points by distance to this point. * * @return the comparator */
Compares two points by distance to this point
distanceToOrder
{ "repo_name": "nkhuyu/algs4", "path": "src/main/java/edu/princeton/cs/algs4/Point2D.java", "license": "gpl-3.0", "size": 12952 }
[ "java.util.Comparator" ]
import java.util.Comparator;
import java.util.*;
[ "java.util" ]
java.util;
2,689,802
private boolean needsPassthroughWorkarounds() { return Util.SDK_INT < 23 && (targetEncoding == C.ENCODING_AC3 || targetEncoding == C.ENCODING_E_AC3); }
boolean function() { return Util.SDK_INT < 23 && (targetEncoding == C.ENCODING_AC3 targetEncoding == C.ENCODING_E_AC3); }
/** * Returns whether to work around problems with passthrough audio tracks. * See [Internal: b/18899620, b/19187573, b/21145353]. */
Returns whether to work around problems with passthrough audio tracks. See [Internal: b/18899620, b/19187573, b/21145353]
needsPassthroughWorkarounds
{ "repo_name": "kj2648/ExoplayerMultitrackTry", "path": "library/src/main/java/com/google/android/exoplayer/audio/AudioTrack.java", "license": "apache-2.0", "size": 50926 }
[ "com.google.android.exoplayer.util.Util" ]
import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.*;
[ "com.google.android" ]
com.google.android;
1,685,779
private static void setDoubleOrNull(final PreparedStatement statement, final int index, final Double number) throws SQLException { if (number == null || number.isInfinite() || number.isNaN()) statement.setNull(index, Types.DOUBLE); else statement.setDouble(index, number); }
static void function(final PreparedStatement statement, final int index, final Double number) throws SQLException { if (number == null number.isInfinite() number.isNaN()) statement.setNull(index, Types.DOUBLE); else statement.setDouble(index, number); }
/** Some dialects like MySQL cannot handle NaN or +-Inf. * Set those numbers as Null in the statement. * @param statement * @param index * @param number * @throws SQLException */
Some dialects like MySQL cannot handle NaN or +-Inf. Set those numbers as Null in the statement
setDoubleOrNull
{ "repo_name": "css-iter/cs-studio", "path": "applications/archive/archive-plugins/org.csstudio.archive.writer.rdb/src/org/csstudio/archive/writer/rdb/NumericMetaDataHelper.java", "license": "epl-1.0", "size": 3755 }
[ "java.sql.PreparedStatement", "java.sql.SQLException", "java.sql.Types" ]
import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,131,690
protected KEMIDService getkemidService() { return kemidService; }
KEMIDService function() { return kemidService; }
/** * gets the kemidService * * @param kemidService The kemidService to get. */
gets the kemidService
getkemidService
{ "repo_name": "ua-eas/ua-kfs-5.3", "path": "work/src/org/kuali/kfs/module/endow/batch/service/impl/AvailableCashUpdateServiceImpl.java", "license": "agpl-3.0", "size": 11810 }
[ "org.kuali.kfs.module.endow.document.service.KEMIDService" ]
import org.kuali.kfs.module.endow.document.service.KEMIDService;
import org.kuali.kfs.module.endow.document.service.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
2,584,502
@Test public void testFencingInteractionWithBookieRecovery() throws Exception { System.setProperty("digestType", digestType.toString()); System.setProperty("passwd", "testPasswd"); BookKeeperAdmin admin = new BookKeeperAdmin(zkUtil.getZooKeeperConnectString()); LedgerHandle writelh = bkc.createLedger(digestType, "testPasswd".getBytes()); String tmp = "Foobar"; final int numEntries = 10; for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } BookieId bookieToKill = writelh.getLedgerMetadata().getEnsembleAt(numEntries).get(0); killBookie(bookieToKill); // write entries to change ensemble for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } admin.recoverBookieData(bookieToKill); for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } LedgerHandle readlh = bkc.openLedger(writelh.getId(), digestType, "testPasswd".getBytes()); try { writelh.addEntry(tmp.getBytes()); LOG.error("Should have thrown an exception"); fail("Should have thrown an exception when trying to write"); } catch (BKException.BKLedgerFencedException e) { // correct behaviour } readlh.close(); writelh.close(); }
void function() throws Exception { System.setProperty(STR, digestType.toString()); System.setProperty(STR, STR); BookKeeperAdmin admin = new BookKeeperAdmin(zkUtil.getZooKeeperConnectString()); LedgerHandle writelh = bkc.createLedger(digestType, STR.getBytes()); String tmp = STR; final int numEntries = 10; for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } BookieId bookieToKill = writelh.getLedgerMetadata().getEnsembleAt(numEntries).get(0); killBookie(bookieToKill); for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } admin.recoverBookieData(bookieToKill); for (int i = 0; i < numEntries; i++) { writelh.addEntry(tmp.getBytes()); } LedgerHandle readlh = bkc.openLedger(writelh.getId(), digestType, STR.getBytes()); try { writelh.addEntry(tmp.getBytes()); LOG.error(STR); fail(STR); } catch (BKException.BKLedgerFencedException e) { } readlh.close(); writelh.close(); }
/** * create a ledger and write entries. * kill a bookie in the ensemble. Recover. * Fence the ledger. Kill another bookie. Recover. */
create a ledger and write entries. kill a bookie in the ensemble. Recover. Fence the ledger. Kill another bookie. Recover
testFencingInteractionWithBookieRecovery
{ "repo_name": "apache/bookkeeper", "path": "bookkeeper-server/src/test/java/org/apache/bookkeeper/client/TestFencing.java", "license": "apache-2.0", "size": 14433 }
[ "org.apache.bookkeeper.net.BookieId", "org.junit.Assert" ]
import org.apache.bookkeeper.net.BookieId; import org.junit.Assert;
import org.apache.bookkeeper.net.*; import org.junit.*;
[ "org.apache.bookkeeper", "org.junit" ]
org.apache.bookkeeper; org.junit;
1,111,806
@Test public void testCacheEntryCleanup() throws Exception { final Time timeout = Time.milliseconds(100L); final Time timeToLive = Time.milliseconds(1L); final JobID expectedJobId2 = new JobID(); final ArchivedExecutionGraph expectedExecutionGraph2 = new ArchivedExecutionGraphBuilder().build(); final AtomicInteger requestJobCalls = new AtomicInteger(0); final TestingRestfulGateway restfulGateway = new TestingRestfulGateway.Builder() .setRequestJobFunction( jobId -> { requestJobCalls.incrementAndGet(); if (jobId.equals(expectedJobId)) { return CompletableFuture.completedFuture(expectedExecutionGraph); } else if (jobId.equals(expectedJobId2)) { return CompletableFuture.completedFuture(expectedExecutionGraph2); } else { throw new AssertionError("Invalid job id received."); } } ) .build(); try (ExecutionGraphCache executionGraphCache = new DefaultExecutionGraphCache(timeout, timeToLive)) { CompletableFuture<AccessExecutionGraph> executionGraph1Future = executionGraphCache.getExecutionGraph(expectedJobId, restfulGateway); CompletableFuture<AccessExecutionGraph> executionGraph2Future = executionGraphCache.getExecutionGraph(expectedJobId2, restfulGateway); assertEquals(expectedExecutionGraph, executionGraph1Future.get()); assertEquals(expectedExecutionGraph2, executionGraph2Future.get()); assertThat(requestJobCalls.get(), Matchers.equalTo(2)); Thread.sleep(timeToLive.toMilliseconds()); executionGraphCache.cleanup(); assertTrue(executionGraphCache.size() == 0); } }
void function() throws Exception { final Time timeout = Time.milliseconds(100L); final Time timeToLive = Time.milliseconds(1L); final JobID expectedJobId2 = new JobID(); final ArchivedExecutionGraph expectedExecutionGraph2 = new ArchivedExecutionGraphBuilder().build(); final AtomicInteger requestJobCalls = new AtomicInteger(0); final TestingRestfulGateway restfulGateway = new TestingRestfulGateway.Builder() .setRequestJobFunction( jobId -> { requestJobCalls.incrementAndGet(); if (jobId.equals(expectedJobId)) { return CompletableFuture.completedFuture(expectedExecutionGraph); } else if (jobId.equals(expectedJobId2)) { return CompletableFuture.completedFuture(expectedExecutionGraph2); } else { throw new AssertionError(STR); } } ) .build(); try (ExecutionGraphCache executionGraphCache = new DefaultExecutionGraphCache(timeout, timeToLive)) { CompletableFuture<AccessExecutionGraph> executionGraph1Future = executionGraphCache.getExecutionGraph(expectedJobId, restfulGateway); CompletableFuture<AccessExecutionGraph> executionGraph2Future = executionGraphCache.getExecutionGraph(expectedJobId2, restfulGateway); assertEquals(expectedExecutionGraph, executionGraph1Future.get()); assertEquals(expectedExecutionGraph2, executionGraph2Future.get()); assertThat(requestJobCalls.get(), Matchers.equalTo(2)); Thread.sleep(timeToLive.toMilliseconds()); executionGraphCache.cleanup(); assertTrue(executionGraphCache.size() == 0); } }
/** * Tests that cache entries are cleaned up when their TTL has expired upon * calling {@link DefaultExecutionGraphCache#cleanup()}. */
Tests that cache entries are cleaned up when their TTL has expired upon calling <code>DefaultExecutionGraphCache#cleanup()</code>
testCacheEntryCleanup
{ "repo_name": "GJL/flink", "path": "flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/legacy/DefaultExecutionGraphCacheTest.java", "license": "apache-2.0", "size": 12014 }
[ "java.util.concurrent.CompletableFuture", "java.util.concurrent.atomic.AtomicInteger", "org.apache.flink.api.common.JobID", "org.apache.flink.api.common.time.Time", "org.apache.flink.runtime.executiongraph.AccessExecutionGraph", "org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph", "org.apache.flink.runtime.rest.handler.legacy.utils.ArchivedExecutionGraphBuilder", "org.apache.flink.runtime.webmonitor.TestingRestfulGateway", "org.hamcrest.Matchers", "org.junit.Assert" ]
import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.executiongraph.AccessExecutionGraph; import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph; import org.apache.flink.runtime.rest.handler.legacy.utils.ArchivedExecutionGraphBuilder; import org.apache.flink.runtime.webmonitor.TestingRestfulGateway; import org.hamcrest.Matchers; import org.junit.Assert;
import java.util.concurrent.*; import java.util.concurrent.atomic.*; import org.apache.flink.api.common.*; import org.apache.flink.api.common.time.*; import org.apache.flink.runtime.executiongraph.*; import org.apache.flink.runtime.rest.handler.legacy.utils.*; import org.apache.flink.runtime.webmonitor.*; import org.hamcrest.*; import org.junit.*;
[ "java.util", "org.apache.flink", "org.hamcrest", "org.junit" ]
java.util; org.apache.flink; org.hamcrest; org.junit;
2,522,800
@Override public ICacheStats getStatistics() { return this.getCacheControl().getStatistics(); }
ICacheStats function() { return this.getCacheControl().getStatistics(); }
/** * This returns the ICacheStats object with information on this region and its auxiliaries. * <p> * This data can be formatted as needed. * <p> * @return ICacheStats */
This returns the ICacheStats object with information on this region and its auxiliaries. This data can be formatted as needed.
getStatistics
{ "repo_name": "apache/commons-jcs", "path": "commons-jcs-core/src/main/java/org/apache/commons/jcs3/access/AbstractCacheAccess.java", "license": "apache-2.0", "size": 6542 }
[ "org.apache.commons.jcs3.engine.stats.behavior.ICacheStats" ]
import org.apache.commons.jcs3.engine.stats.behavior.ICacheStats;
import org.apache.commons.jcs3.engine.stats.behavior.*;
[ "org.apache.commons" ]
org.apache.commons;
760,051
public ImmutableOpenMap<String, List<AliasMetadata>> findAllAliases(final String[] concreteIndices) { return findAliases(Strings.EMPTY_ARRAY, concreteIndices); }
ImmutableOpenMap<String, List<AliasMetadata>> function(final String[] concreteIndices) { return findAliases(Strings.EMPTY_ARRAY, concreteIndices); }
/** * Finds the specific index aliases that point to the requested concrete indices directly * or that match with the indices via wildcards. * * @param concreteIndices The concrete indices that the aliases must point to in order to be returned. * @return A map of index name to the list of aliases metadata. If a concrete index does not have matching * aliases then the result will <b>not</b> include the index's key. */
Finds the specific index aliases that point to the requested concrete indices directly or that match with the indices via wildcards
findAllAliases
{ "repo_name": "gingerwizard/elasticsearch", "path": "server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java", "license": "apache-2.0", "size": 74441 }
[ "java.util.List", "org.elasticsearch.common.Strings", "org.elasticsearch.common.collect.ImmutableOpenMap" ]
import java.util.List; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap;
import java.util.*; import org.elasticsearch.common.*; import org.elasticsearch.common.collect.*;
[ "java.util", "org.elasticsearch.common" ]
java.util; org.elasticsearch.common;
1,009,782
public static boolean showGuidelines() { if ((Math.abs(Edge.LEFT.getCoordinate() - Edge.RIGHT.getCoordinate()) < DEFAULT_SHOW_GUIDELINES_LIMIT) || (Math.abs(Edge.TOP.getCoordinate() - Edge.BOTTOM.getCoordinate()) < DEFAULT_SHOW_GUIDELINES_LIMIT)) return false; else return true; }
static boolean function() { if ((Math.abs(Edge.LEFT.getCoordinate() - Edge.RIGHT.getCoordinate()) < DEFAULT_SHOW_GUIDELINES_LIMIT) (Math.abs(Edge.TOP.getCoordinate() - Edge.BOTTOM.getCoordinate()) < DEFAULT_SHOW_GUIDELINES_LIMIT)) return false; else return true; }
/** * Indicates whether the crop window is small enough that the guidelines * should be shown. Public because this function is also used to determine * if the center handle should be focused. * * @return boolean Whether the guidelines should be shown or not */
Indicates whether the crop window is small enough that the guidelines should be shown. Public because this function is also used to determine if the center handle should be focused
showGuidelines
{ "repo_name": "vfishv/cropper", "path": "cropper/src/com/edmodo/cropper/cropwindow/CropOverlayView.java", "license": "apache-2.0", "size": 26069 }
[ "com.edmodo.cropper.cropwindow.edge.Edge" ]
import com.edmodo.cropper.cropwindow.edge.Edge;
import com.edmodo.cropper.cropwindow.edge.*;
[ "com.edmodo.cropper" ]
com.edmodo.cropper;
2,541,288
public static byte[] getStreamBytesRaw(PRStream stream, RandomAccessFileOrArray file) throws IOException { PdfReader reader = stream.getReader(); byte b[]; if (stream.getOffset() < 0) b = stream.getBytes(); else { b = new byte[stream.getLength()]; file.seek(stream.getOffset()); file.readFully(b); PdfEncryption decrypt = reader.getDecrypt(); if (decrypt != null) { PdfObject filter = getPdfObjectRelease(stream.get(PdfName.FILTER)); ArrayList filters = new ArrayList(); if (filter != null) { if (filter.isName()) filters.add(filter); else if (filter.isArray()) filters = ((PdfArray)filter).getArrayList(); } boolean skip = false; for (int k = 0; k < filters.size(); ++k) { PdfObject obj = getPdfObjectRelease((PdfObject)filters.get(k)); if (obj != null && obj.toString().equals("/Crypt")) { skip = true; break; } } if (!skip) { decrypt.setHashKey(stream.getObjNum(), stream.getObjGen()); b = decrypt.decryptByteArray(b); } } } return b; }
static byte[] function(PRStream stream, RandomAccessFileOrArray file) throws IOException { PdfReader reader = stream.getReader(); byte b[]; if (stream.getOffset() < 0) b = stream.getBytes(); else { b = new byte[stream.getLength()]; file.seek(stream.getOffset()); file.readFully(b); PdfEncryption decrypt = reader.getDecrypt(); if (decrypt != null) { PdfObject filter = getPdfObjectRelease(stream.get(PdfName.FILTER)); ArrayList filters = new ArrayList(); if (filter != null) { if (filter.isName()) filters.add(filter); else if (filter.isArray()) filters = ((PdfArray)filter).getArrayList(); } boolean skip = false; for (int k = 0; k < filters.size(); ++k) { PdfObject obj = getPdfObjectRelease((PdfObject)filters.get(k)); if (obj != null && obj.toString().equals(STR)) { skip = true; break; } } if (!skip) { decrypt.setHashKey(stream.getObjNum(), stream.getObjGen()); b = decrypt.decryptByteArray(b); } } } return b; }
/** Get the content from a stream as it is without applying any filter. * @param stream the stream * @param file the location where the stream is * @throws IOException on error * @return the stream content */
Get the content from a stream as it is without applying any filter
getStreamBytesRaw
{ "repo_name": "yogthos/itext", "path": "src/com/lowagie/text/pdf/PdfReader.java", "license": "lgpl-3.0", "size": 134229 }
[ "java.io.IOException", "java.util.ArrayList" ]
import java.io.IOException; import java.util.ArrayList;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,665,830
return get(key).map(Optional::of).orElse(Optional.absent()); }
return get(key).map(Optional::of).orElse(Optional.absent()); }
/** * This method required Guava's Optional which will be deprecated in favor of Java's Optional * It is being kept for backwards compatibility only * * @deprecated Override {@link #get(String)} instead. */
This method required Guava's Optional which will be deprecated in favor of Java's Optional It is being kept for backwards compatibility only
getConfig
{ "repo_name": "zalando/baigan-config", "path": "src/main/java/org/zalando/baigan/service/ConfigurationRepository.java", "license": "apache-2.0", "size": 803 }
[ "com.google.common.base.Optional" ]
import com.google.common.base.Optional;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
930,678
public ServiceFuture<AuthorizationServerContractInner> getAsync(String resourceGroupName, String serviceName, String authsid, final ServiceCallback<AuthorizationServerContractInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(getWithServiceResponseAsync(resourceGroupName, serviceName, authsid), serviceCallback); }
ServiceFuture<AuthorizationServerContractInner> function(String resourceGroupName, String serviceName, String authsid, final ServiceCallback<AuthorizationServerContractInner> serviceCallback) { return ServiceFuture.fromHeaderResponse(getWithServiceResponseAsync(resourceGroupName, serviceName, authsid), serviceCallback); }
/** * Gets the details of the authorization server specified by its identifier. * * @param resourceGroupName The name of the resource group. * @param serviceName The name of the API Management service. * @param authsid Identifier of the authorization server. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Gets the details of the authorization server specified by its identifier
getAsync
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/apimanagement/mgmt-v2018_06_01_preview/src/main/java/com/microsoft/azure/management/apimanagement/v2018_06_01_preview/implementation/AuthorizationServersInner.java", "license": "mit", "size": 68711 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
2,031,480
private void override(TypeDescriptor other) throws BinaryObjectException { assert clsName.equals(other.clsName); if (canOverride) { mapper = other.mapper; serializer = other.serializer; identity = other.identity; affKeyFieldName = other.affKeyFieldName; isEnum = other.isEnum; enumMap = other.enumMap; canOverride = other.canOverride; } else if (!other.canOverride) throw new BinaryObjectException("Duplicate explicit class definition in configuration: " + clsName); } } static class Type { private final int id; private final boolean registered; public Type(int id, boolean registered) { this.id = id; this.registered = registered; }
void function(TypeDescriptor other) throws BinaryObjectException { assert clsName.equals(other.clsName); if (canOverride) { mapper = other.mapper; serializer = other.serializer; identity = other.identity; affKeyFieldName = other.affKeyFieldName; isEnum = other.isEnum; enumMap = other.enumMap; canOverride = other.canOverride; } else if (!other.canOverride) throw new BinaryObjectException(STR + clsName); } } static class Type { private final int id; private final boolean registered; public Type(int id, boolean registered) { this.id = id; this.registered = registered; }
/** * Override binary class descriptor. * * @param other Other descriptor. * @throws BinaryObjectException If failed. */
Override binary class descriptor
override
{ "repo_name": "samaitra/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryContext.java", "license": "apache-2.0", "size": 54646 }
[ "org.apache.ignite.binary.BinaryObjectException" ]
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.binary.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,407,408
@Test(expected = DBException.class) public void testAggregate_NoAggregateFunctionDefined() throws Exception { new Query(getCtx(), "C_InvoiceLine", null, getTrxName()) .setOnlyActiveRecords(true) .setClient_ID() .aggregate("*", null); }
@Test(expected = DBException.class) void function() throws Exception { new Query(getCtx(), STR, null, getTrxName()) .setOnlyActiveRecords(true) .setClient_ID() .aggregate("*", null); }
/** * Test Exception : No Aggregate Function defined * * @throws Exception */
Test Exception : No Aggregate Function defined
testAggregate_NoAggregateFunctionDefined
{ "repo_name": "klst-com/metasfresh", "path": "de.metas.swat.ait/src/main/java/org/adempiere/ad/dao/impl/QueryTest.java", "license": "gpl-2.0", "size": 16955 }
[ "org.adempiere.exceptions.DBException", "org.compiere.model.Query", "org.junit.Test" ]
import org.adempiere.exceptions.DBException; import org.compiere.model.Query; import org.junit.Test;
import org.adempiere.exceptions.*; import org.compiere.model.*; import org.junit.*;
[ "org.adempiere.exceptions", "org.compiere.model", "org.junit" ]
org.adempiere.exceptions; org.compiere.model; org.junit;
1,688,834
void forEachOrdered(DoubleConsumer action);
void forEachOrdered(DoubleConsumer action);
/** * Performs an action for each element of this stream, guaranteeing that * each element is processed in encounter order for streams that have a * defined encounter order. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @param action a <a href="package-summary.html#NonInterference"> * non-interfering</a> action to perform on the elements * @see #forEach(DoubleConsumer) */
Performs an action for each element of this stream, guaranteeing that each element is processed in encounter order for streams that have a defined encounter order. This is a terminal operation
forEachOrdered
{ "repo_name": "mirkosertic/Bytecoder", "path": "classlib/java.base/src/main/resources/META-INF/modules/java.base/classes/java/util/stream/DoubleStream.java", "license": "apache-2.0", "size": 54716 }
[ "java.util.function.DoubleConsumer" ]
import java.util.function.DoubleConsumer;
import java.util.function.*;
[ "java.util" ]
java.util;
2,587,977
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException { Hive db = hiveDB.get(); if (db == null || needsRefresh || !db.isCurrentUserOwner()) { if (db != null) { LOG.debug("Creating new db. db = " + db + ", needsRefresh = " + needsRefresh + ", db.isCurrentUserOwner = " + db.isCurrentUserOwner()); } closeCurrent(); c.set("fs.scheme.class", "dfs"); Hive newdb = new Hive(c); hiveDB.set(newdb); return newdb; } db.conf = c; return db; }
static Hive function(HiveConf c, boolean needsRefresh) throws HiveException { Hive db = hiveDB.get(); if (db == null needsRefresh !db.isCurrentUserOwner()) { if (db != null) { LOG.debug(STR + db + STR + needsRefresh + STR + db.isCurrentUserOwner()); } closeCurrent(); c.set(STR, "dfs"); Hive newdb = new Hive(c); hiveDB.set(newdb); return newdb; } db.conf = c; return db; }
/** * get a connection to metastore. see get(HiveConf) function for comments * * @param c * new conf * @param needsRefresh * if true then creates a new one * @return The connection to the metastore * @throws HiveException */
get a connection to metastore. see get(HiveConf) function for comments
get
{ "repo_name": "wangbin83-gmail-com/hive-1.1.0-cdh5.4.8", "path": "ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java", "license": "apache-2.0", "size": 117422 }
[ "org.apache.hadoop.hive.conf.HiveConf" ]
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,549,314
public long getBlockPoolUsed(String bpid) throws IOException;
long function(String bpid) throws IOException;
/** * Returns the total space (in bytes) used by a block pool * @return the total space used by a block pool * @throws IOException */
Returns the total space (in bytes) used by a block pool
getBlockPoolUsed
{ "repo_name": "bysslord/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java", "license": "apache-2.0", "size": 3776 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,953,370
public void post(String url, String json, BaseResponseHandler responseHandler) { RequestBody requestBody = RequestBody.create(JSON, json); post(url, requestBody, responseHandler); }
void function(String url, String json, BaseResponseHandler responseHandler) { RequestBody requestBody = RequestBody.create(JSON, json); post(url, requestBody, responseHandler); }
/** * Perform HTTP POST request with a JSON string. * * @param url * the URL of HTTP request. * @param json * the parameter of Request Body. * @param responseHandler * the callback of the response. */
Perform HTTP POST request with a JSON string
post
{ "repo_name": "hhxcode/okhttp", "path": "Anony_Okhttp_library/src/com/anony/okhttp/AsyncOkHttp.java", "license": "gpl-2.0", "size": 12953 }
[ "com.squareup.okhttp.RequestBody" ]
import com.squareup.okhttp.RequestBody;
import com.squareup.okhttp.*;
[ "com.squareup.okhttp" ]
com.squareup.okhttp;
2,023,653
public Font getFont(String family, int style, int size) { Font f = null; if (f == null) { f = new Font(family, style, size); } return f; }
Font function(String family, int style, int size) { Font f = null; if (f == null) { f = new Font(family, style, size); } return f; }
/** * Gets a new font. This returns a Font from a cache if a cached font * exists. If not, a Font is added to the cache. This is basically a * low-level cache for 1.1 font features. * * @param family * the font family (such as "Monospaced") * @param style * the style of the font (such as Font.PLAIN) * @param size * the point size >= 1 * @return the new font */
Gets a new font. This returns a Font from a cache if a cached font exists. If not, a Font is added to the cache. This is basically a low-level cache for 1.1 font features
getFont
{ "repo_name": "javalovercn/j2se_for_android", "path": "src/javax/swing/text/StyleContext.java", "license": "gpl-2.0", "size": 45062 }
[ "java.awt.Font" ]
import java.awt.Font;
import java.awt.*;
[ "java.awt" ]
java.awt;
2,425,558
private void waitForFlush() throws IgniteInterruptedCheckedException { U.await(flushCond); }
void function() throws IgniteInterruptedCheckedException { U.await(flushCond); }
/** * Awaits a signal on flush condition. * * @throws IgniteInterruptedCheckedException If thread was interrupted. */
Awaits a signal on flush condition
waitForFlush
{ "repo_name": "irudyak/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/store/GridCacheWriteBehindStore.java", "license": "apache-2.0", "size": 48641 }
[ "org.apache.ignite.internal.IgniteInterruptedCheckedException", "org.apache.ignite.internal.util.typedef.internal.U" ]
import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.typedef.internal.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,524,858
private void initializeTab(Tab tab) { sCachedCVCList.clear(); if (mLayoutManager != null) { mLayoutManager.getActiveLayout().getAllContentViewCores(sCachedCVCList); } for (int i = 0; i < sCachedCVCList.size(); i++) { initializeContentViewCore(sCachedCVCList.get(i)); } sCachedCVCList.clear(); sCachedViewList.clear(); tab.getAllViews(sCachedViewList); for (int i = 0; i < sCachedViewList.size(); i++) { View view = sCachedViewList.get(i); // Calling View#measure() and View#layout() on a View before adding it to the view // hierarchy seems to cause issues with compound drawables on some versions of Android. // We don't need to proactively size the NTP as we don't need the Android view to render // if it's not actually attached to the view hierarchy (http://crbug.com/462114). if (view == tab.getView() && tab.isNativePage()) continue; setSizeOfUnattachedView(view); } sCachedViewList.clear(); }
void function(Tab tab) { sCachedCVCList.clear(); if (mLayoutManager != null) { mLayoutManager.getActiveLayout().getAllContentViewCores(sCachedCVCList); } for (int i = 0; i < sCachedCVCList.size(); i++) { initializeContentViewCore(sCachedCVCList.get(i)); } sCachedCVCList.clear(); sCachedViewList.clear(); tab.getAllViews(sCachedViewList); for (int i = 0; i < sCachedViewList.size(); i++) { View view = sCachedViewList.get(i); if (view == tab.getView() && tab.isNativePage()) continue; setSizeOfUnattachedView(view); } sCachedViewList.clear(); }
/** * Sets the correct size for all {@link View}s on {@code tab} and sets the correct rendering * parameters on all {@link ContentViewCore}s on {@code tab}. * @param tab The {@link Tab} to initialize. */
Sets the correct size for all <code>View</code>s on tab and sets the correct rendering parameters on all <code>ContentViewCore</code>s on tab
initializeTab
{ "repo_name": "SaschaMester/delicium", "path": "chrome/android/java/src/org/chromium/chrome/browser/compositor/CompositorViewHolder.java", "license": "bsd-3-clause", "size": 42054 }
[ "android.view.View", "org.chromium.chrome.browser.Tab" ]
import android.view.View; import org.chromium.chrome.browser.Tab;
import android.view.*; import org.chromium.chrome.browser.*;
[ "android.view", "org.chromium.chrome" ]
android.view; org.chromium.chrome;
1,295,706
@Override public void invert( DMatrixRMaj A_inv ) { blockB.reshape(A_inv.numRows, A_inv.numCols, false); alg.invert(blockB); MatrixOps_DDRB.convert(blockB, A_inv); }
void function( DMatrixRMaj A_inv ) { blockB.reshape(A_inv.numRows, A_inv.numCols, false); alg.invert(blockB); MatrixOps_DDRB.convert(blockB, A_inv); }
/** * Creates a block matrix the same size as A_inv, inverts the matrix and copies the results back * onto A_inv. * * @param A_inv Where the inverted matrix saved. Modified. */
Creates a block matrix the same size as A_inv, inverts the matrix and copies the results back onto A_inv
invert
{ "repo_name": "lessthanoptimal/ejml", "path": "main/ejml-ddense/src/org/ejml/dense/row/linsol/LinearSolver_DDRB_to_DDRM.java", "license": "apache-2.0", "size": 3799 }
[ "org.ejml.data.DMatrixRMaj" ]
import org.ejml.data.DMatrixRMaj;
import org.ejml.data.*;
[ "org.ejml.data" ]
org.ejml.data;
1,578,888
public Timestamp getActiveToDate();
Timestamp function();
/** * Gets the date for which the record become inactive * * @return Timestamp */
Gets the date for which the record become inactive
getActiveToDate
{ "repo_name": "ricepanda/rice-git3", "path": "rice-framework/krad-app-framework/src/main/java/org/kuali/rice/krad/bo/InactivatableFromTo.java", "license": "apache-2.0", "size": 2155 }
[ "java.sql.Timestamp" ]
import java.sql.Timestamp;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,471,911
public void profileClick(View view) { controller.onProfileClick(); } /** * This method is called if the specified {@code Observable} object's * {@code notifyObservers} method is called (because the {@code Observable}
void function(View view) { controller.onProfileClick(); } /** * This method is called if the specified {@code Observable} object's * {@code notifyObservers} method is called (because the {@code Observable}
/** * Directs controller to handle click on profile button. * * @param view */
Directs controller to handle click on profile button
profileClick
{ "repo_name": "CMPUT301F15T01/YesWeCandroid", "path": "app/src/main/java/ca/ualberta/trinkettrader/HomePageActivity.java", "license": "apache-2.0", "size": 4692 }
[ "android.view.View", "java.util.Observable" ]
import android.view.View; import java.util.Observable;
import android.view.*; import java.util.*;
[ "android.view", "java.util" ]
android.view; java.util;
462,055
@Test public void canGetJobSubmitterServiceBean() { final JobPersistenceService jobPersistenceService = Mockito.mock(JobPersistenceService.class); final ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class); final ApplicationEventMulticaster eventMulticaster = Mockito.mock(ApplicationEventMulticaster.class); final Resource resource = Mockito.mock(Resource.class); final List<WorkflowTask> workflowTasks = new ArrayList<>(); Assert.assertNotNull( this.servicesConfig.jobSubmitterService( jobPersistenceService, eventPublisher, eventMulticaster, workflowTasks, resource, Mockito.mock(Registry.class) ) ); }
void function() { final JobPersistenceService jobPersistenceService = Mockito.mock(JobPersistenceService.class); final ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class); final ApplicationEventMulticaster eventMulticaster = Mockito.mock(ApplicationEventMulticaster.class); final Resource resource = Mockito.mock(Resource.class); final List<WorkflowTask> workflowTasks = new ArrayList<>(); Assert.assertNotNull( this.servicesConfig.jobSubmitterService( jobPersistenceService, eventPublisher, eventMulticaster, workflowTasks, resource, Mockito.mock(Registry.class) ) ); }
/** * Can get a bean for Job Submitter Service. */
Can get a bean for Job Submitter Service
canGetJobSubmitterServiceBean
{ "repo_name": "irontable/genie", "path": "genie-web/src/test/java/com/netflix/genie/web/configs/ServicesConfigUnitTests.java", "license": "apache-2.0", "size": 10002 }
[ "com.netflix.genie.core.jobs.workflow.WorkflowTask", "com.netflix.genie.core.services.JobPersistenceService", "com.netflix.spectator.api.Registry", "java.util.ArrayList", "java.util.List", "org.junit.Assert", "org.mockito.Mockito", "org.springframework.context.ApplicationEventPublisher", "org.springframework.context.event.ApplicationEventMulticaster", "org.springframework.core.io.Resource" ]
import com.netflix.genie.core.jobs.workflow.WorkflowTask; import com.netflix.genie.core.services.JobPersistenceService; import com.netflix.spectator.api.Registry; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.mockito.Mockito; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.event.ApplicationEventMulticaster; import org.springframework.core.io.Resource;
import com.netflix.genie.core.jobs.workflow.*; import com.netflix.genie.core.services.*; import com.netflix.spectator.api.*; import java.util.*; import org.junit.*; import org.mockito.*; import org.springframework.context.*; import org.springframework.context.event.*; import org.springframework.core.io.*;
[ "com.netflix.genie", "com.netflix.spectator", "java.util", "org.junit", "org.mockito", "org.springframework.context", "org.springframework.core" ]
com.netflix.genie; com.netflix.spectator; java.util; org.junit; org.mockito; org.springframework.context; org.springframework.core;
906,048
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MonitoringTagRulesInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<MonitoringTagRulesInner>> function(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException(STR)); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String accept = STR; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); }
/** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return response of a list operation. */
Get the next page of items
listNextSinglePageAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/logz/azure-resourcemanager-logz/src/main/java/com/azure/resourcemanager/logz/implementation/SubAccountTagRulesClientImpl.java", "license": "mit", "size": 48179 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedResponse", "com.azure.core.http.rest.PagedResponseBase", "com.azure.core.util.Context", "com.azure.resourcemanager.logz.fluent.models.MonitoringTagRulesInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.Context; import com.azure.resourcemanager.logz.fluent.models.MonitoringTagRulesInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.logz.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
2,301,104
@Override public void paint(Graphics g) { g.setColor(Color.black); g.fillRect(0, 0, getWidth(), getHeight()); g.setColor(Color.white); FontMetrics fm = g.getFontMetrics(); int width=0; for(char c : title.toCharArray()) { width+=fm.charWidth(c); } int height=Math.round(fm.getHeight()/1.75f); g.drawString(title, getWidth()/2-width/2, height); }
void function(Graphics g) { g.setColor(Color.black); g.fillRect(0, 0, getWidth(), getHeight()); g.setColor(Color.white); FontMetrics fm = g.getFontMetrics(); int width=0; for(char c : title.toCharArray()) { width+=fm.charWidth(c); } int height=Math.round(fm.getHeight()/1.75f); g.drawString(title, getWidth()/2-width/2, height); }
/** * Paints the TitleBar */
Paints the TitleBar
paint
{ "repo_name": "erwin8086/quadro", "path": "Game/src/levelEdit/LevelEditor.java", "license": "gpl-2.0", "size": 12259 }
[ "java.awt.Color", "java.awt.FontMetrics", "java.awt.Graphics" ]
import java.awt.Color; import java.awt.FontMetrics; import java.awt.Graphics;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,578,339
public void setStatisticByAgencyPersistence( StatisticByAgencyPersistence statisticByAgencyPersistence) { this.statisticByAgencyPersistence = statisticByAgencyPersistence; }
void function( StatisticByAgencyPersistence statisticByAgencyPersistence) { this.statisticByAgencyPersistence = statisticByAgencyPersistence; }
/** * Sets the statistic by agency persistence. * * @param statisticByAgencyPersistence the statistic by agency persistence */
Sets the statistic by agency persistence
setStatisticByAgencyPersistence
{ "repo_name": "openegovplatform/OEPv2", "path": "oep-dossier-portlet/docroot/WEB-INF/src/org/oep/dossiermgt/service/base/DocFileVersionServiceBaseImpl.java", "license": "apache-2.0", "size": 51698 }
[ "org.oep.dossiermgt.service.persistence.StatisticByAgencyPersistence" ]
import org.oep.dossiermgt.service.persistence.StatisticByAgencyPersistence;
import org.oep.dossiermgt.service.persistence.*;
[ "org.oep.dossiermgt" ]
org.oep.dossiermgt;
1,454,800
public static ArrayList<Mount> getMounts() throws Exception { return getInternals().getMounts(); }
static ArrayList<Mount> function() throws Exception { return getInternals().getMounts(); }
/** * This will return an ArrayList of the class Mount. The class mount contains the following * property's: device mountPoint type flags * <p/> * These will provide you with any information you need to work with the mount points. * * @return <code>ArrayList<Mount></code> an ArrayList of the class Mount. * @throws Exception if we cannot return the mount points. */
This will return an ArrayList of the class Mount. The class mount contains the following property's: device mountPoint type flags These will provide you with any information you need to work with the mount points
getMounts
{ "repo_name": "TeamBliss-LP/android_packages_apps_DeviceControl", "path": "app/src/main/java/com/stericson/roottools/RootTools.java", "license": "gpl-3.0", "size": 20069 }
[ "com.stericson.roottools.containers.Mount", "java.util.ArrayList" ]
import com.stericson.roottools.containers.Mount; import java.util.ArrayList;
import com.stericson.roottools.containers.*; import java.util.*;
[ "com.stericson.roottools", "java.util" ]
com.stericson.roottools; java.util;
2,784,551
private static void close(Closeable closeable) { if (closeable != null) try { closeable.close(); } catch (IOException e) { } }
static void function(Closeable closeable) { if (closeable != null) try { closeable.close(); } catch (IOException e) { } }
/** * Close closeable object. * @param closeable closeable object */
Close closeable object
close
{ "repo_name": "andy-udp-ip/fstop-util-web", "path": "src/main/java/tw/com/fstop/util/ESAPIUtil.java", "license": "apache-2.0", "size": 5929 }
[ "java.io.Closeable", "java.io.IOException" ]
import java.io.Closeable; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
571,897
public final AccessControlManager getAccessControlManager() { if ( m_securityConfig != null) return m_securityConfig.getAccessControlManager(); return null; }
final AccessControlManager function() { if ( m_securityConfig != null) return m_securityConfig.getAccessControlManager(); return null; }
/** * Return the access control manager * * @return AccessControlManager */
Return the access control manager
getAccessControlManager
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/server/NetworkServer.java", "license": "lgpl-3.0", "size": 12278 }
[ "org.alfresco.jlan.server.auth.acl.AccessControlManager" ]
import org.alfresco.jlan.server.auth.acl.AccessControlManager;
import org.alfresco.jlan.server.auth.acl.*;
[ "org.alfresco.jlan" ]
org.alfresco.jlan;
861,902
public void defineEntity(String type, String name) { try { Class<? extends Entity> klass = Input.parseEntityType(this, type); InputAgent.defineEntityWithUniqueName(this, klass, name, "_", true); } catch (InputErrorException e) { return; } }
void function(String type, String name) { try { Class<? extends Entity> klass = Input.parseEntityType(this, type); InputAgent.defineEntityWithUniqueName(this, klass, name, "_", true); } catch (InputErrorException e) { return; } }
/** * Creates a new entity for the specified type and name. * If the name already used, "_1", "_2", etc. will be appended to the name until an unused * name is found. * @param type - type of entity to be created * @param name - absolute name for the created entity */
Creates a new entity for the specified type and name. If the name already used, "_1", "_2", etc. will be appended to the name until an unused name is found
defineEntity
{ "repo_name": "jaamsim/jaamsim", "path": "src/main/java/com/jaamsim/basicsim/JaamSimModel.java", "license": "apache-2.0", "size": 49643 }
[ "com.jaamsim.input.Input", "com.jaamsim.input.InputAgent", "com.jaamsim.input.InputErrorException" ]
import com.jaamsim.input.Input; import com.jaamsim.input.InputAgent; import com.jaamsim.input.InputErrorException;
import com.jaamsim.input.*;
[ "com.jaamsim.input" ]
com.jaamsim.input;
341,103
public static void showActivityResultError(Activity activity, int requestCode, int actResp, int errorDescription) { if (activity == null) { Log.e("BaseGameUtils", "*** No Activity. Can't show failure dialog!"); return; } Dialog errorDialog; switch (actResp) { case GamesActivityResultCodes.RESULT_APP_MISCONFIGURED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.app_misconfigured)); break; case GamesActivityResultCodes.RESULT_SIGN_IN_FAILED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.sign_in_failed)); break; case GamesActivityResultCodes.RESULT_LICENSE_FAILED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.license_failed)); break; default: // No meaningful Activity response code, so generate default Google // Play services dialog final int errorCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(activity); errorDialog = GooglePlayServicesUtil.getErrorDialog(errorCode, activity, requestCode, null); if (errorDialog == null) { // get fallback dialog Log.e("BaseGamesUtils", "No standard error dialog available. Making fallback dialog."); errorDialog = makeSimpleDialog(activity, activity.getString(errorDescription)); } } errorDialog.show(); } /** * Create a simple {@link Dialog} with an 'OK' button and a message. * * @param activity the Activity in which the Dialog should be displayed. * @param text the message to display on the Dialog. * @return an instance of {@link android.app.AlertDialog}
static void function(Activity activity, int requestCode, int actResp, int errorDescription) { if (activity == null) { Log.e(STR, STR); return; } Dialog errorDialog; switch (actResp) { case GamesActivityResultCodes.RESULT_APP_MISCONFIGURED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.app_misconfigured)); break; case GamesActivityResultCodes.RESULT_SIGN_IN_FAILED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.sign_in_failed)); break; case GamesActivityResultCodes.RESULT_LICENSE_FAILED: errorDialog = makeSimpleDialog(activity, activity.getString(R.string.license_failed)); break; default: final int errorCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(activity); errorDialog = GooglePlayServicesUtil.getErrorDialog(errorCode, activity, requestCode, null); if (errorDialog == null) { Log.e(STR, STR); errorDialog = makeSimpleDialog(activity, activity.getString(errorDescription)); } } errorDialog.show(); } /** * Create a simple {@link Dialog} with an 'OK' button and a message. * * @param activity the Activity in which the Dialog should be displayed. * @param text the message to display on the Dialog. * @return an instance of {@link android.app.AlertDialog}
/** * Show a {@link android.app.Dialog} with the correct message for a connection error. * @param activity the Activity in which the Dialog should be displayed. * @param requestCode the request code from onActivityResult. * @param actResp the response code from onActivityResult. * @param errorDescription the resource id of a String for a generic error message. */
Show a <code>android.app.Dialog</code> with the correct message for a connection error
showActivityResultError
{ "repo_name": "FauDroids/TeamBlocks", "path": "app/src/main/java/com/google/example/games/basegameutils/BaseGameUtils.java", "license": "apache-2.0", "size": 7992 }
[ "android.app.Activity", "android.app.AlertDialog", "android.app.Dialog", "android.util.Log", "com.google.android.gms.common.GooglePlayServicesUtil", "com.google.android.gms.games.GamesActivityResultCodes" ]
import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.util.Log; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.games.GamesActivityResultCodes;
import android.app.*; import android.util.*; import com.google.android.gms.common.*; import com.google.android.gms.games.*;
[ "android.app", "android.util", "com.google.android" ]
android.app; android.util; com.google.android;
202,628
int L = 1; int BSL = (int) Math.pow(N, L - 1.0); char currentBitType; BSCTreeNode temporaryNode = new BSCTreeNode(); Stack<BSCTreeNode> stack = new Stack<>(); int pos = 0; //for(char bit : bitString.toCharArray()){ while (pos < bitString.length()) { currentBitType = bitString.charAt(pos); if (temporaryNode.numBits == 0) { temporaryNode.setBitType(currentBitType); } if (temporaryNode.numBits < BSL) { if (temporaryNode.getBitType() != currentBitType) { // Break the node down decreasing the level of L correspondingly ArrayList<BSCTreeNode> breakDown = temporaryNode.breakDown(L, currentBitType); for (BSCTreeNode node : breakDown) { stack.push(node); } stack.push(new BSCTreeNode(1, currentBitType, currentBitType == '1' ? 1 : 0, 1)); temporaryNode.clean(); } else if (currentBitType == '1') { temporaryNode.setOneBitCount(temporaryNode.getOneBitCount() + 1); } temporaryNode.numBits++; pos++; } else { // The num of bits is equal to BSL BSCTreeNode newNode = new BSCTreeNode(temporaryNode); newNode.setNodeLevel(L); stack.push(newNode); temporaryNode.clean(); while (checkSameVirtualLevelNodes(stack)) { BSCTreeNode node1 = stack.pop(); BSCTreeNode node2 = stack.pop(); BSCTreeNode merged = null; if (node1.getBitType() == node2.getBitType() && node1.getBitType() != 'm' && node2.getBitType() != 'm') { // Merge the nodes merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType(), node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); // Upgrade the level of L //L++; } else { // Merge the nodes merged = new BSCTreeNode(node1.getNodeLevel() + 1, 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); // keep the poped nodes as children. merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } } } // CATCH THE LAST TEMPORARY NODE BIT AND ADD IT INTO THE STACK BSCTreeNode newNode = new BSCTreeNode(temporaryNode); newNode.setNodeLevel(L); stack.push(newNode); temporaryNode.clean(); while (checkSameVirtualLevelNodes(stack)) { BSCTreeNode node1 = stack.pop(); BSCTreeNode node2 = stack.pop(); BSCTreeNode merged = null; if (node1.getBitType() == node2.getBitType() && node1.getBitType() != 'm' && node2.getBitType() != 'm') { // Merge the nodes merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType(), node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); // Upgrade the level of L //L++; } else { // Merge the nodes merged = new BSCTreeNode(node1.getNodeLevel() + 1, 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); // keep the poped nodes as children. merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } // JOIN THE NODES AND CREATE THE TREE while (!stack.empty()) { BSCTreeNode node1 = stack.pop(); if (stack.empty()) { // Set node1 as root node root = node1; } else { // more than one tree left BSCTreeNode node2 = stack.pop(); if (node1.getNodeLevel() == node2.getNodeLevel()) { BSCTreeNode merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType() == node2.getBitType() ? node1.getBitType() : 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); // keep nodes as chidren if (merged.getBitType() == 'm') { merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } else { if (node1.getNodeLevel() < node2.getNodeLevel()) { node1.setNodeLevel(node1.getNodeLevel() + 1); } else { node2.setNodeLevel(node2.getNodeLevel() + 1); } stack.push(node2); stack.push(node1); } } } }
int L = 1; int BSL = (int) Math.pow(N, L - 1.0); char currentBitType; BSCTreeNode temporaryNode = new BSCTreeNode(); Stack<BSCTreeNode> stack = new Stack<>(); int pos = 0; while (pos < bitString.length()) { currentBitType = bitString.charAt(pos); if (temporaryNode.numBits == 0) { temporaryNode.setBitType(currentBitType); } if (temporaryNode.numBits < BSL) { if (temporaryNode.getBitType() != currentBitType) { ArrayList<BSCTreeNode> breakDown = temporaryNode.breakDown(L, currentBitType); for (BSCTreeNode node : breakDown) { stack.push(node); } stack.push(new BSCTreeNode(1, currentBitType, currentBitType == '1' ? 1 : 0, 1)); temporaryNode.clean(); } else if (currentBitType == '1') { temporaryNode.setOneBitCount(temporaryNode.getOneBitCount() + 1); } temporaryNode.numBits++; pos++; } else { BSCTreeNode newNode = new BSCTreeNode(temporaryNode); newNode.setNodeLevel(L); stack.push(newNode); temporaryNode.clean(); while (checkSameVirtualLevelNodes(stack)) { BSCTreeNode node1 = stack.pop(); BSCTreeNode node2 = stack.pop(); BSCTreeNode merged = null; if (node1.getBitType() == node2.getBitType() && node1.getBitType() != 'm' && node2.getBitType() != 'm') { merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType(), node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); } else { merged = new BSCTreeNode(node1.getNodeLevel() + 1, 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } } } BSCTreeNode newNode = new BSCTreeNode(temporaryNode); newNode.setNodeLevel(L); stack.push(newNode); temporaryNode.clean(); while (checkSameVirtualLevelNodes(stack)) { BSCTreeNode node1 = stack.pop(); BSCTreeNode node2 = stack.pop(); BSCTreeNode merged = null; if (node1.getBitType() == node2.getBitType() && node1.getBitType() != 'm' && node2.getBitType() != 'm') { merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType(), node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); } else { merged = new BSCTreeNode(node1.getNodeLevel() + 1, 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } while (!stack.empty()) { BSCTreeNode node1 = stack.pop(); if (stack.empty()) { root = node1; } else { BSCTreeNode node2 = stack.pop(); if (node1.getNodeLevel() == node2.getNodeLevel()) { BSCTreeNode merged = new BSCTreeNode(node1.getNodeLevel() + 1, node1.getBitType() == node2.getBitType() ? node1.getBitType() : 'm', node1.getOneBitCount() + node2.getOneBitCount(), node1.numBits + node2.numBits); if (merged.getBitType() == 'm') { merged.setLeft(node2); merged.setRight(node1); } stack.push(merged); } else { if (node1.getNodeLevel() < node2.getNodeLevel()) { node1.setNodeLevel(node1.getNodeLevel() + 1); } else { node2.setNodeLevel(node2.getNodeLevel() + 1); } stack.push(node2); stack.push(node1); } } } }
/** * Generates the BSC-Tree * * @param bitString * @param N */
Generates the BSC-Tree
generateTree
{ "repo_name": "SIMIDAT/epm-framework", "path": "src/framework/utils/bsc_tree/BSCTree.java", "license": "mit", "size": 12486 }
[ "java.util.ArrayList", "java.util.Stack" ]
import java.util.ArrayList; import java.util.Stack;
import java.util.*;
[ "java.util" ]
java.util;
1,186,385
public TreeSet<Cycle> getCycleSet(){ TreeSet<Cycle> cp = new TreeSet<Cycle> (); for (Cycle cycle : cycleMap) cp.add(new Cycle(cycle)); return cp; } public int getMaxArg(){return maxArg;}
TreeSet<Cycle> function(){ TreeSet<Cycle> cp = new TreeSet<Cycle> (); for (Cycle cycle : cycleMap) cp.add(new Cycle(cycle)); return cp; } public int getMaxArg(){return maxArg;}
/** * Returns an ordered set of all disjoint cycles * of this permutation * @return a cycle set */
Returns an ordered set of all disjoint cycles of this permutation
getCycleSet
{ "repo_name": "gabrielmueller/aljebra-topo", "path": "Algebra/src/group/SymmetricGroup.java", "license": "lgpl-3.0", "size": 27639 }
[ "java.util.TreeSet" ]
import java.util.TreeSet;
import java.util.*;
[ "java.util" ]
java.util;
1,869,490
public void testReceive_UnconnectedReadonly() throws Exception { assertFalse(this.channel1.isConnected()); ByteBuffer dst = ByteBuffer.allocateDirect(CAPACITY_NORMAL) .asReadOnlyBuffer(); assertTrue(dst.isReadOnly()); try { this.channel1.receive(dst); fail("Should throw an IllegalArgumentException here."); //$NON-NLS-1$ } catch (IllegalArgumentException e) { // OK. } }
void function() throws Exception { assertFalse(this.channel1.isConnected()); ByteBuffer dst = ByteBuffer.allocateDirect(CAPACITY_NORMAL) .asReadOnlyBuffer(); assertTrue(dst.isReadOnly()); try { this.channel1.receive(dst); fail(STR); } catch (IllegalArgumentException e) { } }
/** * Test method for 'DatagramChannelImpl.receive(ByteBuffer)' */
Test method for 'DatagramChannelImpl.receive(ByteBuffer)'
testReceive_UnconnectedReadonly
{ "repo_name": "AdmireTheDistance/android_libcore", "path": "harmony-tests/src/test/java/org/apache/harmony/tests/java/nio/channels/DatagramChannelTest.java", "license": "gpl-2.0", "size": 88836 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
2,454,836
@SuppressWarnings("unchecked") // safe because we only read, not write public IterableSubject valuesForKey(@Nullable Object key) { return check("valuesForKey(%s)", key).that(((Multimap<Object, Object>) actual).get(key)); }
@SuppressWarnings(STR) IterableSubject function(@Nullable Object key) { return check(STR, key).that(((Multimap<Object, Object>) actual).get(key)); }
/** * Returns a context-aware {@link Subject} for making assertions about the values for the given * key within the {@link Multimap}. * * <p>This method performs no checks on its own and cannot cause test failures. Subsequent * assertions must be chained onto this method call to test properties of the {@link Multimap}. */
Returns a context-aware <code>Subject</code> for making assertions about the values for the given key within the <code>Multimap</code>. This method performs no checks on its own and cannot cause test failures. Subsequent assertions must be chained onto this method call to test properties of the <code>Multimap</code>
valuesForKey
{ "repo_name": "google/truth", "path": "core/src/main/java/com/google/common/truth/MultimapSubject.java", "license": "apache-2.0", "size": 36196 }
[ "com.google.common.collect.Multimap", "org.checkerframework.checker.nullness.qual.Nullable" ]
import com.google.common.collect.Multimap; import org.checkerframework.checker.nullness.qual.Nullable;
import com.google.common.collect.*; import org.checkerframework.checker.nullness.qual.*;
[ "com.google.common", "org.checkerframework.checker" ]
com.google.common; org.checkerframework.checker;
80,128
default String obtainCookieValue(final Cookie cookie, final HttpServletRequest request) { return obtainCookieValue(cookie.getValue(), request); }
default String obtainCookieValue(final Cookie cookie, final HttpServletRequest request) { return obtainCookieValue(cookie.getValue(), request); }
/** * Obtain cookie value. * * @param cookie the cookie * @param request the request * @return the string */
Obtain cookie value
obtainCookieValue
{ "repo_name": "rrenomeron/cas", "path": "api/cas-server-core-api-cookie/src/main/java/org/apereo/cas/web/cookie/CookieValueManager.java", "license": "apache-2.0", "size": 1316 }
[ "javax.servlet.http.Cookie", "javax.servlet.http.HttpServletRequest" ]
import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.*;
[ "javax.servlet" ]
javax.servlet;
1,116,934
private int getQuantile(final float p) { return mBrightnesses[(int) (mBrightnesses.length * p)]; } } private static final class IrisBoundary { private Bitmap mImage; private int mXCenter; private int mYCenter; private int mRadius; private Map<Integer, Integer> mLeftPoints = new HashMap<>(); private Map<Integer, Integer> mRightPoints = new HashMap<>(); private IrisBoundary(final Bitmap image, final int xCenter, final int yCenter, final int radius) { mImage = image; mXCenter = xCenter; mYCenter = yCenter; mRadius = radius; }
int function(final float p) { return mBrightnesses[(int) (mBrightnesses.length * p)]; } } private static final class IrisBoundary { private Bitmap mImage; private int mXCenter; private int mYCenter; private int mRadius; private Map<Integer, Integer> mLeftPoints = new HashMap<>(); private Map<Integer, Integer> mRightPoints = new HashMap<>(); private IrisBoundary(final Bitmap image, final int xCenter, final int yCenter, final int radius) { mImage = image; mXCenter = xCenter; mYCenter = yCenter; mRadius = radius; }
/** * Get the p-quantile of the brightnesses. Prerequisite: calculateStatistics must have been run before. * * @param p the quantile parameter. * @return the p-quantile of the brightnesses (not considering equality). */
Get the p-quantile of the brightnesses. Prerequisite: calculateStatistics must have been run before
getQuantile
{ "repo_name": "jeisfeld/Augendiagnose", "path": "AugendiagnoseIdea/augendiagnoseLib/src/main/java/de/jeisfeld/augendiagnoselib/util/imagefile/PupilAndIrisDetector.java", "license": "gpl-2.0", "size": 48563 }
[ "android.graphics.Bitmap", "java.util.HashMap", "java.util.Map" ]
import android.graphics.Bitmap; import java.util.HashMap; import java.util.Map;
import android.graphics.*; import java.util.*;
[ "android.graphics", "java.util" ]
android.graphics; java.util;
2,773,517
@Test public void shouldNotNotFailOnNotFoundUsername() throws Exception { ForgotPasswordFormController controller = new ForgotPasswordFormController(); MockHttpServletRequest request = new MockHttpServletRequest(); request.setMethod("POST"); request.addParameter("uname", "validuser"); HttpServletResponse response = new MockHttpServletResponse(); controller.handleRequest(request, response); Assert.assertEquals("validuser", request.getAttribute("uname")); Assert.assertEquals("valid secret question", request.getAttribute("secretQuestion")); }
void function() throws Exception { ForgotPasswordFormController controller = new ForgotPasswordFormController(); MockHttpServletRequest request = new MockHttpServletRequest(); request.setMethod("POST"); request.addParameter("uname", STR); HttpServletResponse response = new MockHttpServletResponse(); controller.handleRequest(request, response); Assert.assertEquals(STR, request.getAttribute("uname")); Assert.assertEquals(STR, request.getAttribute(STR)); }
/** * Check to see if the admin's secret question comes back * * @throws Exception */
Check to see if the admin's secret question comes back
shouldNotNotFailOnNotFoundUsername
{ "repo_name": "sintjuri/openmrs-core", "path": "web/src/test/java/org/openmrs/web/ForgotPasswordFormControllerTest.java", "license": "mpl-2.0", "size": 10165 }
[ "javax.servlet.http.HttpServletResponse", "org.junit.Assert", "org.openmrs.web.controller.ForgotPasswordFormController", "org.springframework.mock.web.MockHttpServletRequest", "org.springframework.mock.web.MockHttpServletResponse" ]
import javax.servlet.http.HttpServletResponse; import org.junit.Assert; import org.openmrs.web.controller.ForgotPasswordFormController; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse;
import javax.servlet.http.*; import org.junit.*; import org.openmrs.web.controller.*; import org.springframework.mock.web.*;
[ "javax.servlet", "org.junit", "org.openmrs.web", "org.springframework.mock" ]
javax.servlet; org.junit; org.openmrs.web; org.springframework.mock;
568,578
Map<String, OAuthConsumerToken> loadRememberedTokens(HttpServletRequest request, HttpServletResponse response);
Map<String, OAuthConsumerToken> loadRememberedTokens(HttpServletRequest request, HttpServletResponse response);
/** * Load any remembered tokens for the given request. * * @param request The request. * @param response The response. * @return The tokens (mapped by resource id), or null if none are remembered. */
Load any remembered tokens for the given request
loadRememberedTokens
{ "repo_name": "xiangzhuyuan/spring-security-oauth", "path": "spring-security-oauth/src/main/java/org/springframework/security/oauth/consumer/rememberme/OAuthRememberMeServices.java", "license": "apache-2.0", "size": 1102 }
[ "java.util.Map", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse", "org.springframework.security.oauth.consumer.OAuthConsumerToken" ]
import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.security.oauth.consumer.OAuthConsumerToken;
import java.util.*; import javax.servlet.http.*; import org.springframework.security.oauth.consumer.*;
[ "java.util", "javax.servlet", "org.springframework.security" ]
java.util; javax.servlet; org.springframework.security;
822,870
void enterFieldAccess(@NotNull Java8Parser.FieldAccessContext ctx); void exitFieldAccess(@NotNull Java8Parser.FieldAccessContext ctx);
void enterFieldAccess(@NotNull Java8Parser.FieldAccessContext ctx); void exitFieldAccess(@NotNull Java8Parser.FieldAccessContext ctx);
/** * Exit a parse tree produced by {@link Java8Parser#fieldAccess}. * @param ctx the parse tree */
Exit a parse tree produced by <code>Java8Parser#fieldAccess</code>
exitFieldAccess
{ "repo_name": "IsThisThePayneResidence/intellidots", "path": "src/main/java/ua/edu/hneu/ast/parsers/Java8Listener.java", "license": "gpl-3.0", "size": 95845 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
608,471
public StoreRef convertStoreRef(StoreRef storeRef) { return new StoreRef(StoreRef.PROTOCOL_WORKSPACE, storeRef.getIdentifier()); }
StoreRef function(StoreRef storeRef) { return new StoreRef(StoreRef.PROTOCOL_WORKSPACE, storeRef.getIdentifier()); }
/** * Create Version Store Ref * * @param store ref * @return store ref for version store */
Create Version Store Ref
convertStoreRef
{ "repo_name": "dnacreative/records-management", "path": "rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/version/RecordableVersionServiceImpl.java", "license": "lgpl-3.0", "size": 33420 }
[ "org.alfresco.service.cmr.repository.StoreRef" ]
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.*;
[ "org.alfresco.service" ]
org.alfresco.service;
2,643,763
public void renameJobEntryIfNameCollides( JobEntryCopy je ) { // First see if the name changed. // If so, we need to verify that the name is not already used in the // job. // String newname = je.getName(); // See if this name exists in the other job entries // boolean found; int nr = 1; do { found = false; for ( JobEntryCopy copy : jobcopies ) { if ( copy != je && copy.getName().equalsIgnoreCase( newname ) && copy.getNr() == 0 ) { found = true; } } if ( found ) { nr++; newname = je.getName() + " (" + nr + ")"; } } while ( found ); // Rename if required. // je.setName( newname ); }
void function( JobEntryCopy je ) { int nr = 1; do { found = false; for ( JobEntryCopy copy : jobcopies ) { if ( copy != je && copy.getName().equalsIgnoreCase( newname ) && copy.getNr() == 0 ) { found = true; } } if ( found ) { nr++; newname = je.getName() + STR + nr + ")"; } } while ( found ); }
/** * See if the name of the supplied job entry copy doesn't collide with any other job entry copy in the job. * * @param je * The job entry copy to verify the name for. */
See if the name of the supplied job entry copy doesn't collide with any other job entry copy in the job
renameJobEntryIfNameCollides
{ "repo_name": "ma459006574/pentaho-kettle", "path": "engine/src/org/pentaho/di/job/JobMeta.java", "license": "apache-2.0", "size": 85011 }
[ "org.pentaho.di.job.entry.JobEntryCopy" ]
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.job.entry.*;
[ "org.pentaho.di" ]
org.pentaho.di;
6,069
void validateCheckpointUpload(CheckpointSignature sig) throws IOException { if (ckptState != CheckpointStates.ROLLED_EDITS) { throw new IOException("Namenode is not expecting an new image " + ckptState); } // verify token long modtime = getEditLog().getFsEditTime(); if (sig.editsTime != modtime) { throw new IOException("Namenode has an edit log with timestamp of " + DATE_FORM.format(new Date(modtime)) + " but new checkpoint was created using editlog " + " with timestamp " + DATE_FORM.format(new Date(sig.editsTime)) + ". Checkpoint Aborted."); } sig.validateStorageInfo(this); ckptState = FSImage.CheckpointStates.UPLOAD_START; }
void validateCheckpointUpload(CheckpointSignature sig) throws IOException { if (ckptState != CheckpointStates.ROLLED_EDITS) { throw new IOException(STR + ckptState); } long modtime = getEditLog().getFsEditTime(); if (sig.editsTime != modtime) { throw new IOException(STR + DATE_FORM.format(new Date(modtime)) + STR + STR + DATE_FORM.format(new Date(sig.editsTime)) + STR); } sig.validateStorageInfo(this); ckptState = FSImage.CheckpointStates.UPLOAD_START; }
/** * This is called just before a new checkpoint is uploaded to the * namenode. */
This is called just before a new checkpoint is uploaded to the namenode
validateCheckpointUpload
{ "repo_name": "wzhuo918/release-1.1.2-MDP", "path": "src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java", "license": "apache-2.0", "size": 66702 }
[ "java.io.IOException", "java.util.Date" ]
import java.io.IOException; import java.util.Date;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
904,887
Page<ReceivedCaseFile> getPageResultFromSearchInput(FormClass formClass, int pageNumber);
Page<ReceivedCaseFile> getPageResultFromSearchInput(FormClass formClass, int pageNumber);
/** * Returns page matching receivedCaseFile held in formClass and pageNumber * @param formClass holds receivedCaseFile in it. ReceivedCaseFile was created by submitting the form * @param pageNumber indicates which matching page should be returned * @return matching page */
Returns page matching receivedCaseFile held in formClass and pageNumber
getPageResultFromSearchInput
{ "repo_name": "bugielmarek/rep_one", "path": "crudone/src/main/java/com/bugielmarek/crudone/services/ReceivedCaseFileService.java", "license": "apache-2.0", "size": 1483 }
[ "com.bugielmarek.crudone.models.FormClass", "com.bugielmarek.crudone.models.ReceivedCaseFile", "org.springframework.data.domain.Page" ]
import com.bugielmarek.crudone.models.FormClass; import com.bugielmarek.crudone.models.ReceivedCaseFile; import org.springframework.data.domain.Page;
import com.bugielmarek.crudone.models.*; import org.springframework.data.domain.*;
[ "com.bugielmarek.crudone", "org.springframework.data" ]
com.bugielmarek.crudone; org.springframework.data;
2,487,715
Sell selectSell(int height, int width);
Sell selectSell(int height, int width);
/** * Method for select sell. * * @param height height coordinate. * @param width width coordinate. * @return found sell. */
Method for select sell
selectSell
{ "repo_name": "AlexanderZf44/APermyakov", "path": "chapter_004/src/main/java/ru/apermyakov/testtask/board/BoardSelect.java", "license": "apache-2.0", "size": 427 }
[ "ru.apermyakov.testtask.cell.Sell" ]
import ru.apermyakov.testtask.cell.Sell;
import ru.apermyakov.testtask.cell.*;
[ "ru.apermyakov.testtask" ]
ru.apermyakov.testtask;
1,153,696
public static boolean isDownwardAxisOfMany(int axis) { return ((Axis.DESCENDANTORSELF == axis) || (Axis.DESCENDANT == axis) || (Axis.FOLLOWING == axis) // || (Axis.FOLLOWINGSIBLING == axis) || (Axis.PRECEDING == axis) // || (Axis.PRECEDINGSIBLING == axis) ); }
static boolean function(int axis) { return ((Axis.DESCENDANTORSELF == axis) (Axis.DESCENDANT == axis) (Axis.FOLLOWING == axis) (Axis.PRECEDING == axis) ); }
/** * Tell if the given axis goes downword. Bogus name, if you can think of * a better one, please do tell. This really has to do with inverting * attribute axis. * @param axis One of Axis.XXX. * @return true if the axis is not a child axis and does not go up from * the axis root. */
Tell if the given axis goes downword. Bogus name, if you can think of a better one, please do tell. This really has to do with inverting attribute axis
isDownwardAxisOfMany
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jaxp/src/java.xml/share/classes/com/sun/org/apache/xpath/internal/axes/WalkerFactory.java", "license": "gpl-2.0", "size": 60051 }
[ "com.sun.org.apache.xml.internal.dtm.Axis" ]
import com.sun.org.apache.xml.internal.dtm.Axis;
import com.sun.org.apache.xml.internal.dtm.*;
[ "com.sun.org" ]
com.sun.org;
616,929
protected final Object mapRow(ResultSet rs, int rowNum, Object[] parameters, Map context) throws SQLException { return mapRow(rs, rowNum); }
final Object function(ResultSet rs, int rowNum, Object[] parameters, Map context) throws SQLException { return mapRow(rs, rowNum); }
/** * This method is implemented to invoke the simpler mapRow * template method, ignoring parameters. * @see #mapRow(ResultSet, int) */
This method is implemented to invoke the simpler mapRow template method, ignoring parameters
mapRow
{ "repo_name": "cbeams-archive/spring-framework-2.5.x", "path": "src/org/springframework/jdbc/object/MappingSqlQuery.java", "license": "apache-2.0", "size": 2706 }
[ "java.sql.ResultSet", "java.sql.SQLException", "java.util.Map" ]
import java.sql.ResultSet; import java.sql.SQLException; import java.util.Map;
import java.sql.*; import java.util.*;
[ "java.sql", "java.util" ]
java.sql; java.util;
1,617,282
public void removeTab(AbstractTabPanel content) { tabPanel.remove(content); content.setVisible(false); content.onTabPanelRemoved(); }
void function(AbstractTabPanel content) { tabPanel.remove(content); content.setVisible(false); content.onTabPanelRemoved(); }
/** * Removes the tab containing the panel passed as a parameter. Right after hiding the panel, * its onTabPanelRemoved method is executed * @param content the panel to be removed */
Removes the tab containing the panel passed as a parameter. Right after hiding the panel, its onTabPanelRemoved method is executed
removeTab
{ "repo_name": "RickieES/localizethat", "path": "src/net/localizethat/gui/MainWindow.java", "license": "mpl-2.0", "size": 19281 }
[ "net.localizethat.gui.tabpanels.AbstractTabPanel" ]
import net.localizethat.gui.tabpanels.AbstractTabPanel;
import net.localizethat.gui.tabpanels.*;
[ "net.localizethat.gui" ]
net.localizethat.gui;
506,797
private ArrayList<Segment10> tableSegments(TableEntry10 table) { ArrayList<Segment10> tableSegments = new ArrayList<>(); for (Segment10 segment : _segments) { if (Arrays.equals(segment.key(), table.key())) { tableSegments.add(segment); } } Collections.sort(tableSegments, (x,y)->Long.signum(y.sequence() - x.sequence())); return tableSegments; }
ArrayList<Segment10> function(TableEntry10 table) { ArrayList<Segment10> tableSegments = new ArrayList<>(); for (Segment10 segment : _segments) { if (Arrays.equals(segment.key(), table.key())) { tableSegments.add(segment); } } Collections.sort(tableSegments, (x,y)->Long.signum(y.sequence() - x.sequence())); return tableSegments; }
/** * Returns segments for a table in reverse sequence order. * * The reverse order minimizes extra pages reads, because older pages * don't need to be read. */
Returns segments for a table in reverse sequence order. The reverse order minimizes extra pages reads, because older pages don't need to be read
tableSegments
{ "repo_name": "baratine/baratine", "path": "kraken/src/main/java/com/caucho/v5/kelp/upgrade/UpgradeScanner10.java", "license": "gpl-2.0", "size": 31066 }
[ "java.util.ArrayList", "java.util.Arrays", "java.util.Collections" ]
import java.util.ArrayList; import java.util.Arrays; import java.util.Collections;
import java.util.*;
[ "java.util" ]
java.util;
1,350,886