method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
@Override public boolean write(Page page, OutputTarget target) throws UnsupportedFormatVersionException { return run(page, target, false); }
boolean function(Page page, OutputTarget target) throws UnsupportedFormatVersionException { return run(page, target, false); }
/** * Writes the given Page object to an XML file. * * @param page Page object * @param target FileTarget representing an XML file * @return Returns true if written successfully, false otherwise. */
Writes the given Page object to an XML file
write
{ "repo_name": "PRImA-Research-Lab/prima-core-libs", "path": "java/PrimaDla/src/org/primaresearch/dla/page/io/xml/XmlPageWriter_2019_07_15.java", "license": "apache-2.0", "size": 34412 }
[ "org.primaresearch.dla.page.Page", "org.primaresearch.dla.page.io.OutputTarget", "org.primaresearch.io.UnsupportedFormatVersionException" ]
import org.primaresearch.dla.page.Page; import org.primaresearch.dla.page.io.OutputTarget; import org.primaresearch.io.UnsupportedFormatVersionException;
import org.primaresearch.dla.page.*; import org.primaresearch.dla.page.io.*; import org.primaresearch.io.*;
[ "org.primaresearch.dla", "org.primaresearch.io" ]
org.primaresearch.dla; org.primaresearch.io;
2,657,637
private Map<String,PortletPreference> clonePreferences( Map <String,PortletPreference> preferences) { if (preferences == null) { return null; } Map <String,PortletPreference> copy = new HashMap<String,PortletPreference>(preferences.size()); for (PortletPreference p : preferences.values()) { copy.put(p.getName(), p.clone()); } return copy; }
Map<String,PortletPreference> function( Map <String,PortletPreference> preferences) { if (preferences == null) { return null; } Map <String,PortletPreference> copy = new HashMap<String,PortletPreference>(preferences.size()); for (PortletPreference p : preferences.values()) { copy.put(p.getName(), p.clone()); } return copy; }
/** * Clones a PortletPreference map. This method performs a deep clone on * the passed-in portlet preferences map. Every PortletPreference object * in the map are cloned (via the <code>PortletPreference.clone()</code> * method) and injected into the new map. * * @param preferences the portlet preferences map to clone. * @return a deep-cloned copy of the portlet preferences map. */
Clones a PortletPreference map. This method performs a deep clone on the passed-in portlet preferences map. Every PortletPreference object in the map are cloned (via the <code>PortletPreference.clone()</code> method) and injected into the new map
clonePreferences
{ "repo_name": "apache/portals-pluto", "path": "pluto-portal-driver/src/main/java/org/apache/pluto/driver/container/DefaultPortletPreferencesService.java", "license": "apache-2.0", "size": 10648 }
[ "java.util.HashMap", "java.util.Map", "org.apache.pluto.container.PortletPreference" ]
import java.util.HashMap; import java.util.Map; import org.apache.pluto.container.PortletPreference;
import java.util.*; import org.apache.pluto.container.*;
[ "java.util", "org.apache.pluto" ]
java.util; org.apache.pluto;
368,490
@Override public int tightMarshal1(OpenWireFormat wireFormat, Object o, BooleanStream bs) throws IOException { MessageId info = (MessageId) o; int rc = super.tightMarshal1(wireFormat, o, bs); rc += tightMarshalString1(info.getTextView(), bs); rc += tightMarshalCachedObject1(wireFormat, info.getProducerId(), bs); rc += tightMarshalLong1(wireFormat, info.getProducerSequenceId(), bs); rc += tightMarshalLong1(wireFormat, info.getBrokerSequenceId(), bs); return rc + 0; }
int function(OpenWireFormat wireFormat, Object o, BooleanStream bs) throws IOException { MessageId info = (MessageId) o; int rc = super.tightMarshal1(wireFormat, o, bs); rc += tightMarshalString1(info.getTextView(), bs); rc += tightMarshalCachedObject1(wireFormat, info.getProducerId(), bs); rc += tightMarshalLong1(wireFormat, info.getProducerSequenceId(), bs); rc += tightMarshalLong1(wireFormat, info.getBrokerSequenceId(), bs); return rc + 0; }
/** * Write the booleans that this object uses to a BooleanStream */
Write the booleans that this object uses to a BooleanStream
tightMarshal1
{ "repo_name": "apache/activemq-openwire", "path": "openwire-legacy/src/main/java/org/apache/activemq/openwire/codec/v10/MessageIdMarshaller.java", "license": "apache-2.0", "size": 5346 }
[ "java.io.IOException", "org.apache.activemq.openwire.codec.BooleanStream", "org.apache.activemq.openwire.codec.OpenWireFormat", "org.apache.activemq.openwire.commands.MessageId" ]
import java.io.IOException; import org.apache.activemq.openwire.codec.BooleanStream; import org.apache.activemq.openwire.codec.OpenWireFormat; import org.apache.activemq.openwire.commands.MessageId;
import java.io.*; import org.apache.activemq.openwire.codec.*; import org.apache.activemq.openwire.commands.*;
[ "java.io", "org.apache.activemq" ]
java.io; org.apache.activemq;
521,685
public void openArchive(String archiveName) { try { losDataEditor = new GUILOSDataEditor( archiveName, LOSEditorProperties.getBoardDirectory(), sharedBoardMetadata); } catch (IOException e) { System.err.println("Cannot open the board archive: " + archiveName); e.printStackTrace(); } // load the terrain images loadTerrainGraphics(); // try to open LOS data frame.setStatusBarText("Reading or creating the LOS data..."); losDataEditor.readLOSData(); // create an empty geo board if no LOS data - for now if(losDataEditor.getMap() == null) { createNewMap(); } openMap(); }
void function(String archiveName) { try { losDataEditor = new GUILOSDataEditor( archiveName, LOSEditorProperties.getBoardDirectory(), sharedBoardMetadata); } catch (IOException e) { System.err.println(STR + archiveName); e.printStackTrace(); } loadTerrainGraphics(); frame.setStatusBarText(STR); losDataEditor.readLOSData(); if(losDataEditor.getMap() == null) { createNewMap(); } openMap(); }
/** * Open a VASL archive for editing LOS data * @param archiveName fully qualified name of the board archive */
Open a VASL archive for editing LOS data
openArchive
{ "repo_name": "vasl-developers/los-gui", "path": "src/VASL/LOSGUI/LOSEditorJComponent.java", "license": "lgpl-2.1", "size": 70394 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,529,450
void setIndexVersionAndSchemaMD5Hex(IdAndVersion tableId, Long viewCRC, String schemaMD5Hex);
void setIndexVersionAndSchemaMD5Hex(IdAndVersion tableId, Long viewCRC, String schemaMD5Hex);
/** * Set the current version of the index and the schema MD5, both of which are used * to determine if the index is up-to-date. * * @param viewCRC * @param schemaMD5Hex */
Set the current version of the index and the schema MD5, both of which are used to determine if the index is up-to-date
setIndexVersionAndSchemaMD5Hex
{ "repo_name": "xschildw/Synapse-Repository-Services", "path": "services/repository-managers/src/main/java/org/sagebionetworks/repo/manager/table/TableIndexManager.java", "license": "apache-2.0", "size": 9590 }
[ "org.sagebionetworks.repo.model.entity.IdAndVersion" ]
import org.sagebionetworks.repo.model.entity.IdAndVersion;
import org.sagebionetworks.repo.model.entity.*;
[ "org.sagebionetworks.repo" ]
org.sagebionetworks.repo;
639,132
public PutCalendarResponse deleteCalendarJob(DeleteCalendarJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, MLRequestConverters::deleteCalendarJob, options, PutCalendarResponse::fromXContent, Collections.emptySet()); }
PutCalendarResponse function(DeleteCalendarJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, MLRequestConverters::deleteCalendarJob, options, PutCalendarResponse::fromXContent, Collections.emptySet()); }
/** * Removes Machine Learning Job(s) from a calendar * <p> * For additional info * see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html"> * ML Delete calendar job documentation</a> * * @param request The request * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return The {@link PutCalendarResponse} containing the updated calendar * @throws IOException when there is a serialization issue sending the request or receiving the response */
Removes Machine Learning Job(s) from a calendar For additional info see ML Delete calendar job documentation
deleteCalendarJob
{ "repo_name": "nknize/elasticsearch", "path": "client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java", "license": "apache-2.0", "size": 133260 }
[ "java.io.IOException", "java.util.Collections", "org.elasticsearch.client.ml.DeleteCalendarJobRequest", "org.elasticsearch.client.ml.PutCalendarResponse" ]
import java.io.IOException; import java.util.Collections; import org.elasticsearch.client.ml.DeleteCalendarJobRequest; import org.elasticsearch.client.ml.PutCalendarResponse;
import java.io.*; import java.util.*; import org.elasticsearch.client.ml.*;
[ "java.io", "java.util", "org.elasticsearch.client" ]
java.io; java.util; org.elasticsearch.client;
1,179,318
DatabaseInner innerModel(); interface Definition extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate { } interface DefinitionStages { interface Blank extends WithParentResource { }
DatabaseInner innerModel(); interface Definition extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate { } interface DefinitionStages { interface Blank extends WithParentResource { }
/** * Gets the inner com.azure.resourcemanager.postgresql.fluent.models.DatabaseInner object. * * @return the inner object. */
Gets the inner com.azure.resourcemanager.postgresql.fluent.models.DatabaseInner object
innerModel
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/postgresql/azure-resourcemanager-postgresql/src/main/java/com/azure/resourcemanager/postgresql/models/Database.java", "license": "mit", "size": 5729 }
[ "com.azure.resourcemanager.postgresql.fluent.models.DatabaseInner" ]
import com.azure.resourcemanager.postgresql.fluent.models.DatabaseInner;
import com.azure.resourcemanager.postgresql.fluent.models.*;
[ "com.azure.resourcemanager" ]
com.azure.resourcemanager;
2,897,458
public void sendFile(final HttpResponseStatus status, final String path, final int max_age) throws IOException { if (max_age < 0) { throw new IllegalArgumentException("Negative max_age=" + max_age + " for path=" + path); } if (!chan.isConnected()) { done(); return; } RandomAccessFile file; try { file = new RandomAccessFile(path, "r"); } catch (FileNotFoundException e) { logWarn("File not found: " + e.getMessage()); if (querystring != null) { querystring.remove("png"); // Avoid potential recursion. } notFound(); return; } final long length = file.length(); { final DefaultHttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status); final String mimetype = guessMimeTypeFromUri(path); response.setHeader(HttpHeaders.Names.CONTENT_TYPE, mimetype == null ? "text/plain" : mimetype); final long mtime = new File(path).lastModified(); if (mtime > 0) { response.setHeader(HttpHeaders.Names.AGE, (System.currentTimeMillis() - mtime) / 1000); } else { logWarn("Found a file with mtime=" + mtime + ": " + path); } response.setHeader(HttpHeaders.Names.CACHE_CONTROL, max_age == 0 ? "no-cache" : "max-age=" + max_age); HttpHeaders.setContentLength(response, length); chan.write(response); }
void function(final HttpResponseStatus status, final String path, final int max_age) throws IOException { if (max_age < 0) { throw new IllegalArgumentException(STR + max_age + STR + path); } if (!chan.isConnected()) { done(); return; } RandomAccessFile file; try { file = new RandomAccessFile(path, "r"); } catch (FileNotFoundException e) { logWarn(STR + e.getMessage()); if (querystring != null) { querystring.remove("png"); } notFound(); return; } final long length = file.length(); { final DefaultHttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status); final String mimetype = guessMimeTypeFromUri(path); response.setHeader(HttpHeaders.Names.CONTENT_TYPE, mimetype == null ? STR : mimetype); final long mtime = new File(path).lastModified(); if (mtime > 0) { response.setHeader(HttpHeaders.Names.AGE, (System.currentTimeMillis() - mtime) / 1000); } else { logWarn(STR + mtime + STR + path); } response.setHeader(HttpHeaders.Names.CACHE_CONTROL, max_age == 0 ? STR : STR + max_age); HttpHeaders.setContentLength(response, length); chan.write(response); }
/** * Send a file (with zero-copy) to the client. * This method doesn't provide any security guarantee. The caller is * responsible for the argument they pass in. * @param status The status of the request (e.g. 200 OK or 404 Not Found). * @param path The path to the file to send to the client. * @param max_age The expiration time of this entity, in seconds. This is * not a timestamp, it's how old the resource is allowed to be in the client * cache. See RFC 2616 section 14.9 for more information. Use 0 to disable * caching. */
Send a file (with zero-copy) to the client. This method doesn't provide any security guarantee. The caller is responsible for the argument they pass in
sendFile
{ "repo_name": "rmarshasatx/opentsdb", "path": "src/tsd/HttpQuery.java", "license": "gpl-3.0", "size": 27430 }
[ "java.io.File", "java.io.FileNotFoundException", "java.io.IOException", "java.io.RandomAccessFile", "org.jboss.netty.handler.codec.http.DefaultHttpResponse", "org.jboss.netty.handler.codec.http.HttpHeaders", "org.jboss.netty.handler.codec.http.HttpResponseStatus", "org.jboss.netty.handler.codec.http.HttpVersion" ]
import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion;
import java.io.*; import org.jboss.netty.handler.codec.http.*;
[ "java.io", "org.jboss.netty" ]
java.io; org.jboss.netty;
2,134,934
@NotNull PsiType createTypeFromText(@NotNull String text, @Nullable PsiElement context) throws IncorrectOperationException;
PsiType createTypeFromText(@NotNull String text, @Nullable PsiElement context) throws IncorrectOperationException;
/** * Creates a Java type from the specified text. * * @param text the text of the type to create (for example, a primitive type keyword, an array * declaration or the name of a class). * @param context the PSI element used as context for resolving the reference. * @return the created type instance. * @throws IncorrectOperationException if the text does not specify a valid type. */
Creates a Java type from the specified text
createTypeFromText
{ "repo_name": "ThiagoGarciaAlves/intellij-community", "path": "java/java-psi-api/src/com/intellij/psi/PsiJavaParserFacade.java", "license": "apache-2.0", "size": 11036 }
[ "com.intellij.util.IncorrectOperationException", "org.jetbrains.annotations.NotNull", "org.jetbrains.annotations.Nullable" ]
import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable;
import com.intellij.util.*; import org.jetbrains.annotations.*;
[ "com.intellij.util", "org.jetbrains.annotations" ]
com.intellij.util; org.jetbrains.annotations;
957,090
private static Rule fromResourceToRule(Resource rrule, int tipo) { String uri = rrule.isAnon() ? "" : rrule.getURI(); Rule rule = new Rule(uri); if (tipo==Rule.RULE_PERMISSION) rule = new Permission(uri); if (tipo==Rule.RULE_PROHIBITION) rule = new Prohibition(uri); if (tipo==Rule.RULE_DUTY) rule = new Duty(uri); rule.setKindOfRule(tipo); //está directamente expresado if (RDFUtils.isOfKind(rrule, ODRLRDF.RACTION.getURI())) { Action a =new Action(rrule.getURI()); a = RDFUtils.enrichAction(a); rule.addAction(a); } String sassignee=RDFUtils.getFirstPropertyValue(rrule, ODRLRDF.PASSIGNEE); if (!sassignee.isEmpty()) rule.setAssignee(new Party(sassignee)); String sassigner=RDFUtils.getFirstPropertyValue(rrule, ODRLRDF.PASSIGNER); if (!sassigner.isEmpty()) rule.setAssignee(new Party(sassigner)); String target1 = RDFUtils.getFirstPropertyValue(rrule, ODRLRDF.PTARGET); if (!target1.isEmpty()) rule.setTarget(target1); List<String> sactions1 = RDFUtils.getAllPropertyStrings(rrule, ODRLRDF.PACTION); for (String saction : sactions1) { Action a = new Action(saction); a = RDFUtils.enrichAction(a); rule.addAction(a); } List<Resource> sconstraints = RDFUtils.getAllPropertyResources(rrule, ODRLRDF.PCONSTRAINT); for (Resource sconstraint : sconstraints) { Constraint c = new Constraint(sconstraint.getURI()); c=RDFUtils.enrichConstraint(c); rule.addConstraint(c); } List<Resource> sduties = RDFUtils.getAllPropertyResources(rrule, ODRLRDF.PDUTY); for (Resource rduty : sduties) { Constraint c = new Constraint(rduty.getURI()); c=RDFUtils.enrichConstraint(c); String action = RDFUtils.getFirstPropertyValue(rduty, ODRLRDF.PACTION); // c.label = LD.getFirstPropertyValue(rrule, RDFS.label); Action a = new Action(action); a = RDFUtils.enrichAction(a); c.setLabel(a.getLabel("en")); if (a.getURI().equals("")) { } } return rule; }
static Rule function(Resource rrule, int tipo) { String uri = rrule.isAnon() ? STRenSTR")) { } } return rule; }
/** * Creates an ODRL Rule from a Jena Resource * @param rrule Resource * @param tipo ? * @return an ODRL Rule or null. */
Creates an ODRL Rule from a Jena Resource
fromResourceToRule
{ "repo_name": "oanc/odrlapi", "path": "src/main/java/odrlmodel/ODRLRDF.java", "license": "mit", "size": 24080 }
[ "com.hp.hpl.jena.rdf.model.Resource" ]
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.*;
[ "com.hp.hpl" ]
com.hp.hpl;
1,350,725
static void encode(Encoder e, Schema s, JsonNode n) throws IOException { switch (s.getType()) { case RECORD: for (Field f : s.getFields()) { String name = f.name(); JsonNode v = n.get(name); if (v == null) { v = f.defaultValue(); } if (v == null) { throw new AvroTypeException("No default value for: " + name); } encode(e, f.schema(), v); } break; case ENUM: e.writeEnum(s.getEnumOrdinal(n.getTextValue())); break; case ARRAY: e.writeArrayStart(); e.setItemCount(n.size()); Schema i = s.getElementType(); for (JsonNode node : n) { e.startItem(); encode(e, i, node); } e.writeArrayEnd(); break; case MAP: e.writeMapStart(); e.setItemCount(n.size()); Schema v = s.getValueType(); for (Iterator<String> it = n.getFieldNames(); it.hasNext();) { e.startItem(); String key = it.next(); e.writeString(key); encode(e, v, n.get(key)); } e.writeMapEnd(); break; case UNION: e.writeIndex(0); encode(e, s.getTypes().get(0), n); break; case FIXED: byte[] bb = n.getTextValue().getBytes("ISO-8859-1"); if (bb.length != s.getFixedSize()) { bb = Arrays.copyOf(bb, s.getFixedSize()); } e.writeFixed(bb); break; case STRING: e.writeString(n.getTextValue()); break; case BYTES: e.writeBytes(n.getTextValue().getBytes("ISO-8859-1")); break; case INT: e.writeInt(n.getIntValue()); break; case LONG: e.writeLong(n.getLongValue()); break; case FLOAT: e.writeFloat((float) n.getDoubleValue()); break; case DOUBLE: e.writeDouble(n.getDoubleValue()); break; case BOOLEAN: e.writeBoolean(n.getBooleanValue()); break; case NULL: e.writeNull(); break; } }
static void encode(Encoder e, Schema s, JsonNode n) throws IOException { switch (s.getType()) { case RECORD: for (Field f : s.getFields()) { String name = f.name(); JsonNode v = n.get(name); if (v == null) { v = f.defaultValue(); } if (v == null) { throw new AvroTypeException(STR + name); } encode(e, f.schema(), v); } break; case ENUM: e.writeEnum(s.getEnumOrdinal(n.getTextValue())); break; case ARRAY: e.writeArrayStart(); e.setItemCount(n.size()); Schema i = s.getElementType(); for (JsonNode node : n) { e.startItem(); encode(e, i, node); } e.writeArrayEnd(); break; case MAP: e.writeMapStart(); e.setItemCount(n.size()); Schema v = s.getValueType(); for (Iterator<String> it = n.getFieldNames(); it.hasNext();) { e.startItem(); String key = it.next(); e.writeString(key); encode(e, v, n.get(key)); } e.writeMapEnd(); break; case UNION: e.writeIndex(0); encode(e, s.getTypes().get(0), n); break; case FIXED: byte[] bb = n.getTextValue().getBytes(STR); if (bb.length != s.getFixedSize()) { bb = Arrays.copyOf(bb, s.getFixedSize()); } e.writeFixed(bb); break; case STRING: e.writeString(n.getTextValue()); break; case BYTES: e.writeBytes(n.getTextValue().getBytes(STR)); break; case INT: e.writeInt(n.getIntValue()); break; case LONG: e.writeLong(n.getLongValue()); break; case FLOAT: e.writeFloat((float) n.getDoubleValue()); break; case DOUBLE: e.writeDouble(n.getDoubleValue()); break; case BOOLEAN: e.writeBoolean(n.getBooleanValue()); break; case NULL: e.writeNull(); break; } }
/** * Encodes the given Json node <tt>n</tt> on to the encoder <tt>e</tt> * according to the schema <tt>s</tt>. * @param e The encoder to encode into. * @param s The schema for the object being encoded. * @param n The Json node to encode. * @throws IOException */
Encodes the given Json node n on to the encoder e according to the schema s
encode
{ "repo_name": "peterholc/avro", "path": "lang/java/src/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java", "license": "apache-2.0", "size": 13851 }
[ "java.io.IOException", "java.util.Arrays", "java.util.Iterator", "org.apache.avro.AvroTypeException", "org.apache.avro.Schema", "org.apache.avro.io.Encoder", "org.codehaus.jackson.JsonNode" ]
import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import org.apache.avro.AvroTypeException; import org.apache.avro.Schema; import org.apache.avro.io.Encoder; import org.codehaus.jackson.JsonNode;
import java.io.*; import java.util.*; import org.apache.avro.*; import org.apache.avro.io.*; import org.codehaus.jackson.*;
[ "java.io", "java.util", "org.apache.avro", "org.codehaus.jackson" ]
java.io; java.util; org.apache.avro; org.codehaus.jackson;
648,382
@SuppressWarnings("unchecked") public H setImageDrawable(int viewId, Drawable drawable) { ImageView view = retrieveView(viewId); view.setImageDrawable(drawable); return (H) this; }
@SuppressWarnings(STR) H function(int viewId, Drawable drawable) { ImageView view = retrieveView(viewId); view.setImageDrawable(drawable); return (H) this; }
/** * Will set the image of an ImageView from a drawable. * @param viewId The view id. * @param drawable The image drawable. * @return The BaseAdapterHelper for chaining. */
Will set the image of an ImageView from a drawable
setImageDrawable
{ "repo_name": "alafighting/QuickAndroid", "path": "quickandroid/src/main/java/cn/jeesoft/qa/ui/adapter/BaseAdapterHelper.java", "license": "gpl-3.0", "size": 15569 }
[ "android.graphics.drawable.Drawable", "android.widget.ImageView" ]
import android.graphics.drawable.Drawable; import android.widget.ImageView;
import android.graphics.drawable.*; import android.widget.*;
[ "android.graphics", "android.widget" ]
android.graphics; android.widget;
2,094,902
@Override public void send( String text ) throws WebsocketNotConnectedException { if( text == null ) throw new IllegalArgumentException( "Cannot send 'null' data to a WebSocketImpl." ); send( draft.createFrames( text, role == Role.CLIENT ) ); }
void function( String text ) throws WebsocketNotConnectedException { if( text == null ) throw new IllegalArgumentException( STR ); send( draft.createFrames( text, role == Role.CLIENT ) ); }
/** * Send Text data to the other end. * * @throws IllegalArgumentException * @throws NotYetConnectedException */
Send Text data to the other end
send
{ "repo_name": "Chyrain/V5ClientSDK-Android", "path": "V5SDKDemo/src/org/java_websocket/WebSocketImpl.java", "license": "gpl-3.0", "size": 24115 }
[ "org.java_websocket.exceptions.WebsocketNotConnectedException" ]
import org.java_websocket.exceptions.WebsocketNotConnectedException;
import org.java_websocket.exceptions.*;
[ "org.java_websocket.exceptions" ]
org.java_websocket.exceptions;
708,975
@SuppressWarnings("ThrowableResultOfMethodCallIgnored") public void testJdbcConnectionDisabled() throws Exception { IgniteConfiguration cfg = baseConfiguration(); cfg.setClientConnectorConfiguration(new ClientConnectorConfiguration() .setJdbcEnabled(false) .setOdbcEnabled(true) .setThinClientEnabled(true)); Ignition.start(cfg);
@SuppressWarnings(STR) void function() throws Exception { IgniteConfiguration cfg = baseConfiguration(); cfg.setClientConnectorConfiguration(new ClientConnectorConfiguration() .setJdbcEnabled(false) .setOdbcEnabled(true) .setThinClientEnabled(true)); Ignition.start(cfg);
/** * Checks if JDBC connection disabled and others are enabled, JDBC doesn't work. * * @throws Exception If failed. */
Checks if JDBC connection disabled and others are enabled, JDBC doesn't work
testJdbcConnectionDisabled
{ "repo_name": "psadusumilli/ignite", "path": "modules/indexing/src/test/java/org/apache/ignite/internal/processors/client/ClientConnectorConfigurationValidationSelfTest.java", "license": "apache-2.0", "size": 14505 }
[ "org.apache.ignite.Ignition", "org.apache.ignite.configuration.ClientConnectorConfiguration", "org.apache.ignite.configuration.IgniteConfiguration" ]
import org.apache.ignite.Ignition; import org.apache.ignite.configuration.ClientConnectorConfiguration; import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.*; import org.apache.ignite.configuration.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,405,273
static public <C extends JComponent & MenuElement> JMenuItem addMenuItem(final Action action, final C topLevelMenu, final List<String> path) throws IllegalArgumentException { return addMenuItem(new JMenuItem(action), topLevelMenu, path); }
static <C extends JComponent & MenuElement> JMenuItem function(final Action action, final C topLevelMenu, final List<String> path) throws IllegalArgumentException { return addMenuItem(new JMenuItem(action), topLevelMenu, path); }
/** * Adds a menu item to the passed menu. The path should be an alternation of group and menu * within that menu. All items within the same group will be grouped together inside separators. * Menus will be created as needed and groups' names can be <code>null</code>. NOTE: items added * with this method (even with null groups) will always be separated from items added directly. * * @param action the action to perform. * @param topLevelMenu where to add the menu item. * @param path where to add the menu item. * @return the newly created item. * @throws IllegalArgumentException if path length is not odd. */
Adds a menu item to the passed menu. The path should be an alternation of group and menu within that menu. All items within the same group will be grouped together inside separators. with this method (even with null groups) will always be separated from items added directly
addMenuItem
{ "repo_name": "mbshopM/openconcerto", "path": "OpenConcerto/src/org/openconcerto/ui/MenuUtils.java", "license": "gpl-3.0", "size": 8329 }
[ "java.util.List", "javax.swing.Action", "javax.swing.JComponent", "javax.swing.JMenuItem", "javax.swing.MenuElement" ]
import java.util.List; import javax.swing.Action; import javax.swing.JComponent; import javax.swing.JMenuItem; import javax.swing.MenuElement;
import java.util.*; import javax.swing.*;
[ "java.util", "javax.swing" ]
java.util; javax.swing;
438,193
boolean iterateContent(@NotNull ContentIterator iterator);
boolean iterateContent(@NotNull ContentIterator iterator);
/** * Iterates all files and directories under content roots skipping excluded and ignored files and directories. * * @param iterator the iterator receiving the files. * @return false if files processing was stopped ({@link ContentIterator#processFile(VirtualFile)} returned false) */
Iterates all files and directories under content roots skipping excluded and ignored files and directories
iterateContent
{ "repo_name": "idea4bsd/idea4bsd", "path": "platform/projectModel-api/src/com/intellij/openapi/roots/FileIndex.java", "license": "apache-2.0", "size": 4233 }
[ "org.jetbrains.annotations.NotNull" ]
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.*;
[ "org.jetbrains.annotations" ]
org.jetbrains.annotations;
332,466
private static <T> void addDependencies(final Vertex<T> vertex, final List<Vertex<T>> vertices) { if (!vertices.contains(vertex)) { vertices.add(vertex); for (Vertex<T> v : vertex.getDependencies()) { addDependencies(v, vertices); } } }
static <T> void function(final Vertex<T> vertex, final List<Vertex<T>> vertices) { if (!vertices.contains(vertex)) { vertices.add(vertex); for (Vertex<T> v : vertex.getDependencies()) { addDependencies(v, vertices); } } }
/** * Recursively add a vertex and all of its dependencies to a list of * vertices * * @param vertex Vertex to be added. * @param vertices Existing list of vertices. */
Recursively add a vertex and all of its dependencies to a list of vertices
addDependencies
{ "repo_name": "kulinski/myfaces", "path": "impl/src/main/java/org/apache/myfaces/config/util/DirectedAcyclicGraphVerifier.java", "license": "apache-2.0", "size": 6151 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,938,903
public void addEntityLocation(final Entity e) { if (this.clip.entites.contains(e)) { // this.clip.entites. } }
void function(final Entity e) { if (this.clip.entites.contains(e)) { } }
/** * Prida zaznam o pozicii entity. * * @param e */
Prida zaznam o pozicii entity
addEntityLocation
{ "repo_name": "dobrakmato/pexel-platform", "path": "slave/src/main/java/eu/matejkormuth/pexel/slave/bukkit/cinematics/V3CameraFrame.java", "license": "gpl-3.0", "size": 4599 }
[ "org.bukkit.entity.Entity" ]
import org.bukkit.entity.Entity;
import org.bukkit.entity.*;
[ "org.bukkit.entity" ]
org.bukkit.entity;
2,555,151
private void enqueueInfoBarAnimation(InfoBar infoBar, View toShow, int animationType) { InfoBarTransitionInfo info = new InfoBarTransitionInfo(infoBar, toShow, animationType); mInfoBarTransitions.add(info); processPendingInfoBars(); }
void function(InfoBar infoBar, View toShow, int animationType) { InfoBarTransitionInfo info = new InfoBarTransitionInfo(infoBar, toShow, animationType); mInfoBarTransitions.add(info); processPendingInfoBars(); }
/** * Enqueue a new animation to run and kicks off the animation sequence. */
Enqueue a new animation to run and kicks off the animation sequence
enqueueInfoBarAnimation
{ "repo_name": "patrickm/chromium.src", "path": "chrome/android/java/src/org/chromium/chrome/browser/infobar/InfoBarContainer.java", "license": "bsd-3-clause", "size": 18592 }
[ "android.view.View" ]
import android.view.View;
import android.view.*;
[ "android.view" ]
android.view;
1,784,349
private void search() { // change the cursor to "waiting cursor" for this operation this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { if (this.anyFiltersEnabled()) { String title = NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.results.title", ++resultWindowCount); String pathText = NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.results.pathText"); // try to get the number of matches first Case currentCase = Case.getCurrentCase(); // get the most updated case long totalMatches = 0; List<AbstractFile> contentList = null; try { SleuthkitCase tskDb = currentCase.getSleuthkitCase(); //ResultSet rs = tempDb.runQuery(this.getQuery("count(*) as TotalMatches")); contentList = tskDb.findAllFilesWhere(this.getQuery()); } catch (TskCoreException ex) { Logger logger = Logger.getLogger(this.getClass().getName()); logger.log(Level.WARNING, "Error while trying to get the number of matches.", ex); //NON-NLS } if (contentList == null) { contentList = Collections.<AbstractFile>emptyList(); } final TopComponent searchResultWin = DataResultTopComponent.createInstance(title, pathText, new TableFilterNode(new SearchNode(contentList), true), contentList.size()); searchResultWin.requestActive(); // make it the active top component if (totalMatches > 10000) { // show info String msg = NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.results.msg", totalMatches); String details = NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.results.details"); MessageNotifyUtil.Notify.info(msg, details); } } else { throw new FilterValidationException( NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.exception.noFilterSelected.msg")); } } catch (FilterValidationException ex) { NotifyDescriptor d = new NotifyDescriptor.Message( NbBundle.getMessage(this.getClass(), "FileSearchPanel.search.validationErr.msg", ex.getMessage())); DialogDisplayer.getDefault().notify(d); } finally { this.setCursor(null); } }
void function() { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { if (this.anyFiltersEnabled()) { String title = NbBundle.getMessage(this.getClass(), STR, ++resultWindowCount); String pathText = NbBundle.getMessage(this.getClass(), STR); Case currentCase = Case.getCurrentCase(); long totalMatches = 0; List<AbstractFile> contentList = null; try { SleuthkitCase tskDb = currentCase.getSleuthkitCase(); contentList = tskDb.findAllFilesWhere(this.getQuery()); } catch (TskCoreException ex) { Logger logger = Logger.getLogger(this.getClass().getName()); logger.log(Level.WARNING, STR, ex); } if (contentList == null) { contentList = Collections.<AbstractFile>emptyList(); } final TopComponent searchResultWin = DataResultTopComponent.createInstance(title, pathText, new TableFilterNode(new SearchNode(contentList), true), contentList.size()); searchResultWin.requestActive(); if (totalMatches > 10000) { String msg = NbBundle.getMessage(this.getClass(), STR, totalMatches); String details = NbBundle.getMessage(this.getClass(), STR); MessageNotifyUtil.Notify.info(msg, details); } } else { throw new FilterValidationException( NbBundle.getMessage(this.getClass(), STR)); } } catch (FilterValidationException ex) { NotifyDescriptor d = new NotifyDescriptor.Message( NbBundle.getMessage(this.getClass(), STR, ex.getMessage())); DialogDisplayer.getDefault().notify(d); } finally { this.setCursor(null); } }
/** * Action when the "Search" button is pressed. * */
Action when the "Search" button is pressed
search
{ "repo_name": "sidheshenator/autopsy", "path": "Core/src/org/sleuthkit/autopsy/filesearch/FileSearchPanel.java", "license": "apache-2.0", "size": 10316 }
[ "java.awt.Cursor", "java.util.Collections", "java.util.List", "java.util.logging.Level", "org.openide.DialogDisplayer", "org.openide.NotifyDescriptor", "org.openide.util.NbBundle", "org.openide.windows.TopComponent", "org.sleuthkit.autopsy.casemodule.Case", "org.sleuthkit.autopsy.corecomponents.DataResultTopComponent", "org.sleuthkit.autopsy.corecomponents.TableFilterNode", "org.sleuthkit.autopsy.coreutils.Logger", "org.sleuthkit.autopsy.coreutils.MessageNotifyUtil", "org.sleuthkit.autopsy.filesearch.FileSearchFilter", "org.sleuthkit.datamodel.AbstractFile", "org.sleuthkit.datamodel.SleuthkitCase", "org.sleuthkit.datamodel.TskCoreException" ]
import java.awt.Cursor; import java.util.Collections; import java.util.List; import java.util.logging.Level; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.util.NbBundle; import org.openide.windows.TopComponent; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.corecomponents.TableFilterNode; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.filesearch.FileSearchFilter; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException;
import java.awt.*; import java.util.*; import java.util.logging.*; import org.openide.*; import org.openide.util.*; import org.openide.windows.*; import org.sleuthkit.autopsy.casemodule.*; import org.sleuthkit.autopsy.corecomponents.*; import org.sleuthkit.autopsy.coreutils.*; import org.sleuthkit.autopsy.filesearch.*; import org.sleuthkit.datamodel.*;
[ "java.awt", "java.util", "org.openide", "org.openide.util", "org.openide.windows", "org.sleuthkit.autopsy", "org.sleuthkit.datamodel" ]
java.awt; java.util; org.openide; org.openide.util; org.openide.windows; org.sleuthkit.autopsy; org.sleuthkit.datamodel;
1,650,146
void purgeQueue(ActiveMQDestination destination) throws Exception; QueueViewFacade getQueue(String name) throws Exception; TopicViewFacade getTopic(String name) throws Exception;
void purgeQueue(ActiveMQDestination destination) throws Exception; QueueViewFacade getQueue(String name) throws Exception; TopicViewFacade getTopic(String name) throws Exception;
/** * Get the view of the topic with the specified name. * * @param name * not <code>null</code> * @return <code>null</code> if no topic with this name exists * @throws Exception */
Get the view of the topic with the specified name
getTopic
{ "repo_name": "alexeev/jboss-fuse-mirror", "path": "tooling/tooling-activemq-facade/src/main/java/io/fabric8/activemq/facade/BrokerFacade.java", "license": "apache-2.0", "size": 6377 }
[ "org.apache.activemq.command.ActiveMQDestination" ]
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.*;
[ "org.apache.activemq" ]
org.apache.activemq;
2,453,424
public String getText() { Object text = getComponentModel().text; if (text == null) { Object value = getData(); if (value != null) { text = value.toString(); } } return I18nUtilities.format(null, text); }
String function() { Object text = getComponentModel().text; if (text == null) { Object value = getData(); if (value != null) { text = value.toString(); } } return I18nUtilities.format(null, text); }
/** * Return the button text. Returns: * <ul> * <li>user text if set; otherwise</li> * <li>shared text if set;</li> * <li>user value if set;</li> * <li>bean value if present; or</li> * <li>shared value.</li> * </ul> * * @return the button text */
Return the button text. Returns: user text if set; otherwise shared text if set; user value if set; bean value if present; or shared value.
getText
{ "repo_name": "marksreeves/wcomponents", "path": "wcomponents-core/src/main/java/com/github/bordertech/wcomponents/WButton.java", "license": "gpl-3.0", "size": 25564 }
[ "com.github.bordertech.wcomponents.util.I18nUtilities" ]
import com.github.bordertech.wcomponents.util.I18nUtilities;
import com.github.bordertech.wcomponents.util.*;
[ "com.github.bordertech" ]
com.github.bordertech;
189,302
@Path("/vtns/{name}.json") @DELETE public Response deleteVTN(@PathParam("name") String name);
@Path(STR) Response function(@PathParam("name") String name);
/** * Delete a VTN * @param name * @return */
Delete a VTN
deleteVTN
{ "repo_name": "dana-i2cat/opennaas-routing-nfv", "path": "extensions/bundles/opendaylight.vtn/src/main/java/org/opennaas/extensions/opendaylight/vtn/protocol/client/IOpenDaylightvtnAPIClient.java", "license": "lgpl-3.0", "size": 5733 }
[ "javax.ws.rs.Path", "javax.ws.rs.PathParam", "javax.ws.rs.core.Response" ]
import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Response;
import javax.ws.rs.*; import javax.ws.rs.core.*;
[ "javax.ws" ]
javax.ws;
1,608,984
OperationCompletionRS finishTestItem(String testItemId, FinishTestItemRQ finishExecutionRQ, String username);
OperationCompletionRS finishTestItem(String testItemId, FinishTestItemRQ finishExecutionRQ, String username);
/** * Updates {@link TestItem} instance * * @param testItemId * Test item ID * @param finishExecutionRQ * Request with finish Test Item data * @param username * RQ principal * @return OperationCompletionRS */
Updates <code>TestItem</code> instance
finishTestItem
{ "repo_name": "talisman1234/service-api", "path": "src/main/java/com/epam/ta/reportportal/core/item/FinishTestItemHandler.java", "license": "gpl-3.0", "size": 1557 }
[ "com.epam.ta.reportportal.ws.model.FinishTestItemRQ", "com.epam.ta.reportportal.ws.model.OperationCompletionRS" ]
import com.epam.ta.reportportal.ws.model.FinishTestItemRQ; import com.epam.ta.reportportal.ws.model.OperationCompletionRS;
import com.epam.ta.reportportal.ws.model.*;
[ "com.epam.ta" ]
com.epam.ta;
2,310,929
public void setTableDetails(TableDescriptor td) throws StandardException { if (td == null) { return; } this.columnDescriptorList = td.getColumnDescriptorList(); this.numberOfColumns = this.columnDescriptorList.size(); this.pkIndexes = GemFireXDUtils.getPrimaryKeyColumns(td); }
void function(TableDescriptor td) throws StandardException { if (td == null) { return; } this.columnDescriptorList = td.getColumnDescriptorList(); this.numberOfColumns = this.columnDescriptorList.size(); this.pkIndexes = GemFireXDUtils.getPrimaryKeyColumns(td); }
/** * Keeping this method as public as the ALTER TABLE thread can directly call * this function on this object and all the details pertaining to the table * can be reset. * * @throws StandardException */
Keeping this method as public as the ALTER TABLE thread can directly call this function on this object and all the details pertaining to the table can be reset
setTableDetails
{ "repo_name": "SnappyDataInc/snappy-store", "path": "gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/engine/ddl/GfxdCacheLoader.java", "license": "apache-2.0", "size": 10211 }
[ "com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils", "com.pivotal.gemfirexd.internal.iapi.error.StandardException", "com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor" ]
import com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor;
import com.pivotal.gemfirexd.internal.engine.distributed.utils.*; import com.pivotal.gemfirexd.internal.iapi.error.*; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.*;
[ "com.pivotal.gemfirexd" ]
com.pivotal.gemfirexd;
2,281,586
private void sendData(CommsByteBuffer request, short jfapPriority, boolean requireReply, SITransaction tran, int outboundSegmentType, int outboundNoReplySegmentType, int replySegmentType) throws SIResourceException, SISessionUnavailableException, SINotPossibleInCurrentConfigurationException, SIIncorrectCallException, SIConnectionUnavailableException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "sendData", new Object[] { request, jfapPriority, requireReply, tran, outboundSegmentType, outboundNoReplySegmentType, replySegmentType }); if (requireReply) { // Pass on call to server CommsByteBuffer reply = jfapExchange(request, outboundSegmentType, jfapPriority, false); try { short err = reply.getCommandCompletionCode(replySegmentType); if (err != CommsConstants.SI_NO_EXCEPTION) { checkFor_SISessionUnavailableException(reply, err); checkFor_SISessionDroppedException(reply, err); checkFor_SIConnectionUnavailableException(reply, err); checkFor_SIConnectionDroppedException(reply, err); checkFor_SIResourceException(reply, err); checkFor_SIConnectionLostException(reply, err); checkFor_SILimitExceededException(reply, err); checkFor_SINotAuthorizedException(reply, err); checkFor_SIIncorrectCallException(reply, err); checkFor_SINotPossibleInCurrentConfigurationException(reply, err); checkFor_SIErrorException(reply, err); defaultChecker(reply, err); } } finally { if (reply != null) reply.release(); } } else { jfapSend(request, outboundNoReplySegmentType, jfapPriority, false, ThrottlingPolicy.BLOCK_THREAD); // Update the lowest priority if (tran != null) { ((Transaction) tran).updateLowestMessagePriority(jfapPriority); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "sendData"); }
void function(CommsByteBuffer request, short jfapPriority, boolean requireReply, SITransaction tran, int outboundSegmentType, int outboundNoReplySegmentType, int replySegmentType) throws SIResourceException, SISessionUnavailableException, SINotPossibleInCurrentConfigurationException, SIIncorrectCallException, SIConnectionUnavailableException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, STR, new Object[] { request, jfapPriority, requireReply, tran, outboundSegmentType, outboundNoReplySegmentType, replySegmentType }); if (requireReply) { CommsByteBuffer reply = jfapExchange(request, outboundSegmentType, jfapPriority, false); try { short err = reply.getCommandCompletionCode(replySegmentType); if (err != CommsConstants.SI_NO_EXCEPTION) { checkFor_SISessionUnavailableException(reply, err); checkFor_SISessionDroppedException(reply, err); checkFor_SIConnectionUnavailableException(reply, err); checkFor_SIConnectionDroppedException(reply, err); checkFor_SIResourceException(reply, err); checkFor_SIConnectionLostException(reply, err); checkFor_SILimitExceededException(reply, err); checkFor_SINotAuthorizedException(reply, err); checkFor_SIIncorrectCallException(reply, err); checkFor_SINotPossibleInCurrentConfigurationException(reply, err); checkFor_SIErrorException(reply, err); defaultChecker(reply, err); } } finally { if (reply != null) reply.release(); } } else { jfapSend(request, outboundNoReplySegmentType, jfapPriority, false, ThrottlingPolicy.BLOCK_THREAD); if (tran != null) { ((Transaction) tran).updateLowestMessagePriority(jfapPriority); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, STR); }
/** * This helper method is used to send the final or only part of a message to our peer. It takes * care of whether we should be exchanging the message and deals with the exceptions returned. * * @param request The request buffer * @param jfapPriority The JFap priority to send the message * @param requireReply Whether we require a reply (or fire-and-forget it) * @param tran The transaction being used to send the message (may be null) * @param outboundSegmentType The segment type to exchange with * @param outboundNoReplySegmentType The segment type to fire-and-forget with * @param replySegmentType The segment type to expect on replies * * @throws SIResourceException * @throws SISessionUnavailableException * @throws SINotPossibleInCurrentConfigurationException * @throws SIIncorrectCallException * @throws SIConnectionUnavailableException */
This helper method is used to send the final or only part of a message to our peer. It takes care of whether we should be exchanging the message and deals with the exceptions returned
sendData
{ "repo_name": "kgibm/open-liberty", "path": "dev/com.ibm.ws.messaging.comms.client/src/com/ibm/ws/sib/comms/client/ProducerSessionProxy.java", "license": "epl-1.0", "size": 30105 }
[ "com.ibm.websphere.ras.TraceComponent", "com.ibm.websphere.sib.exception.SIErrorException", "com.ibm.websphere.sib.exception.SIIncorrectCallException", "com.ibm.websphere.sib.exception.SINotPossibleInCurrentConfigurationException", "com.ibm.websphere.sib.exception.SIResourceException", "com.ibm.ws.sib.comms.CommsConstants", "com.ibm.ws.sib.comms.common.CommsByteBuffer", "com.ibm.ws.sib.jfapchannel.Conversation", "com.ibm.ws.sib.utils.ras.SibTr", "com.ibm.wsspi.sib.core.SITransaction", "com.ibm.wsspi.sib.core.exception.SIConnectionDroppedException", "com.ibm.wsspi.sib.core.exception.SIConnectionLostException", "com.ibm.wsspi.sib.core.exception.SIConnectionUnavailableException", "com.ibm.wsspi.sib.core.exception.SILimitExceededException", "com.ibm.wsspi.sib.core.exception.SINotAuthorizedException", "com.ibm.wsspi.sib.core.exception.SISessionDroppedException", "com.ibm.wsspi.sib.core.exception.SISessionUnavailableException" ]
import com.ibm.websphere.ras.TraceComponent; import com.ibm.websphere.sib.exception.SIErrorException; import com.ibm.websphere.sib.exception.SIIncorrectCallException; import com.ibm.websphere.sib.exception.SINotPossibleInCurrentConfigurationException; import com.ibm.websphere.sib.exception.SIResourceException; import com.ibm.ws.sib.comms.CommsConstants; import com.ibm.ws.sib.comms.common.CommsByteBuffer; import com.ibm.ws.sib.jfapchannel.Conversation; import com.ibm.ws.sib.utils.ras.SibTr; import com.ibm.wsspi.sib.core.SITransaction; import com.ibm.wsspi.sib.core.exception.SIConnectionDroppedException; import com.ibm.wsspi.sib.core.exception.SIConnectionLostException; import com.ibm.wsspi.sib.core.exception.SIConnectionUnavailableException; import com.ibm.wsspi.sib.core.exception.SILimitExceededException; import com.ibm.wsspi.sib.core.exception.SINotAuthorizedException; import com.ibm.wsspi.sib.core.exception.SISessionDroppedException; import com.ibm.wsspi.sib.core.exception.SISessionUnavailableException;
import com.ibm.websphere.ras.*; import com.ibm.websphere.sib.exception.*; import com.ibm.ws.sib.comms.*; import com.ibm.ws.sib.comms.common.*; import com.ibm.ws.sib.jfapchannel.*; import com.ibm.ws.sib.utils.ras.*; import com.ibm.wsspi.sib.core.*; import com.ibm.wsspi.sib.core.exception.*;
[ "com.ibm.websphere", "com.ibm.ws", "com.ibm.wsspi" ]
com.ibm.websphere; com.ibm.ws; com.ibm.wsspi;
1,388,817
private void simulateSlowResolveFinished() throws InterruptedException, TimeoutException { // Allow the slow Resolution to finish, waiting for it to complete. mLatestSlowResolveSearch.finishResolve(); assertLoadedSearchTermMatches(mLatestSlowResolveSearch.getSearchTerm()); }
void function() throws InterruptedException, TimeoutException { mLatestSlowResolveSearch.finishResolve(); assertLoadedSearchTermMatches(mLatestSlowResolveSearch.getSearchTerm()); }
/** * Simulates a slow response for the most recent {@link FakeSlowResolveSearch} set up * by calling simulateSlowResolveSearch. * @throws TimeoutException * @throws InterruptedException */
Simulates a slow response for the most recent <code>FakeSlowResolveSearch</code> set up by calling simulateSlowResolveSearch
simulateSlowResolveFinished
{ "repo_name": "was4444/chromium.src", "path": "chrome/android/javatests/src/org/chromium/chrome/browser/contextualsearch/ContextualSearchManagerTest.java", "license": "bsd-3-clause", "size": 103579 }
[ "java.util.concurrent.TimeoutException" ]
import java.util.concurrent.TimeoutException;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,448,164
static protected void addAppenderFile(String patternLayout, String fileName, boolean appendOnFile) { FileAppender a1 = new FileAppender(); if (currentLogger != null) { try { currentLogger.removeAppender(getAppenderName(APPENDER_FILE)); a1.setName(getAppenderName(APPENDER_FILE)); a1.setLayout(getLayout(patternLayout)); a1.setAppend(appendOnFile); a1.setFile(fileName); a1.activateOptions(); currentLogger.addAppender(a1); } catch (Exception e) { SilverTrace.error("silvertrace", "SilverLog.addAppenderFile()", "silvertrace.ERR_CANT_ADD_APPENDER", "File " + patternLayout + "," + fileName, e); } } }
static void function(String patternLayout, String fileName, boolean appendOnFile) { FileAppender a1 = new FileAppender(); if (currentLogger != null) { try { currentLogger.removeAppender(getAppenderName(APPENDER_FILE)); a1.setName(getAppenderName(APPENDER_FILE)); a1.setLayout(getLayout(patternLayout)); a1.setAppend(appendOnFile); a1.setFile(fileName); a1.activateOptions(); currentLogger.addAppender(a1); } catch (Exception e) { SilverTrace.error(STR, STR, STR, STR + patternLayout + "," + fileName, e); } } }
/** * Add a new file appender. If an appender with the same type have been previously set, delete it * and replace it with the new created one. * @param patternLayout the things displayed in this appender, could be one of the LAYOUT_... * constants * @param fileName full-path name of the file where the trace are written * @param appendOnFile true to append at the end of the existing file (if ther is one), false to * remove old file before writting */
Add a new file appender. If an appender with the same type have been previously set, delete it and replace it with the new created one
addAppenderFile
{ "repo_name": "NicolasEYSSERIC/Silverpeas-Core", "path": "lib-core/src/main/java/com/stratelia/silverpeas/silvertrace/SilverLog.java", "license": "agpl-3.0", "size": 20798 }
[ "org.apache.log4j.FileAppender" ]
import org.apache.log4j.FileAppender;
import org.apache.log4j.*;
[ "org.apache.log4j" ]
org.apache.log4j;
1,334,749
SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory)tParam.getMSF() ); try { log.println( "creating a Spreadsheet document" ); xSheetDoc = SOF.createCalcDoc(null); } catch ( com.sun.star.uno.Exception e ) { // Some exception occurs.FAILED e.printStackTrace( log ); throw new StatusException( "Couldn't create document", e ); } }
SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory)tParam.getMSF() ); try { log.println( STR ); xSheetDoc = SOF.createCalcDoc(null); } catch ( com.sun.star.uno.Exception e ) { e.printStackTrace( log ); throw new StatusException( STR, e ); } }
/** * Creates Spreadsheet document. */
Creates Spreadsheet document
initialize
{ "repo_name": "qt-haiku/LibreOffice", "path": "qadevOOo/tests/java/mod/_sc/ScIndexEnumeration_TextFieldEnumeration.java", "license": "gpl-3.0", "size": 5865 }
[ "com.sun.star.lang.XMultiServiceFactory" ]
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.*;
[ "com.sun.star" ]
com.sun.star;
290,724
public Patient getPatientByUuid(String uuid);
Patient function(String uuid);
/** * Auto generated method comment * * @param uuid * @return */
Auto generated method comment
getPatientByUuid
{ "repo_name": "Bhamni/openmrs-core", "path": "api/src/main/java/org/openmrs/api/db/PatientDAO.java", "license": "mpl-2.0", "size": 6340 }
[ "org.openmrs.Patient" ]
import org.openmrs.Patient;
import org.openmrs.*;
[ "org.openmrs" ]
org.openmrs;
1,077,161
@Test public void testGetDifferentSystemAfterClose() { Properties config = createLonerConfig(); DistributedSystem system1 = DistributedSystem.connect(config); system1.disconnect(); int time = DEFAULT_ACK_WAIT_THRESHOLD + 17; config.put(ACK_WAIT_THRESHOLD, String.valueOf(time)); DistributedSystem system2 = DistributedSystem.connect(config); system2.disconnect(); }
void function() { Properties config = createLonerConfig(); DistributedSystem system1 = DistributedSystem.connect(config); system1.disconnect(); int time = DEFAULT_ACK_WAIT_THRESHOLD + 17; config.put(ACK_WAIT_THRESHOLD, String.valueOf(time)); DistributedSystem system2 = DistributedSystem.connect(config); system2.disconnect(); }
/** * Tests getting a system with a different configuration after another system has been closed. */
Tests getting a system with a different configuration after another system has been closed
testGetDifferentSystemAfterClose
{ "repo_name": "jdeppe-pivotal/geode", "path": "geode-core/src/distributedTest/java/org/apache/geode/distributed/DistributedSystemDUnitTest.java", "license": "apache-2.0", "size": 17601 }
[ "java.util.Properties" ]
import java.util.Properties;
import java.util.*;
[ "java.util" ]
java.util;
2,293,194
@Test public void multiThreadTestFifoDistributedBiFunction() { startTest("multiThreadTestFifoDistributedBiFunction", TEST_DATA_LIST); checkProcessingCountCacheSize(0); System.out.println(" processing..."); ExecutorService exec = Executors.newFixedThreadPool(NUM_THREAD); Map<Integer, String> execThreadNameMap = new ConcurrentHashMap<>(); FifoDistributedBiFunction<Integer, Integer> fifoDistributedBiFunction = i -> { processingCountCache.invoke(i, countProcessor); processingThreadCache.put(i, Thread.currentThread().getName()); return i + PROCESSING_COUNT_UP_VALUE; }; List<Future<List<Integer>>> futureList = IntStream.range(0, NUM_THREAD).mapToObj(n -> { return exec.submit(() -> { execThreadNameMap.put(n, Thread.currentThread().getName()); return TEST_DATA_LIST.stream() .collect(Collectors.toConcurrentMap(Function.identity(), i -> "DistributedProcessingCache")) .entrySet().stream().map(e -> { return fifoDistributedBiFunction.apply(e.getKey(), e.getValue()); }).collect(Collectors.toList()); }); }).collect(Collectors.toList()); IntStream.range(0, NUM_THREAD).forEach(i -> { try { List<Integer> execResultDataList = futureList.get(i).get(); System.out.println( " processed result data list (" + execThreadNameMap.get(i) + "): " + execResultDataList); } catch (Exception e) { e.printStackTrace(); } }); // Combine results List<Integer> resultDataList = new ArrayList<>(); IntStream.range(0, NUM_TEST_DATA).forEach(i -> { futureList.forEach(f -> { try { Integer result = f.get().get(i); if (result != null) { resultDataList.add(result); } } catch (Exception e) { e.printStackTrace(); } }); }); System.out.println(" processed result data list (all): " + resultDataList); assertEquals(EXPECTED_RESULT_DATA_LIST, resultDataList); checkProcessingCount(TEST_DATA_LIST.size(), 1); finishTest(TEST_DATA_LIST.size(), true); }
void function() { startTest(STR, TEST_DATA_LIST); checkProcessingCountCacheSize(0); System.out.println(STR); ExecutorService exec = Executors.newFixedThreadPool(NUM_THREAD); Map<Integer, String> execThreadNameMap = new ConcurrentHashMap<>(); FifoDistributedBiFunction<Integer, Integer> fifoDistributedBiFunction = i -> { processingCountCache.invoke(i, countProcessor); processingThreadCache.put(i, Thread.currentThread().getName()); return i + PROCESSING_COUNT_UP_VALUE; }; List<Future<List<Integer>>> futureList = IntStream.range(0, NUM_THREAD).mapToObj(n -> { return exec.submit(() -> { execThreadNameMap.put(n, Thread.currentThread().getName()); return TEST_DATA_LIST.stream() .collect(Collectors.toConcurrentMap(Function.identity(), i -> STR)) .entrySet().stream().map(e -> { return fifoDistributedBiFunction.apply(e.getKey(), e.getValue()); }).collect(Collectors.toList()); }); }).collect(Collectors.toList()); IntStream.range(0, NUM_THREAD).forEach(i -> { try { List<Integer> execResultDataList = futureList.get(i).get(); System.out.println( STR + execThreadNameMap.get(i) + STR + execResultDataList); } catch (Exception e) { e.printStackTrace(); } }); List<Integer> resultDataList = new ArrayList<>(); IntStream.range(0, NUM_TEST_DATA).forEach(i -> { futureList.forEach(f -> { try { Integer result = f.get().get(i); if (result != null) { resultDataList.add(result); } } catch (Exception e) { e.printStackTrace(); } }); }); System.out.println(STR + resultDataList); assertEquals(EXPECTED_RESULT_DATA_LIST, resultDataList); checkProcessingCount(TEST_DATA_LIST.size(), 1); finishTest(TEST_DATA_LIST.size(), true); }
/** * Test method for FifoDistributedBiFunction as distributed processing in * multi processes.<br> * This method calls FifoDistributedBiFunction for all test data set from * several threads and processes them independently. */
Test method for FifoDistributedBiFunction as distributed processing in multi processes. This method calls FifoDistributedBiFunction for all test data set from several threads and processes them independently
multiThreadTestFifoDistributedBiFunction
{ "repo_name": "simukappu/Coherence-tools", "path": "distributed-processing-tools/src/test/java/test/com/simukappu/coherence/distributed/fifo/TestFifoDistributedProcessor.java", "license": "apache-2.0", "size": 24412 }
[ "com.simukappu.coherence.distributed.fifo.FifoDistributedBiFunction", "java.util.ArrayList", "java.util.List", "java.util.Map", "java.util.concurrent.ConcurrentHashMap", "java.util.concurrent.ExecutorService", "java.util.concurrent.Executors", "java.util.concurrent.Future", "java.util.function.Function", "java.util.stream.Collectors", "java.util.stream.IntStream", "org.junit.Assert" ]
import com.simukappu.coherence.distributed.fifo.FifoDistributedBiFunction; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.junit.Assert;
import com.simukappu.coherence.distributed.fifo.*; import java.util.*; import java.util.concurrent.*; import java.util.function.*; import java.util.stream.*; import org.junit.*;
[ "com.simukappu.coherence", "java.util", "org.junit" ]
com.simukappu.coherence; java.util; org.junit;
1,318,398
protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.emptyList(); }
Collection<Class<? extends Plugin>> function() { return Collections.emptyList(); }
/** * Returns a collection of plugins that should be loaded on each node. */
Returns a collection of plugins that should be loaded on each node
nodePlugins
{ "repo_name": "JervyShi/elasticsearch", "path": "test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java", "license": "apache-2.0", "size": 100008 }
[ "java.util.Collection", "java.util.Collections", "org.elasticsearch.plugins.Plugin" ]
import java.util.Collection; import java.util.Collections; import org.elasticsearch.plugins.Plugin;
import java.util.*; import org.elasticsearch.plugins.*;
[ "java.util", "org.elasticsearch.plugins" ]
java.util; org.elasticsearch.plugins;
1,494
protected boolean arePathsContiguous(TreePath[] paths) { if (rowMapper == null || paths.length < 2) return true; int length = paths.length; TreePath[] tmp = new TreePath[1]; tmp[0] = paths[0]; int min = rowMapper.getRowsForPaths(tmp)[0]; BitSet selected = new BitSet(); int valid = 0; for (int i = 0; i < length; i++) { if (paths[i] != null) { tmp[0] = paths[i]; int[] rows = rowMapper.getRowsForPaths(tmp); if (rows == null) return false; // No row mapping yet, can't be selected. int row = rows[0]; if (row == -1 || row < (min - length) || row > (min + length)) return false; // Not contiguous. min = Math.min(min, row); if (! selected.get(row)) { selected.set(row); valid++; } } } int max = valid + min; for (int i = min; i < max; i++) if (! selected.get(i)) return false; // Not contiguous. return true; }
boolean function(TreePath[] paths) { if (rowMapper == null paths.length < 2) return true; int length = paths.length; TreePath[] tmp = new TreePath[1]; tmp[0] = paths[0]; int min = rowMapper.getRowsForPaths(tmp)[0]; BitSet selected = new BitSet(); int valid = 0; for (int i = 0; i < length; i++) { if (paths[i] != null) { tmp[0] = paths[i]; int[] rows = rowMapper.getRowsForPaths(tmp); if (rows == null) return false; int row = rows[0]; if (row == -1 row < (min - length) row > (min + length)) return false; min = Math.min(min, row); if (! selected.get(row)) { selected.set(row); valid++; } } } int max = valid + min; for (int i = min; i < max; i++) if (! selected.get(i)) return false; return true; }
/** * Returns <code>true</code> if the paths are contiguous (take subsequent * rows in the diplayed tree view. The method returns <code>true</code> if * we have no RowMapper assigned. * * @param paths the paths to check for continuity * @return <code>true</code> if the paths are contiguous or we have no * RowMapper assigned */
Returns <code>true</code> if the paths are contiguous (take subsequent rows in the diplayed tree view. The method returns <code>true</code> if we have no RowMapper assigned
arePathsContiguous
{ "repo_name": "taciano-perez/JamVM-PH", "path": "src/classpath/javax/swing/tree/DefaultTreeSelectionModel.java", "license": "gpl-2.0", "size": 36293 }
[ "java.util.BitSet" ]
import java.util.BitSet;
import java.util.*;
[ "java.util" ]
java.util;
2,114,743
@Override protected Point getInitialSize() { return new Point(450, 504); } /** * {@inheritDoc}
Point function() { return new Point(450, 504); } /** * {@inheritDoc}
/** * Return the initial size of the dialog. */
Return the initial size of the dialog
getInitialSize
{ "repo_name": "chanakaudaya/developer-studio", "path": "esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/custom/configure/ui/ConfigureConditionalRouterMediatorDialog.java", "license": "apache-2.0", "size": 17491 }
[ "org.eclipse.swt.graphics.Point" ]
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
40,986
@Test(expected = NullPointerException.class) public void test_actualizar_nombreNull() { short id = 1; Texto nombre = null; Usuario usuario = new Usuario(); usuario.setId(id); usuario.setNombre(nombre); usuario.actualizarEnBD(); }
@Test(expected = NullPointerException.class) void function() { short id = 1; Texto nombre = null; Usuario usuario = new Usuario(); usuario.setId(id); usuario.setNombre(nombre); usuario.actualizarEnBD(); }
/** * Actualiza usuario con nombre nulo. */
Actualiza usuario con nombre nulo
test_actualizar_nombreNull
{ "repo_name": "NullPointer-Chile/farmacia-popular", "path": "farmacia-popular/src/test/java/cl/nullpointer/farmaciapopular/dominio/UsuarioTest.java", "license": "gpl-3.0", "size": 7585 }
[ "org.junit.Test" ]
import org.junit.Test;
import org.junit.*;
[ "org.junit" ]
org.junit;
230,076
public void sendEnvelope(String contents, Message response);
void function(String contents, Message response);
/** * Send ENVELOPE to the SIM, after processing a proactive command sent by * the SIM. * * @param contents String containing SAT/USAT response in hexadecimal * format starting with command tag. See TS 102 223 for * details. * @param response Callback message */
Send ENVELOPE to the SIM, after processing a proactive command sent by the SIM
sendEnvelope
{ "repo_name": "rex-xxx/mt6572_x201", "path": "frameworks/opt/telephony/src/java/com/android/internal/telephony/CommandsInterface.java", "license": "gpl-2.0", "size": 86787 }
[ "android.os.Message" ]
import android.os.Message;
import android.os.*;
[ "android.os" ]
android.os;
2,014,754
IWizardCategory getChildWithID(final IWizardCategory inParent, final String inID) { final IWizardCategory[] lChildren = inParent.getCategories(); for (int i = 0; i < lChildren.length; ++i) { final IWizardCategory lCurrentChild = lChildren[i]; if (lCurrentChild.getId().equals(inID)) { return lCurrentChild; } } return null; }
IWizardCategory getChildWithID(final IWizardCategory inParent, final String inID) { final IWizardCategory[] lChildren = inParent.getCategories(); for (int i = 0; i < lChildren.length; ++i) { final IWizardCategory lCurrentChild = lChildren[i]; if (lCurrentChild.getId().equals(inID)) { return lCurrentChild; } } return null; }
/** * Returns the child collection element for the given id */
Returns the child collection element for the given id
getChildWithID
{ "repo_name": "aktion-hip/relations", "path": "org.elbe.relations/src/org/elbe/relations/internal/e4/wizards/AbstractExtensionWizard.java", "license": "gpl-3.0", "size": 3968 }
[ "org.elbe.relations.internal.e4.wizards.util.IWizardCategory" ]
import org.elbe.relations.internal.e4.wizards.util.IWizardCategory;
import org.elbe.relations.internal.e4.wizards.util.*;
[ "org.elbe.relations" ]
org.elbe.relations;
644,562
protected void parseDatapackFile(String path) { parseFile(new File(VotingRewardInterface.getInstance().getDocumentRoot(), path)); }
void function(String path) { parseFile(new File(VotingRewardInterface.getInstance().getDocumentRoot(), path)); }
/** * Wrapper for {@link #parseFile(File)} method. * @param path the relative path to the datapack root of the XML file to parse. */
Wrapper for <code>#parseFile(File)</code> method
parseDatapackFile
{ "repo_name": "UnAfraid/topzone", "path": "VotingRewardAPI/src/main/java/com/github/unafraid/votingreward/util/DocumentParser.java", "license": "lgpl-3.0", "size": 20481 }
[ "com.github.unafraid.votingreward.VotingRewardInterface", "java.io.File" ]
import com.github.unafraid.votingreward.VotingRewardInterface; import java.io.File;
import com.github.unafraid.votingreward.*; import java.io.*;
[ "com.github.unafraid", "java.io" ]
com.github.unafraid; java.io;
2,595,078
public static Service create(QName serviceName, WebServiceFeature ... features) { return new Service(null, serviceName, features); }
static Service function(QName serviceName, WebServiceFeature ... features) { return new Service(null, serviceName, features); }
/** * Creates a <code>Service</code> instance. The created instance is * configured with the web service features. * * @param serviceName <code>QName</code> for the service * @param features Web Service features that must be configured on * the service. If the provider doesn't understand a feature, * it must throw a WebServiceException. * @throws WebServiceException If any error in creation of the * specified service * * @since JAX-WS 2.2 */
Creates a <code>Service</code> instance. The created instance is configured with the web service features
create
{ "repo_name": "shun634501730/java_source_cn", "path": "src_en/javax/xml/ws/Service.java", "license": "apache-2.0", "size": 33423 }
[ "javax.xml.namespace.QName" ]
import javax.xml.namespace.QName;
import javax.xml.namespace.*;
[ "javax.xml" ]
javax.xml;
2,562,966
ServiceResponse<Void> putUuidValid(List<UUID> arrayBody) throws ErrorException, IOException, IllegalArgumentException;
ServiceResponse<Void> putUuidValid(List<UUID> arrayBody) throws ErrorException, IOException, IllegalArgumentException;
/** * Set array value ['6dcc7237-45fe-45c4-8a6b-3a8a3f625652', 'd1399005-30f7-40d6-8da6-dd7c89ad34db', 'f42f6aa1-a5bc-4ddf-907e-5f915de43205']. * * @param arrayBody the List&lt;UUID&gt; value * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */
Set array value ['6dcc7237-45fe-45c4-8a6b-3a8a3f625652', 'd1399005-30f7-40d6-8da6-dd7c89ad34db', 'f42f6aa1-a5bc-4ddf-907e-5f915de43205']
putUuidValid
{ "repo_name": "haocs/autorest", "path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodyarray/Arrays.java", "license": "mit", "size": 72234 }
[ "com.microsoft.rest.ServiceResponse", "java.io.IOException", "java.util.List" ]
import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List;
import com.microsoft.rest.*; import java.io.*; import java.util.*;
[ "com.microsoft.rest", "java.io", "java.util" ]
com.microsoft.rest; java.io; java.util;
2,501,189
@Nullable public Vec3d getTargetLocation() { return this.targetLocation; }
Vec3d function() { return this.targetLocation; }
/** * Returns the location the dragon is flying toward */
Returns the location the dragon is flying toward
getTargetLocation
{ "repo_name": "TheGreatAndPowerfulWeegee/wipunknown", "path": "build/tmp/recompileMc/sources/net/minecraft/entity/boss/dragon/phase/PhaseDying.java", "license": "gpl-3.0", "size": 2647 }
[ "net.minecraft.util.math.Vec3d" ]
import net.minecraft.util.math.Vec3d;
import net.minecraft.util.math.*;
[ "net.minecraft.util" ]
net.minecraft.util;
86,397
public void changeStream(){ try { if(outputStream != null){ outputStream.close(); } } catch (IOException e) { } outputStream = null; }
void function(){ try { if(outputStream != null){ outputStream.close(); } } catch (IOException e) { } outputStream = null; }
/** * allows to change stream, sometime needed if a stream is too full or when changing color */
allows to change stream, sometime needed if a stream is too full or when changing color
changeStream
{ "repo_name": "diverse-project/tools", "path": "commons-eclipse/fr.inria.diverse.commons.eclipse.messagingsystem.ui/src/main/java/fr/inria/diverse/commons/eclipse/messagingsystem/ui/internal/console/EclipseConsoleIO.java", "license": "epl-1.0", "size": 8841 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,898,775
ExamplesUtils.checkMinMemory(MIN_MEMORY); try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) { System.out.println(); System.out.println(">>> Cache data streamer example started."); try (IgniteCache<Integer, String> cache = ignite.getOrCreateCache(CACHE_NAME)) { long start = System.currentTimeMillis(); try (IgniteDataStreamer<Integer, String> stmr = ignite.dataStreamer(CACHE_NAME)) { // Configure loader. stmr.perNodeBufferSize(1024); stmr.perNodeParallelOperations(8); for (int i = 0; i < ENTRY_COUNT; i++) { stmr.addData(i, Integer.toString(i)); // Print out progress while loading cache. if (i > 0 && i % 10000 == 0) System.out.println("Loaded " + i + " keys."); } } long end = System.currentTimeMillis(); System.out.println(">>> Loaded " + ENTRY_COUNT + " keys in " + (end - start) + "ms."); } } }
ExamplesUtils.checkMinMemory(MIN_MEMORY); try (Ignite ignite = Ignition.start(STR)) { System.out.println(); System.out.println(STR); try (IgniteCache<Integer, String> cache = ignite.getOrCreateCache(CACHE_NAME)) { long start = System.currentTimeMillis(); try (IgniteDataStreamer<Integer, String> stmr = ignite.dataStreamer(CACHE_NAME)) { stmr.perNodeBufferSize(1024); stmr.perNodeParallelOperations(8); for (int i = 0; i < ENTRY_COUNT; i++) { stmr.addData(i, Integer.toString(i)); if (i > 0 && i % 10000 == 0) System.out.println(STR + i + STR); } } long end = System.currentTimeMillis(); System.out.println(STR + ENTRY_COUNT + STR + (end - start) + "ms."); } } }
/** * Executes example. * * @param args Command line arguments, none required. * @throws IgniteException If example execution failed. */
Executes example
main
{ "repo_name": "dlnufox/ignite", "path": "examples/src/main/java/org/apache/ignite/examples/datagrid/CacheDataStreamerExample.java", "license": "apache-2.0", "size": 3577 }
[ "org.apache.ignite.Ignite", "org.apache.ignite.IgniteCache", "org.apache.ignite.IgniteDataStreamer", "org.apache.ignite.Ignition", "org.apache.ignite.examples.ExamplesUtils" ]
import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.Ignition; import org.apache.ignite.examples.ExamplesUtils;
import org.apache.ignite.*; import org.apache.ignite.examples.*;
[ "org.apache.ignite" ]
org.apache.ignite;
522,296
public static SyncResponse syncContacts(Account account, String authtoken, SyncAnchor serverSyncState, List<RawContact> dirtyContacts, List<ContactGroup> dirtyGroups, SecretKey key, AccountManager accountManager, Context context, SyncResult syncResult, String pwdSaltHexStr, Map<Long, String> newIdMap, Restrictions restr, boolean explizitPhotoSave) throws AuthenticationException, OperationCanceledException, AuthenticatorException, ServerException, NetworkErrorException, HeaderParseException, HeaderCreateException { String clientId = getClientId(accountManager, account); SyncPrepErrorStatistic prepError = new SyncPrepErrorStatistic(); byte[] totBuffer = RequestGenerator.prepareServerRequest( serverSyncState, dirtyContacts, dirtyGroups, key, SystemHelper.getPkgVersion(context), clientId, pwdSaltHexStr, newIdMap, prepError, restr, explizitPhotoSave); syncResult.stats.numSkippedEntries += prepError.getIgnoredRows(); String currAuthtoken = authtoken; SyncResponse syncResponse = null; boolean retry; int retrycount = 0; do { retry = false; HttpEntity entity = new ByteArrayEntity(totBuffer); // Send the updated friends data to the server final HttpPost post = new HttpPost(SYNC_URI); post.setHeader("Content-Encoding", "application/octect-stream"); post.setEntity(entity); HttpEntity respEntity = null; try { final HttpResponse resp = getHttpClient(context).execute(post, createHttpContext(account.name, currAuthtoken)); respEntity = resp.getEntity(); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { final byte[] response = EntityUtils.toByteArray(respEntity); syncResponse = processServerResponse(account, key, accountManager, clientId, response, syncResult); if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Response-Length: " + response.length); } } else { if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_UNAUTHORIZED) { currAuthtoken = retryAuthentification(retrycount, accountManager, currAuthtoken, account.name, resp); retry = true; } else { throw new ServerException( "Server error in sending dirty contacts: " + resp.getStatusLine()); } } } catch (IOException ex) { throw new NetworkErrorException(ex); } finally { consumeContent(respEntity); } retrycount++; } while (retry); return syncResponse; }
static SyncResponse function(Account account, String authtoken, SyncAnchor serverSyncState, List<RawContact> dirtyContacts, List<ContactGroup> dirtyGroups, SecretKey key, AccountManager accountManager, Context context, SyncResult syncResult, String pwdSaltHexStr, Map<Long, String> newIdMap, Restrictions restr, boolean explizitPhotoSave) throws AuthenticationException, OperationCanceledException, AuthenticatorException, ServerException, NetworkErrorException, HeaderParseException, HeaderCreateException { String clientId = getClientId(accountManager, account); SyncPrepErrorStatistic prepError = new SyncPrepErrorStatistic(); byte[] totBuffer = RequestGenerator.prepareServerRequest( serverSyncState, dirtyContacts, dirtyGroups, key, SystemHelper.getPkgVersion(context), clientId, pwdSaltHexStr, newIdMap, prepError, restr, explizitPhotoSave); syncResult.stats.numSkippedEntries += prepError.getIgnoredRows(); String currAuthtoken = authtoken; SyncResponse syncResponse = null; boolean retry; int retrycount = 0; do { retry = false; HttpEntity entity = new ByteArrayEntity(totBuffer); final HttpPost post = new HttpPost(SYNC_URI); post.setHeader(STR, STR); post.setEntity(entity); HttpEntity respEntity = null; try { final HttpResponse resp = getHttpClient(context).execute(post, createHttpContext(account.name, currAuthtoken)); respEntity = resp.getEntity(); if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { final byte[] response = EntityUtils.toByteArray(respEntity); syncResponse = processServerResponse(account, key, accountManager, clientId, response, syncResult); if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, STR + response.length); } } else { if (resp.getStatusLine().getStatusCode() == HttpStatus.SC_UNAUTHORIZED) { currAuthtoken = retryAuthentification(retrycount, accountManager, currAuthtoken, account.name, resp); retry = true; } else { throw new ServerException( STR + resp.getStatusLine()); } } } catch (IOException ex) { throw new NetworkErrorException(ex); } finally { consumeContent(respEntity); } retrycount++; } while (retry); return syncResponse; }
/** * Perform 2-way sync with the server-side contacts. We send a request that * includes all the locally-dirty contacts so that the server can process * those changes, and we receive (and return) a list of contacts that were * updated on the server-side that need to be updated locally. * * @param account * The account being synced * @param authtoken * The authtoken stored in the AccountManager for this account * @param serverSyncState * A token returned from the server on the last sync * @param dirtyContacts * A list of the contacts to send to the server * @param newIdMap * Map of RawId to ServerId * @param explizitPhotoSave * @return A list of contacts that we need to update locally. Null if * processing of server-results failed. * @throws ParserConfigurationException * @throws TransformerException * @throws AuthenticatorException * @throws OperationCanceledException * when Authentication was canceled from user * @throws SAXException * @throws ServerException * @throws NetworkErrorException * @throws HeaderParseException * @throws HeaderCreateException */
Perform 2-way sync with the server-side contacts. We send a request that includes all the locally-dirty contacts so that the server can process those changes, and we receive (and return) a list of contacts that were updated on the server-side that need to be updated locally
syncContacts
{ "repo_name": "mgrieder/ntsync-android", "path": "ntsync-android-client/src/com/ntsync/android/sync/client/NetworkUtilities.java", "license": "gpl-3.0", "size": 36031 }
[ "android.accounts.Account", "android.accounts.AccountManager", "android.accounts.AuthenticatorException", "android.accounts.NetworkErrorException", "android.accounts.OperationCanceledException", "android.content.Context", "android.content.SyncResult", "android.util.Log", "com.ntsync.android.sync.platform.SystemHelper", "com.ntsync.shared.ContactGroup", "com.ntsync.shared.HeaderCreateException", "com.ntsync.shared.HeaderParseException", "com.ntsync.shared.RawContact", "com.ntsync.shared.RequestGenerator", "com.ntsync.shared.Restrictions", "com.ntsync.shared.SyncAnchor", "java.io.IOException", "java.util.List", "java.util.Map", "javax.crypto.SecretKey", "org.apache.http.HttpEntity", "org.apache.http.HttpResponse", "org.apache.http.HttpStatus", "org.apache.http.auth.AuthenticationException", "org.apache.http.client.methods.HttpPost", "org.apache.http.entity.ByteArrayEntity", "org.apache.http.util.EntityUtils" ]
import android.accounts.Account; import android.accounts.AccountManager; import android.accounts.AuthenticatorException; import android.accounts.NetworkErrorException; import android.accounts.OperationCanceledException; import android.content.Context; import android.content.SyncResult; import android.util.Log; import com.ntsync.android.sync.platform.SystemHelper; import com.ntsync.shared.ContactGroup; import com.ntsync.shared.HeaderCreateException; import com.ntsync.shared.HeaderParseException; import com.ntsync.shared.RawContact; import com.ntsync.shared.RequestGenerator; import com.ntsync.shared.Restrictions; import com.ntsync.shared.SyncAnchor; import java.io.IOException; import java.util.List; import java.util.Map; import javax.crypto.SecretKey; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.auth.AuthenticationException; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.util.EntityUtils;
import android.accounts.*; import android.content.*; import android.util.*; import com.ntsync.android.sync.platform.*; import com.ntsync.shared.*; import java.io.*; import java.util.*; import javax.crypto.*; import org.apache.http.*; import org.apache.http.auth.*; import org.apache.http.client.methods.*; import org.apache.http.entity.*; import org.apache.http.util.*;
[ "android.accounts", "android.content", "android.util", "com.ntsync.android", "com.ntsync.shared", "java.io", "java.util", "javax.crypto", "org.apache.http" ]
android.accounts; android.content; android.util; com.ntsync.android; com.ntsync.shared; java.io; java.util; javax.crypto; org.apache.http;
2,862,793
ListAdapter mAdapter = listView.getAdapter(); int totalHeight = 0; for (int i = 0; i < mAdapter.getCount(); i++) { View mView = mAdapter.getView(i, null, listView); mView.measure( View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED), View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED)); totalHeight += mView.getMeasuredHeight(); Log.w("HEIGHT" + i, String.valueOf(totalHeight)); } ViewGroup.LayoutParams params = listView.getLayoutParams(); params.height = totalHeight + (listView.getDividerHeight() * (mAdapter.getCount() - 1)); listView.setLayoutParams(params); listView.requestLayout(); }
ListAdapter mAdapter = listView.getAdapter(); int totalHeight = 0; for (int i = 0; i < mAdapter.getCount(); i++) { View mView = mAdapter.getView(i, null, listView); mView.measure( View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED), View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED)); totalHeight += mView.getMeasuredHeight(); Log.w(STR + i, String.valueOf(totalHeight)); } ViewGroup.LayoutParams params = listView.getLayoutParams(); params.height = totalHeight + (listView.getDividerHeight() * (mAdapter.getCount() - 1)); listView.setLayoutParams(params); listView.requestLayout(); }
/**** Method for Setting the Height of the ListView dynamically. **** Hack to fix the issue of not showing all the items of the ListView **** when placed inside a ScrollView ****/
Method for Setting the Height of the ListView dynamically. Hack to fix the issue of not showing all the items of the ListView
setListViewHeightBasedOnChildren
{ "repo_name": "zhuliliang/SalesPlatform-for-Android", "path": "commons/src/main/java/com/ebaotech/salesplatform/commons/util/ListViewUtil.java", "license": "apache-2.0", "size": 1358 }
[ "android.util.Log", "android.view.View", "android.view.ViewGroup", "android.widget.ListAdapter" ]
import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.ListAdapter;
import android.util.*; import android.view.*; import android.widget.*;
[ "android.util", "android.view", "android.widget" ]
android.util; android.view; android.widget;
2,490,245
boolean dfsPruning(PrefixVMSP prefix, Bitmap prefixBitmap, List<Integer> sn, List<Integer> in, int hasToBeGreaterThanForIStep, int m, Integer lastAppendedItem) throws IOException { boolean atLeastOneFrequentExtension = false; // System.out.println(prefix.toString()); // ====== S-STEPS ====== // Temporary variables (as described in the paper) List<Integer> sTemp = new ArrayList<Integer>(); List<Bitmap> sTempBitmaps = new ArrayList<Bitmap>(); // for CMAP pruning, we will only check against the last appended item Map<Integer, Integer> mapSupportItemsAfter = coocMapAfter.get(lastAppendedItem); // for each item in sn loopi: for (Integer i : sn) { // LAST POSITION PRUNING // CMAP PRUNING // we only check with the last appended item if (useCMAPPruning) { if (mapSupportItemsAfter == null) { continue loopi; } Integer support = mapSupportItemsAfter.get(i); if (support == null || support < minsup) { // System.out.println("PRUNE"); continue loopi; } } // perform the S-STEP with that item to get a new bitmap Bitmap.INTERSECTION_COUNT++; Bitmap newBitmap = prefixBitmap.createNewBitmapSStep(verticalDB.get(i), sequencesSize, lastBitIndex, maxGap); // if the support is higher than minsup if (newBitmap.getSupportWithoutGapTotal() >= minsup) { // record that item and pattern in temporary variables sTemp.add(i); sTempBitmaps.add(newBitmap); } } // for each pattern recorded for the s-step for (int k = 0; k < sTemp.size(); k++) { // STRATEGY: NEWWW atLeastOneFrequentExtension = true; int item = sTemp.get(k); // create the new prefix PrefixVMSP prefixSStep = prefix.cloneSequence(); prefixSStep.addItemset(new Itemset(item)); if(item % 2 == 0) { prefixSStep.sumOfEvenItems = item + prefix.sumOfEvenItems; prefixSStep.sumOfOddItems = prefix.sumOfOddItems; }else { prefixSStep.sumOfEvenItems = prefix.sumOfEvenItems; prefixSStep.sumOfOddItems = item + prefix.sumOfOddItems; } // prefixSStep.sumOfItems = item + prefix.sumOfItems; // create the new bitmap Bitmap newBitmap = sTempBitmaps.get(k); // save the pattern to the file if(newBitmap.getSupport() >= minsup) { boolean hasFrequentExtension = false; // recursively try to extend that pattern if (maximumPatternLength > m) { hasFrequentExtension = dfsPruning(prefixSStep, newBitmap, sTemp, sTemp, item, m + 1, item); } if(hasFrequentExtension == false) { savePatternMultipleItems(prefixSStep, newBitmap, m); } } } Map<Integer, Integer> mapSupportItemsEquals = coocMapEquals.get(lastAppendedItem); // ======== I STEPS ======= // Temporary variables List<Integer> iTemp = new ArrayList<Integer>(); List<Bitmap> iTempBitmaps = new ArrayList<Bitmap>(); // for each item in in loop2: for (Integer i : in) { // the item has to be greater than the largest item // already in the last itemset of prefix. if (i > hasToBeGreaterThanForIStep) { // LAST POSITION PRUNING // CMAP PRUNING if (useCMAPPruning) { if (mapSupportItemsEquals == null) { continue loop2; } Integer support = mapSupportItemsEquals.get(i); if (support == null || support < minsup) { continue loop2; } } // Perform an i-step with this item and the current prefix. // This creates a new bitmap Bitmap.INTERSECTION_COUNT++; Bitmap newBitmap = prefixBitmap.createNewBitmapIStep(verticalDB.get(i), sequencesSize, lastBitIndex); // If the support is no less than minsup if (newBitmap.getSupport() >= minsup) { // record that item and pattern in temporary variables iTemp.add(i); iTempBitmaps.add(newBitmap); } } } // for each pattern recorded for the i-step for (int k = 0; k < iTemp.size(); k++) {// STRATEGY: NEWWW atLeastOneFrequentExtension = true; int item = iTemp.get(k); // create the new prefix PrefixVMSP prefixIStep = prefix.cloneSequence(); prefixIStep.getItemsets().get(prefixIStep.size() - 1).addItem(item); if(item % 2 == 0) { prefixIStep.sumOfEvenItems = item + prefix.sumOfEvenItems; prefixIStep.sumOfOddItems = prefix.sumOfOddItems; }else { prefixIStep.sumOfEvenItems = prefix.sumOfEvenItems; prefixIStep.sumOfOddItems = item + prefix.sumOfOddItems; } // create the new bitmap Bitmap newBitmap = iTempBitmaps.get(k); // recursively try to extend that pattern boolean hasFrequentExtension = false; if (maximumPatternLength > m) { hasFrequentExtension = dfsPruning(prefixIStep, newBitmap, sTemp, iTemp, item, m + 1, item); } if(hasFrequentExtension == false) { // save the pattern savePatternMultipleItems(prefixIStep, newBitmap, m); } } // check the memory usage MemoryLogger.getInstance().checkMemory(); return atLeastOneFrequentExtension || useStrategyForwardExtensionChecking == false; }
boolean dfsPruning(PrefixVMSP prefix, Bitmap prefixBitmap, List<Integer> sn, List<Integer> in, int hasToBeGreaterThanForIStep, int m, Integer lastAppendedItem) throws IOException { boolean atLeastOneFrequentExtension = false; List<Integer> sTemp = new ArrayList<Integer>(); List<Bitmap> sTempBitmaps = new ArrayList<Bitmap>(); Map<Integer, Integer> mapSupportItemsAfter = coocMapAfter.get(lastAppendedItem); loopi: for (Integer i : sn) { if (useCMAPPruning) { if (mapSupportItemsAfter == null) { continue loopi; } Integer support = mapSupportItemsAfter.get(i); if (support == null support < minsup) { continue loopi; } } Bitmap.INTERSECTION_COUNT++; Bitmap newBitmap = prefixBitmap.createNewBitmapSStep(verticalDB.get(i), sequencesSize, lastBitIndex, maxGap); if (newBitmap.getSupportWithoutGapTotal() >= minsup) { sTemp.add(i); sTempBitmaps.add(newBitmap); } } for (int k = 0; k < sTemp.size(); k++) { atLeastOneFrequentExtension = true; int item = sTemp.get(k); PrefixVMSP prefixSStep = prefix.cloneSequence(); prefixSStep.addItemset(new Itemset(item)); if(item % 2 == 0) { prefixSStep.sumOfEvenItems = item + prefix.sumOfEvenItems; prefixSStep.sumOfOddItems = prefix.sumOfOddItems; }else { prefixSStep.sumOfEvenItems = prefix.sumOfEvenItems; prefixSStep.sumOfOddItems = item + prefix.sumOfOddItems; } Bitmap newBitmap = sTempBitmaps.get(k); if(newBitmap.getSupport() >= minsup) { boolean hasFrequentExtension = false; if (maximumPatternLength > m) { hasFrequentExtension = dfsPruning(prefixSStep, newBitmap, sTemp, sTemp, item, m + 1, item); } if(hasFrequentExtension == false) { savePatternMultipleItems(prefixSStep, newBitmap, m); } } } Map<Integer, Integer> mapSupportItemsEquals = coocMapEquals.get(lastAppendedItem); List<Integer> iTemp = new ArrayList<Integer>(); List<Bitmap> iTempBitmaps = new ArrayList<Bitmap>(); loop2: for (Integer i : in) { if (i > hasToBeGreaterThanForIStep) { if (useCMAPPruning) { if (mapSupportItemsEquals == null) { continue loop2; } Integer support = mapSupportItemsEquals.get(i); if (support == null support < minsup) { continue loop2; } } Bitmap.INTERSECTION_COUNT++; Bitmap newBitmap = prefixBitmap.createNewBitmapIStep(verticalDB.get(i), sequencesSize, lastBitIndex); if (newBitmap.getSupport() >= minsup) { iTemp.add(i); iTempBitmaps.add(newBitmap); } } } for (int k = 0; k < iTemp.size(); k++) { atLeastOneFrequentExtension = true; int item = iTemp.get(k); PrefixVMSP prefixIStep = prefix.cloneSequence(); prefixIStep.getItemsets().get(prefixIStep.size() - 1).addItem(item); if(item % 2 == 0) { prefixIStep.sumOfEvenItems = item + prefix.sumOfEvenItems; prefixIStep.sumOfOddItems = prefix.sumOfOddItems; }else { prefixIStep.sumOfEvenItems = prefix.sumOfEvenItems; prefixIStep.sumOfOddItems = item + prefix.sumOfOddItems; } Bitmap newBitmap = iTempBitmaps.get(k); boolean hasFrequentExtension = false; if (maximumPatternLength > m) { hasFrequentExtension = dfsPruning(prefixIStep, newBitmap, sTemp, iTemp, item, m + 1, item); } if(hasFrequentExtension == false) { savePatternMultipleItems(prefixIStep, newBitmap, m); } } MemoryLogger.getInstance().checkMemory(); return atLeastOneFrequentExtension useStrategyForwardExtensionChecking == false; }
/** * This is the dfsPruning method as described in the SPAM paper. * * @param prefix the current prefix * @param prefixBitmap the bitmap corresponding to the current prefix * @param sn a list of items to be considered for i-steps * @param in a list of items to be considered for s-steps * @param hasToBeGreaterThanForIStep * @param m size of the current prefix in terms of items * @param lastAppendedItem the last appended item to the prefix * @throws IOException if there is an error writing a pattern to the output * file * @return TRUE IF A FREQUENT PATTERN WAS CREATED USING THE PREFIX. */
This is the dfsPruning method as described in the SPAM paper
dfsPruning
{ "repo_name": "ArneBinder/LanguageAnalyzer", "path": "src/main/java/ca/pfv/spmf/algorithms/sequentialpatterns/spam/AlgoVMSP.java", "license": "gpl-3.0", "size": 35983 }
[ "ca.pfv.spmf.patterns.itemset_list_integers_without_support.Itemset", "ca.pfv.spmf.tools.MemoryLogger", "java.io.IOException", "java.util.ArrayList", "java.util.List", "java.util.Map" ]
import ca.pfv.spmf.patterns.itemset_list_integers_without_support.Itemset; import ca.pfv.spmf.tools.MemoryLogger; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map;
import ca.pfv.spmf.patterns.itemset_list_integers_without_support.*; import ca.pfv.spmf.tools.*; import java.io.*; import java.util.*;
[ "ca.pfv.spmf", "java.io", "java.util" ]
ca.pfv.spmf; java.io; java.util;
544,239
public long position() throws IOException { return input.position() - origin; }
long function() throws IOException { return input.position() - origin; }
/** * Returns the position of the underlying stream relative to the origin. * * @return the relative position * @throws IOException if an IO error occurs */
Returns the position of the underlying stream relative to the origin
position
{ "repo_name": "gaowangyizu/myHeritrix", "path": "myHeritrix/src/org/archive/io/OriginSeekInputStream.java", "license": "apache-2.0", "size": 3410 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,742,570
public void setTarget(final NormalizationStorage target) { this.storage = target; }
void function(final NormalizationStorage target) { this.storage = target; }
/** * Determines where the normalized data will be sent. * * @param target * The target. */
Determines where the normalized data will be sent
setTarget
{ "repo_name": "Crespo911/encog-java-core", "path": "src/main/java/org/encog/util/normalize/DataNormalization.java", "license": "apache-2.0", "size": 23726 }
[ "org.encog.util.normalize.target.NormalizationStorage" ]
import org.encog.util.normalize.target.NormalizationStorage;
import org.encog.util.normalize.target.*;
[ "org.encog.util" ]
org.encog.util;
29,964
public KeyInfo getKeyInfo() { return keyInfo; }
KeyInfo function() { return keyInfo; }
/** * Get the static KeyInfo object held by this generator. * * @return the currently held KeyInfo object */
Get the static KeyInfo object held by this generator
getKeyInfo
{ "repo_name": "Safewhere/kombit-service-java", "path": "XmlTooling/src/org/opensaml/xml/security/keyinfo/StaticKeyInfoGenerator.java", "license": "mit", "size": 6363 }
[ "org.opensaml.xml.signature.KeyInfo" ]
import org.opensaml.xml.signature.KeyInfo;
import org.opensaml.xml.signature.*;
[ "org.opensaml.xml" ]
org.opensaml.xml;
2,636,679
private void addCommandLineArgFile( Commandline cmd, File javadocOutputDirectory, List<String> files ) throws MavenReportException { File argfileFile; if ( JAVA_VERSION.compareTo( SINCE_JAVADOC_1_4 ) >= 0 ) { argfileFile = new File( javadocOutputDirectory, ARGFILE_FILE_NAME ); cmd.createArg().setValue( "@" + ARGFILE_FILE_NAME ); } else { argfileFile = new File( javadocOutputDirectory, FILES_FILE_NAME ); cmd.createArg().setValue( "@" + FILES_FILE_NAME ); } try { FileUtils.fileWrite( argfileFile.getAbsolutePath(), null , StringUtils.join( files.iterator(), SystemUtils.LINE_SEPARATOR ) ); } catch ( IOException e ) { throw new MavenReportException( "Unable to write '" + argfileFile.getName() + "' temporary file for command execution", e ); } }
void function( Commandline cmd, File javadocOutputDirectory, List<String> files ) throws MavenReportException { File argfileFile; if ( JAVA_VERSION.compareTo( SINCE_JAVADOC_1_4 ) >= 0 ) { argfileFile = new File( javadocOutputDirectory, ARGFILE_FILE_NAME ); cmd.createArg().setValue( "@" + ARGFILE_FILE_NAME ); } else { argfileFile = new File( javadocOutputDirectory, FILES_FILE_NAME ); cmd.createArg().setValue( "@" + FILES_FILE_NAME ); } try { FileUtils.fileWrite( argfileFile.getAbsolutePath(), null , StringUtils.join( files.iterator(), SystemUtils.LINE_SEPARATOR ) ); } catch ( IOException e ) { throw new MavenReportException( STR + argfileFile.getName() + STR, e ); } }
/** * Generate a file called <code>argfile</code> (or <code>files</code>, depending the JDK) to hold files and add * the <code>@argfile</code> (or <code>@file</code>, depending the JDK) in the command line. * * @param cmd not null * @param javadocOutputDirectory not null * @param files not null * @throws MavenReportException if any * @see <a href="http://docs.oracle.com/javase/7/docs/technotes/tools/windows/javadoc.html#argumentfiles"> * Reference Guide, Command line argument files * </a> * @see <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/javadoc/whatsnew-1.4.html#runningjavadoc"> * What s New in Javadoc 1.4 * </a> * @see #isJavaDocVersionAtLeast(float) * @see #ARGFILE_FILE_NAME * @see #FILES_FILE_NAME */
Generate a file called <code>argfile</code> (or <code>files</code>, depending the JDK) to hold files and add the <code>@argfile</code> (or <code>@file</code>, depending the JDK) in the command line
addCommandLineArgFile
{ "repo_name": "mcculls/maven-plugins", "path": "maven-javadoc-plugin/src/main/java/org/apache/maven/plugins/javadoc/AbstractJavadocMojo.java", "license": "apache-2.0", "size": 237779 }
[ "java.io.File", "java.io.IOException", "java.util.List", "org.apache.commons.lang3.SystemUtils", "org.apache.maven.reporting.MavenReportException", "org.codehaus.plexus.util.FileUtils", "org.codehaus.plexus.util.StringUtils", "org.codehaus.plexus.util.cli.Commandline" ]
import java.io.File; import java.io.IOException; import java.util.List; import org.apache.commons.lang3.SystemUtils; import org.apache.maven.reporting.MavenReportException; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.StringUtils; import org.codehaus.plexus.util.cli.Commandline;
import java.io.*; import java.util.*; import org.apache.commons.lang3.*; import org.apache.maven.reporting.*; import org.codehaus.plexus.util.*; import org.codehaus.plexus.util.cli.*;
[ "java.io", "java.util", "org.apache.commons", "org.apache.maven", "org.codehaus.plexus" ]
java.io; java.util; org.apache.commons; org.apache.maven; org.codehaus.plexus;
2,413,322
public static int getNOW(String listpt) throws java.text.ParseException{//SDA=Some Days Ago String listPT = getListPT(listpt); Date date = sim01.parse(listPT); Calendar calendar = Calendar.getInstance();//日历对象 calendar.setTime(date);//设置当前日期 System.out.println(calendar); int month = calendar.get(Calendar.MONTH)+1; return month; }
static int function(String listpt) throws java.text.ParseException{ String listPT = getListPT(listpt); Date date = sim01.parse(listPT); Calendar calendar = Calendar.getInstance(); calendar.setTime(date); System.out.println(calendar); int month = calendar.get(Calendar.MONTH)+1; return month; }
/** * ODA=One Days Ago * @return 三天前的日期对象 * @throws java.text.ParseException */
ODA=One Days Ago
getNOW
{ "repo_name": "thomasloto/migang-crawler", "path": "src/project/java/com/ouyeel/crawler/util/CrawlUtils.java", "license": "mit", "size": 19291 }
[ "java.util.Calendar", "java.util.Date", "org.apache.http.ParseException" ]
import java.util.Calendar; import java.util.Date; import org.apache.http.ParseException;
import java.util.*; import org.apache.http.*;
[ "java.util", "org.apache.http" ]
java.util; org.apache.http;
465,445
private RelFieldCollation.Direction reverseDirection(RelFieldCollation.Direction direction) { switch(direction) { case ASCENDING: case STRICTLY_ASCENDING: return RelFieldCollation.Direction.DESCENDING; case DESCENDING: case STRICTLY_DESCENDING: return RelFieldCollation.Direction.ASCENDING; default: return null; } }
RelFieldCollation.Direction function(RelFieldCollation.Direction direction) { switch(direction) { case ASCENDING: case STRICTLY_ASCENDING: return RelFieldCollation.Direction.DESCENDING; case DESCENDING: case STRICTLY_DESCENDING: return RelFieldCollation.Direction.ASCENDING; default: return null; } }
/** Find the reverse of a given collation direction. * * @return Reverse of the input direction */
Find the reverse of a given collation direction
reverseDirection
{ "repo_name": "wanglan/calcite", "path": "cassandra/src/main/java/org/apache/calcite/adapter/cassandra/CassandraRules.java", "license": "apache-2.0", "size": 14136 }
[ "org.apache.calcite.rel.RelFieldCollation" ]
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.*;
[ "org.apache.calcite" ]
org.apache.calcite;
1,113,782
public LogEventRequestType logAdhocQuery(AdhocQueryMessageType message, String direction, String _interface, String responseCommunityId) { LOG.debug("Entering AuditRepositoryLogger.logAdhocQuery(...)"); LogEventRequestType auditMsg = null; LogAdhocQueryRequestType logReqMsg = new LogAdhocQueryRequestType(); logReqMsg.setDirection(direction); logReqMsg.setInterface(_interface); logReqMsg.setMessage(message); auditMsg = dqAuditTransforms.transformDocQueryReq2AuditMsg(logReqMsg, responseCommunityId); LOG.debug("Exiting AuditRepositoryLogger.logAdhocQuery(...)"); return auditMsg; }
LogEventRequestType function(AdhocQueryMessageType message, String direction, String _interface, String responseCommunityId) { LOG.debug(STR); LogEventRequestType auditMsg = null; LogAdhocQueryRequestType logReqMsg = new LogAdhocQueryRequestType(); logReqMsg.setDirection(direction); logReqMsg.setInterface(_interface); logReqMsg.setMessage(message); auditMsg = dqAuditTransforms.transformDocQueryReq2AuditMsg(logReqMsg, responseCommunityId); LOG.debug(STR); return auditMsg; }
/** * This method will create the generic Audit Log Message from a document query request. * * @param message The Document Query Request message to be audit logged. * @param direction The direction this message is going (Inbound or Outbound) * @param _interface The interface this message is being received/sent on (Entity, Adapter, or Nhin) * @param responseCommunityId * @return A generic audit log message that can be passed to the Audit Repository */
This method will create the generic Audit Log Message from a document query request
logAdhocQuery
{ "repo_name": "sailajaa/CONNECT", "path": "Product/Production/Services/AuditRepositoryCore/src/main/java/gov/hhs/fha/nhinc/auditrepository/AuditRepositoryLogger.java", "license": "bsd-3-clause", "size": 53244 }
[ "gov.hhs.fha.nhinc.common.auditlog.AdhocQueryMessageType", "gov.hhs.fha.nhinc.common.auditlog.LogAdhocQueryRequestType", "gov.hhs.fha.nhinc.common.auditlog.LogEventRequestType" ]
import gov.hhs.fha.nhinc.common.auditlog.AdhocQueryMessageType; import gov.hhs.fha.nhinc.common.auditlog.LogAdhocQueryRequestType; import gov.hhs.fha.nhinc.common.auditlog.LogEventRequestType;
import gov.hhs.fha.nhinc.common.auditlog.*;
[ "gov.hhs.fha" ]
gov.hhs.fha;
1,494,311
public synchronized void add(String topic) { Objects.requireNonNull(topic, "topic cannot be null"); if (topics.put(topic, TOPIC_EXPIRY_NEEDS_UPDATE) == null) { requestUpdateForNewTopics(); } }
synchronized void function(String topic) { Objects.requireNonNull(topic, STR); if (topics.put(topic, TOPIC_EXPIRY_NEEDS_UPDATE) == null) { requestUpdateForNewTopics(); } }
/** * Add the topic to maintain in the metadata. If topic expiry is enabled, expiry time * will be reset on the next update. */
Add the topic to maintain in the metadata. If topic expiry is enabled, expiry time will be reset on the next update
add
{ "repo_name": "MyPureCloud/kafka", "path": "clients/src/main/java/org/apache/kafka/clients/Metadata.java", "license": "apache-2.0", "size": 15205 }
[ "java.util.Objects" ]
import java.util.Objects;
import java.util.*;
[ "java.util" ]
java.util;
1,845,978
public static boolean start(String classname) { // Determine the default agencyId String agencyId = AgencyConfig.getAgencyId(); // start the module return start(classname, agencyId); }
static boolean function(String classname) { String agencyId = AgencyConfig.getAgencyId(); return start(classname, agencyId); }
/** * Runs the named module in a separate thread using the default agencyId. * * @param classname * @return */
Runs the named module in a separate thread using the default agencyId
start
{ "repo_name": "TheTransitClock/transitime", "path": "transitclock/src/main/java/org/transitclock/modules/Module.java", "license": "gpl-3.0", "size": 3675 }
[ "org.transitclock.configData.AgencyConfig" ]
import org.transitclock.configData.AgencyConfig;
import org.transitclock.*;
[ "org.transitclock" ]
org.transitclock;
1,006,648
public PDColor getBorderColour() { return getColor(COSName.BC); }
PDColor function() { return getColor(COSName.BC); }
/** * This will retrieve the border color. * * @return the border color. */
This will retrieve the border color
getBorderColour
{ "repo_name": "kalaspuffar/pdfbox", "path": "pdfbox/src/main/java/org/apache/pdfbox/pdmodel/interactive/annotation/PDAppearanceCharacteristicsDictionary.java", "license": "apache-2.0", "size": 6218 }
[ "org.apache.pdfbox.cos.COSName", "org.apache.pdfbox.pdmodel.graphics.color.PDColor" ]
import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.graphics.color.PDColor;
import org.apache.pdfbox.cos.*; import org.apache.pdfbox.pdmodel.graphics.color.*;
[ "org.apache.pdfbox" ]
org.apache.pdfbox;
725,878
// TODO (stephshi): rename to "traverse" when the old traverse method is no longer used public static void traverseEs6(AbstractCompiler compiler, Node root, Callback cb) { NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler)); t.traverse(root); }
static void function(AbstractCompiler compiler, Node root, Callback cb) { NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler)); t.traverse(root); }
/** * Traverses using the ES6SyntacticScopeCreator */
Traverses using the ES6SyntacticScopeCreator
traverseEs6
{ "repo_name": "pr4v33n/closure-compiler", "path": "src/com/google/javascript/jscomp/NodeTraversal.java", "license": "apache-2.0", "size": 25324 }
[ "com.google.javascript.rhino.Node" ]
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.*;
[ "com.google.javascript" ]
com.google.javascript;
466,401
public ArrayDeque<String> getHits() { return hitlist ; }
ArrayDeque<String> function() { return hitlist ; }
/** * This method returns the current list of probe results. * @return List of probe results */
This method returns the current list of probe results
getHits
{ "repo_name": "reddipped/csProbe", "path": "src/main/java/com/reddipped/csprobe/CSProbeHistory.java", "license": "mit", "size": 1392 }
[ "java.util.ArrayDeque" ]
import java.util.ArrayDeque;
import java.util.*;
[ "java.util" ]
java.util;
2,117,643
private void initGUI() { setLayout(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.gridwidth = 2; gbc.weightx = 1.0; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(5, 15, 0, 15); // add name label (including ID and dependencies) String threadLabel = createDisplayLabel(); JLabel nameLabel = new JLabel(threadLabel); add(nameLabel, gbc); gbc.gridy += 1; gbc.gridwidth = 1; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(5, 15, 0, 5); if (pg.isIndeterminate()) { progressBar = new JProgressBar(); progressBar.setStringPainted(true); progressBar.setIndeterminate(true); } else { progressBar = new JProgressBar(0, pg.getDisplay().getTotal()); progressBar.setValue(pg.getDisplay().getCompleted()); } add(progressBar, gbc); gbc.gridx += 1; gbc.weightx = 0.0; gbc.weighty = 0.0; gbc.fill = GridBagConstraints.NONE; gbc.insets = new Insets(5, 5, 0, 15); JButton cancelButton = new JButton(new ResourceAction(true, "stop_progress") { private static final long serialVersionUID = 1L;
void function() { setLayout(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.gridwidth = 2; gbc.weightx = 1.0; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(5, 15, 0, 15); String threadLabel = createDisplayLabel(); JLabel nameLabel = new JLabel(threadLabel); add(nameLabel, gbc); gbc.gridy += 1; gbc.gridwidth = 1; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(5, 15, 0, 5); if (pg.isIndeterminate()) { progressBar = new JProgressBar(); progressBar.setStringPainted(true); progressBar.setIndeterminate(true); } else { progressBar = new JProgressBar(0, pg.getDisplay().getTotal()); progressBar.setValue(pg.getDisplay().getCompleted()); } add(progressBar, gbc); gbc.gridx += 1; gbc.weightx = 0.0; gbc.weighty = 0.0; gbc.fill = GridBagConstraints.NONE; gbc.insets = new Insets(5, 5, 0, 15); JButton cancelButton = new JButton(new ResourceAction(true, STR) { private static final long serialVersionUID = 1L;
/** * Inits the GUI. */
Inits the GUI
initGUI
{ "repo_name": "brtonnies/rapidminer-studio", "path": "src/main/java/com/rapidminer/gui/tools/ProgressThreadDisplay.java", "license": "agpl-3.0", "size": 5903 }
[ "java.awt.GridBagConstraints", "java.awt.GridBagLayout", "java.awt.Insets", "javax.swing.JButton", "javax.swing.JLabel", "javax.swing.JProgressBar" ]
import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JProgressBar;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
2,860,033
public ListIterator<AbstractInsnNode> iterator() { return iterator(0); }
ListIterator<AbstractInsnNode> function() { return iterator(0); }
/** * Returns an iterator over the instructions in this list. * * @return an iterator over the instructions in this list. */
Returns an iterator over the instructions in this list
iterator
{ "repo_name": "ClubObsidian/ObsidianEngine", "path": "src/com/clubobsidian/obsidianengine/asm/tree/InsnList.java", "license": "mit", "size": 19300 }
[ "java.util.ListIterator" ]
import java.util.ListIterator;
import java.util.*;
[ "java.util" ]
java.util;
2,659,521
private void cleanupPreviousContainerFiles(Path containerWorkDir) { // delete ContainerScriptPath deleteAsUser(new Path(containerWorkDir, CONTAINER_SCRIPT)); // delete TokensPath deleteAsUser(new Path(containerWorkDir, FINAL_CONTAINER_TOKENS_FILE)); }
void function(Path containerWorkDir) { deleteAsUser(new Path(containerWorkDir, CONTAINER_SCRIPT)); deleteAsUser(new Path(containerWorkDir, FINAL_CONTAINER_TOKENS_FILE)); }
/** * Clean up container's previous files for container relaunch. */
Clean up container's previous files for container relaunch
cleanupPreviousContainerFiles
{ "repo_name": "ChetnaChaudhari/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java", "license": "apache-2.0", "size": 8573 }
[ "org.apache.hadoop.fs.Path" ]
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,880,909
private void drawValue(Canvas canvas) { valuePaint.setColor(VALUE_TEXT_COLOR); valuePaint.drawableState = getDrawableState(); Rect bounds = new Rect(); itemsLayout.getLineBounds(visibleItems / 2, bounds); // draw label if (labelLayout != null) { canvas.save(); canvas.translate(itemsLayout.getWidth() + LABEL_OFFSET, bounds.top); labelLayout.draw(canvas); canvas.restore(); } // draw current value if (valueLayout != null) { canvas.save(); canvas.translate(0, bounds.top + scrollingOffset); valueLayout.draw(canvas); canvas.restore(); } }
void function(Canvas canvas) { valuePaint.setColor(VALUE_TEXT_COLOR); valuePaint.drawableState = getDrawableState(); Rect bounds = new Rect(); itemsLayout.getLineBounds(visibleItems / 2, bounds); if (labelLayout != null) { canvas.save(); canvas.translate(itemsLayout.getWidth() + LABEL_OFFSET, bounds.top); labelLayout.draw(canvas); canvas.restore(); } if (valueLayout != null) { canvas.save(); canvas.translate(0, bounds.top + scrollingOffset); valueLayout.draw(canvas); canvas.restore(); } }
/** * Draws value and label layout. * * @param canvas the canvas for drawing */
Draws value and label layout
drawValue
{ "repo_name": "gizwits/Gizwits-SmartLights_Android", "path": "src/com/gizwits/framework/widget/WheelView.java", "license": "mit", "size": 31428 }
[ "android.graphics.Canvas", "android.graphics.Rect" ]
import android.graphics.Canvas; import android.graphics.Rect;
import android.graphics.*;
[ "android.graphics" ]
android.graphics;
2,331,704
int getNumStorageDirs(NameNodeDirType dirType) { if(dirType == null) { return getNumStorageDirs(); } Iterator<StorageDirectory> it = dirIterator(dirType); int numDirs = 0; for(; it.hasNext(); it.next()) { numDirs++; } return numDirs; }
int getNumStorageDirs(NameNodeDirType dirType) { if(dirType == null) { return getNumStorageDirs(); } Iterator<StorageDirectory> it = dirIterator(dirType); int numDirs = 0; for(; it.hasNext(); it.next()) { numDirs++; } return numDirs; }
/** * Return number of storage directories of the given type. * @param dirType directory type * @return number of storage directories of type dirType */
Return number of storage directories of the given type
getNumStorageDirs
{ "repo_name": "jaypatil/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java", "license": "gpl-3.0", "size": 39022 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
1,484,583
private void gobble(Iterator iter) { if (eatTheRest) { while (iter.hasNext()) { tokens.add(iter.next()); } } }
void function(Iterator iter) { if (eatTheRest) { while (iter.hasNext()) { tokens.add(iter.next()); } } }
/** * Adds the remaining tokens to the processed tokens list. * * @param iter An iterator over the remaining tokens */
Adds the remaining tokens to the processed tokens list
gobble
{ "repo_name": "Selventa/model-builder", "path": "tools/groovy/src/src/main/org/apache/commons/cli/GroovyInternalPosixParser.java", "license": "apache-2.0", "size": 9877 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
1,718,887
Bundle bundle = getBundle(bundleClass); return bundle != null ? bundle.getBundleContext() : null; }
Bundle bundle = getBundle(bundleClass); return bundle != null ? bundle.getBundleContext() : null; }
/** * Computes bundle context for the given bundle class. * Use with caution and only if componentContext is not available * * @param bundleClass bundle resource * @return bundle context or null if passed object reference is null */
Computes bundle context for the given bundle class. Use with caution and only if componentContext is not available
getBundleContext
{ "repo_name": "igor-sfdc/aura", "path": "aura-osgi-api/src/main/java/org/auraframework/ds/util/BundleUtil.java", "license": "apache-2.0", "size": 3457 }
[ "org.osgi.framework.Bundle" ]
import org.osgi.framework.Bundle;
import org.osgi.framework.*;
[ "org.osgi.framework" ]
org.osgi.framework;
1,760,699
protected int checkIncrement(final int firstOp, final ResultList results) throws OtpErlangException, UnknownException { results.processAddOnNrAt(firstOp); return 1; }
int function(final int firstOp, final ResultList results) throws OtpErlangException, UnknownException { results.processAddOnNrAt(firstOp); return 1; }
/** * Verifies the increment operation. * * @param firstOp the first operation to process inside the result list * @param results the result list * * @return <tt>1</tt> operation processed (the write) */
Verifies the increment operation
checkIncrement
{ "repo_name": "tectronics/scalaris", "path": "java-api/src/de/zib/scalaris/executor/ScalarisIncrementOp2.java", "license": "apache-2.0", "size": 3346 }
[ "com.ericsson.otp.erlang.OtpErlangException", "de.zib.scalaris.ResultList", "de.zib.scalaris.UnknownException" ]
import com.ericsson.otp.erlang.OtpErlangException; import de.zib.scalaris.ResultList; import de.zib.scalaris.UnknownException;
import com.ericsson.otp.erlang.*; import de.zib.scalaris.*;
[ "com.ericsson.otp", "de.zib.scalaris" ]
com.ericsson.otp; de.zib.scalaris;
600,686
protected boolean reloadScriptBody(final String scriptBody) { final Collection<ValidationResult> results = new HashSet<>(); try { return reloadScript(scriptBody); } catch (final Exception e) { final ComponentLog logger = getLogger(); final String message = "Unable to load script: " + e; logger.error(message, e); results.add(new ValidationResult.Builder() .subject("ScriptValidation") .valid(false) .explanation("Unable to load script due to " + e) .input(scriptingComponentHelper.getScriptPath()) .build()); } // store the updated validation results validationResults.set(results); // return whether there was any issues loading the configured script return results.isEmpty(); }
boolean function(final String scriptBody) { final Collection<ValidationResult> results = new HashSet<>(); try { return reloadScript(scriptBody); } catch (final Exception e) { final ComponentLog logger = getLogger(); final String message = STR + e; logger.error(message, e); results.add(new ValidationResult.Builder() .subject(STR) .valid(false) .explanation(STR + e) .input(scriptingComponentHelper.getScriptPath()) .build()); } validationResults.set(results); return results.isEmpty(); }
/** * Reloads the script defined by the given string * * @param scriptBody the contents of the script to be loaded * @return true if the script was loaded successfully; false otherwise */
Reloads the script defined by the given string
reloadScriptBody
{ "repo_name": "MikeThomsen/nifi", "path": "nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/script/AbstractScriptedControllerService.java", "license": "apache-2.0", "size": 8934 }
[ "java.util.Collection", "java.util.HashSet", "org.apache.nifi.components.ValidationResult", "org.apache.nifi.logging.ComponentLog" ]
import java.util.Collection; import java.util.HashSet; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.logging.ComponentLog;
import java.util.*; import org.apache.nifi.components.*; import org.apache.nifi.logging.*;
[ "java.util", "org.apache.nifi" ]
java.util; org.apache.nifi;
1,158,940
@Test public void testAccessParialUnauthorizedAddress() { ModelNode address = PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,"test.war"), PathElement.pathElement(SUBSYSTEM, "Undertow")).toModelNode(); ModelNode authorizedAddress = PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,"test.war")).toModelNode(); OperationContext context = new AuthorizationOperationContext(authorizedAddress.asString()); ModelNode operation = new ModelNode(); operation.get(OP).set(READ_RESOURCE_OPERATION); operation.get(OP_ADDR).set(address); AuthorizedAddress expResult = new AuthorizedAddress(PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,"test.war"), PathElement.pathElement(SUBSYSTEM,"<hidden>")).toModelNode(), true); AuthorizedAddress result = AuthorizedAddress.authorizeAddress(context, operation); assertEquals(expResult, result); } private static class AuthorizationOperationContext implements OperationContext { private final String authorizedAddress; private AuthorizationOperationContext(String authorizedAddress) { this.authorizedAddress = authorizedAddress; }
void function() { ModelNode address = PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,STR), PathElement.pathElement(SUBSYSTEM, STR)).toModelNode(); ModelNode authorizedAddress = PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,STR)).toModelNode(); OperationContext context = new AuthorizationOperationContext(authorizedAddress.asString()); ModelNode operation = new ModelNode(); operation.get(OP).set(READ_RESOURCE_OPERATION); operation.get(OP_ADDR).set(address); AuthorizedAddress expResult = new AuthorizedAddress(PathAddress.pathAddress(PathElement.pathElement(DEPLOYMENT,STR), PathElement.pathElement(SUBSYSTEM,STR)).toModelNode(), true); AuthorizedAddress result = AuthorizedAddress.authorizeAddress(context, operation); assertEquals(expResult, result); } private static class AuthorizationOperationContext implements OperationContext { private final String authorizedAddress; private AuthorizationOperationContext(String authorizedAddress) { this.authorizedAddress = authorizedAddress; }
/** * Test of authorizeAddress method, of class AuthorizedAddress. */
Test of authorizeAddress method, of class AuthorizedAddress
testAccessParialUnauthorizedAddress
{ "repo_name": "yersan/wildfly-core", "path": "controller/src/test/java/org/jboss/as/controller/access/management/AuthorizedAddressTest.java", "license": "lgpl-2.1", "size": 24146 }
[ "org.jboss.as.controller.OperationContext", "org.jboss.as.controller.PathAddress", "org.jboss.as.controller.PathElement", "org.jboss.dmr.ModelNode", "org.junit.Assert" ]
import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.PathElement; import org.jboss.dmr.ModelNode; import org.junit.Assert;
import org.jboss.as.controller.*; import org.jboss.dmr.*; import org.junit.*;
[ "org.jboss.as", "org.jboss.dmr", "org.junit" ]
org.jboss.as; org.jboss.dmr; org.junit;
203,378
public AccountDataBean getAccountData(String userID) throws Exception, RemoteException;
AccountDataBean function(String userID) throws Exception, RemoteException;
/** * Return an AccountDataBean object for userID describing the account * * @param userID the account userID to lookup * @return User account data in AccountDataBean */
Return an AccountDataBean object for userID describing the account
getAccountData
{ "repo_name": "meetdestiny/daytrader", "path": "javaee6/modules/web/src/main/java/org/apache/geronimo/daytrader/javaee6/core/api/TradeServices.java", "license": "apache-2.0", "size": 11268 }
[ "java.rmi.RemoteException", "org.apache.geronimo.daytrader.javaee6.entities.AccountDataBean" ]
import java.rmi.RemoteException; import org.apache.geronimo.daytrader.javaee6.entities.AccountDataBean;
import java.rmi.*; import org.apache.geronimo.daytrader.javaee6.entities.*;
[ "java.rmi", "org.apache.geronimo" ]
java.rmi; org.apache.geronimo;
300,391
protected AsynchronousCall unwrapPayload(PersistedMessageBO message) { if (message == null || message.getPayload() == null) { return null; } String encodedPayload = message.getPayload().getPayload(); if (StringUtils.isBlank(encodedPayload)) { return null; } Object decodedPayload = null; if (encodedPayload != null) { decodedPayload = SerializationUtils.deserializeFromBase64(encodedPayload); } // fail fast if its not the expected type of AsynchronousCall if ((decodedPayload != null) && !(decodedPayload instanceof AsynchronousCall)) { throw new IllegalArgumentException("PersistedMessageBO payload was not of the expected class. " + "Expected was [" + AsynchronousCall.class.getName() + "], actual was: [" + decodedPayload.getClass().getName() + "]"); } return (AsynchronousCall) decodedPayload; }
AsynchronousCall function(PersistedMessageBO message) { if (message == null message.getPayload() == null) { return null; } String encodedPayload = message.getPayload().getPayload(); if (StringUtils.isBlank(encodedPayload)) { return null; } Object decodedPayload = null; if (encodedPayload != null) { decodedPayload = SerializationUtils.deserializeFromBase64(encodedPayload); } if ((decodedPayload != null) && !(decodedPayload instanceof AsynchronousCall)) { throw new IllegalArgumentException(STR + STR + AsynchronousCall.class.getName() + STR + decodedPayload.getClass().getName() + "]"); } return (AsynchronousCall) decodedPayload; }
/** * Extracts the payload from a PersistedMessageBO, attempts to convert it to the expected AsynchronousCall type, and * returns it. * * Throws an IllegalArgumentException if the decoded payload isnt of the expected type. * * @param message * The populated PersistedMessageBO object to extract the payload from. * @return Returns the payload if one is present and it can be deserialized, otherwise returns null. */
Extracts the payload from a PersistedMessageBO, attempts to convert it to the expected AsynchronousCall type, and returns it. Throws an IllegalArgumentException if the decoded payload isnt of the expected type
unwrapPayload
{ "repo_name": "ricepanda/rice-git3", "path": "rice-middleware/ksb/web/src/main/java/org/kuali/rice/ksb/messaging/web/MessageQueueAction.java", "license": "apache-2.0", "size": 19661 }
[ "org.apache.commons.lang.StringUtils", "org.kuali.rice.core.api.util.io.SerializationUtils", "org.kuali.rice.ksb.api.messaging.AsynchronousCall", "org.kuali.rice.ksb.messaging.PersistedMessageBO" ]
import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.util.io.SerializationUtils; import org.kuali.rice.ksb.api.messaging.AsynchronousCall; import org.kuali.rice.ksb.messaging.PersistedMessageBO;
import org.apache.commons.lang.*; import org.kuali.rice.core.api.util.io.*; import org.kuali.rice.ksb.api.messaging.*; import org.kuali.rice.ksb.messaging.*;
[ "org.apache.commons", "org.kuali.rice" ]
org.apache.commons; org.kuali.rice;
2,614,074
public Number getValue(UserDataContainer udc) { return (Number) udc.getUserDatum(getKey()); }
Number function(UserDataContainer udc) { return (Number) udc.getUserDatum(getKey()); }
/** * Returns the decorated value as Number * @param udc the graph/vertex/edge * @return the value */
Returns the decorated value as Number
getValue
{ "repo_name": "markus1978/clickwatch", "path": "external/edu.uci.ics.jung/src/edu/uci/ics/jung/graph/decorators/NumericDecorator.java", "license": "apache-2.0", "size": 1567 }
[ "edu.uci.ics.jung.utils.UserDataContainer" ]
import edu.uci.ics.jung.utils.UserDataContainer;
import edu.uci.ics.jung.utils.*;
[ "edu.uci.ics" ]
edu.uci.ics;
1,121,894
protected boolean extractDates(CalendarEntry entry, JSONObject json) throws JSONException { boolean isAllDay = false; if (json.containsKey("allday")) { isAllDay = true; } if (json.containsKey(PARAM_START_AT) && json.containsKey(PARAM_END_AT)) { // New style ISO8601 based dates and times Object startAtO = json.get(PARAM_START_AT); Object endAtO = json.get(PARAM_END_AT); // Grab the details String startAt; String endAt; String timezoneName = null; if(startAtO instanceof JSONObject) { // "startAt": { "iso8601":"2011-...." } JSONObject startAtJSON = (JSONObject)startAtO; JSONObject endAtJSON = (JSONObject)endAtO; startAt = (String)startAtJSON.get(PARAM_ISO8601); endAt = (String)endAtJSON.get(PARAM_ISO8601); if(startAtJSON.containsKey(PARAM_TIMEZONE)) { timezoneName = (String)startAtJSON.get(PARAM_TIMEZONE); if(endAtJSON.containsKey(PARAM_TIMEZONE)) { String endTZ = (String)endAtJSON.get(PARAM_TIMEZONE); if(! endTZ.equals(timezoneName)) { throw new WebScriptException(Status.STATUS_BAD_REQUEST, "Timezones must match"); } } } } else { // "startAt": "2011-...." startAt = (String)json.get(PARAM_START_AT); endAt = (String)json.get(PARAM_END_AT); } if(json.containsKey(PARAM_TIMEZONE)) { timezoneName = (String)json.get(PARAM_TIMEZONE); } // Is this an all day event? if (json.containsKey("allday")) { // Store it as UTC midnight to midnight // Reset the time part to ensure that String utcMidnight = "T00:00:00Z"; startAt = startAt.substring(0, 10) + utcMidnight; endAt = endAt.substring(0, 10) + utcMidnight; entry.setStart(ISO8601DateFormat.parse(startAt)); entry.setEnd(ISO8601DateFormat.parse(endAt)); } else { // Regular event start and end rules // Do we have explicit timezone information? if (timezoneName != null) { // Get the specified timezone TimeZone tz = TimeZone.getTimeZone(timezoneName); // Grab the dates and times in the specified timezone entry.setStart(ISO8601DateFormat.parse(startAt, tz)); entry.setEnd(ISO8601DateFormat.parse(endAt, tz)); } else { // Offset info is either in the date, or we just have to guess entry.setStart(parseDate(startAt)); entry.setEnd(parseDate(endAt)); } } } else if (json.containsKey("allday")) { // Old style all-day event Date start = parseDate(getOrNull(json, "from")); Date end = parseDate(getOrNull(json, "to")); // Store it as UTC midnight to midnight // Reset the time part to ensure that String isoStartAt = ISO8601DateFormat.format(start); String isoEndAt = ISO8601DateFormat.format(end); String utcMidnight = "T00:00:00Z"; isoStartAt = isoStartAt.substring(0, 10) + utcMidnight; isoEndAt = isoEndAt.substring(0, 10) + utcMidnight; entry.setStart(ISO8601DateFormat.parse(isoStartAt)); entry.setEnd(ISO8601DateFormat.parse(isoEndAt)); } else { // Old style regular event entry.setStart(parseDate((String)json.get("from") + " " + (String)json.get("start"))); entry.setEnd(parseDate((String)json.get("to") + " " + (String)json.get("end"))); } return isAllDay; }
boolean function(CalendarEntry entry, JSONObject json) throws JSONException { boolean isAllDay = false; if (json.containsKey(STR)) { isAllDay = true; } if (json.containsKey(PARAM_START_AT) && json.containsKey(PARAM_END_AT)) { Object startAtO = json.get(PARAM_START_AT); Object endAtO = json.get(PARAM_END_AT); String startAt; String endAt; String timezoneName = null; if(startAtO instanceof JSONObject) { JSONObject startAtJSON = (JSONObject)startAtO; JSONObject endAtJSON = (JSONObject)endAtO; startAt = (String)startAtJSON.get(PARAM_ISO8601); endAt = (String)endAtJSON.get(PARAM_ISO8601); if(startAtJSON.containsKey(PARAM_TIMEZONE)) { timezoneName = (String)startAtJSON.get(PARAM_TIMEZONE); if(endAtJSON.containsKey(PARAM_TIMEZONE)) { String endTZ = (String)endAtJSON.get(PARAM_TIMEZONE); if(! endTZ.equals(timezoneName)) { throw new WebScriptException(Status.STATUS_BAD_REQUEST, STR); } } } } else { startAt = (String)json.get(PARAM_START_AT); endAt = (String)json.get(PARAM_END_AT); } if(json.containsKey(PARAM_TIMEZONE)) { timezoneName = (String)json.get(PARAM_TIMEZONE); } if (json.containsKey(STR)) { String utcMidnight = STR; startAt = startAt.substring(0, 10) + utcMidnight; endAt = endAt.substring(0, 10) + utcMidnight; entry.setStart(ISO8601DateFormat.parse(startAt)); entry.setEnd(ISO8601DateFormat.parse(endAt)); } else { if (timezoneName != null) { TimeZone tz = TimeZone.getTimeZone(timezoneName); entry.setStart(ISO8601DateFormat.parse(startAt, tz)); entry.setEnd(ISO8601DateFormat.parse(endAt, tz)); } else { entry.setStart(parseDate(startAt)); entry.setEnd(parseDate(endAt)); } } } else if (json.containsKey(STR)) { Date start = parseDate(getOrNull(json, "from")); Date end = parseDate(getOrNull(json, "to")); String isoStartAt = ISO8601DateFormat.format(start); String isoEndAt = ISO8601DateFormat.format(end); String utcMidnight = STR; isoStartAt = isoStartAt.substring(0, 10) + utcMidnight; isoEndAt = isoEndAt.substring(0, 10) + utcMidnight; entry.setStart(ISO8601DateFormat.parse(isoStartAt)); entry.setEnd(ISO8601DateFormat.parse(isoEndAt)); } else { entry.setStart(parseDate((String)json.get("from") + " " + (String)json.get("start"))); entry.setEnd(parseDate((String)json.get("to") + " " + (String)json.get("end"))); } return isAllDay; }
/** * Extracts the Start and End details, along with the All Day flag * from the JSON, and returns if the event is all day or not */
Extracts the Start and End details, along with the All Day flag from the JSON, and returns if the event is all day or not
extractDates
{ "repo_name": "deas/alfresco-community-edition", "path": "projects/remote-api/source/java/org/alfresco/repo/web/scripts/calendar/AbstractCalendarWebScript.java", "license": "lgpl-3.0", "size": 19360 }
[ "java.text.DateFormat", "java.util.Date", "java.util.TimeZone", "org.alfresco.service.cmr.calendar.CalendarEntry", "org.alfresco.util.ISO8601DateFormat", "org.json.JSONException", "org.json.simple.JSONObject", "org.springframework.extensions.webscripts.Status", "org.springframework.extensions.webscripts.WebScriptException" ]
import java.text.DateFormat; import java.util.Date; import java.util.TimeZone; import org.alfresco.service.cmr.calendar.CalendarEntry; import org.alfresco.util.ISO8601DateFormat; import org.json.JSONException; import org.json.simple.JSONObject; import org.springframework.extensions.webscripts.Status; import org.springframework.extensions.webscripts.WebScriptException;
import java.text.*; import java.util.*; import org.alfresco.service.cmr.calendar.*; import org.alfresco.util.*; import org.json.*; import org.json.simple.*; import org.springframework.extensions.webscripts.*;
[ "java.text", "java.util", "org.alfresco.service", "org.alfresco.util", "org.json", "org.json.simple", "org.springframework.extensions" ]
java.text; java.util; org.alfresco.service; org.alfresco.util; org.json; org.json.simple; org.springframework.extensions;
2,416,244
public boolean hasSetupWizardFilter() { return PluginServletFilter.hasFilter(FORCE_SETUP_WIZARD_FILTER); }
boolean function() { return PluginServletFilter.hasFilter(FORCE_SETUP_WIZARD_FILTER); }
/** * Returns whether the setup wizard filter is currently registered. * @since 2.94 */
Returns whether the setup wizard filter is currently registered
hasSetupWizardFilter
{ "repo_name": "aldaris/jenkins", "path": "core/src/main/java/jenkins/install/SetupWizard.java", "license": "mit", "size": 26693 }
[ "hudson.util.PluginServletFilter" ]
import hudson.util.PluginServletFilter;
import hudson.util.*;
[ "hudson.util" ]
hudson.util;
2,424,895
public boolean clearMetaKeyStates(int states) { final Editable content = getEditable(); if (content == null) return false; MetaKeyKeyListener.clearMetaKeyState(content, states); return true; }
boolean function(int states) { final Editable content = getEditable(); if (content == null) return false; MetaKeyKeyListener.clearMetaKeyState(content, states); return true; }
/** * Default implementation uses * {@link MetaKeyKeyListener#clearMetaKeyState(long, int) * MetaKeyKeyListener.clearMetaKeyState(long, int)} to clear the state. */
Default implementation uses <code>MetaKeyKeyListener#clearMetaKeyState(long, int) MetaKeyKeyListener.clearMetaKeyState(long, int)</code> to clear the state
clearMetaKeyStates
{ "repo_name": "syslover33/ctank", "path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/android/view/inputmethod/BaseInputConnection.java", "license": "gpl-3.0", "size": 22688 }
[ "android.text.Editable", "android.text.method.MetaKeyKeyListener" ]
import android.text.Editable; import android.text.method.MetaKeyKeyListener;
import android.text.*; import android.text.method.*;
[ "android.text" ]
android.text;
2,133,249
public static <T extends CalculusFieldElement<T>> T log10(final T x) { return x.log10(); }
static <T extends CalculusFieldElement<T>> T function(final T x) { return x.log10(); }
/** Compute the base 10 logarithm. * @param x a number * @param <T> the type of the field element * @return log10(x) * @since 1.3 */
Compute the base 10 logarithm
log10
{ "repo_name": "sdinot/hipparchus", "path": "hipparchus-core/src/main/java/org/hipparchus/util/FastMath.java", "license": "apache-2.0", "size": 173587 }
[ "org.hipparchus.CalculusFieldElement" ]
import org.hipparchus.CalculusFieldElement;
import org.hipparchus.*;
[ "org.hipparchus" ]
org.hipparchus;
1,221,307
@Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Seq<T1> s1, Seq<T2> s2, Seq<T3> s3, Seq<T4> s4, Seq<T5> s5, Seq<T6> s6, Seq<T7> s7, Seq<T8> s8, Seq<T9> s9, Seq<T10> s10, Seq<T11> s11, Seq<T12> s12, Seq<T13> s13) { List<Tuple12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12))); }
@Generated(STR) static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Seq<T1> s1, Seq<T2> s2, Seq<T3> s3, Seq<T4> s4, Seq<T5> s5, Seq<T6> s6, Seq<T7> s7, Seq<T8> s8, Seq<T9> s9, Seq<T10> s10, Seq<T11> s11, Seq<T12> s12, Seq<T13> s13) { List<Tuple12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12))); }
/** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */
Cross join 13 streams into one. <code><code> (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) Seq.of(1, 2).crossJoin(Seq.of("a", "b")) </code></code>
crossJoin
{ "repo_name": "stephenh/jOOL", "path": "src/main/java/org/jooq/lambda/Seq.java", "license": "apache-2.0", "size": 198501 }
[ "java.util.List", "javax.annotation.Generated", "org.jooq.lambda.tuple.Tuple", "org.jooq.lambda.tuple.Tuple12", "org.jooq.lambda.tuple.Tuple13" ]
import java.util.List; import javax.annotation.Generated; import org.jooq.lambda.tuple.Tuple; import org.jooq.lambda.tuple.Tuple12; import org.jooq.lambda.tuple.Tuple13;
import java.util.*; import javax.annotation.*; import org.jooq.lambda.tuple.*;
[ "java.util", "javax.annotation", "org.jooq.lambda" ]
java.util; javax.annotation; org.jooq.lambda;
424,315
@Nullable public State waitToFinish( long timeToWait, TimeUnit timeUnit, MonitoringUtil.JobMessagesHandler messageHandler) throws IOException, InterruptedException { return waitToFinish(timeToWait, timeUnit, messageHandler, Sleeper.DEFAULT, NanoClock.SYSTEM); }
State function( long timeToWait, TimeUnit timeUnit, MonitoringUtil.JobMessagesHandler messageHandler) throws IOException, InterruptedException { return waitToFinish(timeToWait, timeUnit, messageHandler, Sleeper.DEFAULT, NanoClock.SYSTEM); }
/** * Waits for the job to finish and return the final status. * * @param timeToWait The time to wait in units timeUnit for the job to finish. * Provide a value less than 1 ms for an infinite wait. * @param timeUnit The unit of time for timeToWait. * @param messageHandler If non null this handler will be invoked for each * batch of messages received. * @return The final state of the job or null on timeout or if the * thread is interrupted. * @throws IOException If there is a persistent problem getting job * information. * @throws InterruptedException */
Waits for the job to finish and return the final status
waitToFinish
{ "repo_name": "elibixby/DataflowJavaSDK", "path": "sdk/src/main/java/com/google/cloud/dataflow/sdk/runners/DataflowPipelineJob.java", "license": "apache-2.0", "size": 13591 }
[ "com.google.api.client.util.NanoClock", "com.google.api.client.util.Sleeper", "com.google.cloud.dataflow.sdk.util.MonitoringUtil", "java.io.IOException", "java.util.concurrent.TimeUnit" ]
import com.google.api.client.util.NanoClock; import com.google.api.client.util.Sleeper; import com.google.cloud.dataflow.sdk.util.MonitoringUtil; import java.io.IOException; import java.util.concurrent.TimeUnit;
import com.google.api.client.util.*; import com.google.cloud.dataflow.sdk.util.*; import java.io.*; import java.util.concurrent.*;
[ "com.google.api", "com.google.cloud", "java.io", "java.util" ]
com.google.api; com.google.cloud; java.io; java.util;
341,297
public void startBrokerStartedDetection(int port, ScheduledExecutorService scheduler) { this.port = port; this.scheduler = scheduler; this.startTime = System.currentTimeMillis(); this.schedule = null; servicePing(); }
void function(int port, ScheduledExecutorService scheduler) { this.port = port; this.scheduler = scheduler; this.startTime = System.currentTimeMillis(); this.schedule = null; servicePing(); }
/** * Start the broker server reachable detection * * @param port The Mqtt Server port * @param scheduler A scheduler */
Start the broker server reachable detection
startBrokerStartedDetection
{ "repo_name": "clinique/openhab2", "path": "bundles/org.openhab.io.mqttembeddedbroker/src/main/java/org/openhab/io/mqttembeddedbroker/internal/MqttEmbeddedBrokerDetectStart.java", "license": "epl-1.0", "size": 3703 }
[ "java.util.concurrent.ScheduledExecutorService" ]
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
605,532
@Test public void testUnsuccessfulInstantiation_ByKey() { when(runtimeServiceMock.startProcessInstanceById(eq(MockProvider.EXAMPLE_PROCESS_DEFINITION_ID), anyString(), anyString(), Matchers.<Map<String, Object>>any())) .thenThrow(new ProcessEngineException("expected exception")); given().pathParam("key", MockProvider.EXAMPLE_PROCESS_DEFINITION_KEY) .contentType(POST_JSON_CONTENT_TYPE).body(EMPTY_JSON_OBJECT) .then().expect() .statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(RestException.class.getSimpleName())) .body("message", containsString("Cannot instantiate process definition")) .when().post(START_PROCESS_INSTANCE_BY_KEY_URL); }
void function() { when(runtimeServiceMock.startProcessInstanceById(eq(MockProvider.EXAMPLE_PROCESS_DEFINITION_ID), anyString(), anyString(), Matchers.<Map<String, Object>>any())) .thenThrow(new ProcessEngineException(STR)); given().pathParam("key", MockProvider.EXAMPLE_PROCESS_DEFINITION_KEY) .contentType(POST_JSON_CONTENT_TYPE).body(EMPTY_JSON_OBJECT) .then().expect() .statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(RestException.class.getSimpleName())) .body(STR, containsString(STR)) .when().post(START_PROCESS_INSTANCE_BY_KEY_URL); }
/** * {@link RuntimeService#startProcessInstanceById(String, Map)} throws an {@link ProcessEngineException}, if a definition with the given id does not exist. */
<code>RuntimeService#startProcessInstanceById(String, Map)</code> throws an <code>ProcessEngineException</code>, if a definition with the given id does not exist
testUnsuccessfulInstantiation_ByKey
{ "repo_name": "skjolber/camunda-bpm-platform", "path": "engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/ProcessDefinitionRestServiceInteractionTest.java", "license": "apache-2.0", "size": 142952 }
[ "com.jayway.restassured.RestAssured", "com.jayway.restassured.http.ContentType", "java.util.Map", "javax.ws.rs.core.Response", "org.camunda.bpm.engine.ProcessEngineException", "org.camunda.bpm.engine.rest.exception.RestException", "org.camunda.bpm.engine.rest.helper.MockProvider", "org.hamcrest.Matchers", "org.mockito.Matchers", "org.mockito.Mockito" ]
import com.jayway.restassured.RestAssured; import com.jayway.restassured.http.ContentType; import java.util.Map; import javax.ws.rs.core.Response; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.rest.exception.RestException; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.hamcrest.Matchers; import org.mockito.Matchers; import org.mockito.Mockito;
import com.jayway.restassured.*; import com.jayway.restassured.http.*; import java.util.*; import javax.ws.rs.core.*; import org.camunda.bpm.engine.*; import org.camunda.bpm.engine.rest.exception.*; import org.camunda.bpm.engine.rest.helper.*; import org.hamcrest.*; import org.mockito.*;
[ "com.jayway.restassured", "java.util", "javax.ws", "org.camunda.bpm", "org.hamcrest", "org.mockito" ]
com.jayway.restassured; java.util; javax.ws; org.camunda.bpm; org.hamcrest; org.mockito;
1,675,600
@Test public void testLongToShortArray() { assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 0, new short[]{}, 0, 0)); assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 100, new short[]{}, 0, 0)); assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 0, new short[]{}, 100, 0)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 0)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0xFFFF, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 1)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 2)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0x5678, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 3)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0x5678, (short)0x1234}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 4)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xCDEF, (short)0x90AB, (short)0x5678}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 1, 3)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xCDEF, (short)0x90AB}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 2, 2)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xCDEF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xFFFF, (short)0xCDEF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 3, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xE6F7, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 1, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xF37B, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 2, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0x79BD, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 3, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xBCDE, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 4, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xE6F7, (short)0x4855, (short)0x2B3C, (short)0x091A}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 1, new short[]{-1, -1, -1, -1}, 0, 4)); assertArrayEquals( new short[]{(short)0x2B3C}, Conversion.longToShortArray(0x1234567890ABCDEFL, 33, new short[]{0}, 0, 1)); }
void function() { assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 0, new short[]{}, 0, 0)); assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 100, new short[]{}, 0, 0)); assertArrayEquals( new short[]{}, Conversion.longToShortArray(0x0000000000000000L, 0, new short[]{}, 100, 0)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 0)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0xFFFF, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 1)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0xFFFF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 2)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0x5678, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 3)); assertArrayEquals( new short[]{(short)0xCDEF, (short)0x90AB, (short)0x5678, (short)0x1234}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 0, 4)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xCDEF, (short)0x90AB, (short)0x5678}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 1, 3)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xCDEF, (short)0x90AB}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 2, 2)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xCDEF, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xFFFF, (short)0xCDEF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 0, new short[]{-1, -1, -1, -1}, 3, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xE6F7, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 1, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xF37B, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 2, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0x79BD, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 3, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xFFFF, (short)0xFFFF, (short)0xBCDE, (short)0xFFFF}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 4, new short[]{-1, -1, -1, -1}, 2, 1)); assertArrayEquals( new short[]{(short)0xE6F7, (short)0x4855, (short)0x2B3C, (short)0x091A}, Conversion.longToShortArray( 0x1234567890ABCDEFL, 1, new short[]{-1, -1, -1, -1}, 0, 4)); assertArrayEquals( new short[]{(short)0x2B3C}, Conversion.longToShortArray(0x1234567890ABCDEFL, 33, new short[]{0}, 0, 1)); }
/** * Tests {@link Conversion#longToShortArray(long, int, short[], int, int)}. */
Tests <code>Conversion#longToShortArray(long, int, short[], int, int)</code>
testLongToShortArray
{ "repo_name": "MuShiiii/commons-lang", "path": "src/test/java/org/apache/commons/lang3/ConversionTest.java", "license": "apache-2.0", "size": 100581 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,676,852
@NotNull default TfvcCheckoutResult checkoutForEdit( @Nullable Project project, @NotNull ServerContext serverContext, @NotNull List<Path> filePaths, boolean recursive) { try { return checkoutForEditAsync(project, serverContext, filePaths, recursive).toCompletableFuture().get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }
default TfvcCheckoutResult checkoutForEdit( @Nullable Project project, @NotNull ServerContext serverContext, @NotNull List<Path> filePaths, boolean recursive) { try { return checkoutForEditAsync(project, serverContext, filePaths, recursive).toCompletableFuture().get(); } catch (InterruptedException ExecutionException e) { throw new RuntimeException(e); } }
/** * Performs asynchronous checkout of passed files for edit. * * @param serverContext a server context to extract the authorization information from. * @param filePaths list of file paths to checkout. * @param recursive whether the operation should be recursive. * @return the checkout result. */
Performs asynchronous checkout of passed files for edit
checkoutForEdit
{ "repo_name": "Microsoft/vso-intellij", "path": "plugin/src/com/microsoft/alm/plugin/idea/tfvc/core/TfvcClient.java", "license": "mit", "size": 16454 }
[ "com.intellij.openapi.project.Project", "com.microsoft.alm.plugin.context.ServerContext", "com.microsoft.tfs.model.connector.TfvcCheckoutResult", "java.nio.file.Path", "java.util.List", "java.util.concurrent.ExecutionException", "org.jetbrains.annotations.NotNull", "org.jetbrains.annotations.Nullable" ]
import com.intellij.openapi.project.Project; import com.microsoft.alm.plugin.context.ServerContext; import com.microsoft.tfs.model.connector.TfvcCheckoutResult; import java.nio.file.Path; import java.util.List; import java.util.concurrent.ExecutionException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable;
import com.intellij.openapi.project.*; import com.microsoft.alm.plugin.context.*; import com.microsoft.tfs.model.connector.*; import java.nio.file.*; import java.util.*; import java.util.concurrent.*; import org.jetbrains.annotations.*;
[ "com.intellij.openapi", "com.microsoft.alm", "com.microsoft.tfs", "java.nio", "java.util", "org.jetbrains.annotations" ]
com.intellij.openapi; com.microsoft.alm; com.microsoft.tfs; java.nio; java.util; org.jetbrains.annotations;
287,835
public CQLSSTableWriter addRow(Object... values) throws InvalidRequestException, IOException { return addRow(Arrays.asList(values)); }
CQLSSTableWriter function(Object... values) throws InvalidRequestException, IOException { return addRow(Arrays.asList(values)); }
/** * Adds a new row to the writer. * <p> * This is a shortcut for {@code addRow(Arrays.asList(values))}. * * @param values the row values (corresponding to the bind variables of the * insertion statement used when creating by this writer). * @return this writer. */
Adds a new row to the writer. This is a shortcut for addRow(Arrays.asList(values))
addRow
{ "repo_name": "thobbs/cassandra", "path": "src/java/org/apache/cassandra/io/sstable/CQLSSTableWriter.java", "license": "apache-2.0", "size": 24744 }
[ "java.io.IOException", "java.util.Arrays", "org.apache.cassandra.exceptions.InvalidRequestException" ]
import java.io.IOException; import java.util.Arrays; import org.apache.cassandra.exceptions.InvalidRequestException;
import java.io.*; import java.util.*; import org.apache.cassandra.exceptions.*;
[ "java.io", "java.util", "org.apache.cassandra" ]
java.io; java.util; org.apache.cassandra;
1,956,373
public void cleanup(LifecycleBean lifecycleBean) throws IgniteCheckedException { cleanupGeneric(lifecycleBean); }
void function(LifecycleBean lifecycleBean) throws IgniteCheckedException { cleanupGeneric(lifecycleBean); }
/** * Cleans up resources from given lifecycle beans. Essentially, this * method injects {@code null}s into lifecycle bean. * * @param lifecycleBean Lifecycle bean. * @throws IgniteCheckedException Thrown in case of any errors. */
Cleans up resources from given lifecycle beans. Essentially, this method injects nulls into lifecycle bean
cleanup
{ "repo_name": "samaitra/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/resource/GridResourceProcessor.java", "license": "apache-2.0", "size": 20556 }
[ "org.apache.ignite.IgniteCheckedException", "org.apache.ignite.lifecycle.LifecycleBean" ]
import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.lifecycle.LifecycleBean;
import org.apache.ignite.*; import org.apache.ignite.lifecycle.*;
[ "org.apache.ignite" ]
org.apache.ignite;
1,898,341
public ValidatorBuilder withJava(Class<? extends Validator> clazz) { resetType(); this.clazz = clazz; return this; } /** * Set the Java Bean name to be used for custom {@code Validator}. * @see CustomValidatorDefinition * * @param ref bean name for the custom {@code Validator}
ValidatorBuilder function(Class<? extends Validator> clazz) { resetType(); this.clazz = clazz; return this; } /** * Set the Java Bean name to be used for custom {@code Validator}. * @see CustomValidatorDefinition * * @param ref bean name for the custom {@code Validator}
/** * Set the Java {@code Class} represents a custom {@code Validator} implementation class. * @see CustomValidatorDefinition * * @param clazz {@code Class} object represents custom validator implementation */
Set the Java Class represents a custom Validator implementation class
withJava
{ "repo_name": "Fabryprog/camel", "path": "core/camel-core/src/main/java/org/apache/camel/builder/ValidatorBuilder.java", "license": "apache-2.0", "size": 5914 }
[ "org.apache.camel.model.validator.CustomValidatorDefinition", "org.apache.camel.spi.Validator" ]
import org.apache.camel.model.validator.CustomValidatorDefinition; import org.apache.camel.spi.Validator;
import org.apache.camel.model.validator.*; import org.apache.camel.spi.*;
[ "org.apache.camel" ]
org.apache.camel;
1,823,950
public void setPremiumCurrency(Currency premiumCurrency) { this._premiumCurrency = premiumCurrency; }
void function(Currency premiumCurrency) { this._premiumCurrency = premiumCurrency; }
/** * Sets currency of payment at time of purchase, null if not known. * @param premiumCurrency the new value of the property */
Sets currency of payment at time of purchase, null if not known
setPremiumCurrency
{ "repo_name": "McLeodMoores/starling", "path": "projects/core/src/main/java/com/opengamma/core/position/impl/SimpleTrade.java", "license": "apache-2.0", "size": 28757 }
[ "com.opengamma.util.money.Currency" ]
import com.opengamma.util.money.Currency;
import com.opengamma.util.money.*;
[ "com.opengamma.util" ]
com.opengamma.util;
1,257,378
public boolean containsDs(String dsName) throws IOException { for (Datasource datasource : datasources) { if (datasource.getDsName().equals(dsName)) { return true; } } return false; }
boolean function(String dsName) throws IOException { for (Datasource datasource : datasources) { if (datasource.getDsName().equals(dsName)) { return true; } } return false; }
/** * Checks presence of a specific datasource. * * @param dsName Datasource name to check * @return <code>true</code> if datasource is present in this RRD, <code>false</code> otherwise * @throws IOException Thrown in case of I/O error. */
Checks presence of a specific datasource
containsDs
{ "repo_name": "OpenNMS/jrobin", "path": "src/main/java/org/jrobin/core/RrdDb.java", "license": "lgpl-2.1", "size": 41918 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
16,535
public String[] getTableNames(Pattern pattern) throws KeeperException, IOException { List<String> results = Collections.emptyList(); List<String> tableNames = ZKUtil.listChildrenNoWatch(zkw, this.tablesZNode); if (tableNames != null && !tableNames.isEmpty()) { results = new ArrayList<String>(); for (String tableName : tableNames) { if (pattern.matcher(tableName).matches()) { byte[] data = ZKUtil.getData(this.zkw, getTableDescZNode(tableName)); if (data != null) { results.add(tableName); } } } } return results.toArray(new String[0]); }
String[] function(Pattern pattern) throws KeeperException, IOException { List<String> results = Collections.emptyList(); List<String> tableNames = ZKUtil.listChildrenNoWatch(zkw, this.tablesZNode); if (tableNames != null && !tableNames.isEmpty()) { results = new ArrayList<String>(); for (String tableName : tableNames) { if (pattern.matcher(tableName).matches()) { byte[] data = ZKUtil.getData(this.zkw, getTableDescZNode(tableName)); if (data != null) { results.add(tableName); } } } } return results.toArray(new String[0]); }
/** * Gets the table names of all the cross site tables with the pattern. * * @param pattern * @return * @throws KeeperException * @throws IOException */
Gets the table names of all the cross site tables with the pattern
getTableNames
{ "repo_name": "intel-hadoop/CSBT", "path": "csbt-client/src/main/java/org/apache/hadoop/hbase/crosssite/CrossSiteZNodes.java", "license": "apache-2.0", "size": 33331 }
[ "java.io.IOException", "java.util.ArrayList", "java.util.Collections", "java.util.List", "java.util.regex.Pattern", "org.apache.hadoop.hbase.zookeeper.ZKUtil", "org.apache.zookeeper.KeeperException" ]
import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Pattern; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.zookeeper.KeeperException;
import java.io.*; import java.util.*; import java.util.regex.*; import org.apache.hadoop.hbase.zookeeper.*; import org.apache.zookeeper.*;
[ "java.io", "java.util", "org.apache.hadoop", "org.apache.zookeeper" ]
java.io; java.util; org.apache.hadoop; org.apache.zookeeper;
144,313
@Test public void testRestore() throws Exception { final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet()); final DummyFlinkKafkaConsumer<String> consumerFunction = new DummyFlinkKafkaConsumer<>(TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED); StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction); final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0); testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime); testHarness.setup(); // restore state from binary snapshot file testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( "kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot")); testHarness.open(); // assert that there are partitions and is identical to expected list assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null); assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()); // on restore, subscribedPartitionsToStartOffsets should be identical to the restored state assertEquals(PARTITION_STATE, consumerFunction.getSubscribedPartitionsToStartOffsets()); // assert that state is correctly restored from legacy checkpoint assertTrue(consumerFunction.getRestoredState() != null); assertEquals(PARTITION_STATE, consumerFunction.getRestoredState()); consumerOperator.close(); consumerOperator.cancel(); }
void function() throws Exception { final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet()); final DummyFlinkKafkaConsumer<String> consumerFunction = new DummyFlinkKafkaConsumer<>(TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED); StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction); final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0); testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime); testHarness.setup(); testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( STR + testMigrateVersion + STR)); testHarness.open(); assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null); assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()); assertEquals(PARTITION_STATE, consumerFunction.getSubscribedPartitionsToStartOffsets()); assertTrue(consumerFunction.getRestoredState() != null); assertEquals(PARTITION_STATE, consumerFunction.getRestoredState()); consumerOperator.close(); consumerOperator.cancel(); }
/** * Test restoring from a non-empty state taken using a previous Flink version, when some partitions could be * found for topics. */
Test restoring from a non-empty state taken using a previous Flink version, when some partitions could be found for topics
testRestore
{ "repo_name": "bowenli86/flink", "path": "flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/FlinkKafkaConsumerBaseMigrationTest.java", "license": "apache-2.0", "size": 16777 }
[ "java.util.ArrayList", "java.util.List", "org.apache.flink.streaming.api.TimeCharacteristic", "org.apache.flink.streaming.api.operators.StreamSource", "org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition", "org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness", "org.apache.flink.streaming.util.OperatorSnapshotUtil", "org.junit.Assert" ]
import java.util.ArrayList; import java.util.List; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.operators.StreamSource; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.streaming.util.OperatorSnapshotUtil; import org.junit.Assert;
import java.util.*; import org.apache.flink.streaming.api.*; import org.apache.flink.streaming.api.operators.*; import org.apache.flink.streaming.connectors.kafka.internals.*; import org.apache.flink.streaming.util.*; import org.junit.*;
[ "java.util", "org.apache.flink", "org.junit" ]
java.util; org.apache.flink; org.junit;
2,038,281
public SchemaGrammar parseSchema(XMLInputSource is, XSDDescription desc, Map<String, XMLSchemaLoader.LocationArray> locationPairs) throws IOException { fLocationPairs = locationPairs; fSchemaParser.resetNodePool(); SchemaGrammar grammar = null; String schemaNamespace = null; short referType = desc.getContextType(); // if loading using JAXP schemaSource property, or using grammar caching loadGrammar // the desc.targetNamespace is always null. // Therefore we should not attempt to find out if // the schema is already in the bucket, since in the case we have // no namespace schema in the bucket, findGrammar will always return the // no namespace schema. if (referType != XSDDescription.CONTEXT_PREPARSE){ // first try to find it in the bucket/pool, return if one is found if (fHonourAllSchemaLocations && referType == XSDDescription.CONTEXT_IMPORT && isExistingGrammar(desc, fNamespaceGrowth)) { grammar = fGrammarBucket.getGrammar(desc.getTargetNamespace()); } else { grammar = findGrammar(desc, fNamespaceGrowth); } if (grammar != null) { if (!fNamespaceGrowth) { return grammar; } else { try { if (grammar.getDocumentLocations().contains(XMLEntityManager.expandSystemId(is.getSystemId(), is.getBaseSystemId(), false))) { return grammar; } } catch (MalformedURIException e) { //REVISIT: return the grammar? } } } schemaNamespace = desc.getTargetNamespace(); // handle empty string URI as null if (schemaNamespace != null) { schemaNamespace = fSymbolTable.addSymbol(schemaNamespace); } } // before parsing a schema, need to clear registries associated with // parsing schemas prepareForParse(); Element schemaRoot = null; // first phase: construct trees. if (is instanceof DOMInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (DOMInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } // DOMInputSource else if (is instanceof SAXInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (SAXInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } // SAXInputSource else if (is instanceof StAXInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (StAXInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } // StAXInputSource else if (is instanceof XSInputSource) { schemaRoot = getSchemaDocument((XSInputSource) is, desc); } // XSInputSource else { schemaRoot = getSchemaDocument(schemaNamespace, is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } //is instanceof XMLInputSource if (schemaRoot == null) { // something went wrong right off the hop if (is instanceof XSInputSource) { return fGrammarBucket.getGrammar(desc.getTargetNamespace()); } return grammar; } if (referType == XSDDescription.CONTEXT_PREPARSE) { Element schemaElem = schemaRoot; schemaNamespace = DOMUtil.getAttrValue(schemaElem, SchemaSymbols.ATT_TARGETNAMESPACE); if(schemaNamespace != null && schemaNamespace.length() > 0) { // Since now we've discovered a namespace, we need to update xsd key // and store this schema in traversed schemas bucket schemaNamespace = fSymbolTable.addSymbol(schemaNamespace); desc.setTargetNamespace(schemaNamespace); } else { schemaNamespace = null; } grammar = findGrammar(desc, fNamespaceGrowth); String schemaId = XMLEntityManager.expandSystemId(is.getSystemId(), is.getBaseSystemId(), false); if (grammar != null) { // When namespace growth is enabled and a null location is provided we cannot tell // whether we've loaded this schema document before so we must assume that we haven't. if (!fNamespaceGrowth || (schemaId != null && grammar.getDocumentLocations().contains(schemaId))) { return grammar; } } XSDKey key = new XSDKey(schemaId, referType, schemaNamespace); fTraversed.put(key, schemaRoot); if (schemaId != null) { fDoc2SystemId.put(schemaRoot, schemaId); } } // before constructing trees and traversing a schema, need to reset // all traversers and clear all registries prepareForTraverse(); fRoot = constructTrees(schemaRoot, is.getSystemId(), desc, grammar != null); if (fRoot == null) { return null; } // second phase: fill global registries. buildGlobalNameRegistries(); // third phase: call traversers ArrayList annotationInfo = fValidateAnnotations ? new ArrayList() : null; traverseSchemas(annotationInfo); // fourth phase: handle local element decls traverseLocalElements(); // fifth phase: handle Keyrefs resolveKeyRefs(); // sixth phase: validate attribute of non-schema namespaces // REVISIT: skip this for now. we really don't want to do it. //fAttributeChecker.checkNonSchemaAttributes(fGrammarBucket); // seventh phase: store imported grammars // for all grammars with <import>s for (int i = fAllTNSs.size() - 1; i >= 0; i--) { // get its target namespace String tns = fAllTNSs.elementAt(i); // get all namespaces it imports Vector ins = (Vector)fImportMap.get(tns); // get the grammar SchemaGrammar sg = fGrammarBucket.getGrammar(emptyString2Null(tns)); if (sg == null) continue; SchemaGrammar isg; // for imported namespace int count = 0; for (int j = 0; j < ins.size(); j++) { // get imported grammar isg = fGrammarBucket.getGrammar((String)ins.elementAt(j)); // reuse the same vector if (isg != null) ins.setElementAt(isg, count++); } ins.setSize(count); // set the imported grammars sg.setImportedGrammars(ins); } if (fValidateAnnotations && annotationInfo.size() > 0) { validateAnnotations(annotationInfo); } // and return. return fGrammarBucket.getGrammar(fRoot.fTargetNamespace); } // end parseSchema
SchemaGrammar function(XMLInputSource is, XSDDescription desc, Map<String, XMLSchemaLoader.LocationArray> locationPairs) throws IOException { fLocationPairs = locationPairs; fSchemaParser.resetNodePool(); SchemaGrammar grammar = null; String schemaNamespace = null; short referType = desc.getContextType(); if (referType != XSDDescription.CONTEXT_PREPARSE){ if (fHonourAllSchemaLocations && referType == XSDDescription.CONTEXT_IMPORT && isExistingGrammar(desc, fNamespaceGrowth)) { grammar = fGrammarBucket.getGrammar(desc.getTargetNamespace()); } else { grammar = findGrammar(desc, fNamespaceGrowth); } if (grammar != null) { if (!fNamespaceGrowth) { return grammar; } else { try { if (grammar.getDocumentLocations().contains(XMLEntityManager.expandSystemId(is.getSystemId(), is.getBaseSystemId(), false))) { return grammar; } } catch (MalformedURIException e) { } } } schemaNamespace = desc.getTargetNamespace(); if (schemaNamespace != null) { schemaNamespace = fSymbolTable.addSymbol(schemaNamespace); } } prepareForParse(); Element schemaRoot = null; if (is instanceof DOMInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (DOMInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } else if (is instanceof SAXInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (SAXInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } else if (is instanceof StAXInputSource) { schemaRoot = getSchemaDocument(schemaNamespace, (StAXInputSource) is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } else if (is instanceof XSInputSource) { schemaRoot = getSchemaDocument((XSInputSource) is, desc); } else { schemaRoot = getSchemaDocument(schemaNamespace, is, referType == XSDDescription.CONTEXT_PREPARSE, referType, null); } if (schemaRoot == null) { if (is instanceof XSInputSource) { return fGrammarBucket.getGrammar(desc.getTargetNamespace()); } return grammar; } if (referType == XSDDescription.CONTEXT_PREPARSE) { Element schemaElem = schemaRoot; schemaNamespace = DOMUtil.getAttrValue(schemaElem, SchemaSymbols.ATT_TARGETNAMESPACE); if(schemaNamespace != null && schemaNamespace.length() > 0) { schemaNamespace = fSymbolTable.addSymbol(schemaNamespace); desc.setTargetNamespace(schemaNamespace); } else { schemaNamespace = null; } grammar = findGrammar(desc, fNamespaceGrowth); String schemaId = XMLEntityManager.expandSystemId(is.getSystemId(), is.getBaseSystemId(), false); if (grammar != null) { if (!fNamespaceGrowth (schemaId != null && grammar.getDocumentLocations().contains(schemaId))) { return grammar; } } XSDKey key = new XSDKey(schemaId, referType, schemaNamespace); fTraversed.put(key, schemaRoot); if (schemaId != null) { fDoc2SystemId.put(schemaRoot, schemaId); } } prepareForTraverse(); fRoot = constructTrees(schemaRoot, is.getSystemId(), desc, grammar != null); if (fRoot == null) { return null; } buildGlobalNameRegistries(); ArrayList annotationInfo = fValidateAnnotations ? new ArrayList() : null; traverseSchemas(annotationInfo); traverseLocalElements(); resolveKeyRefs(); for (int i = fAllTNSs.size() - 1; i >= 0; i--) { String tns = fAllTNSs.elementAt(i); Vector ins = (Vector)fImportMap.get(tns); SchemaGrammar sg = fGrammarBucket.getGrammar(emptyString2Null(tns)); if (sg == null) continue; SchemaGrammar isg; int count = 0; for (int j = 0; j < ins.size(); j++) { isg = fGrammarBucket.getGrammar((String)ins.elementAt(j)); if (isg != null) ins.setElementAt(isg, count++); } ins.setSize(count); sg.setImportedGrammars(ins); } if (fValidateAnnotations && annotationInfo.size() > 0) { validateAnnotations(annotationInfo); } return fGrammarBucket.getGrammar(fRoot.fTargetNamespace); }
/** * This method initiates the parse of a schema. It will likely be * called from the Validator and it will make the * resulting grammar available; it returns a reference to this object just * in case. A reset(XMLComponentManager) must be called before this methods is called. * @param is * @param desc * @param locationPairs * @return the SchemaGrammar * @throws IOException */
This method initiates the parse of a schema. It will likely be called from the Validator and it will make the resulting grammar available; it returns a reference to this object just in case. A reset(XMLComponentManager) must be called before this methods is called
parseSchema
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jaxp/src/java.xml/share/classes/com/sun/org/apache/xerces/internal/impl/xs/traversers/XSDHandler.java", "license": "gpl-2.0", "size": 203897 }
[ "com.sun.org.apache.xerces.internal.impl.XMLEntityManager", "com.sun.org.apache.xerces.internal.impl.xs.SchemaGrammar", "com.sun.org.apache.xerces.internal.impl.xs.SchemaSymbols", "com.sun.org.apache.xerces.internal.impl.xs.XMLSchemaLoader", "com.sun.org.apache.xerces.internal.impl.xs.XSDDescription", "com.sun.org.apache.xerces.internal.impl.xs.util.XSInputSource", "com.sun.org.apache.xerces.internal.util.DOMInputSource", "com.sun.org.apache.xerces.internal.util.DOMUtil", "com.sun.org.apache.xerces.internal.util.SAXInputSource", "com.sun.org.apache.xerces.internal.util.StAXInputSource", "com.sun.org.apache.xerces.internal.util.URI", "com.sun.org.apache.xerces.internal.xni.parser.XMLInputSource", "java.io.IOException", "java.util.ArrayList", "java.util.Map", "java.util.Vector", "org.w3c.dom.Element" ]
import com.sun.org.apache.xerces.internal.impl.XMLEntityManager; import com.sun.org.apache.xerces.internal.impl.xs.SchemaGrammar; import com.sun.org.apache.xerces.internal.impl.xs.SchemaSymbols; import com.sun.org.apache.xerces.internal.impl.xs.XMLSchemaLoader; import com.sun.org.apache.xerces.internal.impl.xs.XSDDescription; import com.sun.org.apache.xerces.internal.impl.xs.util.XSInputSource; import com.sun.org.apache.xerces.internal.util.DOMInputSource; import com.sun.org.apache.xerces.internal.util.DOMUtil; import com.sun.org.apache.xerces.internal.util.SAXInputSource; import com.sun.org.apache.xerces.internal.util.StAXInputSource; import com.sun.org.apache.xerces.internal.util.URI; import com.sun.org.apache.xerces.internal.xni.parser.XMLInputSource; import java.io.IOException; import java.util.ArrayList; import java.util.Map; import java.util.Vector; import org.w3c.dom.Element;
import com.sun.org.apache.xerces.internal.impl.*; import com.sun.org.apache.xerces.internal.impl.xs.*; import com.sun.org.apache.xerces.internal.impl.xs.util.*; import com.sun.org.apache.xerces.internal.util.*; import com.sun.org.apache.xerces.internal.xni.parser.*; import java.io.*; import java.util.*; import org.w3c.dom.*;
[ "com.sun.org", "java.io", "java.util", "org.w3c.dom" ]
com.sun.org; java.io; java.util; org.w3c.dom;
2,387,697
default boolean isFree(IPatternMatcher predicate, boolean heads) { return !predicate.test(this); }
default boolean isFree(IPatternMatcher predicate, boolean heads) { return !predicate.test(this); }
/** * Returns <code>true</code>, if <b>all of the elements</b> in the subexpressions or the * expression itself, did not satisfy the given unary predicate. * * @param predicate a unary predicate * @param heads if set to <code>false</code>, only the arguments of an IAST should be tested and * not the <code>Head[]</code> element. * @return */
Returns <code>true</code>, if all of the elements in the subexpressions or the expression itself, did not satisfy the given unary predicate
isFree
{ "repo_name": "axkr/symja_android_library", "path": "symja_android_library/matheclipse-core/src/main/java/org/matheclipse/core/interfaces/IExpr.java", "license": "gpl-3.0", "size": 152649 }
[ "org.matheclipse.core.patternmatching.IPatternMatcher" ]
import org.matheclipse.core.patternmatching.IPatternMatcher;
import org.matheclipse.core.patternmatching.*;
[ "org.matheclipse.core" ]
org.matheclipse.core;
72,092
public void testUpdateSql2() { final String modelXml = "<?xml version='1.0' encoding='ISO-8859-1'?>\n"+ "<database xmlns='" + DatabaseIO.DDLUTILS_NAMESPACE + "' name='ddlutils'>\n"+ " <table name='TestTable'>\n"+ " <column name='id' autoIncrement='true' type='INTEGER' primaryKey='true'/>\n"+ " <column name='name' type='VARCHAR' size='15'/>\n"+ " </table>\n"+ "</database>"; TestPlatform platform = new TestPlatform(); SqlBuilder sqlBuilder = platform.getSqlBuilder(); Database database = parseDatabaseFromString(modelXml); Map oldMap = new HashMap(); Map newMap = new HashMap(); oldMap.put("id", new Integer(0)); newMap.put("name", "ddlutils"); newMap.put("id", new Integer(1)); platform.setDelimitedIdentifierModeOn(true); String sql = sqlBuilder.getUpdateSql(database.getTable(0), oldMap, newMap, false); assertEquals("UPDATE \"TestTable\" SET \"id\" = '1', \"name\" = 'ddlutils' WHERE \"id\" = '0'", sql); }
void function() { final String modelXml = STR+ STR + DatabaseIO.DDLUTILS_NAMESPACE + STR+ STR+ STR+ STR+ STR+ STR; TestPlatform platform = new TestPlatform(); SqlBuilder sqlBuilder = platform.getSqlBuilder(); Database database = parseDatabaseFromString(modelXml); Map oldMap = new HashMap(); Map newMap = new HashMap(); oldMap.put("id", new Integer(0)); newMap.put("name", STR); newMap.put("id", new Integer(1)); platform.setDelimitedIdentifierModeOn(true); String sql = sqlBuilder.getUpdateSql(database.getTable(0), oldMap, newMap, false); assertEquals(STRTestTable\STRid\STRname\STRid\STR, sql); }
/** * Tests the {@link SqlBuilder#getUpdateSql(Table, Map, Map, boolean)} method. */
Tests the <code>SqlBuilder#getUpdateSql(Table, Map, Map, boolean)</code> method
testUpdateSql2
{ "repo_name": "qxo/ddlutils", "path": "src/test/java/org/apache/ddlutils/platform/TestSqlBuilder.java", "license": "apache-2.0", "size": 3620 }
[ "java.util.HashMap", "java.util.Map", "org.apache.ddlutils.io.DatabaseIO", "org.apache.ddlutils.model.Database" ]
import java.util.HashMap; import java.util.Map; import org.apache.ddlutils.io.DatabaseIO; import org.apache.ddlutils.model.Database;
import java.util.*; import org.apache.ddlutils.io.*; import org.apache.ddlutils.model.*;
[ "java.util", "org.apache.ddlutils" ]
java.util; org.apache.ddlutils;
2,474,615
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
/** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */
Handles the HTTP <code>POST</code> method
doPost
{ "repo_name": "bd-king/E-notice", "path": "src/main/java/Servlet/SearchServlet.java", "license": "mit", "size": 4809 }
[ "java.io.IOException", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "javax.servlet" ]
java.io; javax.servlet;
2,797,501
public byte[] getFileIds() throws IOException { return sendBytes(new byte[]{0x6f}).data; }
byte[] function() throws IOException { return sendBytes(new byte[]{0x6f}).data; }
/** * Get a list of all the files in the current application ("directory") */
Get a list of all the files in the current application ("directory")
getFileIds
{ "repo_name": "ldematte/Nfc", "path": "nfcbase/src/main/java/org/dematte/nfc/common/MifareDesfire.java", "license": "mit", "size": 11062 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,322,759
TestBase.createCaller().init().test(); }
TestBase.createCaller().init().test(); }
/** * Run just this test. * * @param a ignored */
Run just this test
main
{ "repo_name": "vdr007/ThriftyPaxos", "path": "src/applications/h2/src/test/org/h2/test/unit/TestConnectionInfo.java", "license": "apache-2.0", "size": 3136 }
[ "org.h2.test.TestBase" ]
import org.h2.test.TestBase;
import org.h2.test.*;
[ "org.h2.test" ]
org.h2.test;
2,737,390
private void initJMeterKeyStore() throws IOException, GeneralSecurityException { if (storePassword != null) { // Assume we have already created the store try { keyStore = getKeyStore(storePassword.toCharArray()); X509Certificate caCert = (X509Certificate) keyStore.getCertificate(JMETER_SERVER_ALIAS); caCert.checkValidity(new Date(System.currentTimeMillis()+DateUtils.MILLIS_PER_DAY)); } catch (Exception e) { // store is faulty, we need to recreate it keyStore = null; // if cert is not valid, flag up to recreate it log.warn( "Could not open expected file or certificate is not valid {} {}", CERT_PATH_ABS, e.getMessage(), e); } } if (keyStore == null) { // no existing file or not valid storePassword = RandomStringUtils.randomAlphanumeric(20); // Alphanum to avoid issues with command-line quoting keyPassword = storePassword; // we use same password for both setPassword(storePassword); log.info("Generating standard keypair in {}", CERT_PATH_ABS); if(!CERT_PATH.delete()){ // safer to start afresh log.warn( "Could not delete {}, this could create issues, stop jmeter, ensure file is deleted and restart again", CERT_PATH.getAbsolutePath()); } KeyToolUtils.genkeypair(CERT_PATH, JMETER_SERVER_ALIAS, storePassword, CERT_VALIDITY, null, null); keyStore = getKeyStore(storePassword.toCharArray()); // This should now work } }
void function() throws IOException, GeneralSecurityException { if (storePassword != null) { try { keyStore = getKeyStore(storePassword.toCharArray()); X509Certificate caCert = (X509Certificate) keyStore.getCertificate(JMETER_SERVER_ALIAS); caCert.checkValidity(new Date(System.currentTimeMillis()+DateUtils.MILLIS_PER_DAY)); } catch (Exception e) { keyStore = null; log.warn( STR, CERT_PATH_ABS, e.getMessage(), e); } } if (keyStore == null) { storePassword = RandomStringUtils.randomAlphanumeric(20); keyPassword = storePassword; setPassword(storePassword); log.info(STR, CERT_PATH_ABS); if(!CERT_PATH.delete()){ log.warn( STR, CERT_PATH.getAbsolutePath()); } KeyToolUtils.genkeypair(CERT_PATH, JMETER_SERVER_ALIAS, storePassword, CERT_VALIDITY, null, null); keyStore = getKeyStore(storePassword.toCharArray()); } }
/** * Initialise the single key JMeter keystore (original behaviour) */
Initialise the single key JMeter keystore (original behaviour)
initJMeterKeyStore
{ "repo_name": "ufctester/apache-jmeter", "path": "src/protocol/http/org/apache/jmeter/protocol/http/proxy/ProxyControl.java", "license": "apache-2.0", "size": 70486 }
[ "java.io.IOException", "java.security.GeneralSecurityException", "java.security.cert.X509Certificate", "java.util.Date", "org.apache.commons.lang3.RandomStringUtils", "org.apache.commons.lang3.time.DateUtils", "org.apache.jorphan.exec.KeyToolUtils" ]
import java.io.IOException; import java.security.GeneralSecurityException; import java.security.cert.X509Certificate; import java.util.Date; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.time.DateUtils; import org.apache.jorphan.exec.KeyToolUtils;
import java.io.*; import java.security.*; import java.security.cert.*; import java.util.*; import org.apache.commons.lang3.*; import org.apache.commons.lang3.time.*; import org.apache.jorphan.exec.*;
[ "java.io", "java.security", "java.util", "org.apache.commons", "org.apache.jorphan" ]
java.io; java.security; java.util; org.apache.commons; org.apache.jorphan;
1,774,716
@ExceptionHandler public ResponseEntity<KnetminerExceptionResponse> handleStatusBasedException ( HttpStatusCodeException ex, WebRequest request, HttpServletResponse response ) { return handleExceptionInternal ( ex, ex.getResponseHeaders (), ex.getStatusCode (), request, response ); }
ResponseEntity<KnetminerExceptionResponse> function ( HttpStatusCodeException ex, WebRequest request, HttpServletResponse response ) { return handleExceptionInternal ( ex, ex.getResponseHeaders (), ex.getStatusCode (), request, response ); }
/** * Takes the status code from {@link HttpStatusCodeException#getStatus() code}. */
Takes the status code from <code>HttpStatusCodeException#getStatus() code</code>
handleStatusBasedException
{ "repo_name": "AjitPS/KnetMiner", "path": "server-base/src/main/java/rres/knetminer/datasource/server/KnetminerExceptionHandler.java", "license": "mit", "size": 6919 }
[ "javax.servlet.http.HttpServletResponse", "org.springframework.http.ResponseEntity", "org.springframework.web.client.HttpStatusCodeException", "org.springframework.web.context.request.WebRequest" ]
import javax.servlet.http.HttpServletResponse; import org.springframework.http.ResponseEntity; import org.springframework.web.client.HttpStatusCodeException; import org.springframework.web.context.request.WebRequest;
import javax.servlet.http.*; import org.springframework.http.*; import org.springframework.web.client.*; import org.springframework.web.context.request.*;
[ "javax.servlet", "org.springframework.http", "org.springframework.web" ]
javax.servlet; org.springframework.http; org.springframework.web;
2,562,569
public boolean initialize(WorkMode mode, Duration interval) throws PortInUseException, TooManyListenersException, IOException, UnsupportedCommOperationException { boolean initSuccessful = true; SerialPort localSerialPort = portId.open(thingHandler.getThing().getUID().toString(), 2000); localSerialPort.setSerialPortParams(9600, 8, 1, 0); outputStream = localSerialPort.getOutputStream(); inputStream = localSerialPort.getInputStream(); if (inputStream == null || outputStream == null) { throw new IOException("Could not create input or outputstream for the port"); } // wake up the device initSuccessful &= sendSleep(false); initSuccessful &= getFirmware(); if (mode == WorkMode.POLLING) { initSuccessful &= setMode(WorkMode.POLLING); initSuccessful &= setWorkingPeriod((byte) 0); } else { // reporting initSuccessful &= setWorkingPeriod((byte) interval.toMinutes()); initSuccessful &= setMode(WorkMode.REPORTING); } // enable listeners only after we have configured the sensor above because for configuring we send and read data // sequentially localSerialPort.notifyOnDataAvailable(true); localSerialPort.addEventListener(this); this.serialPort = localSerialPort; return initSuccessful; }
boolean function(WorkMode mode, Duration interval) throws PortInUseException, TooManyListenersException, IOException, UnsupportedCommOperationException { boolean initSuccessful = true; SerialPort localSerialPort = portId.open(thingHandler.getThing().getUID().toString(), 2000); localSerialPort.setSerialPortParams(9600, 8, 1, 0); outputStream = localSerialPort.getOutputStream(); inputStream = localSerialPort.getInputStream(); if (inputStream == null outputStream == null) { throw new IOException(STR); } initSuccessful &= sendSleep(false); initSuccessful &= getFirmware(); if (mode == WorkMode.POLLING) { initSuccessful &= setMode(WorkMode.POLLING); initSuccessful &= setWorkingPeriod((byte) 0); } else { initSuccessful &= setWorkingPeriod((byte) interval.toMinutes()); initSuccessful &= setMode(WorkMode.REPORTING); } localSerialPort.notifyOnDataAvailable(true); localSerialPort.addEventListener(this); this.serialPort = localSerialPort; return initSuccessful; }
/** * Initialize the communication with the device, i.e. open the serial port etc. * * @param mode the {@link WorkMode} if we want to use polling or reporting * @param interval the time between polling or reportings * @return {@code true} if we can communicate with the device * @throws PortInUseException * @throws TooManyListenersException * @throws IOException * @throws UnsupportedCommOperationException */
Initialize the communication with the device, i.e. open the serial port etc
initialize
{ "repo_name": "openhab/openhab2", "path": "bundles/org.openhab.binding.novafinedust/src/main/java/org/openhab/binding/novafinedust/internal/sds011protocol/SDS011Communicator.java", "license": "epl-1.0", "size": 12463 }
[ "java.io.IOException", "java.time.Duration", "java.util.TooManyListenersException", "org.openhab.core.io.transport.serial.PortInUseException", "org.openhab.core.io.transport.serial.SerialPort", "org.openhab.core.io.transport.serial.UnsupportedCommOperationException" ]
import java.io.IOException; import java.time.Duration; import java.util.TooManyListenersException; import org.openhab.core.io.transport.serial.PortInUseException; import org.openhab.core.io.transport.serial.SerialPort; import org.openhab.core.io.transport.serial.UnsupportedCommOperationException;
import java.io.*; import java.time.*; import java.util.*; import org.openhab.core.io.transport.serial.*;
[ "java.io", "java.time", "java.util", "org.openhab.core" ]
java.io; java.time; java.util; org.openhab.core;
2,039,144
private static boolean adjustForSideEffects(List<Parameter> parameters) { // If a parameter is moved, that has side-effect no parameters that // can be effected by side-effects can be left. // A parameter can be moved if it can't be side-effected (immutable), // or there are no following side-effects, that aren't moved. boolean anyMovable = false; boolean seenUnmovableSideEffects = false; boolean seenUnmoveableSideEfffected = false; for (int i = parameters.size() - 1; i >= 0; i--) { Parameter current = parameters.get(i); // Preserve side-effect ordering, don't move this parameter if: // * the current parameter has side-effects and a following // parameters that will not be move can be effected. // * the current parameter can be effected and a following // parameter that will not be moved has side-effects if (current.shouldRemove && ((seenUnmovableSideEffects && current.canBeSideEffected()) || (seenUnmoveableSideEfffected && current.hasSideEffects()))) { current.shouldRemove = false; } if (current.shouldRemove) { anyMovable = true; } else { if (current.canBeSideEffected) { seenUnmoveableSideEfffected = true; } if (current.hasSideEffects) { seenUnmovableSideEffects = true; } } } return anyMovable; }
static boolean function(List<Parameter> parameters) { boolean anyMovable = false; boolean seenUnmovableSideEffects = false; boolean seenUnmoveableSideEfffected = false; for (int i = parameters.size() - 1; i >= 0; i--) { Parameter current = parameters.get(i); if (current.shouldRemove && ((seenUnmovableSideEffects && current.canBeSideEffected()) (seenUnmoveableSideEfffected && current.hasSideEffects()))) { current.shouldRemove = false; } if (current.shouldRemove) { anyMovable = true; } else { if (current.canBeSideEffected) { seenUnmoveableSideEfffected = true; } if (current.hasSideEffects) { seenUnmovableSideEffects = true; } } } return anyMovable; }
/** * Adjust the parameters to move based on the side-effects seen. * @return Whether there are any movable parameters. */
Adjust the parameters to move based on the side-effects seen
adjustForSideEffects
{ "repo_name": "visokio/closure-compiler", "path": "src/com/google/javascript/jscomp/OptimizeParameters.java", "license": "apache-2.0", "size": 17698 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,355,041
public Class getType() { return Date.class; }
Class function() { return Date.class; }
/** * Returns java.sql.Date. * * @see prefux.data.parser.DataParser#getType() */
Returns java.sql.Date
getType
{ "repo_name": "effrafax/Prefux", "path": "src/main/java/prefux/data/parser/DateParser.java", "license": "bsd-3-clause", "size": 5283 }
[ "java.sql.Date" ]
import java.sql.Date;
import java.sql.*;
[ "java.sql" ]
java.sql;
737,201