method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
void modify(AbstractEpollChannel ch) throws IOException {
assert inEventLoop();
Native.epollCtlMod(epollFd.intValue(), ch.socket.intValue(), ch.flags);
} | void modify(AbstractEpollChannel ch) throws IOException { assert inEventLoop(); Native.epollCtlMod(epollFd.intValue(), ch.socket.intValue(), ch.flags); } | /**
* The flags of the given epoll was modified so update the registration
*/ | The flags of the given epoll was modified so update the registration | modify | {
"repo_name": "Apache9/netty",
"path": "transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollEventLoop.java",
"license": "apache-2.0",
"size": 19100
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,265,927 |
protected void initListPreference(final ListPreference prefView, final String value,
final CharSequence[] entries, final CharSequence[] entryValues) {
prefView.setEntries(entries);
prefView.setEntryValues(entryValues);
prefView.setValue(value);
prefView.setSummary(prefView.getEntry());
prefView.setOnPreferenceChangeListener(new PreferenceChangeListener(prefView));
}
private static class PreferenceChangeListener implements Preference.OnPreferenceChangeListener {
private ListPreference mPrefView;
private PreferenceChangeListener(final ListPreference prefView) {
this.mPrefView = prefView;
} | void function(final ListPreference prefView, final String value, final CharSequence[] entries, final CharSequence[] entryValues) { prefView.setEntries(entries); prefView.setEntryValues(entryValues); prefView.setValue(value); prefView.setSummary(prefView.getEntry()); prefView.setOnPreferenceChangeListener(new PreferenceChangeListener(prefView)); } private static class PreferenceChangeListener implements Preference.OnPreferenceChangeListener { private ListPreference mPrefView; private PreferenceChangeListener(final ListPreference prefView) { this.mPrefView = prefView; } | /**
* Initialize a given {@link ListPreference} instance.
*
* @param prefView
* The {@link ListPreference} instance to initialize.
* @param value
* Initial value for the {@link ListPreference} object.
* @param entries
* Sets the human-readable entries to be shown in the list.
* @param entryValues
* The array to find the value to save for a preference when an
* entry from entries is selected.
*/ | Initialize a given <code>ListPreference</code> instance | initListPreference | {
"repo_name": "1037704496/ZywxEmail",
"path": "src/com/fsck/zywxMailk9/activity/K9PreferenceActivity.java",
"license": "bsd-3-clause",
"size": 3206
} | [
"android.preference.ListPreference",
"android.preference.Preference"
] | import android.preference.ListPreference; import android.preference.Preference; | import android.preference.*; | [
"android.preference"
] | android.preference; | 1,403,191 |
public void setBirthdateFromAge(int age, Date ageOnDate) {
Calendar c = Calendar.getInstance();
c.setTime(ageOnDate == null ? new Date() : ageOnDate);
c.set(Calendar.DATE, 1);
c.set(Calendar.MONTH, Calendar.JANUARY);
c.add(Calendar.YEAR, -1 * age);
setBirthdate(c.getTime());
setBirthdateEstimated(true);
}
| void function(int age, Date ageOnDate) { Calendar c = Calendar.getInstance(); c.setTime(ageOnDate == null ? new Date() : ageOnDate); c.set(Calendar.DATE, 1); c.set(Calendar.MONTH, Calendar.JANUARY); c.add(Calendar.YEAR, -1 * age); setBirthdate(c.getTime()); setBirthdateEstimated(true); } | /**
* Convenience method: sets a person's birth date from an age as of the given date Also sets
* flag indicating that the birth date is inexact. This sets the person's birth date to January
* 1 of the year that matches this age and date
*
* @param age (the age to set)
* @param ageOnDate (null defaults to today)
*/ | Convenience method: sets a person's birth date from an age as of the given date Also sets flag indicating that the birth date is inexact. This sets the person's birth date to January 1 of the year that matches this age and date | setBirthdateFromAge | {
"repo_name": "vinayvenu/openmrs-core",
"path": "api/src/main/java/org/openmrs/Person.java",
"license": "mpl-2.0",
"size": 29580
} | [
"java.util.Calendar",
"java.util.Date"
] | import java.util.Calendar; import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 1,161,694 |
public long writeResult(PlatformMemory mem, Object obj) {
return writeResult(mem, obj, null);
} | long function(PlatformMemory mem, Object obj) { return writeResult(mem, obj, null); } | /**
* Writes the result to reused stream, if any.
*/ | Writes the result to reused stream, if any | writeResult | {
"repo_name": "irudyak/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/platform/cache/PlatformCache.java",
"license": "apache-2.0",
"size": 52051
} | [
"org.apache.ignite.internal.processors.platform.memory.PlatformMemory"
] | import org.apache.ignite.internal.processors.platform.memory.PlatformMemory; | import org.apache.ignite.internal.processors.platform.memory.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,995,283 |
if (!isInitialized) {
if(processEngines == null) {
// Create new map to store process-engines if current map is null
processEngines = new HashMap<String, ProcessEngine>();
}
ClassLoader classLoader = ReflectUtil.getClassLoader();
Enumeration<URL> resources = null;
try {
resources = classLoader.getResources("activiti.cfg.xml");
} catch (IOException e) {
throw new ActivitiIllegalArgumentException("problem retrieving activiti.cfg.xml resources on the classpath: "+System.getProperty("java.class.path"), e);
}
// Remove duplicated configuration URL's using set. Some classloaders may return identical URL's twice, causing duplicate startups
Set<URL> configUrls = new HashSet<URL>();
while (resources.hasMoreElements()) {
configUrls.add( resources.nextElement() );
}
for (Iterator<URL> iterator = configUrls.iterator(); iterator.hasNext();) {
URL resource = iterator.next();
log.info("Initializing process engine using configuration '{}'", resource.toString());
initProcessEnginFromResource(resource);
}
try {
resources = classLoader.getResources("activiti-context.xml");
} catch (IOException e) {
throw new ActivitiIllegalArgumentException("problem retrieving activiti-context.xml resources on the classpath: "+System.getProperty("java.class.path"), e);
}
while (resources.hasMoreElements()) {
URL resource = resources.nextElement();
log.info("Initializing process engine using Spring configuration '{}'", resource.toString());
initProcessEngineFromSpringResource(resource);
}
isInitialized = true;
} else {
log.info("Process engines already initialized");
}
} | if (!isInitialized) { if(processEngines == null) { processEngines = new HashMap<String, ProcessEngine>(); } ClassLoader classLoader = ReflectUtil.getClassLoader(); Enumeration<URL> resources = null; try { resources = classLoader.getResources(STR); } catch (IOException e) { throw new ActivitiIllegalArgumentException(STR+System.getProperty(STR), e); } Set<URL> configUrls = new HashSet<URL>(); while (resources.hasMoreElements()) { configUrls.add( resources.nextElement() ); } for (Iterator<URL> iterator = configUrls.iterator(); iterator.hasNext();) { URL resource = iterator.next(); log.info(STR, resource.toString()); initProcessEnginFromResource(resource); } try { resources = classLoader.getResources(STR); } catch (IOException e) { throw new ActivitiIllegalArgumentException(STR+System.getProperty(STR), e); } while (resources.hasMoreElements()) { URL resource = resources.nextElement(); log.info(STR, resource.toString()); initProcessEngineFromSpringResource(resource); } isInitialized = true; } else { log.info(STR); } } | /** Initializes all process engines that can be found on the classpath for
* resources <code>activiti.cfg.xml</code> (plain Activiti style configuration)
* and for resources <code>activiti-context.xml</code> (Spring style configuration). */ | Initializes all process engines that can be found on the classpath for resources <code>activiti.cfg.xml</code> (plain Activiti style configuration) | init | {
"repo_name": "springvelocity/xbpm5",
"path": "activiti-engine/src/main/java/org/activiti/engine/ProcessEngines.java",
"license": "apache-2.0",
"size": 11554
} | [
"java.io.IOException",
"java.util.Enumeration",
"java.util.HashMap",
"java.util.HashSet",
"java.util.Iterator",
"java.util.Set",
"org.activiti.engine.impl.util.ReflectUtil"
] | import java.io.IOException; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.activiti.engine.impl.util.ReflectUtil; | import java.io.*; import java.util.*; import org.activiti.engine.impl.util.*; | [
"java.io",
"java.util",
"org.activiti.engine"
] | java.io; java.util; org.activiti.engine; | 2,628,158 |
public static <A extends Analyzer<?>> AnalyzerDescriptor<A> ofAnalyzer(Class<A> analyzerClass) {
return new AnnotationBasedAnalyzerComponentDescriptor<A>(analyzerClass);
} | static <A extends Analyzer<?>> AnalyzerDescriptor<A> function(Class<A> analyzerClass) { return new AnnotationBasedAnalyzerComponentDescriptor<A>(analyzerClass); } | /**
* Creates an {@link AnalyzerDescriptor} for an analyzer class.
*
* @param <A>
* @param analyzerClass
* @return
*/ | Creates an <code>AnalyzerDescriptor</code> for an analyzer class | ofAnalyzer | {
"repo_name": "anandswarupv/DataCleaner",
"path": "engine/core/src/main/java/org/datacleaner/descriptors/Descriptors.java",
"license": "lgpl-3.0",
"size": 4721
} | [
"org.datacleaner.api.Analyzer"
] | import org.datacleaner.api.Analyzer; | import org.datacleaner.api.*; | [
"org.datacleaner.api"
] | org.datacleaner.api; | 1,715,346 |
private VerticalPanel createTypePanel()
{
typePanel = new VerticalPanel();
| VerticalPanel function() { typePanel = new VerticalPanel(); | /**
* Create panel to specify the type of confidence intervals
*
* @return type panel
*/ | Create panel to specify the type of confidence intervals | createTypePanel | {
"repo_name": "SampleSizeShop/GlimmpseWeb",
"path": "src/edu/ucdenver/bios/glimmpseweb/client/shared/OptionsConfidenceIntervalsPanel.java",
"license": "gpl-2.0",
"size": 19931
} | [
"com.google.gwt.user.client.ui.VerticalPanel"
] | import com.google.gwt.user.client.ui.VerticalPanel; | import com.google.gwt.user.client.ui.*; | [
"com.google.gwt"
] | com.google.gwt; | 783,888 |
public Property get(String category, String key, String defaultValue, String comment, Pattern validationPattern)
{
Property prop = get(category, key, defaultValue, comment, STRING);
prop.setValidationPattern(validationPattern);
return prop;
} | Property function(String category, String key, String defaultValue, String comment, Pattern validationPattern) { Property prop = get(category, key, defaultValue, comment, STRING); prop.setValidationPattern(validationPattern); return prop; } | /**
* Gets a string Property with a comment using the defined validationPattern and otherwise default settings.
*
* @param category the config category
* @param key the Property key value
* @param defaultValue the default value
* @param comment a String comment
* @param validationPattern a Pattern object for input validation
* @return a string Property with the defined validationPattern, validValues = null
*/ | Gets a string Property with a comment using the defined validationPattern and otherwise default settings | get | {
"repo_name": "seblund/Dissolvable",
"path": "build/tmp/recompileMc/sources/net/minecraftforge/common/config/Configuration.java",
"license": "gpl-3.0",
"size": 65608
} | [
"java.util.regex.Pattern"
] | import java.util.regex.Pattern; | import java.util.regex.*; | [
"java.util"
] | java.util; | 455,594 |
public static Record findById(String tableName, String primaryKey, Object idValue, String columns) {
String sql = DbKit.dialect.forDbFindById(tableName, primaryKey, columns);
List<Record> result = find(sql, idValue);
return result.size() > 0 ? result.get(0) : null;
}
| static Record function(String tableName, String primaryKey, Object idValue, String columns) { String sql = DbKit.dialect.forDbFindById(tableName, primaryKey, columns); List<Record> result = find(sql, idValue); return result.size() > 0 ? result.get(0) : null; } | /**
* Find record by id. Fetch the specific columns only.
* Example: Record user = Db.findById("user", "user_id", 15, "name, age");
* @param tableName the table name of the table
* @param primaryKey the primary key of the table
* @param idValue the id value of the record
* @param columns the specific columns separate with comma character ==> ","
*/ | Find record by id. Fetch the specific columns only. Example: Record user = Db.findById("user", "user_id", 15, "name, age") | findById | {
"repo_name": "handong106324/sqLogWeb",
"path": "src/com/jfinal/plugin/activerecord/Db.java",
"license": "lgpl-2.1",
"size": 34758
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,557,177 |
@Test
public void testParallism() throws Exception {
List<WriterThread> threads = new ArrayList<WriterThread>();
ThreadGroup group = new ThreadGroup("threads");
for (int i = 0; i < 10; i++) {
WriterThread thread = new WriterThread(group, i, this.writer);
threads.add(thread);
thread.start();
}
while (group.activeCount() > 0) {
Thread.sleep(1000);
}
for (WriterThread thread : threads) {
assertFalse("expected thread caused unexpected exception", thread.isFailed());
}
}
public static class WriterThread extends Thread {
private int index;
private boolean failed;
private DropwizardMetricServices writer;
public WriterThread(ThreadGroup group, int index,
DropwizardMetricServices writer) {
super(group, "Writer-" + index);
this.index = index;
this.writer = writer;
} | void function() throws Exception { List<WriterThread> threads = new ArrayList<WriterThread>(); ThreadGroup group = new ThreadGroup(STR); for (int i = 0; i < 10; i++) { WriterThread thread = new WriterThread(group, i, this.writer); threads.add(thread); thread.start(); } while (group.activeCount() > 0) { Thread.sleep(1000); } for (WriterThread thread : threads) { assertFalse(STR, thread.isFailed()); } } public static class WriterThread extends Thread { private int index; private boolean failed; private DropwizardMetricServices writer; public WriterThread(ThreadGroup group, int index, DropwizardMetricServices writer) { super(group, STR + index); this.index = index; this.writer = writer; } | /**
* Test the case where a given writer is used amongst several threads where each
* thread is updating the same set of metrics. This would be an example case of the
* writer being used with the MetricsFilter handling several requests/sec to the same
* URL.
*
* @throws Exception if an error occurs
*/ | Test the case where a given writer is used amongst several threads where each thread is updating the same set of metrics. This would be an example case of the writer being used with the MetricsFilter handling several requests/sec to the same URL | testParallism | {
"repo_name": "Nephilim84/contestparser",
"path": "spring-boot-actuator/src/test/java/org/springframework/boot/actuate/metrics/dropwizard/DropwizardMetricServicesTests.java",
"license": "apache-2.0",
"size": 4201
} | [
"java.util.ArrayList",
"java.util.List",
"org.junit.Assert"
] | import java.util.ArrayList; import java.util.List; import org.junit.Assert; | import java.util.*; import org.junit.*; | [
"java.util",
"org.junit"
] | java.util; org.junit; | 267,433 |
String getCharset();
/**
* Sets the connect timeout for the underlying {@link HttpURLConnection} | String getCharset(); /** * Sets the connect timeout for the underlying {@link HttpURLConnection} | /**
* Returns the connection charset. Defaults to {@link Charset} defaultCharset if not set
*
* @return charset
*/ | Returns the connection charset. Defaults to <code>Charset</code> defaultCharset if not set | getCharset | {
"repo_name": "brettwooldridge/scribe-java",
"path": "src/main/java/org/scribe/model/Request.java",
"license": "mit",
"size": 3977
} | [
"java.net.HttpURLConnection"
] | import java.net.HttpURLConnection; | import java.net.*; | [
"java.net"
] | java.net; | 939,779 |
protected void addPendingEntryToDocumentType(PendingEntrySummary pendingEntrySummary, Map<String, EntryReportDocumentTypeTotalLine> docTypeTotals) {
EntryReportDocumentTypeTotalLine docTypeTotal = docTypeTotals.get(pendingEntrySummary.getConstantDocumentTypeCode());
if (docTypeTotal == null) {
docTypeTotal = new EntryReportDocumentTypeTotalLine(pendingEntrySummary.getConstantDocumentTypeCode());
docTypeTotals.put(pendingEntrySummary.getConstantDocumentTypeCode(), docTypeTotal);
}
addSummaryToTotal(pendingEntrySummary, docTypeTotal);
}
| void function(PendingEntrySummary pendingEntrySummary, Map<String, EntryReportDocumentTypeTotalLine> docTypeTotals) { EntryReportDocumentTypeTotalLine docTypeTotal = docTypeTotals.get(pendingEntrySummary.getConstantDocumentTypeCode()); if (docTypeTotal == null) { docTypeTotal = new EntryReportDocumentTypeTotalLine(pendingEntrySummary.getConstantDocumentTypeCode()); docTypeTotals.put(pendingEntrySummary.getConstantDocumentTypeCode(), docTypeTotal); } addSummaryToTotal(pendingEntrySummary, docTypeTotal); } | /**
* Adds the given pending entry summary to the appropriate doc type's line total
* @param pendingEntrySummary the pending entry summary to add
* @param docTypeTotals the Map of doc type line total helpers to add the summary to
*/ | Adds the given pending entry summary to the appropriate doc type's line total | addPendingEntryToDocumentType | {
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/gl/batch/service/impl/NightlyOutServiceImpl.java",
"license": "apache-2.0",
"size": 22776
} | [
"java.util.Map",
"org.kuali.kfs.gl.businessobject.PendingEntrySummary"
] | import java.util.Map; import org.kuali.kfs.gl.businessobject.PendingEntrySummary; | import java.util.*; import org.kuali.kfs.gl.businessobject.*; | [
"java.util",
"org.kuali.kfs"
] | java.util; org.kuali.kfs; | 1,595,524 |
public void add( Module module, DesignElement content, String propName,
int posn )
{
ElementPropertyDefn defn = getPropertyDefn( propName );
if ( defn != null )
{
if ( defn.isList( ) )
{
List<DesignElement> values = (List<DesignElement>) getLocalProperty(
module, propName );
if ( values == null )
values = new ArrayList<DesignElement>( );
if ( !values.contains( content ) )
values.add( posn, content );
setProperty( propName, values );
content.setContainer( this, propName );
}
else
{
assert posn == 0;
setProperty( defn, content );
content.setContainer( this, propName );
}
}
} | void function( Module module, DesignElement content, String propName, int posn ) { ElementPropertyDefn defn = getPropertyDefn( propName ); if ( defn != null ) { if ( defn.isList( ) ) { List<DesignElement> values = (List<DesignElement>) getLocalProperty( module, propName ); if ( values == null ) values = new ArrayList<DesignElement>( ); if ( !values.contains( content ) ) values.add( posn, content ); setProperty( propName, values ); content.setContainer( this, propName ); } else { assert posn == 0; setProperty( defn, content ); content.setContainer( this, propName ); } } } | /**
* Adds a content to this element.
*
* @param module
* @param content
* @param propName
* @param posn
*/ | Adds a content to this element | add | {
"repo_name": "sguan-actuate/birt",
"path": "model/org.eclipse.birt.report.model/src/org/eclipse/birt/report/model/core/DesignElement.java",
"license": "epl-1.0",
"size": 113258
} | [
"java.util.ArrayList",
"java.util.List",
"org.eclipse.birt.report.model.metadata.ElementPropertyDefn"
] | import java.util.ArrayList; import java.util.List; import org.eclipse.birt.report.model.metadata.ElementPropertyDefn; | import java.util.*; import org.eclipse.birt.report.model.metadata.*; | [
"java.util",
"org.eclipse.birt"
] | java.util; org.eclipse.birt; | 132,706 |
@Test(expected = GenieNotFoundException.class)
public void testRemoveTagForApplicationNoApp() throws GenieException {
this.service.removeTagForApplication(
UUID.randomUUID().toString(),
"something"
);
} | @Test(expected = GenieNotFoundException.class) void function() throws GenieException { this.service.removeTagForApplication( UUID.randomUUID().toString(), STR ); } | /**
* Test remove configuration for application.
*
* @throws GenieException
*/ | Test remove configuration for application | testRemoveTagForApplicationNoApp | {
"repo_name": "gorcz/genie",
"path": "genie-server/src/test/java/com/netflix/genie/server/services/impl/jpa/TestApplicationConfigServiceJPAImpl.java",
"license": "apache-2.0",
"size": 43910
} | [
"com.netflix.genie.common.exceptions.GenieException",
"com.netflix.genie.common.exceptions.GenieNotFoundException",
"java.util.UUID",
"org.junit.Test"
] | import com.netflix.genie.common.exceptions.GenieException; import com.netflix.genie.common.exceptions.GenieNotFoundException; import java.util.UUID; import org.junit.Test; | import com.netflix.genie.common.exceptions.*; import java.util.*; import org.junit.*; | [
"com.netflix.genie",
"java.util",
"org.junit"
] | com.netflix.genie; java.util; org.junit; | 2,040,396 |
void onContactsSelected(ContactsListFragment fragment, List<Contact> contacts); | void onContactsSelected(ContactsListFragment fragment, List<Contact> contacts); | /**
* Called when one or more contacts have been selected from a
* {@link org.kontalk.ui.ContactsListFragment}.
*/ | Called when one or more contacts have been selected from a <code>org.kontalk.ui.ContactsListFragment</code> | onContactsSelected | {
"repo_name": "115ek/androidclient",
"path": "app/src/main/java/org/kontalk/ui/view/ContactPickerListener.java",
"license": "gpl-3.0",
"size": 1441
} | [
"java.util.List",
"org.kontalk.data.Contact",
"org.kontalk.ui.ContactsListFragment"
] | import java.util.List; import org.kontalk.data.Contact; import org.kontalk.ui.ContactsListFragment; | import java.util.*; import org.kontalk.data.*; import org.kontalk.ui.*; | [
"java.util",
"org.kontalk.data",
"org.kontalk.ui"
] | java.util; org.kontalk.data; org.kontalk.ui; | 1,155,438 |
protected void addIsSubsettedByPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_EndPoint_isSubsettedBy_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EndPoint_isSubsettedBy_feature", "_UI_EndPoint_type"),
OntoumlPackage.Literals.END_POINT__IS_SUBSETTED_BY,
true,
false,
true,
null,
null,
null));
}
| void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), OntoumlPackage.Literals.END_POINT__IS_SUBSETTED_BY, true, false, true, null, null, null)); } | /**
* This adds a property descriptor for the Is Subsetted By feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Is Subsetted By feature. | addIsSubsettedByPropertyDescriptor | {
"repo_name": "MenthorTools/ontouml-metamodel",
"path": "net.menthor.onto2.ontouml.edit/src-gen/net/menthor/onto2/ontouml/provider/EndPointItemProvider.java",
"license": "mit",
"size": 6879
} | [
"net.menthor.onto2.ontouml.OntoumlPackage",
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory"
] | import net.menthor.onto2.ontouml.OntoumlPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; | import net.menthor.onto2.ontouml.*; import org.eclipse.emf.edit.provider.*; | [
"net.menthor.onto2",
"org.eclipse.emf"
] | net.menthor.onto2; org.eclipse.emf; | 1,396,089 |
private NestedSet<Artifact> plugins() {
NestedSetBuilder<Artifact> pluginArtifacts = NestedSetBuilder.stableOrder();
pluginArtifacts.addTransitive(
PrerequisiteArtifacts.nestedSet(ruleContext, "plugins", Mode.TARGET));
if (ruleContext.getFragment(ObjcConfiguration.class).runMemleaks()) {
pluginArtifacts.addTransitive(
PrerequisiteArtifacts.nestedSet(ruleContext, IosTest.MEMLEAKS_PLUGIN, Mode.TARGET));
}
return pluginArtifacts.build();
} | NestedSet<Artifact> function() { NestedSetBuilder<Artifact> pluginArtifacts = NestedSetBuilder.stableOrder(); pluginArtifacts.addTransitive( PrerequisiteArtifacts.nestedSet(ruleContext, STR, Mode.TARGET)); if (ruleContext.getFragment(ObjcConfiguration.class).runMemleaks()) { pluginArtifacts.addTransitive( PrerequisiteArtifacts.nestedSet(ruleContext, IosTest.MEMLEAKS_PLUGIN, Mode.TARGET)); } return pluginArtifacts.build(); } | /**
* Jar files for plugins to the test runner. May be empty.
*/ | Jar files for plugins to the test runner. May be empty | plugins | {
"repo_name": "Digas29/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/objc/TestSupport.java",
"license": "apache-2.0",
"size": 11360
} | [
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.analysis.PrerequisiteArtifacts",
"com.google.devtools.build.lib.analysis.RuleConfiguredTarget",
"com.google.devtools.build.lib.collect.nestedset.NestedSet",
"com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder"
] | import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.PrerequisiteArtifacts; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; | import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.collect.nestedset.*; | [
"com.google.devtools"
] | com.google.devtools; | 662,403 |
public void initializeMap() {
if (mappingArgs == null) {
mappingArgs = new HashMap<String, Object>();
}
} | void function() { if (mappingArgs == null) { mappingArgs = new HashMap<String, Object>(); } } | /**
* Initialize the Hashmap of mapping arguments if not done so already.
*/ | Initialize the Hashmap of mapping arguments if not done so already | initializeMap | {
"repo_name": "tsiq/magic-beanstalk",
"path": "src/main/java/com/twosigmaiq/magic/beanstalk/model/DockerrunTemplate.java",
"license": "apache-2.0",
"size": 1168
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 1,200,157 |
public void testDecomposed2() throws Exception {
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(
"\u0049\u0316\u0307STANBUL \u0049\u0307ZM\u0049\u0307R I\u0316SPARTA"));
TurkishLowerCaseFilter filter = new TurkishLowerCaseFilter(stream);
assertTokenStreamContents(filter, new String[] {"i\u0316stanbul", "izmir",
"\u0131\u0316sparta",});
} | void function() throws Exception { TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader( STR)); TurkishLowerCaseFilter filter = new TurkishLowerCaseFilter(stream); assertTokenStreamContents(filter, new String[] {STR, "izmir", STR,}); } | /**
* Test decomposed forms with additional accents
* In this example, U+0049 + U+0316 + U+0307 is canonically equivalent
* to U+0130 + U+0316, and is lowercased the same way.
*/ | Test decomposed forms with additional accents In this example, U+0049 + U+0316 + U+0307 is canonically equivalent to U+0130 + U+0316, and is lowercased the same way | testDecomposed2 | {
"repo_name": "tokee/lucene",
"path": "contrib/analyzers/common/src/test/org/apache/lucene/analysis/tr/TestTurkishLowerCaseFilter.java",
"license": "apache-2.0",
"size": 2607
} | [
"java.io.StringReader",
"org.apache.lucene.analysis.TokenStream",
"org.apache.lucene.analysis.WhitespaceTokenizer"
] | import java.io.StringReader; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.WhitespaceTokenizer; | import java.io.*; import org.apache.lucene.analysis.*; | [
"java.io",
"org.apache.lucene"
] | java.io; org.apache.lucene; | 1,779,086 |
protected void processXMLLangAttributes(XMLAttributes attributes) {
String language = attributes.getValue(NamespaceContext.XML_URI, "lang");
if (language != null) {
fCurrentLanguage = language;
saveLanguage(fCurrentLanguage);
}
} | void function(XMLAttributes attributes) { String language = attributes.getValue(NamespaceContext.XML_URI, "lang"); if (language != null) { fCurrentLanguage = language; saveLanguage(fCurrentLanguage); } } | /**
* Search for a xml:lang attribute, and if one is found, put the new
* [language] into effect.
*/ | Search for a xml:lang attribute, and if one is found, put the new [language] into effect | processXMLLangAttributes | {
"repo_name": "alexkasko/openjdk-icedtea7",
"path": "jaxp/src/com/sun/org/apache/xerces/internal/xinclude/XIncludeHandler.java",
"license": "gpl-2.0",
"size": 118804
} | [
"com.sun.org.apache.xerces.internal.xni.NamespaceContext",
"com.sun.org.apache.xerces.internal.xni.XMLAttributes"
] | import com.sun.org.apache.xerces.internal.xni.NamespaceContext; import com.sun.org.apache.xerces.internal.xni.XMLAttributes; | import com.sun.org.apache.xerces.internal.xni.*; | [
"com.sun.org"
] | com.sun.org; | 875,752 |
public void executeUptimeCommand(AgentCommand commandForExecution) {
try {
validateAndVerifyCommand(AgentConsoleCommands.AGENT_UPTIME, commandForExecution);
Date agentStartDate = agent.getStartDate();
if (agentStartDate != null) {
String formattedTime = DateClockUtil.formatDateAndTime(agentStartDate);
agentConsole.writeLine("Agent was started on " + formattedTime);
String formattedTimeInterval = DateClockUtil.getTimeInterval(agentStartDate);
agentConsole.writeLine("Uptime: " + formattedTimeInterval);
}
} catch (IllegalArgumentException e) {
LOGGER.error("Could not execute command.", e);
agentConsole.writeLine(ILLEGAL_COMMAND_MESSAGE);
}
}
/**
* Tests the hardware possibilities of the agent for supporting emulators.
*
* @param commandForExecution
* - should be {@link AgentConsoleCommands#AGENT_PERFORMANCE} | void function(AgentCommand commandForExecution) { try { validateAndVerifyCommand(AgentConsoleCommands.AGENT_UPTIME, commandForExecution); Date agentStartDate = agent.getStartDate(); if (agentStartDate != null) { String formattedTime = DateClockUtil.formatDateAndTime(agentStartDate); agentConsole.writeLine(STR + formattedTime); String formattedTimeInterval = DateClockUtil.getTimeInterval(agentStartDate); agentConsole.writeLine(STR + formattedTimeInterval); } } catch (IllegalArgumentException e) { LOGGER.error(STR, e); agentConsole.writeLine(ILLEGAL_COMMAND_MESSAGE); } } /** * Tests the hardware possibilities of the agent for supporting emulators. * * @param commandForExecution * - should be {@link AgentConsoleCommands#AGENT_PERFORMANCE} | /**
* Prints the time for which the {@link Agent agent} has been running.
*
* @param commandForExecution
* it should be an {@link AgentConsoleCommands#AGENT_EXIT exit } command.
*/ | Prints the time for which the <code>Agent agent</code> has been running | executeUptimeCommand | {
"repo_name": "MusalaSoft/atmosphere-agent",
"path": "src/main/java/com/musala/atmosphere/agent/state/AgentState.java",
"license": "gpl-3.0",
"size": 12222
} | [
"com.musala.atmosphere.agent.command.AgentCommand",
"com.musala.atmosphere.agent.command.AgentConsoleCommands",
"com.musala.atmosphere.agent.util.date.DateClockUtil",
"java.util.Date"
] | import com.musala.atmosphere.agent.command.AgentCommand; import com.musala.atmosphere.agent.command.AgentConsoleCommands; import com.musala.atmosphere.agent.util.date.DateClockUtil; import java.util.Date; | import com.musala.atmosphere.agent.command.*; import com.musala.atmosphere.agent.util.date.*; import java.util.*; | [
"com.musala.atmosphere",
"java.util"
] | com.musala.atmosphere; java.util; | 547,068 |
private static <K, V> boolean mapsHaveCompatibleOverlap(Map<K, V> a, Map<K, V> b) {
if (b.size() < a.size()) {
Map<K, V> t = a;
a = b;
b = t;
}
boolean overlap = false;
for (Map.Entry<K, V> e : a.entrySet()) {
V value = b.get(e.getKey());
if (value != null) {
if (!value.equals(e.getValue())) {
return false;
}
overlap = true;
} else if (b.containsKey(e.getKey())) {
if (e.getValue() != null) {
return false;
}
overlap = true;
}
}
return overlap;
} | static <K, V> boolean function(Map<K, V> a, Map<K, V> b) { if (b.size() < a.size()) { Map<K, V> t = a; a = b; b = t; } boolean overlap = false; for (Map.Entry<K, V> e : a.entrySet()) { V value = b.get(e.getKey()); if (value != null) { if (!value.equals(e.getValue())) { return false; } overlap = true; } else if (b.containsKey(e.getKey())) { if (e.getValue() != null) { return false; } overlap = true; } } return overlap; } | /**
* True if the two maps have at least one (key, value) pair in common, and no pairs with the same
* key but different values according to {@link Object#equals}.
*/ | True if the two maps have at least one (key, value) pair in common, and no pairs with the same key but different values according to <code>Object#equals</code> | mapsHaveCompatibleOverlap | {
"repo_name": "Medium/closure-templates",
"path": "java/src/com/google/template/soy/shared/internal/AbstractGenerateSoyEscapingDirectiveCode.java",
"license": "apache-2.0",
"size": 24081
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 398,791 |
public void releaseBackends(String clusterName, boolean isReplay) {
ImmutableMap<Long, Backend> idToBackend = idToBackendRef.get();
final List<Long> backendIds = getClusterBackendIds(clusterName);
final Iterator<Long> iterator = backendIds.iterator();
while (iterator.hasNext()) {
final Long id = iterator.next();
if (!idToBackend.containsKey(id)) {
LOG.warn("cluster {} contain backend {} that does't exist", clusterName, id);
} else {
final Backend backend = idToBackend.get(id);
backend.setBackendState(BackendState.free);
backend.clearClusterName();
if (!isReplay) {
Catalog.getInstance().getEditLog().logBackendStateChange(backend);
}
}
}
lastBackendIdForCreationMap.remove(clusterName);
lastBackendIdForOtherMap.remove(clusterName);
} | void function(String clusterName, boolean isReplay) { ImmutableMap<Long, Backend> idToBackend = idToBackendRef.get(); final List<Long> backendIds = getClusterBackendIds(clusterName); final Iterator<Long> iterator = backendIds.iterator(); while (iterator.hasNext()) { final Long id = iterator.next(); if (!idToBackend.containsKey(id)) { LOG.warn(STR, clusterName, id); } else { final Backend backend = idToBackend.get(id); backend.setBackendState(BackendState.free); backend.clearClusterName(); if (!isReplay) { Catalog.getInstance().getEditLog().logBackendStateChange(backend); } } } lastBackendIdForCreationMap.remove(clusterName); lastBackendIdForOtherMap.remove(clusterName); } | /**
* remove backends in cluster
*
* @throws DdlException
*/ | remove backends in cluster | releaseBackends | {
"repo_name": "lingbin/palo",
"path": "fe/src/com/baidu/palo/system/SystemInfoService.java",
"license": "apache-2.0",
"size": 46739
} | [
"com.baidu.palo.catalog.Catalog",
"com.baidu.palo.system.Backend",
"com.google.common.collect.ImmutableMap",
"java.util.Iterator",
"java.util.List"
] | import com.baidu.palo.catalog.Catalog; import com.baidu.palo.system.Backend; import com.google.common.collect.ImmutableMap; import java.util.Iterator; import java.util.List; | import com.baidu.palo.catalog.*; import com.baidu.palo.system.*; import com.google.common.collect.*; import java.util.*; | [
"com.baidu.palo",
"com.google.common",
"java.util"
] | com.baidu.palo; com.google.common; java.util; | 2,534,744 |
@Test
public void testGetFrequency() {
assertEquals("test frequency", -1, entry1.getFrequency());
assertEquals("test frequency", -1, entry2.getFrequency());
SAXTrieHitEntry entry3 = new SAXTrieHitEntry(ENTRY_SIZE, POS1);
entry3.setFrequency(15);
assertEquals("test frequency", 15, entry3.getFrequency());
}
| void function() { assertEquals(STR, -1, entry1.getFrequency()); assertEquals(STR, -1, entry2.getFrequency()); SAXTrieHitEntry entry3 = new SAXTrieHitEntry(ENTRY_SIZE, POS1); entry3.setFrequency(15); assertEquals(STR, 15, entry3.getFrequency()); } | /**
* Frequency getter test.
*/ | Frequency getter test | testGetFrequency | {
"repo_name": "RaynorJim/SAX",
"path": "src/test/java/net/seninp/jmotif/sax/trie/TestSAXTrieHitEntry.java",
"license": "gpl-2.0",
"size": 3717
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 2,762,006 |
protected Intent isAllowed(Intent data) {
try {
if (isCallerAllowed(false)) {
return null;
} else {
String packageName = getCurrentCallingPackage();
Log.d(Constants.TAG, "isAllowed packageName: " + packageName);
byte[] packageSignature;
try {
packageSignature = getPackageSignature(packageName);
} catch (NameNotFoundException e) {
Log.e(Constants.TAG, "Should not happen, returning!", e);
// return error
Intent result = new Intent();
result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_ERROR);
result.putExtra(OpenPgpApi.RESULT_ERROR,
new OpenPgpError(OpenPgpError.GENERIC_ERROR, e.getMessage()));
return result;
}
Log.e(Constants.TAG, "Not allowed to use service! return PendingIntent for registration!");
Intent intent = new Intent(getBaseContext(), RemoteServiceActivity.class);
intent.setAction(RemoteServiceActivity.ACTION_REGISTER);
intent.putExtra(RemoteServiceActivity.EXTRA_PACKAGE_NAME, packageName);
intent.putExtra(RemoteServiceActivity.EXTRA_PACKAGE_SIGNATURE, packageSignature);
intent.putExtra(RemoteServiceActivity.EXTRA_DATA, data);
PendingIntent pi = PendingIntent.getActivity(getBaseContext(), 0,
intent,
PendingIntent.FLAG_CANCEL_CURRENT | PendingIntent.FLAG_ONE_SHOT);
// return PendingIntent to be executed by client
Intent result = new Intent();
result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED);
result.putExtra(OpenPgpApi.RESULT_INTENT, pi);
return result;
}
} catch (WrongPackageSignatureException e) {
Log.e(Constants.TAG, "wrong signature!", e);
Intent intent = new Intent(getBaseContext(), RemoteServiceActivity.class);
intent.setAction(RemoteServiceActivity.ACTION_ERROR_MESSAGE);
intent.putExtra(RemoteServiceActivity.EXTRA_ERROR_MESSAGE,
getString(R.string.api_error_wrong_signature));
intent.putExtra(RemoteServiceActivity.EXTRA_DATA, data);
PendingIntent pi = PendingIntent.getActivity(getBaseContext(), 0,
intent,
PendingIntent.FLAG_CANCEL_CURRENT);
// return PendingIntent to be executed by client
Intent result = new Intent();
result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED);
result.putExtra(OpenPgpApi.RESULT_INTENT, pi);
return result;
}
} | Intent function(Intent data) { try { if (isCallerAllowed(false)) { return null; } else { String packageName = getCurrentCallingPackage(); Log.d(Constants.TAG, STR + packageName); byte[] packageSignature; try { packageSignature = getPackageSignature(packageName); } catch (NameNotFoundException e) { Log.e(Constants.TAG, STR, e); Intent result = new Intent(); result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_ERROR); result.putExtra(OpenPgpApi.RESULT_ERROR, new OpenPgpError(OpenPgpError.GENERIC_ERROR, e.getMessage())); return result; } Log.e(Constants.TAG, STR); Intent intent = new Intent(getBaseContext(), RemoteServiceActivity.class); intent.setAction(RemoteServiceActivity.ACTION_REGISTER); intent.putExtra(RemoteServiceActivity.EXTRA_PACKAGE_NAME, packageName); intent.putExtra(RemoteServiceActivity.EXTRA_PACKAGE_SIGNATURE, packageSignature); intent.putExtra(RemoteServiceActivity.EXTRA_DATA, data); PendingIntent pi = PendingIntent.getActivity(getBaseContext(), 0, intent, PendingIntent.FLAG_CANCEL_CURRENT PendingIntent.FLAG_ONE_SHOT); Intent result = new Intent(); result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED); result.putExtra(OpenPgpApi.RESULT_INTENT, pi); return result; } } catch (WrongPackageSignatureException e) { Log.e(Constants.TAG, STR, e); Intent intent = new Intent(getBaseContext(), RemoteServiceActivity.class); intent.setAction(RemoteServiceActivity.ACTION_ERROR_MESSAGE); intent.putExtra(RemoteServiceActivity.EXTRA_ERROR_MESSAGE, getString(R.string.api_error_wrong_signature)); intent.putExtra(RemoteServiceActivity.EXTRA_DATA, data); PendingIntent pi = PendingIntent.getActivity(getBaseContext(), 0, intent, PendingIntent.FLAG_CANCEL_CURRENT); Intent result = new Intent(); result.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED); result.putExtra(OpenPgpApi.RESULT_INTENT, pi); return result; } } | /**
* Checks if caller is allowed to access the API
*
* @param data
* @return null if caller is allowed, or a Bundle with a PendingIntent
*/ | Checks if caller is allowed to access the API | isAllowed | {
"repo_name": "eric-stanley/apg",
"path": "OpenKeychain/src/main/java/org/sufficientlysecure/keychain/remote/RemoteService.java",
"license": "gpl-3.0",
"size": 10881
} | [
"android.app.PendingIntent",
"android.content.Intent",
"android.content.pm.PackageManager",
"org.openintents.openpgp.OpenPgpError",
"org.openintents.openpgp.util.OpenPgpApi",
"org.thialfihar.android.apg.Constants",
"org.thialfihar.android.apg.remote.ui.RemoteServiceActivity",
"org.thialfihar.android.apg.util.Log"
] | import android.app.PendingIntent; import android.content.Intent; import android.content.pm.PackageManager; import org.openintents.openpgp.OpenPgpError; import org.openintents.openpgp.util.OpenPgpApi; import org.thialfihar.android.apg.Constants; import org.thialfihar.android.apg.remote.ui.RemoteServiceActivity; import org.thialfihar.android.apg.util.Log; | import android.app.*; import android.content.*; import android.content.pm.*; import org.openintents.openpgp.*; import org.openintents.openpgp.util.*; import org.thialfihar.android.apg.*; import org.thialfihar.android.apg.remote.ui.*; import org.thialfihar.android.apg.util.*; | [
"android.app",
"android.content",
"org.openintents.openpgp",
"org.thialfihar.android"
] | android.app; android.content; org.openintents.openpgp; org.thialfihar.android; | 1,481,665 |
public static void rcvXAStart(CommsByteBuffer request, Conversation conversation,
int requestNumber, boolean allocatedFromBufferPool,
boolean partOfExchange)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "rcvXAStart",
new Object[]
{
request,
conversation,
""+requestNumber,
""+allocatedFromBufferPool,
""+partOfExchange
});
ConversationState convState = (ConversationState) conversation.getAttachment();
try
{
int clientTransactionId = request.getInt();
CATConnection catConn = (CATConnection) convState.getObject(convState.getConnectionObjectId());
SICoreConnection connection = catConn.getSICoreConnection();
ServerLinkLevelState linkState = (ServerLinkLevelState) conversation.getLinkLevelAttachment();
SIXAResource xaResource = (SIXAResource)linkState.getTransactionTable().get(clientTransactionId, true);
if (xaResource == null)
{
try {
xaResource = connection.getSIXAResource();
}
catch (SIConnectionUnavailableException e)
{
//No FFDC code needed
//Only FFDC if we haven't received a meTerminated event.
if(!convState.hasMETerminated())
{
FFDCFilter.processException(e, CLASS_NAME + ".rcvXAStart",
CommsConstants.STATICCATXATRANSACTION_XASTART_04);
}
// This should never happen. We are running inside the
// application server - so we should always be able to
// contact a messaging engine. Get very upset about this
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(tc, e);
throw new SIErrorException(e);
}
catch (SIResourceException e)
{
FFDCFilter.processException(e,
CLASS_NAME + ".rcvXAStart",
CommsConstants.STATICCATXATRANSACTION_XASTART_05);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(tc, e);
throw new XAException(XAException.XAER_RMERR);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
SibTr.debug(tc, "XAResource Object ID", clientTransactionId);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Getting Xid");
Xid xid = request.getXid();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Completed:", xid);
int flags = request.getInt();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Flags: ", flags);
// Now call the method on the XA resource
xaResource.start(xid, flags);
linkState.getTransactionTable().addGlobalTransactionBranch(clientTransactionId,
conversation.getId(),
xaResource,
(XidProxy) xid,
false);
try
{
conversation.send(poolManager.allocate(),
JFapChannelConstants.SEG_XASTART_R,
requestNumber,
JFapChannelConstants.PRIORITY_MEDIUM,
true,
ThrottlingPolicy.BLOCK_THREAD,
null);
}
catch (SIException e)
{
FFDCFilter.processException(e,
CLASS_NAME + ".rcvXAStart",
CommsConstants.STATICCATXATRANSACTION_XASTART_01);
SibTr.error(tc, "COMMUNICATION_ERROR_SICO2027", e);
}
}
catch (XAException e)
{
FFDCFilter.processException(e,
CLASS_NAME + ".rcvXAStart",
CommsConstants.STATICCATXATRANSACTION_XASTART_02);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "XAException - RC: " + e.errorCode, e);
StaticCATHelper.sendExceptionToClient(e,
CommsConstants.STATICCATXATRANSACTION_XASTART_02,
conversation, requestNumber);
}
request.release(allocatedFromBufferPool);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "rcvXAStart");
} | static void function(CommsByteBuffer request, Conversation conversation, int requestNumber, boolean allocatedFromBufferPool, boolean partOfExchange) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, STR, new Object[] { request, conversation, STRSTRSTR.rcvXAStartSTR.rcvXAStartSTRXAResource Object IDSTRGetting XidSTRCompleted:STRFlags: STR.rcvXAStartSTRCOMMUNICATION_ERROR_SICO2027STR.rcvXAStartSTRXAException - RC: " + e.errorCode, e); StaticCATHelper.sendExceptionToClient(e, CommsConstants.STATICCATXATRANSACTION_XASTART_02, conversation, requestNumber); } request.release(allocatedFromBufferPool); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, STR); } | /**
* Calls start() on the SIXAResource.
*
* Fields:
*
* BIT16 XAResourceId
* The XID Structure
* BIT32 Flags
*
* @param request
* @param conversation
* @param requestNumber
* @param allocatedFromBufferPool
* @param partOfExchange
*/ | Calls start() on the SIXAResource. Fields: BIT16 XAResourceId The XID Structure BIT32 Flags | rcvXAStart | {
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.ws.messaging.comms.server/src/com/ibm/ws/sib/comms/server/clientsupport/StaticCATXATransaction.java",
"license": "epl-1.0",
"size": 59313
} | [
"com.ibm.websphere.ras.TraceComponent",
"com.ibm.ws.sib.comms.CommsConstants",
"com.ibm.ws.sib.comms.common.CommsByteBuffer",
"com.ibm.ws.sib.jfapchannel.Conversation",
"com.ibm.ws.sib.utils.ras.SibTr"
] | import com.ibm.websphere.ras.TraceComponent; import com.ibm.ws.sib.comms.CommsConstants; import com.ibm.ws.sib.comms.common.CommsByteBuffer; import com.ibm.ws.sib.jfapchannel.Conversation; import com.ibm.ws.sib.utils.ras.SibTr; | import com.ibm.websphere.ras.*; import com.ibm.ws.sib.comms.*; import com.ibm.ws.sib.comms.common.*; import com.ibm.ws.sib.jfapchannel.*; import com.ibm.ws.sib.utils.ras.*; | [
"com.ibm.websphere",
"com.ibm.ws"
] | com.ibm.websphere; com.ibm.ws; | 1,182,399 |
public Date getBirthdayAsDate() {
if (isBlank(getBirthday()) || getBirthday().split("/").length < 2) {
return null;
}
return toDateFromShortFormat(birthday);
} | Date function() { if (isBlank(getBirthday()) getBirthday().split("/").length < 2) { return null; } return toDateFromShortFormat(birthday); } | /**
* The user's birthday, typed to {@code java.util.Date} if possible.
*
* @return The user's birthday, or {@code null} if unavailable or only available in month/year format.
*/ | The user's birthday, typed to java.util.Date if possible | getBirthdayAsDate | {
"repo_name": "restfb/restfb",
"path": "src/main/lombok/com/restfb/types/User.java",
"license": "mit",
"size": 23512
} | [
"com.restfb.util.DateUtils",
"java.util.Date"
] | import com.restfb.util.DateUtils; import java.util.Date; | import com.restfb.util.*; import java.util.*; | [
"com.restfb.util",
"java.util"
] | com.restfb.util; java.util; | 1,587,494 |
public JProgressBar getProgress() {
if (this.progress == null) {
this.progress = new JProgressBar();
this.progress.setStringPainted(true);
this.progress.setFont(new java.awt.Font("Dialog", java.awt.Font.BOLD, 12));
this.progress.setForeground(new java.awt.Color(153, 153, 255));
this.progress.setString("");
}
return this.progress;
}
| JProgressBar function() { if (this.progress == null) { this.progress = new JProgressBar(); this.progress.setStringPainted(true); this.progress.setFont(new java.awt.Font(STR, java.awt.Font.BOLD, 12)); this.progress.setForeground(new java.awt.Color(153, 153, 255)); this.progress.setString(""); } return this.progress; } | /**
* This method initializes progress
*
* @return javax.swing.JProgressBar
*/ | This method initializes progress | getProgress | {
"repo_name": "NCIP/cagrid-core",
"path": "caGrid/projects/grape/src/org/cagrid/grape/utils/BusyDialog.java",
"license": "bsd-3-clause",
"size": 4150
} | [
"java.awt.Font",
"javax.swing.JProgressBar"
] | import java.awt.Font; import javax.swing.JProgressBar; | import java.awt.*; import javax.swing.*; | [
"java.awt",
"javax.swing"
] | java.awt; javax.swing; | 1,328,799 |
private static List<DependencyInfo> slowSort(
final Collection<DependencyInfo> inputs,
final Collection<DependencyInfo> deps) {
HashMap<String, DependencyInfo> searchSet = buildSearchList(deps);
HashSet<File> seenList = new HashSet<File>();
ArrayList<DependencyInfo> resultList = new ArrayList<DependencyInfo>();
for (DependencyInfo input : inputs) {
if (!seenList.contains(input.getFile())) {
seenList.add(input.getFile());
for (String require : input.getRequires()) {
orderDependenciesForNamespace(input.getFile(), require,
searchSet, seenList, resultList);
}
resultList.add(input);
}
}
return resultList;
}
| static List<DependencyInfo> function( final Collection<DependencyInfo> inputs, final Collection<DependencyInfo> deps) { HashMap<String, DependencyInfo> searchSet = buildSearchList(deps); HashSet<File> seenList = new HashSet<File>(); ArrayList<DependencyInfo> resultList = new ArrayList<DependencyInfo>(); for (DependencyInfo input : inputs) { if (!seenList.contains(input.getFile())) { seenList.add(input.getFile()); for (String require : input.getRequires()) { orderDependenciesForNamespace(input.getFile(), require, searchSet, seenList, resultList); } resultList.add(input); } } return resultList; } | /**
* Compare every element to one another. This is significantly slower than a
* merge sort, but guarantees that deps end up in the right order
*
* @param inputs
* the inputs to scan
* @param deps
* the external dependencies
* @return the list of dependencyInfo objects
*/ | Compare every element to one another. This is significantly slower than a merge sort, but guarantees that deps end up in the right order | slowSort | {
"repo_name": "jlgrock/ClosureJavascriptFramework",
"path": "closure-compiler-maven-plugin/src/main/java/com/github/jlgrock/javascriptframework/closurecompiler/CalcDeps.java",
"license": "mit",
"size": 10708
} | [
"java.io.File",
"java.util.ArrayList",
"java.util.Collection",
"java.util.HashMap",
"java.util.HashSet",
"java.util.List"
] | import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 203,718 |
void layout(Graph pGraph, Graphics2D pGraphics2D, Grid pGrid);
| void layout(Graph pGraph, Graphics2D pGraphics2D, Grid pGrid); | /**
* Lays out the node and its children.
* @param pGraph the ambient graph
* @param pGraphics2D the graphics context
* @param pGrid the grid to snap to
*/ | Lays out the node and its children | layout | {
"repo_name": "henray/JetUML",
"path": "src/ca/mcgill/cs/stg/jetuml/graph/Node.java",
"license": "gpl-3.0",
"size": 2582
} | [
"ca.mcgill.cs.stg.jetuml.framework.Grid",
"java.awt.Graphics2D"
] | import ca.mcgill.cs.stg.jetuml.framework.Grid; import java.awt.Graphics2D; | import ca.mcgill.cs.stg.jetuml.framework.*; import java.awt.*; | [
"ca.mcgill.cs",
"java.awt"
] | ca.mcgill.cs; java.awt; | 1,145,665 |
public List<D> divideByStride(int stride)
{
List<E> data = getData();
List<E> strideList = new ArrayList<E>();
List<E> remainderList = new ArrayList<E>();
// Walk data and divide by on-stride and off-stride
for(int i=0; i < data.size(); i++) {
if(i%stride == 0) {
strideList.add(data.get(i));
} else {
remainderList.add(data.get(i));
}
}
// Collect data in to DataSets
D strideSubset =
spawnSubset(strideList, getLabels(), getFeatValues());
D remainderSubset =
spawnSubset(remainderList, getLabels(), getFeatValues());
// Collect DataSets into List<T>
List<D> returnList = new ArrayList<D>();
returnList.add(strideSubset);
returnList.add(remainderSubset);
return returnList;
}
| List<D> function(int stride) { List<E> data = getData(); List<E> strideList = new ArrayList<E>(); List<E> remainderList = new ArrayList<E>(); for(int i=0; i < data.size(); i++) { if(i%stride == 0) { strideList.add(data.get(i)); } else { remainderList.add(data.get(i)); } } D strideSubset = spawnSubset(strideList, getLabels(), getFeatValues()); D remainderSubset = spawnSubset(remainderList, getLabels(), getFeatValues()); List<D> returnList = new ArrayList<D>(); returnList.add(strideSubset); returnList.add(remainderSubset); return returnList; } | /**
* Returns a list of DataSet subsets of data on the stride (mod stride)
* and off the stride. The on-stride subset is placed first in the list.
* @param stride - int the stride on which to divide
* @return List<D>, 0th element is stride subset, 1st is remainder
*/ | Returns a list of DataSet subsets of data on the stride (mod stride) and off the stride. The on-stride subset is placed first in the list | divideByStride | {
"repo_name": "mxdubois/decision-tree-java",
"path": "src/com/michaelxdubois/decisiontree/DataSet.java",
"license": "mit",
"size": 12383
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 374,367 |
private String getContainerPid(Path pidFilePath) throws Exception {
String containerIdStr =
ConverterUtils.toString(container.getContainerId());
String processId = null;
LOG.debug("Accessing pid for container " + containerIdStr
+ " from pid file " + pidFilePath);
int sleepCounter = 0;
final int sleepInterval = 100;
// loop waiting for pid file to show up
// until our timer expires in which case we admit defeat
while (true) {
processId = ProcessIdFileReader.getProcessId(pidFilePath);
if (processId != null) {
LOG.debug("Got pid " + processId + " for container "
+ containerIdStr);
break;
}
else if ((sleepCounter*sleepInterval) > maxKillWaitTime) {
LOG.info("Could not get pid for " + containerIdStr
+ ". Waited for " + maxKillWaitTime + " ms.");
break;
}
else {
++sleepCounter;
Thread.sleep(sleepInterval);
}
}
return processId;
} | String function(Path pidFilePath) throws Exception { String containerIdStr = ConverterUtils.toString(container.getContainerId()); String processId = null; LOG.debug(STR + containerIdStr + STR + pidFilePath); int sleepCounter = 0; final int sleepInterval = 100; while (true) { processId = ProcessIdFileReader.getProcessId(pidFilePath); if (processId != null) { LOG.debug(STR + processId + STR + containerIdStr); break; } else if ((sleepCounter*sleepInterval) > maxKillWaitTime) { LOG.info(STR + containerIdStr + STR + maxKillWaitTime + STR); break; } else { ++sleepCounter; Thread.sleep(sleepInterval); } } return processId; } | /**
* Loop through for a time-bounded interval waiting to
* read the process id from a file generated by a running process.
* @param pidFilePath File from which to read the process id
* @return Process ID
* @throws Exception
*/ | Loop through for a time-bounded interval waiting to read the process id from a file generated by a running process | getContainerPid | {
"repo_name": "zjshen/hadoop-in-docker",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java",
"license": "apache-2.0",
"size": 32178
} | [
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.yarn.server.nodemanager.util.ProcessIdFileReader",
"org.apache.hadoop.yarn.util.ConverterUtils"
] | import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.server.nodemanager.util.ProcessIdFileReader; import org.apache.hadoop.yarn.util.ConverterUtils; | import org.apache.hadoop.fs.*; import org.apache.hadoop.yarn.server.nodemanager.util.*; import org.apache.hadoop.yarn.util.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 179,570 |
public void parse(JSONObject jsonPoiObject) throws JSONException
{
if (jsonPoiObject != null)
{
mTextureUrl = jsonPoiObject.getString("texture");
mName = jsonPoiObject.has("name") ? jsonPoiObject.getString("name") : ("\" " + mId);
mBillboardHandling = jsonPoiObject.has("billboardHandling") ? jsonPoiObject.getString("billboardHandling") : null;
if (mBillboardHandling != null //
&& !ArvosObject.BillboardHandlingNone.equals(mBillboardHandling) //
&& !ArvosObject.BillboardHandlingCylinder.equals(mBillboardHandling) //
&& !ArvosObject.BillboardHandlingSphere.equals(mBillboardHandling))
{
throw new JSONException("Illegal value for billboardHandling: " + mBillboardHandling);
}
mStartTime = jsonPoiObject.has("startTime") ? jsonPoiObject.getInt("startTime") : 0;
mAnimationDuration = jsonPoiObject.has("duration") ? jsonPoiObject.getInt("duration") : 0;
mLoop = jsonPoiObject.has("loop") ? jsonPoiObject.getBoolean("loop") : true;
mIsActive = jsonPoiObject.has("isActive") ? jsonPoiObject.getBoolean("isActive") : true;
mStartPosition = parseVec3f(jsonPoiObject, "startPosition");
if (mStartPosition == null)
{
mStartPosition = new float[] { 0f, 0f, 0f };
}
mEndPosition = parseVec3f(jsonPoiObject, "endPosition");
mStartScale = parseVec3f(jsonPoiObject, "startScale");
mEndScale = parseVec3f(jsonPoiObject, "endScale");
mStartRotation = parseVec4f(jsonPoiObject, "startRotation");
mEndRotation = parseVec4f(jsonPoiObject, "endRotation");
if (jsonPoiObject.has("onClick"))
{
JSONArray jsonArray = new JSONArray(jsonPoiObject.getString("onClick"));
if (jsonArray != null && jsonArray.length() > 0)
{
for (int i = 0; i < jsonArray.length(); i++)
{
JSONObject jsonOnClick = jsonArray.getJSONObject(i);
if (jsonOnClick.has("url"))
{
if (mOnClickUrls == null)
{
mOnClickUrls = new LinkedList<String>();
}
mOnClickUrls.add(jsonOnClick.getString("url"));
}
if (jsonOnClick.has("activate"))
{
if (mOnClickActivates == null)
{
mOnClickActivates = new LinkedList<String>();
}
mOnClickActivates.add(jsonOnClick.getString("activate"));
}
if (jsonOnClick.has("deactivate"))
{
if (mOnClickDeactivates == null)
{
mOnClickDeactivates = new LinkedList<String>();
}
mOnClickDeactivates.add(jsonOnClick.getString("deactivate"));
}
}
}
}
if (jsonPoiObject.has("onDurationEnd"))
{
JSONArray jsonArray = new JSONArray(jsonPoiObject.getString("onDurationEnd"));
if (jsonArray != null && jsonArray.length() > 0)
{
for (int i = 0; i < jsonArray.length(); i++)
{
JSONObject jsonOnDurationEnd = jsonArray.getJSONObject(i);
if (jsonOnDurationEnd.has("url"))
{
if (mOnDurationEndUrls == null)
{
mOnDurationEndUrls = new LinkedList<String>();
}
mOnDurationEndUrls.add(jsonOnDurationEnd.getString("url"));
}
if (jsonOnDurationEnd.has("activate"))
{
if (mOnDurationEndActivates == null)
{
mOnDurationEndActivates = new LinkedList<String>();
}
mOnDurationEndActivates.add(jsonOnDurationEnd.getString("activate"));
}
if (jsonOnDurationEnd.has("deactivate"))
{
if (mOnDurationEndDeactivates == null)
{
mOnDurationEndDeactivates = new LinkedList<String>();
}
mOnDurationEndDeactivates.add(jsonOnDurationEnd.getString("deactivate"));
}
}
}
}
}
} | void function(JSONObject jsonPoiObject) throws JSONException { if (jsonPoiObject != null) { mTextureUrl = jsonPoiObject.getString(STR); mName = jsonPoiObject.has("nameSTRname") : ("\" STRbillboardHandlingSTRbillboardHandlingSTRIllegal value for billboardHandling: STRstartTimeSTRstartTimeSTRdurationSTRdurationSTRloopSTRloopSTRisActiveSTRisActiveSTRstartPositionSTRendPositionSTRstartScaleSTRendScaleSTRstartRotationSTRendRotationSTRonClickSTRonClickSTRurlSTRurlSTRactivateSTRactivateSTRdeactivateSTRdeactivateSTRonDurationEndSTRonDurationEndSTRurlSTRurlSTRactivateSTRactivateSTRdeactivateSTRdeactivate")); } } } } } } | /**
* Parses one poi object.
*
* @param jsonPoiObject
* The JSON input to parse.
* @throws JSONException
* JSON parse exception.
*/ | Parses one poi object | parse | {
"repo_name": "ARVOS-APP/ArViewerGameplay",
"path": "ArViewer/src/com/arvos/arviewer/viewer/ArvosPoiObject.java",
"license": "lgpl-3.0",
"size": 12625
} | [
"org.json.JSONException",
"org.json.JSONObject"
] | import org.json.JSONException; import org.json.JSONObject; | import org.json.*; | [
"org.json"
] | org.json; | 2,720,350 |
LayoutInflater inflater = context.getLayoutInflater();
View rowView = inflater.inflate(R.layout.list_main, null, true);
TextView txtTitle = (TextView) rowView.findViewById(R.id.txt);
txtTitle.setText(text[position]);
Drawable colorImg = null;
if (SharedPreferencesUtils.getSelectedTheme(context).equals("AppTheme")) {
colorImg = changeColorOfIcons(imageId[position], R.color.iconLight);
} else {
colorImg = changeColorOfIcons(imageId[position], R.color.iconDark);
}
ImageView imageView = (ImageView) rowView.findViewById(R.id.img);
imageView.setImageDrawable(colorImg);
return rowView;
} | LayoutInflater inflater = context.getLayoutInflater(); View rowView = inflater.inflate(R.layout.list_main, null, true); TextView txtTitle = (TextView) rowView.findViewById(R.id.txt); txtTitle.setText(text[position]); Drawable colorImg = null; if (SharedPreferencesUtils.getSelectedTheme(context).equals(STR)) { colorImg = changeColorOfIcons(imageId[position], R.color.iconLight); } else { colorImg = changeColorOfIcons(imageId[position], R.color.iconDark); } ImageView imageView = (ImageView) rowView.findViewById(R.id.img); imageView.setImageDrawable(colorImg); return rowView; } | /**
* adattatore per visualizzare la lista con i determinati parametri:
* 1.testo
* 2.immagine
*
* @param position
* @param view
* @param parent
* @return
*/ | adattatore per visualizzare la lista con i determinati parametri: 1.testo 2.immagine | getView | {
"repo_name": "simone98dm/TheBusAppFinal",
"path": "app/src/main/java/com/android/projectz/teamrocket/thebusapp/adapters/CustomListSettingMain.java",
"license": "apache-2.0",
"size": 3357
} | [
"android.graphics.drawable.Drawable",
"android.view.LayoutInflater",
"android.view.View",
"android.widget.ImageView",
"android.widget.TextView",
"com.android.projectz.teamrocket.thebusapp.util.SharedPreferencesUtils"
] | import android.graphics.drawable.Drawable; import android.view.LayoutInflater; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.android.projectz.teamrocket.thebusapp.util.SharedPreferencesUtils; | import android.graphics.drawable.*; import android.view.*; import android.widget.*; import com.android.projectz.teamrocket.thebusapp.util.*; | [
"android.graphics",
"android.view",
"android.widget",
"com.android.projectz"
] | android.graphics; android.view; android.widget; com.android.projectz; | 2,148,146 |
public Object[] getMethods() {
Method[] methods = getClassUnderInspection().getMethods();
Constructor[] ctors = getClassUnderInspection().getConstructors();
Object[] result = new Object[methods.length + ctors.length];
int resultIndex = 0;
for (; resultIndex < methods.length; resultIndex++) {
Method method = methods[resultIndex];
result[resultIndex] = methodInfo(method);
}
for (int i = 0; i < ctors.length; i++, resultIndex++) {
Constructor ctor = ctors[i];
result[resultIndex] = methodInfo(ctor);
}
return result;
} | Object[] function() { Method[] methods = getClassUnderInspection().getMethods(); Constructor[] ctors = getClassUnderInspection().getConstructors(); Object[] result = new Object[methods.length + ctors.length]; int resultIndex = 0; for (; resultIndex < methods.length; resultIndex++) { Method method = methods[resultIndex]; result[resultIndex] = methodInfo(method); } for (int i = 0; i < ctors.length; i++, resultIndex++) { Constructor ctor = ctors[i]; result[resultIndex] = methodInfo(ctor); } return result; } | /**
* Get info about usual Java instance and class Methods as well as Constructors.
*
* @return Array of StringArrays that can be indexed with the MEMBER_xxx_IDX constants
*/ | Get info about usual Java instance and class Methods as well as Constructors | getMethods | {
"repo_name": "paulk-asert/groovy",
"path": "src/main/java/groovy/inspect/Inspector.java",
"license": "apache-2.0",
"size": 13291
} | [
"java.lang.reflect.Constructor",
"java.lang.reflect.Method"
] | import java.lang.reflect.Constructor; import java.lang.reflect.Method; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 2,242,184 |
private Image getHeapDumpImage() {
if (heapDumpImage == null || heapDumpImage.isDisposed()) {
heapDumpImage = Activator.getImageDescriptor(HEAP_DUMP_IMG_PATH)
.createImage();
}
return heapDumpImage;
} | Image function() { if (heapDumpImage == null heapDumpImage.isDisposed()) { heapDumpImage = Activator.getImageDescriptor(HEAP_DUMP_IMG_PATH) .createImage(); } return heapDumpImage; } | /**
* Gets the heap dump image.
*
* @return The image
*/ | Gets the heap dump image | getHeapDumpImage | {
"repo_name": "TANGO-Project/code-optimiser-plugin",
"path": "bundles/org.jvmmonitor.ui/src/org/jvmmonitor/internal/ui/views/JvmTreeLabelProvider.java",
"license": "apache-2.0",
"size": 9589
} | [
"org.eclipse.swt.graphics.Image",
"org.jvmmonitor.ui.Activator"
] | import org.eclipse.swt.graphics.Image; import org.jvmmonitor.ui.Activator; | import org.eclipse.swt.graphics.*; import org.jvmmonitor.ui.*; | [
"org.eclipse.swt",
"org.jvmmonitor.ui"
] | org.eclipse.swt; org.jvmmonitor.ui; | 1,291,073 |
protected IndexingAction createIndexingAction() {
return new IndexingAction(configuration.getSearchableFields(), configuration.getSearchableAttributes());
} | IndexingAction function() { return new IndexingAction(configuration.getSearchableFields(), configuration.getSearchableAttributes()); } | /**
* This method is protected and exists for testing purposes. This allows
* unit tests to extend this class and override the createIndexingAction so
* that they can mock out the Indexing Action to throw Exceptions, count
* events indexed, etc.
*/ | This method is protected and exists for testing purposes. This allows unit tests to extend this class and override the createIndexingAction so that they can mock out the Indexing Action to throw Exceptions, count events indexed, etc | createIndexingAction | {
"repo_name": "Xsixteen/nifi",
"path": "nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java",
"license": "apache-2.0",
"size": 126035
} | [
"org.apache.nifi.provenance.lucene.IndexingAction"
] | import org.apache.nifi.provenance.lucene.IndexingAction; | import org.apache.nifi.provenance.lucene.*; | [
"org.apache.nifi"
] | org.apache.nifi; | 769,650 |
@Test
public void testReplicantThrottle() throws Exception
{
mockCoordinator();
mockPeon.loadSegment(EasyMock.<DataSegment>anyObject(), EasyMock.<LoadPeonCallback>anyObject());
EasyMock.expectLastCall().atLeastOnce();
EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.<DataSegment>newHashSet()).atLeastOnce();
EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).atLeastOnce();
EasyMock.replay(mockPeon);
EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn(
Lists.<Rule>newArrayList(
new IntervalLoadRule(new Interval("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), ImmutableMap.<String, Integer>of("hot", 2))
)
).atLeastOnce();
EasyMock.replay(databaseRuleManager);
DruidCluster druidCluster = new DruidCluster(
ImmutableMap.of(
"hot",
MinMaxPriorityQueue.orderedBy(Ordering.natural().reverse()).create(
Arrays.asList(
new ServerHolder(
new DruidServer(
"serverHot",
"hostHot",
1000,
"historical",
"hot",
0
).toImmutableDruidServer(),
mockPeon
),
new ServerHolder(
new DruidServer(
"serverHot2",
"hostHot2",
1000,
"historical",
"hot",
0
).toImmutableDruidServer(),
mockPeon
)
)
)
)
);
DruidCoordinatorRuntimeParams params =
new DruidCoordinatorRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster()))
.withBalancerStrategyFactory(new CostBalancerStrategyFactory(1))
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build();
DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params);
CoordinatorStats stats = afterParams.getCoordinatorStats();
Assert.assertTrue(stats.getPerTierStats().get("assignedCount").get("hot").get() == 48);
Assert.assertTrue(stats.getPerTierStats().get("unassignedCount") == null);
Assert.assertTrue(stats.getPerTierStats().get("unassignedSize") == null);
DataSegment overFlowSegment = new DataSegment(
"test",
new Interval("2012-02-01/2012-02-02"),
new DateTime().toString(),
Maps.<String, Object>newHashMap(),
Lists.<String>newArrayList(),
Lists.<String>newArrayList(),
NoneShardSpec.instance(),
1,
0
);
afterParams.getBalancerStrategyFactory().close();
afterParams = ruleRunner.run(
new DruidCoordinatorRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withEmitter(emitter)
.withAvailableSegments(Arrays.asList(overFlowSegment))
.withDatabaseRuleManager(databaseRuleManager)
.withBalancerStrategyFactory(new CostBalancerStrategyFactory(1))
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster()))
.build()
);
stats = afterParams.getCoordinatorStats();
Assert.assertTrue(stats.getPerTierStats().get("assignedCount").get("hot").get() == 1);
Assert.assertTrue(stats.getPerTierStats().get("unassignedCount") == null);
Assert.assertTrue(stats.getPerTierStats().get("unassignedSize") == null);
EasyMock.verify(mockPeon);
afterParams.getBalancerStrategyFactory().close();
} | void function() throws Exception { mockCoordinator(); mockPeon.loadSegment(EasyMock.<DataSegment>anyObject(), EasyMock.<LoadPeonCallback>anyObject()); EasyMock.expectLastCall().atLeastOnce(); EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.<DataSegment>newHashSet()).atLeastOnce(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).atLeastOnce(); EasyMock.replay(mockPeon); EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn( Lists.<Rule>newArrayList( new IntervalLoadRule(new Interval(STR), ImmutableMap.<String, Integer>of("hot", 2)) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); DruidCluster druidCluster = new DruidCluster( ImmutableMap.of( "hot", MinMaxPriorityQueue.orderedBy(Ordering.natural().reverse()).create( Arrays.asList( new ServerHolder( new DruidServer( STR, STR, 1000, STR, "hot", 0 ).toImmutableDruidServer(), mockPeon ), new ServerHolder( new DruidServer( STR, STR, 1000, STR, "hot", 0 ).toImmutableDruidServer(), mockPeon ) ) ) ) ); DruidCoordinatorRuntimeParams params = new DruidCoordinatorRuntimeParams.Builder() .withDruidCluster(druidCluster) .withAvailableSegments(availableSegments) .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategyFactory(new CostBalancerStrategyFactory(1)) .withBalancerReferenceTimestamp(new DateTime(STR)) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); CoordinatorStats stats = afterParams.getCoordinatorStats(); Assert.assertTrue(stats.getPerTierStats().get(STR).get("hot").get() == 48); Assert.assertTrue(stats.getPerTierStats().get(STR) == null); Assert.assertTrue(stats.getPerTierStats().get(STR) == null); DataSegment overFlowSegment = new DataSegment( "test", new Interval(STR), new DateTime().toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 1, 0 ); afterParams.getBalancerStrategyFactory().close(); afterParams = ruleRunner.run( new DruidCoordinatorRuntimeParams.Builder() .withDruidCluster(druidCluster) .withEmitter(emitter) .withAvailableSegments(Arrays.asList(overFlowSegment)) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategyFactory(new CostBalancerStrategyFactory(1)) .withBalancerReferenceTimestamp(new DateTime(STR)) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .build() ); stats = afterParams.getCoordinatorStats(); Assert.assertTrue(stats.getPerTierStats().get(STR).get("hot").get() == 1); Assert.assertTrue(stats.getPerTierStats().get(STR) == null); Assert.assertTrue(stats.getPerTierStats().get(STR) == null); EasyMock.verify(mockPeon); afterParams.getBalancerStrategyFactory().close(); } | /**
* Nodes:
* hot - 2 replicants
*
* @throws Exception
*/ | Nodes: hot - 2 replicants | testReplicantThrottle | {
"repo_name": "mrijke/druid",
"path": "server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java",
"license": "apache-2.0",
"size": 47160
} | [
"com.google.common.collect.ImmutableMap",
"com.google.common.collect.Lists",
"com.google.common.collect.Maps",
"com.google.common.collect.MinMaxPriorityQueue",
"com.google.common.collect.Ordering",
"com.google.common.collect.Sets",
"io.druid.client.DruidServer",
"io.druid.server.coordinator.rules.IntervalLoadRule",
"io.druid.server.coordinator.rules.Rule",
"io.druid.timeline.DataSegment",
"io.druid.timeline.partition.NoneShardSpec",
"java.util.Arrays",
"org.easymock.EasyMock",
"org.joda.time.DateTime",
"org.joda.time.Interval",
"org.junit.Assert"
] | import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import io.druid.client.DruidServer; import io.druid.server.coordinator.rules.IntervalLoadRule; import io.druid.server.coordinator.rules.Rule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import java.util.Arrays; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; | import com.google.common.collect.*; import io.druid.client.*; import io.druid.server.coordinator.rules.*; import io.druid.timeline.*; import io.druid.timeline.partition.*; import java.util.*; import org.easymock.*; import org.joda.time.*; import org.junit.*; | [
"com.google.common",
"io.druid.client",
"io.druid.server",
"io.druid.timeline",
"java.util",
"org.easymock",
"org.joda.time",
"org.junit"
] | com.google.common; io.druid.client; io.druid.server; io.druid.timeline; java.util; org.easymock; org.joda.time; org.junit; | 2,763,726 |
protected Bitmap loadTopSongs(Cursor sortedCursor) {
if (sortedCursor == null || !sortedCursor.moveToFirst()) {
return null;
}
ArrayList<Bitmap> loadedBitmaps = new ArrayList<Bitmap>(MAX_NUM_BITMAPS_TO_LOAD);
final int artistIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ARTIST);
final int albumIdIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ALBUM_ID);
final int albumIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ALBUM);
Bitmap bitmap = null;
String artistName = null;
String albumName = null;
long albumId = -1;
// create a hashset of the keys so we don't load images from the same album multiple times
HashSet<String> keys = new HashSet<String>(sortedCursor.getCount());
do {
if (isCancelled()) {
return null;
}
artistName = sortedCursor.getString(artistIdx);
albumName = sortedCursor.getString(albumIdx);
albumId = sortedCursor.getLong(albumIdIdx);
String key = ImageFetcher.generateAlbumCacheKey(albumName, artistName);
// if we successfully added the key (ie the key didn't previously exist)
if (keys.add(key)) {
// try to load the bitmap
bitmap = ImageWorker.getBitmapInBackground(mContext, mImageCache,
key, albumName, artistName, albumId, ImageType.ALBUM);
// if we got the bitmap, add it to the list
if (bitmap != null) {
loadedBitmaps.add(bitmap);
bitmap = null;
}
}
} while (sortedCursor.moveToNext() && loadedBitmaps.size() < MAX_NUM_BITMAPS_TO_LOAD);
// if we found at least 1 bitmap
if (loadedBitmaps.size() > 0) {
// get the first bitmap
bitmap = loadedBitmaps.get(0);
// if we have many bitmaps
if (loadedBitmaps.size() == MAX_NUM_BITMAPS_TO_LOAD) {
// create a combined bitmap of the 4 images
final int width = bitmap.getWidth();
final int height = bitmap.getHeight();
Bitmap combinedBitmap = Bitmap.createBitmap(width, height,
bitmap.getConfig());
Canvas combinedCanvas = new Canvas(combinedBitmap);
// top left
combinedCanvas.drawBitmap(loadedBitmaps.get(0), null,
new Rect(0, 0, width / 2, height / 2), null);
// top right
combinedCanvas.drawBitmap(loadedBitmaps.get(1), null,
new Rect(width / 2, 0, width, height / 2), null);
// bottom left
combinedCanvas.drawBitmap(loadedBitmaps.get(2), null,
new Rect(0, height / 2, width / 2, height), null);
// bottom right
combinedCanvas.drawBitmap(loadedBitmaps.get(3), null,
new Rect(width / 2, height / 2, width, height), null);
// combinedCanvas.release();
combinedCanvas = null;
bitmap = combinedBitmap;
}
}
// store the fact that we ran this code into the db to prevent multiple re-runs
mPlaylistStore.updateCoverArt(mPlaylistId);
if (bitmap != null) {
// add the image to the cache
mImageCache.addBitmapToCache(mKey, bitmap, true);
} else {
mImageCache.removeFromCache(mKey);
mFallbackToDefaultImage = true;
}
return bitmap;
}
/**
* {@inheritDoc} | Bitmap function(Cursor sortedCursor) { if (sortedCursor == null !sortedCursor.moveToFirst()) { return null; } ArrayList<Bitmap> loadedBitmaps = new ArrayList<Bitmap>(MAX_NUM_BITMAPS_TO_LOAD); final int artistIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ARTIST); final int albumIdIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ALBUM_ID); final int albumIdx = sortedCursor.getColumnIndex(MediaStore.Audio.AudioColumns.ALBUM); Bitmap bitmap = null; String artistName = null; String albumName = null; long albumId = -1; HashSet<String> keys = new HashSet<String>(sortedCursor.getCount()); do { if (isCancelled()) { return null; } artistName = sortedCursor.getString(artistIdx); albumName = sortedCursor.getString(albumIdx); albumId = sortedCursor.getLong(albumIdIdx); String key = ImageFetcher.generateAlbumCacheKey(albumName, artistName); if (keys.add(key)) { bitmap = ImageWorker.getBitmapInBackground(mContext, mImageCache, key, albumName, artistName, albumId, ImageType.ALBUM); if (bitmap != null) { loadedBitmaps.add(bitmap); bitmap = null; } } } while (sortedCursor.moveToNext() && loadedBitmaps.size() < MAX_NUM_BITMAPS_TO_LOAD); if (loadedBitmaps.size() > 0) { bitmap = loadedBitmaps.get(0); if (loadedBitmaps.size() == MAX_NUM_BITMAPS_TO_LOAD) { final int width = bitmap.getWidth(); final int height = bitmap.getHeight(); Bitmap combinedBitmap = Bitmap.createBitmap(width, height, bitmap.getConfig()); Canvas combinedCanvas = new Canvas(combinedBitmap); combinedCanvas.drawBitmap(loadedBitmaps.get(0), null, new Rect(0, 0, width / 2, height / 2), null); combinedCanvas.drawBitmap(loadedBitmaps.get(1), null, new Rect(width / 2, 0, width, height / 2), null); combinedCanvas.drawBitmap(loadedBitmaps.get(2), null, new Rect(0, height / 2, width / 2, height), null); combinedCanvas.drawBitmap(loadedBitmaps.get(3), null, new Rect(width / 2, height / 2, width, height), null); combinedCanvas = null; bitmap = combinedBitmap; } } mPlaylistStore.updateCoverArt(mPlaylistId); if (bitmap != null) { mImageCache.addBitmapToCache(mKey, bitmap, true); } else { mImageCache.removeFromCache(mKey); mFallbackToDefaultImage = true; } return bitmap; } /** * {@inheritDoc} | /**
* Gets the Cover Art of the playlist, which is a combination of the top song's album image
*
* @param sortedCursor the sorted playlist song cursor
* @return Bitmap of the artist
*/ | Gets the Cover Art of the playlist, which is a combination of the top song's album image | loadTopSongs | {
"repo_name": "YouKim/ExoPlayer",
"path": "twelve/src/main/java/com/dolzzo/twelve/cache/PlaylistWorkerTask.java",
"license": "apache-2.0",
"size": 14561
} | [
"android.database.Cursor",
"android.graphics.Bitmap",
"android.graphics.Canvas",
"android.graphics.Rect",
"android.provider.MediaStore",
"com.dolzzo.twelve.cache.ImageWorker",
"java.util.ArrayList",
"java.util.HashSet"
] | import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Rect; import android.provider.MediaStore; import com.dolzzo.twelve.cache.ImageWorker; import java.util.ArrayList; import java.util.HashSet; | import android.database.*; import android.graphics.*; import android.provider.*; import com.dolzzo.twelve.cache.*; import java.util.*; | [
"android.database",
"android.graphics",
"android.provider",
"com.dolzzo.twelve",
"java.util"
] | android.database; android.graphics; android.provider; com.dolzzo.twelve; java.util; | 1,849,256 |
public String[] registerOAuthConsumer() throws IdentityOAuthAdminException {
String loggedInUser = CarbonContext.getThreadLocalCarbonContext().getUsername();
if (LOG.isDebugEnabled()) {
LOG.debug("Adding a consumer secret for the logged in user:" + loggedInUser);
}
String tenantUser = MultitenantUtils.getTenantAwareUsername(loggedInUser);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String userDomain = IdentityUtil.extractDomainFromName(loggedInUser);
OAuthAppDAO dao = new OAuthAppDAO();
return dao.addOAuthConsumer(UserCoreUtil.removeDomainFromName(tenantUser), tenantId, userDomain);
} | String[] function() throws IdentityOAuthAdminException { String loggedInUser = CarbonContext.getThreadLocalCarbonContext().getUsername(); if (LOG.isDebugEnabled()) { LOG.debug(STR + loggedInUser); } String tenantUser = MultitenantUtils.getTenantAwareUsername(loggedInUser); int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); String userDomain = IdentityUtil.extractDomainFromName(loggedInUser); OAuthAppDAO dao = new OAuthAppDAO(); return dao.addOAuthConsumer(UserCoreUtil.removeDomainFromName(tenantUser), tenantId, userDomain); } | /**
* Registers an consumer secret against the logged in user. A given user can only have a single
* consumer secret at a time. Calling this method again and again will update the existing
* consumer secret key.
*
* @return An array containing the consumer key and the consumer secret correspondingly.
* @throws IdentityOAuthAdminException Error when persisting the data in the persistence store.
*/ | Registers an consumer secret against the logged in user. A given user can only have a single consumer secret at a time. Calling this method again and again will update the existing consumer secret key | registerOAuthConsumer | {
"repo_name": "IsuraD/identity-inbound-auth-oauth",
"path": "components/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth/OAuthAdminServiceImpl.java",
"license": "apache-2.0",
"size": 73520
} | [
"org.wso2.carbon.context.CarbonContext",
"org.wso2.carbon.identity.core.util.IdentityUtil",
"org.wso2.carbon.identity.oauth.dao.OAuthAppDAO",
"org.wso2.carbon.user.core.util.UserCoreUtil",
"org.wso2.carbon.utils.multitenancy.MultitenantUtils"
] | import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.dao.OAuthAppDAO; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; | import org.wso2.carbon.context.*; import org.wso2.carbon.identity.core.util.*; import org.wso2.carbon.identity.oauth.dao.*; import org.wso2.carbon.user.core.util.*; import org.wso2.carbon.utils.multitenancy.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 567,140 |
@FIXVersion(introduced="4.4")
@TagNumRef(tagNum=TagNum.TradeReportType)
public void setTradeReportType(TradeReportType tradeReportType) {
this.tradeReportType = tradeReportType;
} | @FIXVersion(introduced="4.4") @TagNumRef(tagNum=TagNum.TradeReportType) void function(TradeReportType tradeReportType) { this.tradeReportType = tradeReportType; } | /**
* Message field setter.
* @param tradeReportType field value
*/ | Message field setter | setTradeReportType | {
"repo_name": "marvisan/HadesFIX",
"path": "Model/src/main/java/net/hades/fix/message/TradeCaptureReportAckMsg.java",
"license": "gpl-3.0",
"size": 118717
} | [
"net.hades.fix.message.anno.FIXVersion",
"net.hades.fix.message.anno.TagNumRef",
"net.hades.fix.message.type.TagNum",
"net.hades.fix.message.type.TradeReportType"
] | import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.TagNum; import net.hades.fix.message.type.TradeReportType; | import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*; | [
"net.hades.fix"
] | net.hades.fix; | 544,461 |
void scanDiskComponents(ILSMIndexOperationContext ctx, IIndexCursor cursor) throws HyracksDataException; | void scanDiskComponents(ILSMIndexOperationContext ctx, IIndexCursor cursor) throws HyracksDataException; | /**
* Scan all disk components of the index
*
* @param ctx
* the search operation context
* @param cursor
* the index cursor
* @throws HyracksDataException
*/ | Scan all disk components of the index | scanDiskComponents | {
"repo_name": "heriram/incubator-asterixdb",
"path": "hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java",
"license": "apache-2.0",
"size": 7830
} | [
"org.apache.hyracks.api.exceptions.HyracksDataException",
"org.apache.hyracks.storage.common.IIndexCursor"
] | import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.storage.common.IIndexCursor; | import org.apache.hyracks.api.exceptions.*; import org.apache.hyracks.storage.common.*; | [
"org.apache.hyracks"
] | org.apache.hyracks; | 119,262 |
@SuppressWarnings("unchecked")
public void recycle() {
if (buffer.length > INITIAL_CAPACITY) {
Entry[] e = new Entry[INITIAL_CAPACITY];
System.arraycopy(buffer, 0, e, 0, INITIAL_CAPACITY);
buffer = e;
}
// reset flushed, unflushed and tail
// See https://github.com/netty/netty/issues/1772
flushed = 0;
unflushed = 0;
tail = 0;
// Set the channel to null so it can be GC'ed ASAP
channel = null;
totalPendingSize = 0;
writable = 1;
RECYCLER.recycle(this, (Handle<ChannelOutboundBuffer>) handle);
} | @SuppressWarnings(STR) void function() { if (buffer.length > INITIAL_CAPACITY) { Entry[] e = new Entry[INITIAL_CAPACITY]; System.arraycopy(buffer, 0, e, 0, INITIAL_CAPACITY); buffer = e; } flushed = 0; unflushed = 0; tail = 0; channel = null; totalPendingSize = 0; writable = 1; RECYCLER.recycle(this, (Handle<ChannelOutboundBuffer>) handle); } | /**
* Recycle this {@link ChannelOutboundBuffer}. After this was called it is disallowed to use it with the previous
* assigned {@link AbstractChannel}.
*/ | Recycle this <code>ChannelOutboundBuffer</code>. After this was called it is disallowed to use it with the previous assigned <code>AbstractChannel</code> | recycle | {
"repo_name": "daschl/netty",
"path": "transport/src/main/java/io/netty/channel/ChannelOutboundBuffer.java",
"license": "apache-2.0",
"size": 19772
} | [
"io.netty.util.Recycler"
] | import io.netty.util.Recycler; | import io.netty.util.*; | [
"io.netty.util"
] | io.netty.util; | 2,119,360 |
public PutIndexTemplateRequestBuilder setSource(BytesReference templateSource) {
request.source(templateSource);
return this;
} | PutIndexTemplateRequestBuilder function(BytesReference templateSource) { request.source(templateSource); return this; } | /**
* The template source definition.
*/ | The template source definition | setSource | {
"repo_name": "Flipkart/elasticsearch",
"path": "src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java",
"license": "apache-2.0",
"size": 7494
} | [
"org.elasticsearch.common.bytes.BytesReference"
] | import org.elasticsearch.common.bytes.BytesReference; | import org.elasticsearch.common.bytes.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 2,529,050 |
public ArrayList<String> getSingletonFeatures(Dictionaries dict){
ArrayList<String> features = new ArrayList<>();
features.add(mentionType.toString());
features.add(nerString);
features.add(animacy.toString());
int personNum = 3;
if(person.equals(Person.I) || person.equals(Person.WE)) personNum = 1;
if(person.equals(Person.YOU)) personNum = 2;
if(person.equals(Person.UNKNOWN)) personNum = 0;
features.add(String.valueOf(personNum));
features.add(number.toString());
features.add(getPosition());
// TODO(kevin): make these compatible with universal dependencies
try {
features.add(getRelation());
features.add(getQuantification(dict));
features.add(String.valueOf(getModifiers(dict)));
features.add(String.valueOf(getNegation(dict)));
features.add(String.valueOf(getModal(dict)));
features.add(String.valueOf(getReportEmbedding(dict)));
features.add(String.valueOf(getCoordination()));
} catch(IllegalArgumentException e) {}
return features;
} | ArrayList<String> function(Dictionaries dict){ ArrayList<String> features = new ArrayList<>(); features.add(mentionType.toString()); features.add(nerString); features.add(animacy.toString()); int personNum = 3; if(person.equals(Person.I) person.equals(Person.WE)) personNum = 1; if(person.equals(Person.YOU)) personNum = 2; if(person.equals(Person.UNKNOWN)) personNum = 0; features.add(String.valueOf(personNum)); features.add(number.toString()); features.add(getPosition()); try { features.add(getRelation()); features.add(getQuantification(dict)); features.add(String.valueOf(getModifiers(dict))); features.add(String.valueOf(getNegation(dict))); features.add(String.valueOf(getModal(dict))); features.add(String.valueOf(getReportEmbedding(dict))); features.add(String.valueOf(getCoordination())); } catch(IllegalArgumentException e) {} return features; } | /**
* Returns the features used by the singleton predictor (logistic
* classifier) to decide whether the mention belongs to a singleton entity
*/ | Returns the features used by the singleton predictor (logistic classifier) to decide whether the mention belongs to a singleton entity | getSingletonFeatures | {
"repo_name": "automenta/corenlp",
"path": "src/edu/stanford/nlp/hcoref/data/Mention.java",
"license": "gpl-2.0",
"size": 58107
} | [
"edu.stanford.nlp.hcoref.data.Dictionaries",
"java.util.ArrayList"
] | import edu.stanford.nlp.hcoref.data.Dictionaries; import java.util.ArrayList; | import edu.stanford.nlp.hcoref.data.*; import java.util.*; | [
"edu.stanford.nlp",
"java.util"
] | edu.stanford.nlp; java.util; | 465,651 |
@Override
public MapOp clone()
{
try
{
final MapOp result = (MapOp) super.clone();
if (_inputs != null)
{
result._inputs = new ArrayList<MapOp>();
for (final MapOp mo : _inputs)
{
result._inputs.add(mo.clone());
}
}
else
{
result._inputs = null;
}
return result;
}
catch (final CloneNotSupportedException e)
{
log.error(e.getMessage());
throw new Error("Error cloning MapOp, shouldn't be here.");
}
} | MapOp function() { try { final MapOp result = (MapOp) super.clone(); if (_inputs != null) { result._inputs = new ArrayList<MapOp>(); for (final MapOp mo : _inputs) { result._inputs.add(mo.clone()); } } else { result._inputs = null; } return result; } catch (final CloneNotSupportedException e) { log.error(e.getMessage()); throw new Error(STR); } } | /**
* Returns a clone of this MapOp and all its children. This will not include any intermediate
* results or temporary variables involved in computation.
*/ | Returns a clone of this MapOp and all its children. This will not include any intermediate results or temporary variables involved in computation | clone | {
"repo_name": "bradh/mrgeo",
"path": "mrgeo-core/src/main/java/org/mrgeo/mapalgebra/MapOp.java",
"license": "apache-2.0",
"size": 20082
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 562,760 |
public void setBody(String body, ContentType contentType) {
try {
// Attempt to cast it to an EntityEnclosingMethod which supports
// body elements (i.e. POST, PUT methods) and set the body
((HttpEntityEnclosingRequestBase) this.request).setEntity(new StringEntity(body, contentType));
} catch (Exception e) {
logger.error("Cannot add http body to request: " + e.getMessage());
}
} | void function(String body, ContentType contentType) { try { ((HttpEntityEnclosingRequestBase) this.request).setEntity(new StringEntity(body, contentType)); } catch (Exception e) { logger.error(STR + e.getMessage()); } } | /**
* Sets a body parameter
*
* @param body
* message body to add
* @param contentType
* Content type
*/ | Sets a body parameter | setBody | {
"repo_name": "BlueberryMoss/ucsd-oa-training-spark",
"path": "Spark-Training/src/uk/co/blueberrymoss/http/UcsdHttpConnection.java",
"license": "apache-2.0",
"size": 11409
} | [
"org.apache.http.client.methods.HttpEntityEnclosingRequestBase",
"org.apache.http.entity.ContentType",
"org.apache.http.entity.StringEntity"
] | import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; | import org.apache.http.client.methods.*; import org.apache.http.entity.*; | [
"org.apache.http"
] | org.apache.http; | 257,287 |
Effort saveEffortForTiTATask(Effort effort, String description, TiTAUser user, TiTAProject project); | Effort saveEffortForTiTATask(Effort effort, String description, TiTAUser user, TiTAProject project); | /**
* Saves an effort for tita task generated by some tita user.
*
* @param effort Effort
* @param description effortdescription
* @param user user
* @param project TiTaProject
* @return saved effort
*/ | Saves an effort for tita task generated by some tita user | saveEffortForTiTATask | {
"repo_name": "tita/tita",
"path": "tita-business/src/main/java/at/ac/tuwien/ifs/tita/business/service/time/IEffortService.java",
"license": "apache-2.0",
"size": 6641
} | [
"at.ac.tuwien.ifs.tita.entity.Effort",
"at.ac.tuwien.ifs.tita.entity.TiTAProject",
"at.ac.tuwien.ifs.tita.entity.TiTAUser"
] | import at.ac.tuwien.ifs.tita.entity.Effort; import at.ac.tuwien.ifs.tita.entity.TiTAProject; import at.ac.tuwien.ifs.tita.entity.TiTAUser; | import at.ac.tuwien.ifs.tita.entity.*; | [
"at.ac.tuwien"
] | at.ac.tuwien; | 673,419 |
@SuppressWarnings("rawtypes")
public static Comparable convertToComparable(IPropertyType<?> propertyType, String value) throws ConstraintValueDoNotMatchPropertyTypeException {
if (!(propertyType instanceof IComparablePropertyType)) {
throw new ConstraintValueDoNotMatchPropertyTypeException(
"Constraint is invalid for property type <" + propertyType.getTypeName() + ">, as it's not comparable");
}
IComparablePropertyType<?> comparablePropertyType = (IComparablePropertyType) propertyType;
try {
return comparablePropertyType.parse(value);
} catch (InvalidPropertyValueException e) {
throw new ConstraintValueDoNotMatchPropertyTypeException("Unable to parse value <" + value + "> of type <" + propertyType.getTypeName() + ">", e);
}
} | @SuppressWarnings(STR) static Comparable function(IPropertyType<?> propertyType, String value) throws ConstraintValueDoNotMatchPropertyTypeException { if (!(propertyType instanceof IComparablePropertyType)) { throw new ConstraintValueDoNotMatchPropertyTypeException( STR + propertyType.getTypeName() + STR); } IComparablePropertyType<?> comparablePropertyType = (IComparablePropertyType) propertyType; try { return comparablePropertyType.parse(value); } catch (InvalidPropertyValueException e) { throw new ConstraintValueDoNotMatchPropertyTypeException(STR + value + STR + propertyType.getTypeName() + ">", e); } } | /**
* Convert a string value following its type throw exception if it cannot be converted to a comparable
*
* @param propertyType the type of the property
* @param value the value to convert
* @return the converted comparable
* @throws ConstraintValueDoNotMatchPropertyTypeException if the converted value is not a comparable
*/ | Convert a string value following its type throw exception if it cannot be converted to a comparable | convertToComparable | {
"repo_name": "alien4cloud/alien4cloud",
"path": "alien4cloud-tosca/src/main/java/alien4cloud/tosca/properties/constraints/ConstraintUtil.java",
"license": "apache-2.0",
"size": 5740
} | [
"org.alien4cloud.tosca.exceptions.ConstraintValueDoNotMatchPropertyTypeException",
"org.alien4cloud.tosca.exceptions.InvalidPropertyValueException",
"org.alien4cloud.tosca.normative.types.IComparablePropertyType",
"org.alien4cloud.tosca.normative.types.IPropertyType"
] | import org.alien4cloud.tosca.exceptions.ConstraintValueDoNotMatchPropertyTypeException; import org.alien4cloud.tosca.exceptions.InvalidPropertyValueException; import org.alien4cloud.tosca.normative.types.IComparablePropertyType; import org.alien4cloud.tosca.normative.types.IPropertyType; | import org.alien4cloud.tosca.exceptions.*; import org.alien4cloud.tosca.normative.types.*; | [
"org.alien4cloud.tosca"
] | org.alien4cloud.tosca; | 929,062 |
public FeatureCursor queryFeaturesForChunkIdOrder(String[] columns,
BoundingBox boundingBox, String where, int limit, long offset) {
return queryFeaturesForChunk(columns, boundingBox, where,
getPkColumnName(), limit, offset);
} | FeatureCursor function(String[] columns, BoundingBox boundingBox, String where, int limit, long offset) { return queryFeaturesForChunk(columns, boundingBox, where, getPkColumnName(), limit, offset); } | /**
* Query for features within the bounding box ordered by id, starting at the
* offset and returning no more than the limit
*
* @param columns columns
* @param boundingBox bounding box
* @param where where clause
* @param limit chunk limit
* @param offset chunk query offset
* @return feature cursor
* @since 6.2.0
*/ | Query for features within the bounding box ordered by id, starting at the offset and returning no more than the limit | queryFeaturesForChunkIdOrder | {
"repo_name": "ngageoint/geopackage-android",
"path": "geopackage-sdk/src/main/java/mil/nga/geopackage/extension/nga/index/FeatureTableIndex.java",
"license": "mit",
"size": 276322
} | [
"mil.nga.geopackage.BoundingBox",
"mil.nga.geopackage.features.user.FeatureCursor"
] | import mil.nga.geopackage.BoundingBox; import mil.nga.geopackage.features.user.FeatureCursor; | import mil.nga.geopackage.*; import mil.nga.geopackage.features.user.*; | [
"mil.nga.geopackage"
] | mil.nga.geopackage; | 347,237 |
@ServiceMethod(returns = ReturnType.SINGLE)
ExpressRouteCircuitsRoutesTableListResultInner listRoutesTable(
String resourceGroupName, String crossConnectionName, String peeringName, String devicePath, Context context); | @ServiceMethod(returns = ReturnType.SINGLE) ExpressRouteCircuitsRoutesTableListResultInner listRoutesTable( String resourceGroupName, String crossConnectionName, String peeringName, String devicePath, Context context); | /**
* Gets the currently advertised routes table associated with the express route cross connection in a resource
* group.
*
* @param resourceGroupName The name of the resource group.
* @param crossConnectionName The name of the ExpressRouteCrossConnection.
* @param peeringName The name of the peering.
* @param devicePath The path of the device.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the currently advertised routes table associated with the express route cross connection in a resource
* group.
*/ | Gets the currently advertised routes table associated with the express route cross connection in a resource group | listRoutesTable | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/ExpressRouteCrossConnectionsClient.java",
"license": "mit",
"size": 44449
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.util.Context",
"com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitsRoutesTableListResultInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; import com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitsRoutesTableListResultInner; | import com.azure.core.annotation.*; import com.azure.core.util.*; import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,994,195 |
public void setSubAwardAmountInfoList(
List<SubAwardAmountInfo> subAwardAmountInfoList) {
this.subAwardAmountInfoList = subAwardAmountInfoList;
} | void function( List<SubAwardAmountInfo> subAwardAmountInfoList) { this.subAwardAmountInfoList = subAwardAmountInfoList; } | /**.
* This is the Setter Method for subAwardAmountInfoList
* @param subAwardAmountInfoList The subAwardAmountInfoList to set.
*/ | . This is the Setter Method for subAwardAmountInfoList | setSubAwardAmountInfoList | {
"repo_name": "blackcathacker/kc.preclean",
"path": "coeus-code/src/main/java/org/kuali/kra/subaward/bo/SubAward.java",
"license": "apache-2.0",
"size": 44135
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,781,023 |
public AxisState draw(Graphics2D g2, double cursor, Rectangle2D plotArea,
Rectangle2D dataArea, RectangleEdge edge,
PlotRenderingInfo plotState) {
AxisState state = null;
// if the axis is not visible, don't draw it...
if (!isVisible()) {
state = new AxisState(cursor);
// even though the axis is not visible, we need ticks for the
// gridlines...
List ticks = refreshTicks(g2, state, dataArea, edge);
state.setTicks(ticks);
return state;
}
// draw the tick marks and labels...
state = drawTickMarksAndLabels(g2, cursor, plotArea, dataArea, edge);
// // draw the marker band (if there is one)...
// if (getMarkerBand() != null) {
// if (edge == RectangleEdge.BOTTOM) {
// cursor = cursor - getMarkerBand().getHeight(g2);
// }
// getMarkerBand().draw(g2, plotArea, dataArea, 0, cursor);
// }
// draw the axis label...
state = drawLabel(getLabel(), g2, plotArea, dataArea, edge, state);
createAndAddEntity(cursor, state, dataArea, edge, plotState);
return state;
}
| AxisState function(Graphics2D g2, double cursor, Rectangle2D plotArea, Rectangle2D dataArea, RectangleEdge edge, PlotRenderingInfo plotState) { AxisState state = null; if (!isVisible()) { state = new AxisState(cursor); List ticks = refreshTicks(g2, state, dataArea, edge); state.setTicks(ticks); return state; } state = drawTickMarksAndLabels(g2, cursor, plotArea, dataArea, edge); state = drawLabel(getLabel(), g2, plotArea, dataArea, edge, state); createAndAddEntity(cursor, state, dataArea, edge, plotState); return state; } | /**
* Draws the axis on a Java 2D graphics device (such as the screen or a
* printer).
*
* @param g2 the graphics device (<code>null</code> not permitted).
* @param cursor the cursor location.
* @param plotArea the area within which the axes and data should be drawn
* (<code>null</code> not permitted).
* @param dataArea the area within which the data should be drawn
* (<code>null</code> not permitted).
* @param edge the location of the axis (<code>null</code> not permitted).
* @param plotState collects information about the plot
* (<code>null</code> permitted).
*
* @return The axis state (never <code>null</code>).
*/ | Draws the axis on a Java 2D graphics device (such as the screen or a printer) | draw | {
"repo_name": "apetresc/JFreeChart",
"path": "src/main/java/org/jfree/chart/axis/NumberAxis.java",
"license": "lgpl-2.1",
"size": 56557
} | [
"java.awt.Graphics2D",
"java.awt.geom.Rectangle2D",
"java.util.List",
"org.jfree.chart.plot.PlotRenderingInfo",
"org.jfree.ui.RectangleEdge"
] | import java.awt.Graphics2D; import java.awt.geom.Rectangle2D; import java.util.List; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.ui.RectangleEdge; | import java.awt.*; import java.awt.geom.*; import java.util.*; import org.jfree.chart.plot.*; import org.jfree.ui.*; | [
"java.awt",
"java.util",
"org.jfree.chart",
"org.jfree.ui"
] | java.awt; java.util; org.jfree.chart; org.jfree.ui; | 472,278 |
private static IPentahoResultSet convertToMemoryResultSet( IPentahoResultSet resultSet ) throws SQLException {
MemoryResultSet cachedResultSet = null;
try {
IPentahoMetaData meta = resultSet.getMetaData();
Object[][] columnHeaders = meta.getColumnHeaders();
MemoryMetaData cachedMetaData = new MemoryMetaData( columnHeaders, null );
String[] colTypesAsString;
// If the IPentahoMetaData is an instanceof SQLMetaData then get the column types from the metadata
if ( meta instanceof SQLMetaData ) {
SQLMetaData sqlMeta = (SQLMetaData) meta;
// Column Types in SQLMetaData are int. MemoryMetaData stores column types as string. So we will store them
// as string in MemoryMetaData
int[] colTypes = sqlMeta.getJDBCColumnTypes();
colTypesAsString = new String[ colTypes.length ];
for ( int i = 0; i < colTypes.length; i++ ) {
colTypesAsString[ i ] = Integer.toString( colTypes[ i ] );
}
cachedMetaData.setColumnTypes( colTypesAsString );
}
cachedResultSet = new MemoryResultSet( cachedMetaData );
Object[] rowObjects = resultSet.next();
while ( rowObjects != null ) {
cachedResultSet.addRow( rowObjects );
rowObjects = resultSet.next();
}
} finally {
resultSet.close();
}
return cachedResultSet;
} | static IPentahoResultSet function( IPentahoResultSet resultSet ) throws SQLException { MemoryResultSet cachedResultSet = null; try { IPentahoMetaData meta = resultSet.getMetaData(); Object[][] columnHeaders = meta.getColumnHeaders(); MemoryMetaData cachedMetaData = new MemoryMetaData( columnHeaders, null ); String[] colTypesAsString; if ( meta instanceof SQLMetaData ) { SQLMetaData sqlMeta = (SQLMetaData) meta; int[] colTypes = sqlMeta.getJDBCColumnTypes(); colTypesAsString = new String[ colTypes.length ]; for ( int i = 0; i < colTypes.length; i++ ) { colTypesAsString[ i ] = Integer.toString( colTypes[ i ] ); } cachedMetaData.setColumnTypes( colTypesAsString ); } cachedResultSet = new MemoryResultSet( cachedMetaData ); Object[] rowObjects = resultSet.next(); while ( rowObjects != null ) { cachedResultSet.addRow( rowObjects ); rowObjects = resultSet.next(); } } finally { resultSet.close(); } return cachedResultSet; } | /**
* Convert the live result set to memory result set.
*
* @param resultSet
* @return
*/ | Convert the live result set to memory result set | convertToMemoryResultSet | {
"repo_name": "SergeyTravin/data-access",
"path": "core/src/main/java/org/pentaho/platform/dataaccess/datasource/wizard/service/impl/utils/DatasourceServiceHelper.java",
"license": "apache-2.0",
"size": 8240
} | [
"java.sql.SQLException",
"org.pentaho.commons.connection.IPentahoMetaData",
"org.pentaho.commons.connection.IPentahoResultSet",
"org.pentaho.commons.connection.memory.MemoryMetaData",
"org.pentaho.commons.connection.memory.MemoryResultSet",
"org.pentaho.platform.plugin.services.connections.sql.SQLMetaData"
] | import java.sql.SQLException; import org.pentaho.commons.connection.IPentahoMetaData; import org.pentaho.commons.connection.IPentahoResultSet; import org.pentaho.commons.connection.memory.MemoryMetaData; import org.pentaho.commons.connection.memory.MemoryResultSet; import org.pentaho.platform.plugin.services.connections.sql.SQLMetaData; | import java.sql.*; import org.pentaho.commons.connection.*; import org.pentaho.commons.connection.memory.*; import org.pentaho.platform.plugin.services.connections.sql.*; | [
"java.sql",
"org.pentaho.commons",
"org.pentaho.platform"
] | java.sql; org.pentaho.commons; org.pentaho.platform; | 508,160 |
private String generateQuery(String qry, String tableAlias, H2TableDescriptor tbl) throws IgniteCheckedException {
assert tbl != null;
final String qry0 = qry;
String t = tbl.fullTableName();
String from = " ";
qry = qry.trim();
String upper = qry.toUpperCase();
if (upper.startsWith("SELECT")) {
qry = qry.substring(6).trim();
final int star = qry.indexOf('*');
if (star == 0)
qry = qry.substring(1).trim();
else if (star > 0) {
if (F.eq('.', qry.charAt(star - 1))) {
t = qry.substring(0, star - 1);
qry = qry.substring(star + 1).trim();
}
else
throw new IgniteCheckedException("Invalid query (missing alias before asterisk): " + qry0);
}
else
throw new IgniteCheckedException("Only queries starting with 'SELECT *' and 'SELECT alias.*' " +
"are supported (rewrite your query or use SqlFieldsQuery instead): " + qry0);
upper = qry.toUpperCase();
}
if (!upper.startsWith("FROM"))
from = " FROM " + t + (tableAlias != null ? " as " + tableAlias : "") +
(upper.startsWith("WHERE") || upper.startsWith("ORDER") || upper.startsWith("LIMIT") ?
" " : " WHERE ");
if(tableAlias != null)
t = tableAlias;
qry = "SELECT " + t + "." + KEY_FIELD_NAME + ", " + t + "." + VAL_FIELD_NAME + from + qry;
return qry;
} | String function(String qry, String tableAlias, H2TableDescriptor tbl) throws IgniteCheckedException { assert tbl != null; final String qry0 = qry; String t = tbl.fullTableName(); String from = " "; qry = qry.trim(); String upper = qry.toUpperCase(); if (upper.startsWith(STR)) { qry = qry.substring(6).trim(); final int star = qry.indexOf('*'); if (star == 0) qry = qry.substring(1).trim(); else if (star > 0) { if (F.eq('.', qry.charAt(star - 1))) { t = qry.substring(0, star - 1); qry = qry.substring(star + 1).trim(); } else throw new IgniteCheckedException(STR + qry0); } else throw new IgniteCheckedException(STR + STR + qry0); upper = qry.toUpperCase(); } if (!upper.startsWith("FROM")) from = STR + t + (tableAlias != null ? STR + tableAlias : STRWHERESTRORDERSTRLIMITSTR STR WHERE STRSELECT STR.STR, STR." + VAL_FIELD_NAME + from + qry; return qry; } | /**
* Prepares statement for query.
*
* @param qry Query string.
* @param tableAlias table alias.
* @param tbl Table to use.
* @return Prepared statement.
* @throws IgniteCheckedException In case of error.
*/ | Prepares statement for query | generateQuery | {
"repo_name": "sk0x50/ignite",
"path": "modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/IgniteH2Indexing.java",
"license": "apache-2.0",
"size": 114428
} | [
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.util.typedef.F"
] | import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.util.typedef.F; | import org.apache.ignite.*; import org.apache.ignite.internal.util.typedef.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 2,681,994 |
public List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>> getRelationshipWasProvidedBy(List<String> ids, List<String> fromFields, List<String> relFields, List<String> toFields) throws IOException, JsonClientException {
List<Object> args = new ArrayList<Object>();
args.add(ids);
args.add(fromFields);
args.add(relFields);
args.add(toFields);
TypeReference<List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>>> retType = new TypeReference<List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>>>() {};
List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>> res = caller.jsonrpcCall("CDMI_EntityAPI.get_relationship_WasProvidedBy", args, retType, true, false);
return res.get(0);
} | List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>> function(List<String> ids, List<String> fromFields, List<String> relFields, List<String> toFields) throws IOException, JsonClientException { List<Object> args = new ArrayList<Object>(); args.add(ids); args.add(fromFields); args.add(relFields); args.add(toFields); TypeReference<List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>>> retType = new TypeReference<List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>>>() {}; List<List<Tuple3<FieldsSubsystem, FieldsProvided, FieldsSource>>> res = caller.jsonrpcCall(STR, args, retType, true, false); return res.get(0); } | /**
* <p>Original spec-file function name: get_relationship_WasProvidedBy</p>
* <pre>
* </pre>
* @param ids instance of list of String
* @param fromFields instance of list of String
* @param relFields instance of list of String
* @param toFields instance of list of String
* @return instance of list of tuple of size 3: type {@link us.kbase.cdmientityapi.FieldsSubsystem FieldsSubsystem} (original type "fields_Subsystem"), type {@link us.kbase.cdmientityapi.FieldsProvided FieldsProvided} (original type "fields_Provided"), type {@link us.kbase.cdmientityapi.FieldsSource FieldsSource} (original type "fields_Source")
* @throws IOException if an IO exception occurs
* @throws JsonClientException if a JSON RPC exception occurs
*/ | Original spec-file function name: get_relationship_WasProvidedBy <code> </code> | getRelationshipWasProvidedBy | {
"repo_name": "kbase/trees",
"path": "src/us/kbase/cdmientityapi/CDMIEntityAPIClient.java",
"license": "mit",
"size": 869221
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"us.kbase.common.service.JsonClientException",
"us.kbase.common.service.Tuple3"
] | import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.ArrayList; import java.util.List; import us.kbase.common.service.JsonClientException; import us.kbase.common.service.Tuple3; | import com.fasterxml.jackson.core.type.*; import java.io.*; import java.util.*; import us.kbase.common.service.*; | [
"com.fasterxml.jackson",
"java.io",
"java.util",
"us.kbase.common"
] | com.fasterxml.jackson; java.io; java.util; us.kbase.common; | 1,967,741 |
public static String getRuleText(ObjectElement objEle) {
Integer mult = (Integer) objEle.getValue();
Object attributeOwner = objEle.getParent();
Integer cond = 0;
if (attributeOwner instanceof ClassAsAssociatedOneSide_c) {
cond = ((ClassAsAssociatedOneSide_c) attributeOwner).getCond();
} else if (attributeOwner instanceof ClassAsAssociatedOtherSide_c) {
cond = ((ClassAsAssociatedOtherSide_c) attributeOwner).getCond();
} else if (attributeOwner instanceof ClassAsLink_c) {
if (mult == 0) {
return "1";
} else {
return "*";
}
} else if (attributeOwner instanceof ClassAsSimpleFormalizer_c) {
cond = ((ClassAsSimpleFormalizer_c) attributeOwner).getCond();
} else if (attributeOwner instanceof ClassAsSimpleParticipant_c) {
cond = ((ClassAsSimpleParticipant_c) attributeOwner).getCond();
}
return getRuleForMultCond(mult, cond);
}
static String[][] rules = new String[][] { new String[] { "1", "0..1" }, new String[] { "1..*", "*" } }; | static String function(ObjectElement objEle) { Integer mult = (Integer) objEle.getValue(); Object attributeOwner = objEle.getParent(); Integer cond = 0; if (attributeOwner instanceof ClassAsAssociatedOneSide_c) { cond = ((ClassAsAssociatedOneSide_c) attributeOwner).getCond(); } else if (attributeOwner instanceof ClassAsAssociatedOtherSide_c) { cond = ((ClassAsAssociatedOtherSide_c) attributeOwner).getCond(); } else if (attributeOwner instanceof ClassAsLink_c) { if (mult == 0) { return "1"; } else { return "*"; } } else if (attributeOwner instanceof ClassAsSimpleFormalizer_c) { cond = ((ClassAsSimpleFormalizer_c) attributeOwner).getCond(); } else if (attributeOwner instanceof ClassAsSimpleParticipant_c) { cond = ((ClassAsSimpleParticipant_c) attributeOwner).getCond(); } return getRuleForMultCond(mult, cond); } static String[][] rules = new String[][] { new String[] { "1", "0..1" }, new String[] { "1..*", "*" } }; | /**
* We have the following rules:
*
* 1 (unconditional one) mult = 0 cond = 0
* 0..1 (conditional one) mult = 0 cond = 1
* 1..* (unconditional many) mult = 1, cond = 0
* * (conditional many) mult = 1, cond = 1
*
*/ | We have the following rules: 1 (unconditional one) mult = 0 cond = 0 0..1 (conditional one) mult = 0 cond = 1 1..* (unconditional many) mult = 1, cond = 0 (conditional many) mult = 1, cond = 1 | getRuleText | {
"repo_name": "lwriemen/bridgepoint",
"path": "src/org.xtuml.bp.core.editors/src/org/xtuml/bp/core/editors/association/AssociationEditorTab.java",
"license": "apache-2.0",
"size": 22963
} | [
"org.xtuml.bp.core.inspector.ObjectElement"
] | import org.xtuml.bp.core.inspector.ObjectElement; | import org.xtuml.bp.core.inspector.*; | [
"org.xtuml.bp"
] | org.xtuml.bp; | 376,590 |
public static WritableRaster createPackedRaster(DataBuffer dataBuffer, int w, int h,
int bitsPerPixel, Point location) {
if (w <= 0 || h <= 0) {
// awt.22E=w or h is less than or equal to zero
throw new RasterFormatException(Messages.getString("awt.22E")); //$NON-NLS-1$
}
if (location == null) {
location = new Point(0, 0);
}
if ((long)location.x + w > Integer.MAX_VALUE || (long)location.y + h > Integer.MAX_VALUE) {
// awt.276=location.x + w or location.y + h results in integer
// overflow
throw new RasterFormatException(Messages.getString("awt.276")); //$NON-NLS-1$
}
if (dataBuffer == null) {
// awt.278=dataBuffer is null
throw new NullPointerException(Messages.getString("awt.278")); //$NON-NLS-1$
}
if (dataBuffer.getNumBanks() > 1) {
// awt.27A=dataBuffer has more than one bank
throw new RasterFormatException(Messages.getString("awt.27A")); //$NON-NLS-1$
}
int dataType = dataBuffer.getDataType();
if (dataType != DataBuffer.TYPE_BYTE && dataType != DataBuffer.TYPE_USHORT
&& dataType != DataBuffer.TYPE_INT) {
// awt.230=dataType is not one of the supported data types
throw new IllegalArgumentException(Messages.getString("awt.230")); //$NON-NLS-1$
}
MultiPixelPackedSampleModel sampleModel = new MultiPixelPackedSampleModel(dataType, w, h,
bitsPerPixel);
return new OrdinaryWritableRaster(sampleModel, dataBuffer, location);
} | static WritableRaster function(DataBuffer dataBuffer, int w, int h, int bitsPerPixel, Point location) { if (w <= 0 h <= 0) { throw new RasterFormatException(Messages.getString(STR)); } if (location == null) { location = new Point(0, 0); } if ((long)location.x + w > Integer.MAX_VALUE (long)location.y + h > Integer.MAX_VALUE) { throw new RasterFormatException(Messages.getString(STR)); } if (dataBuffer == null) { throw new NullPointerException(Messages.getString(STR)); } if (dataBuffer.getNumBanks() > 1) { throw new RasterFormatException(Messages.getString(STR)); } int dataType = dataBuffer.getDataType(); if (dataType != DataBuffer.TYPE_BYTE && dataType != DataBuffer.TYPE_USHORT && dataType != DataBuffer.TYPE_INT) { throw new IllegalArgumentException(Messages.getString(STR)); } MultiPixelPackedSampleModel sampleModel = new MultiPixelPackedSampleModel(dataType, w, h, bitsPerPixel); return new OrdinaryWritableRaster(sampleModel, dataBuffer, location); } | /**
* Creates a Raster object with a MultiPixelPackedSampleModel and the
* specified DataBuffer.
*
* @param dataBuffer
* the DataBuffer.
* @param w
* the width of the image data.
* @param h
* the height of the image data.
* @param bitsPerPixel
* the number of bits per pixel.
* @param location
* the location which defines the upper left corner of the
* Raster.
* @return the WritableRaster.
*/ | Creates a Raster object with a MultiPixelPackedSampleModel and the specified DataBuffer | createPackedRaster | {
"repo_name": "dritanlatifi/AndroidPrefuse",
"path": "src/awt/java/awt/image/Raster.java",
"license": "bsd-3-clause",
"size": 54179
} | [
"org.apache.harmony.awt.gl.image.OrdinaryWritableRaster",
"org.apache.harmony.awt.internal.nls.Messages"
] | import org.apache.harmony.awt.gl.image.OrdinaryWritableRaster; import org.apache.harmony.awt.internal.nls.Messages; | import org.apache.harmony.awt.gl.image.*; import org.apache.harmony.awt.internal.nls.*; | [
"org.apache.harmony"
] | org.apache.harmony; | 2,216,039 |
@Generated
@Selector("upperBound")
public native double upperBound(); | @Selector(STR) native double function(); | /**
* The maximum displayable value for the axis.
*/ | The maximum displayable value for the axis | upperBound | {
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/accessibility/AXNumericDataAxisDescriptor.java",
"license": "apache-2.0",
"size": 9678
} | [
"org.moe.natj.objc.ann.Selector"
] | import org.moe.natj.objc.ann.Selector; | import org.moe.natj.objc.ann.*; | [
"org.moe.natj"
] | org.moe.natj; | 384,427 |
private static PDFObject getDestFromString(PDFObject str, PDFObject root)
throws IOException {
// find the names object in the root node
PDFObject names = root.getDictRef("Names");
if (names != null) {
// find the dests entry in the names dictionary
PDFObject dests = names.getDictRef("Dests");
if (dests != null) {
// create a name tree object
NameTree tree = new NameTree(dests);
// find the value we're looking for
PDFObject obj = tree.find(str.getStringValue());
// if we get back a dictionary, look for the /D value
if (obj != null && obj.getType() == PDFObject.DICTIONARY) {
obj = obj.getDictRef("D");
}
// found it
return obj;
}
}
// not found
return null;
} | static PDFObject function(PDFObject str, PDFObject root) throws IOException { PDFObject names = root.getDictRef("Names"); if (names != null) { PDFObject dests = names.getDictRef("Dests"); if (dests != null) { NameTree tree = new NameTree(dests); PDFObject obj = tree.find(str.getStringValue()); if (obj != null && obj.getType() == PDFObject.DICTIONARY) { obj = obj.getDictRef("D"); } return obj; } } return null; } | /**
* Get a destination, given a string. This means the destination is in
* the root node's names dictionary.
*/ | Get a destination, given a string. This means the destination is in the root node's names dictionary | getDestFromString | {
"repo_name": "erpragatisingh/androidTraining",
"path": "Android_6_weekTraning/AndroidPdfViewer/src/com/sun/pdfview/PDFDestination.java",
"license": "gpl-2.0",
"size": 8967
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,684,062 |
@Test
public void testGeneralCleaning() throws Exception {
final File testPom = new File("src/test/resources/net/trajano/mojo/cleanpom/default-pom.xml");
final File dirtyXml = new File("src/test/resources/net/trajano/mojo/cleanpom/dirty.xml");
final File temp = File.createTempFile("dirty", ".xml");
FileUtils.copyFile(dirtyXml, temp);
assertTrue(temp.exists());
final CleanMojo mojo = (CleanMojo) rule.lookupMojo("clean", testPom);
rule.setVariableValueToObject(mojo, "pomFile", temp);
rule.setVariableValueToObject(mojo, "xsltFileList", "/META-INF/clean.xslt");
assertNotNull(mojo);
mojo.execute();
final FileInputStream fileInputStream = new FileInputStream(temp);
final String data = IOUtils.toString(fileInputStream);
fileInputStream.close();
assertTrue(data.contains("<notapom>"));
assertTrue(data.contains("<!-- prolog-->"));
} | void function() throws Exception { final File testPom = new File(STR); final File dirtyXml = new File(STR); final File temp = File.createTempFile("dirty", ".xml"); FileUtils.copyFile(dirtyXml, temp); assertTrue(temp.exists()); final CleanMojo mojo = (CleanMojo) rule.lookupMojo("clean", testPom); rule.setVariableValueToObject(mojo, STR, temp); rule.setVariableValueToObject(mojo, STR, STR); assertNotNull(mojo); mojo.execute(); final FileInputStream fileInputStream = new FileInputStream(temp); final String data = IOUtils.toString(fileInputStream); fileInputStream.close(); assertTrue(data.contains(STR)); assertTrue(data.contains(STR)); } | /**
* Tests general cleaning.
*
* @throws Exception
*/ | Tests general cleaning | testGeneralCleaning | {
"repo_name": "trajano/cleanpom-maven-plugin",
"path": "src/test/java/net/trajano/mojo/cleanpom/test/CleanMojoTest.java",
"license": "epl-1.0",
"size": 14521
} | [
"java.io.File",
"java.io.FileInputStream",
"net.trajano.mojo.cleanpom.CleanMojo",
"org.apache.commons.io.IOUtils",
"org.codehaus.plexus.util.FileUtils",
"org.junit.Assert"
] | import java.io.File; import java.io.FileInputStream; import net.trajano.mojo.cleanpom.CleanMojo; import org.apache.commons.io.IOUtils; import org.codehaus.plexus.util.FileUtils; import org.junit.Assert; | import java.io.*; import net.trajano.mojo.cleanpom.*; import org.apache.commons.io.*; import org.codehaus.plexus.util.*; import org.junit.*; | [
"java.io",
"net.trajano.mojo",
"org.apache.commons",
"org.codehaus.plexus",
"org.junit"
] | java.io; net.trajano.mojo; org.apache.commons; org.codehaus.plexus; org.junit; | 1,341,523 |
private void installEditing()
{
// Install a mouse listener which will select/unselect when clicking on
// the check icon
MouseHandler mouseHandler = new MouseHandler();
this.listBox.addMouseListener(mouseHandler);
this.listBox.addMouseMotionListener(mouseHandler);
// Map the space bar to the key in the ActionMap
InputMap inputMap = (InputMap) UIManager.get("List.focusInputMap");
inputMap.put(KeyStroke.getKeyStroke("SPACE"), "pressed");
inputMap.put(KeyStroke.getKeyStroke("released SPACE"), "released");
// Map the actions
ActionMap actionMap = this.listBox.getActionMap();
actionMap.put("pressed", new PressedAction());
actionMap.put("released", new ReleasedAction());
}
| void function() { MouseHandler mouseHandler = new MouseHandler(); this.listBox.addMouseListener(mouseHandler); this.listBox.addMouseMotionListener(mouseHandler); InputMap inputMap = (InputMap) UIManager.get(STR); inputMap.put(KeyStroke.getKeyStroke("SPACE"), STR); inputMap.put(KeyStroke.getKeyStroke(STR), STR); ActionMap actionMap = this.listBox.getActionMap(); actionMap.put(STR, new PressedAction()); actionMap.put(STR, new ReleasedAction()); } | /**
* Installs the necessary handlers on the given list to mimic an real cell
* editor.
*/ | Installs the necessary handlers on the given list to mimic an real cell editor | installEditing | {
"repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs",
"path": "utils/eclipselink.utils.workbench/framework/source/org/eclipse/persistence/tools/workbench/framework/uitools/CheckList.java",
"license": "epl-1.0",
"size": 29290
} | [
"javax.swing.ActionMap",
"javax.swing.InputMap",
"javax.swing.KeyStroke",
"javax.swing.UIManager"
] | import javax.swing.ActionMap; import javax.swing.InputMap; import javax.swing.KeyStroke; import javax.swing.UIManager; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 197,138 |
public static @Nonnull Matcher<ConvertConfigurationAnswerElement> hasNumReferrers(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
int numReferrers) {
return new HasNumReferrers(filename, type, structureName, numReferrers);
} | static @Nonnull Matcher<ConvertConfigurationAnswerElement> function( @Nonnull String filename, @Nonnull StructureType type, @Nonnull String structureName, int numReferrers) { return new HasNumReferrers(filename, type, structureName, numReferrers); } | /**
* Provides a matcher that matches if the provided {@link ConvertConfigurationAnswerElement} has a
* structure for {@code filename} of type {@code type} named {@code structureName} with {@code
* numReferrers} referrers.
*/ | Provides a matcher that matches if the provided <code>ConvertConfigurationAnswerElement</code> has a structure for filename of type type named structureName with numReferrers referrers | hasNumReferrers | {
"repo_name": "arifogel/batfish",
"path": "projects/batfish-common-protocol/src/test/java/org/batfish/datamodel/matchers/DataModelMatchers.java",
"license": "apache-2.0",
"size": 17325
} | [
"javax.annotation.Nonnull",
"org.batfish.datamodel.answers.ConvertConfigurationAnswerElement",
"org.batfish.datamodel.matchers.ConvertConfigurationAnswerElementMatchers",
"org.batfish.vendor.StructureType",
"org.hamcrest.Matcher"
] | import javax.annotation.Nonnull; import org.batfish.datamodel.answers.ConvertConfigurationAnswerElement; import org.batfish.datamodel.matchers.ConvertConfigurationAnswerElementMatchers; import org.batfish.vendor.StructureType; import org.hamcrest.Matcher; | import javax.annotation.*; import org.batfish.datamodel.answers.*; import org.batfish.datamodel.matchers.*; import org.batfish.vendor.*; import org.hamcrest.*; | [
"javax.annotation",
"org.batfish.datamodel",
"org.batfish.vendor",
"org.hamcrest"
] | javax.annotation; org.batfish.datamodel; org.batfish.vendor; org.hamcrest; | 2,696,154 |
interface WithPeerings {
Update withPeerings(List<ExpressRouteCircuitPeeringInner> peerings);
} | interface WithPeerings { Update withPeerings(List<ExpressRouteCircuitPeeringInner> peerings); } | /**
* Specifies peerings.
* @param peerings The list of peerings
* @return the next update stage
*/ | Specifies peerings | withPeerings | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2018_04_01/src/main/java/com/microsoft/azure/management/network/v2018_04_01/ExpressRouteCircuit.java",
"license": "mit",
"size": 15551
} | [
"com.microsoft.azure.management.network.v2018_04_01.implementation.ExpressRouteCircuitPeeringInner",
"java.util.List"
] | import com.microsoft.azure.management.network.v2018_04_01.implementation.ExpressRouteCircuitPeeringInner; import java.util.List; | import com.microsoft.azure.management.network.v2018_04_01.implementation.*; import java.util.*; | [
"com.microsoft.azure",
"java.util"
] | com.microsoft.azure; java.util; | 2,812,052 |
public static Set<String> getKeys(JSONObject jsonObject) {
Set<String> keys = new TreeSet<String>();
Iterator<?> iter = jsonObject.keys();
while (iter.hasNext()) {
keys.add((String) iter.next());
}
return keys;
} | static Set<String> function(JSONObject jsonObject) { Set<String> keys = new TreeSet<String>(); Iterator<?> iter = jsonObject.keys(); while (iter.hasNext()) { keys.add((String) iter.next()); } return keys; } | /**
* Collects all keys in {@code jsonObject}.
*
* @param jsonObject the {@link JSONObject} to get the keys of
* @return the set of keys
*/ | Collects all keys in jsonObject | getKeys | {
"repo_name": "grtlinux/KIEA_JAVA7",
"path": "KIEA_JAVA7/src/tain/kr/com/github/json/JSONassert/v01/skyscreamer/jsonassert/comparator/JSONCompareUtil.java",
"license": "gpl-3.0",
"size": 8321
} | [
"java.util.Iterator",
"java.util.Set",
"java.util.TreeSet"
] | import java.util.Iterator; import java.util.Set; import java.util.TreeSet; | import java.util.*; | [
"java.util"
] | java.util; | 1,008,819 |
@Test(expected = NullPointerException.class)
public void getAreaWeightedCenterArrayNullElements() {
Geometry.getAreaWeightedCenter(new Vector2[] {
new Vector2(1.0, 0.0),
null,
new Vector2(4.0, 3.0),
new Vector2(-2.0, -1.0),
null
});
}
| @Test(expected = NullPointerException.class) void function() { Geometry.getAreaWeightedCenter(new Vector2[] { new Vector2(1.0, 0.0), null, new Vector2(4.0, 3.0), new Vector2(-2.0, -1.0), null }); } | /**
* Tests the getAreaWeightedCenter method passing an array with null elements.
* @since 3.1.0
*/ | Tests the getAreaWeightedCenter method passing an array with null elements | getAreaWeightedCenterArrayNullElements | {
"repo_name": "satishbabusee/dyn4j",
"path": "junit/org/dyn4j/geometry/GeometryTest.java",
"license": "bsd-3-clause",
"size": 54121
} | [
"org.dyn4j.geometry.Geometry",
"org.dyn4j.geometry.Vector2",
"org.junit.Test"
] | import org.dyn4j.geometry.Geometry; import org.dyn4j.geometry.Vector2; import org.junit.Test; | import org.dyn4j.geometry.*; import org.junit.*; | [
"org.dyn4j.geometry",
"org.junit"
] | org.dyn4j.geometry; org.junit; | 2,796,584 |
public boolean load(@Nullable InputStream in, @NotNull String systemId) throws IOException, ConfigurationException {
if (systemId.endsWith("/" + Constants.PACKAGE_DEFINITION_XML)) {
setHasDefinition(true);
log.trace("Contains package definition {}.", systemId);
return true;
}
if (in == null) {
return false;
}
final String name = Text.getName(systemId);
if (Constants.FILTER_XML.equals(name)) {
// load filter
loadFilter(in, systemId);
return true;
} else if (Constants.CONFIG_XML.equals(name)) {
// load config
loadConfig(in, systemId);
return true;
} else if (Constants.SETTINGS_XML.equals(name)) {
// load settings
loadSettings(in, systemId);
return true;
} else if (Constants.PROPERTIES_XML.equals(name)) {
// load properties
loadProperties(in, systemId);
return true;
} else if (Constants.PRIVILEGES_XML.equals(name)) {
// load privileges
loadPrivileges(in, systemId);
return true;
} else if (name.endsWith(".cnd")) {
Reader r = new InputStreamReader(in, "utf8");
CNDReader reader = ServiceProviderFactory.getProvider().getCNDReader();
reader.read(r, systemId, null);
getNodeTypes().add(reader);
log.trace("Loaded nodetypes from {}.", systemId);
return true;
}
return false;
} | boolean function(@Nullable InputStream in, @NotNull String systemId) throws IOException, ConfigurationException { if (systemId.endsWith("/" + Constants.PACKAGE_DEFINITION_XML)) { setHasDefinition(true); log.trace(STR, systemId); return true; } if (in == null) { return false; } final String name = Text.getName(systemId); if (Constants.FILTER_XML.equals(name)) { loadFilter(in, systemId); return true; } else if (Constants.CONFIG_XML.equals(name)) { loadConfig(in, systemId); return true; } else if (Constants.SETTINGS_XML.equals(name)) { loadSettings(in, systemId); return true; } else if (Constants.PROPERTIES_XML.equals(name)) { loadProperties(in, systemId); return true; } else if (Constants.PRIVILEGES_XML.equals(name)) { loadPrivileges(in, systemId); return true; } else if (name.endsWith(".cnd")) { Reader r = new InputStreamReader(in, "utf8"); CNDReader reader = ServiceProviderFactory.getProvider().getCNDReader(); reader.read(r, systemId, null); getNodeTypes().add(reader); log.trace(STR, systemId); return true; } return false; } | /**
* Loads a setting based on the name of the system id.
* <p>The specified stream remains open after this method returns.
*
* @param systemId the system id of the setting to load
* @param in the input stream
* @return {@code true} if the setting was loaded.
* @throws IOException if an I/O error occurrs
* @throws ConfigurationException if a configuration error occurrs
*
* @since 3.1.32
*/ | Loads a setting based on the name of the system id. The specified stream remains open after this method returns | load | {
"repo_name": "tripodsan/jackrabbit-filevault",
"path": "vault-core/src/main/java/org/apache/jackrabbit/vault/fs/config/DefaultMetaInf.java",
"license": "apache-2.0",
"size": 14597
} | [
"java.io.IOException",
"java.io.InputStream",
"java.io.InputStreamReader",
"java.io.Reader",
"org.apache.jackrabbit.vault.fs.spi.CNDReader",
"org.apache.jackrabbit.vault.fs.spi.ServiceProviderFactory",
"org.apache.jackrabbit.vault.util.Constants",
"org.apache.jackrabbit.vault.util.Text",
"org.jetbrains.annotations.NotNull",
"org.jetbrains.annotations.Nullable"
] | import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import org.apache.jackrabbit.vault.fs.spi.CNDReader; import org.apache.jackrabbit.vault.fs.spi.ServiceProviderFactory; import org.apache.jackrabbit.vault.util.Constants; import org.apache.jackrabbit.vault.util.Text; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; | import java.io.*; import org.apache.jackrabbit.vault.fs.spi.*; import org.apache.jackrabbit.vault.util.*; import org.jetbrains.annotations.*; | [
"java.io",
"org.apache.jackrabbit",
"org.jetbrains.annotations"
] | java.io; org.apache.jackrabbit; org.jetbrains.annotations; | 611,556 |
public long executeInsert() {
if (!mDatabase.isOpen()) {
throw new IllegalStateException("database " + mDatabase.getPath() + " already closed");
}
long timeStart = SystemClock.uptimeMillis();
mDatabase.lock();
acquireReference();
try {
native_execute();
mDatabase.logTimeStat(mSql, timeStart);
return (mDatabase.lastChangeCount() > 0) ? mDatabase.lastInsertRow() : -1;
} finally {
releaseReference();
mDatabase.unlock();
}
} | long function() { if (!mDatabase.isOpen()) { throw new IllegalStateException(STR + mDatabase.getPath() + STR); } long timeStart = SystemClock.uptimeMillis(); mDatabase.lock(); acquireReference(); try { native_execute(); mDatabase.logTimeStat(mSql, timeStart); return (mDatabase.lastChangeCount() > 0) ? mDatabase.lastInsertRow() : -1; } finally { releaseReference(); mDatabase.unlock(); } } | /**
* Execute this SQL statement and return the ID of the row inserted due to this call.
* The SQL statement should be an INSERT for this to be a useful call.
*
* @return the row ID of the last row inserted, if this insert is successful. -1 otherwise.
*
* @throws android.database.SQLException If the SQL string is invalid for
* some reason
*/ | Execute this SQL statement and return the ID of the row inserted due to this call. The SQL statement should be an INSERT for this to be a useful call | executeInsert | {
"repo_name": "jayal/android-database-sqlcipher",
"path": "src/net/sqlcipher/database/SQLiteStatement.java",
"license": "apache-2.0",
"size": 5489
} | [
"android.os.SystemClock"
] | import android.os.SystemClock; | import android.os.*; | [
"android.os"
] | android.os; | 381,164 |
Range<Double> doubleRange(@Nullable Double min, @Nullable Double max);
} | Range<Double> doubleRange(@Nullable Double min, @Nullable Double max); } | /**
* Builds a {@link Range} based on doubles.
*
* @param min The minimum, or {@code null} for no minimum
* @param max The maximum, or {@code null} for no maximum
* @return The range.
*/ | Builds a <code>Range</code> based on doubles | doubleRange | {
"repo_name": "SpongePowered/SpongeAPI",
"path": "src/main/java/org/spongepowered/api/util/Range.java",
"license": "mit",
"size": 4212
} | [
"org.checkerframework.checker.nullness.qual.Nullable"
] | import org.checkerframework.checker.nullness.qual.Nullable; | import org.checkerframework.checker.nullness.qual.*; | [
"org.checkerframework.checker"
] | org.checkerframework.checker; | 347,434 |
public TermDocs termDocs(final Term t) throws IOException {
if (t == null || t.field() != field) {
return reader.termDocs(t);
}
String text = t.text();
if (unknownValues.get(text) != null) {
log.debug("EmptyTermDocs({},{})", field, text);
return EmptyTermDocs.INSTANCE;
}
// maintain cache
CacheEntry entry;
synchronized (cache) {
entry = cache.get(text);
if (entry == null) {
// check space
if (cache.size() >= CACHE_SIZE) {
// prune half of them and adjust the rest
CacheEntry[] entries = cache.values().toArray(
new CacheEntry[cache.size()]);
Arrays.sort(entries);
int threshold = entries[CACHE_SIZE / 2].numAccessed;
for (Iterator<Map.Entry<String, CacheEntry>> it = cache.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<String, CacheEntry> e = it.next();
if (e.getValue().numAccessed <= threshold) {
// prune
it.remove();
} else {
// adjust
CacheEntry ce = e.getValue();
ce.numAccessed = (int) Math.sqrt(ce.numAccessed);
}
}
}
entry = new CacheEntry();
cache.put(text, entry);
} else {
entry.numAccessed++;
}
}
// this is a threshold to prevent caching of TermDocs
// that are read only irregularly.
if (entry.numAccessed < 10) {
if (log.isDebugEnabled()) {
log.debug("#{} TermDocs({},{})",
new Object[]{entry.numAccessed, field, text});
}
return reader.termDocs(t);
}
if (entry.bits == null) {
// collect bits
BitSet bits = null;
TermDocs tDocs = reader.termDocs(t);
try {
while (tDocs.next()) {
if (bits == null) {
bits = new BitSet(reader.maxDoc());
}
bits.set(tDocs.doc());
}
} finally {
tDocs.close();
}
if (bits != null) {
entry.bits = bits;
}
}
if (entry.bits == null) {
// none collected
unknownValues.put(text, text);
return EmptyTermDocs.INSTANCE;
} else {
if (log.isDebugEnabled()) {
log.debug("CachedTermDocs({},{},{}/{})", new Object[]{
field, text, entry.bits.cardinality(), reader.maxDoc()});
}
return new CachedTermDocs(entry.bits);
}
}
private static final class CachedTermDocs implements TermDocs {
private final BitSet docs;
private int position = -1;
private boolean moreDocs = true;
public CachedTermDocs(BitSet docs) {
this.docs = docs;
} | TermDocs function(final Term t) throws IOException { if (t == null t.field() != field) { return reader.termDocs(t); } String text = t.text(); if (unknownValues.get(text) != null) { log.debug(STR, field, text); return EmptyTermDocs.INSTANCE; } CacheEntry entry; synchronized (cache) { entry = cache.get(text); if (entry == null) { if (cache.size() >= CACHE_SIZE) { CacheEntry[] entries = cache.values().toArray( new CacheEntry[cache.size()]); Arrays.sort(entries); int threshold = entries[CACHE_SIZE / 2].numAccessed; for (Iterator<Map.Entry<String, CacheEntry>> it = cache.entrySet().iterator(); it.hasNext(); ) { Map.Entry<String, CacheEntry> e = it.next(); if (e.getValue().numAccessed <= threshold) { it.remove(); } else { CacheEntry ce = e.getValue(); ce.numAccessed = (int) Math.sqrt(ce.numAccessed); } } } entry = new CacheEntry(); cache.put(text, entry); } else { entry.numAccessed++; } } if (entry.numAccessed < 10) { if (log.isDebugEnabled()) { log.debug(STR, new Object[]{entry.numAccessed, field, text}); } return reader.termDocs(t); } if (entry.bits == null) { BitSet bits = null; TermDocs tDocs = reader.termDocs(t); try { while (tDocs.next()) { if (bits == null) { bits = new BitSet(reader.maxDoc()); } bits.set(tDocs.doc()); } } finally { tDocs.close(); } if (bits != null) { entry.bits = bits; } } if (entry.bits == null) { unknownValues.put(text, text); return EmptyTermDocs.INSTANCE; } else { if (log.isDebugEnabled()) { log.debug(STR, new Object[]{ field, text, entry.bits.cardinality(), reader.maxDoc()}); } return new CachedTermDocs(entry.bits); } } private static final class CachedTermDocs implements TermDocs { private final BitSet docs; private int position = -1; private boolean moreDocs = true; public CachedTermDocs(BitSet docs) { this.docs = docs; } | /**
* Returns the {@link TermDocs} for the given term.
*
* @param t the term.
* @return the term docs for the given term.
* @throws IOException if an error occurs while reading from the index.
*/ | Returns the <code>TermDocs</code> for the given term | termDocs | {
"repo_name": "sdmcraft/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TermDocsCache.java",
"license": "apache-2.0",
"size": 8686
} | [
"java.io.IOException",
"java.util.Arrays",
"java.util.BitSet",
"java.util.Iterator",
"java.util.Map",
"org.apache.lucene.index.Term",
"org.apache.lucene.index.TermDocs"
] | import java.io.IOException; import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; import java.util.Map; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermDocs; | import java.io.*; import java.util.*; import org.apache.lucene.index.*; | [
"java.io",
"java.util",
"org.apache.lucene"
] | java.io; java.util; org.apache.lucene; | 677,346 |
public void close() throws KOAException
{
try
{
xWriter.write("</wrongids>\n");
xWriter.close();
}
catch (IOException ioe)
{
String[] params = { "writer" };
KOALogHelper.logErrorCode(
"WrongIdWriter.close",
ErrorConstants.ERR_IO,
params,
ioe);
throw new KOADataBeheerException(
KOADataBeheerException.IO_EXCETION,
ioe);
}
}
| void function() throws KOAException { try { xWriter.write(STR); xWriter.close(); } catch (IOException ioe) { String[] params = { STR }; KOALogHelper.logErrorCode( STR, ErrorConstants.ERR_IO, params, ioe); throw new KOADataBeheerException( KOADataBeheerException.IO_EXCETION, ioe); } } | /**
* writes a close tag to the writer
* and closes the writer.
*
*/ | writes a close tag to the writer and closes the writer | close | {
"repo_name": "GaloisInc/KOA",
"path": "infrastructure/source/WebVotingSystem/src/ie/ucd/srg/koa/databeheer/xml/WrongIdWriter.java",
"license": "gpl-2.0",
"size": 7270
} | [
"ie.ucd.srg.koa.constants.ErrorConstants",
"ie.ucd.srg.koa.exception.KOADataBeheerException",
"ie.ucd.srg.koa.exception.KOAException",
"ie.ucd.srg.koa.utils.KOALogHelper",
"java.io.IOException"
] | import ie.ucd.srg.koa.constants.ErrorConstants; import ie.ucd.srg.koa.exception.KOADataBeheerException; import ie.ucd.srg.koa.exception.KOAException; import ie.ucd.srg.koa.utils.KOALogHelper; import java.io.IOException; | import ie.ucd.srg.koa.constants.*; import ie.ucd.srg.koa.exception.*; import ie.ucd.srg.koa.utils.*; import java.io.*; | [
"ie.ucd.srg",
"java.io"
] | ie.ucd.srg; java.io; | 2,059,322 |
List<PersonalMessageDbObj> selectByExample(
PersonalMessageDbObjExample example); | List<PersonalMessageDbObj> selectByExample( PersonalMessageDbObjExample example); | /**
* This method was generated by MyBatis Generator. This method corresponds to the database table PERSONAL_MESSAGE
* @mbggenerated Wed Sep 06 19:27:46 EDT 2017
*/ | This method was generated by MyBatis Generator. This method corresponds to the database table PERSONAL_MESSAGE | selectByExample | {
"repo_name": "ZFGCCP/ZFGC3",
"path": "src/main/java/com/zfgc/mappers/PersonalMessageDbObjMapper.java",
"license": "mit",
"size": 3894
} | [
"com.zfgc.dbobj.PersonalMessageDbObj",
"com.zfgc.dbobj.PersonalMessageDbObjExample",
"java.util.List"
] | import com.zfgc.dbobj.PersonalMessageDbObj; import com.zfgc.dbobj.PersonalMessageDbObjExample; import java.util.List; | import com.zfgc.dbobj.*; import java.util.*; | [
"com.zfgc.dbobj",
"java.util"
] | com.zfgc.dbobj; java.util; | 849,432 |
public RequestPost<Channel> createChannel(Channel channel) {
String url = String.format(Constants.CHANNELS_LIST_URL, getNotEmptyInstanceName());
return new RequestPost<>(Channel.class, url, this, channel);
} | RequestPost<Channel> function(Channel channel) { String url = String.format(Constants.CHANNELS_LIST_URL, getNotEmptyInstanceName()); return new RequestPost<>(Channel.class, url, this, channel); } | /**
* Create a new Channel
*
* @param channel Channel to create.
* @return New Channel.
*/ | Create a new Channel | createChannel | {
"repo_name": "Syncano/syncano-android",
"path": "library/src/main/java/com/syncano/library/SyncanoDashboard.java",
"license": "mit",
"size": 24946
} | [
"com.syncano.library.api.RequestPost",
"com.syncano.library.data.Channel"
] | import com.syncano.library.api.RequestPost; import com.syncano.library.data.Channel; | import com.syncano.library.api.*; import com.syncano.library.data.*; | [
"com.syncano.library"
] | com.syncano.library; | 1,590,282 |
void setFilter(TreeModelFilter filter)
{
filteredTree.setFilter(filter);
}
| void setFilter(TreeModelFilter filter) { filteredTree.setFilter(filter); } | /**
* Set the {@link TreeModelFilter} to use
*
* @param filter The {@link TreeModelFilter} to apply. If this is
* <code>null</code>, then the unfiltered tree will be shown.
*/ | Set the <code>TreeModelFilter</code> to use | setFilter | {
"repo_name": "javagl/Flow",
"path": "flow-gui/src/main/java/de/javagl/flow/gui/CategoryTree.java",
"license": "mit",
"size": 10173
} | [
"de.javagl.common.ui.tree.filtered.TreeModelFilter"
] | import de.javagl.common.ui.tree.filtered.TreeModelFilter; | import de.javagl.common.ui.tree.filtered.*; | [
"de.javagl.common"
] | de.javagl.common; | 477,974 |
void onContributorAdded(ParticipantId contributor); | void onContributorAdded(ParticipantId contributor); | /**
* Notifies this listener that a contributor was added to the blip.
*/ | Notifies this listener that a contributor was added to the blip | onContributorAdded | {
"repo_name": "gburd/wave",
"path": "src/org/waveprotocol/wave/model/conversation/WaveletBasedConversationBlip.java",
"license": "apache-2.0",
"size": 19140
} | [
"org.waveprotocol.wave.model.wave.ParticipantId"
] | import org.waveprotocol.wave.model.wave.ParticipantId; | import org.waveprotocol.wave.model.wave.*; | [
"org.waveprotocol.wave"
] | org.waveprotocol.wave; | 2,899,566 |
private void checkLoadBalancingStrategy() {
try
{
String loadBalancingStrategyClassName = System.getProperty(NAME_LOAD_BALANCING_STRATEGY);
if (loadBalancingStrategyClassName != null)
{
// load class
Class strategyClass = Class.forName(loadBalancingStrategyClassName);
// create component implementation
Constructor constructor = strategyClass.getConstructor((Class[])null);
if (constructor == null)
throw new IllegalArgumentException("Class '" + strategyClass.getName() + "' does have required default constructor.");
Object strategyObject = constructor.newInstance((Object[])null);
if (!(strategyObject instanceof LoadBalancingStrategy))
throw new IllegalArgumentException("Class '" + strategyClass.getName() + "' does not implement '" + LoadBalancingStrategy.class.getName() + "' interface.");
loadBalancingStrategy = (LoadBalancingStrategy)strategyObject;
logger.log(Level.INFO,"Using load balancing strategy: '" + strategyClass.getName() + "'.");
}
}
catch (Throwable t)
{
logger.log(Level.WARNING, "Failed to register '" + NAME_LOAD_BALANCING_STRATEGY + "' load balancing strategy: " + t.getMessage(), t);
}
} | void function() { try { String loadBalancingStrategyClassName = System.getProperty(NAME_LOAD_BALANCING_STRATEGY); if (loadBalancingStrategyClassName != null) { Class strategyClass = Class.forName(loadBalancingStrategyClassName); Constructor constructor = strategyClass.getConstructor((Class[])null); if (constructor == null) throw new IllegalArgumentException(STR + strategyClass.getName() + STR); Object strategyObject = constructor.newInstance((Object[])null); if (!(strategyObject instanceof LoadBalancingStrategy)) throw new IllegalArgumentException(STR + strategyClass.getName() + STR + LoadBalancingStrategy.class.getName() + STR); loadBalancingStrategy = (LoadBalancingStrategy)strategyObject; logger.log(Level.INFO,STR + strategyClass.getName() + "'."); } } catch (Throwable t) { logger.log(Level.WARNING, STR + NAME_LOAD_BALANCING_STRATEGY + STR + t.getMessage(), t); } } | /**
* Checks and registers load balancing strategy.
* Load balancing strategy is defined as Java JVM system property named
* <code>NAME_LOAD_BALANCING_STRATEGY</code> containing class name of the
* <code>LoadBalancingStrategy</code> implementation.
*/ | Checks and registers load balancing strategy. Load balancing strategy is defined as Java JVM system property named <code>NAME_LOAD_BALANCING_STRATEGY</code> containing class name of the <code>LoadBalancingStrategy</code> implementation | checkLoadBalancingStrategy | {
"repo_name": "csrg-utfsm/acscb",
"path": "LGPL/CommonSoftware/jmanager/src/com/cosylab/acs/maci/manager/ManagerImpl.java",
"license": "mit",
"size": 309850
} | [
"com.cosylab.acs.maci.loadbalancing.LoadBalancingStrategy",
"java.lang.reflect.Constructor",
"java.util.logging.Level"
] | import com.cosylab.acs.maci.loadbalancing.LoadBalancingStrategy; import java.lang.reflect.Constructor; import java.util.logging.Level; | import com.cosylab.acs.maci.loadbalancing.*; import java.lang.reflect.*; import java.util.logging.*; | [
"com.cosylab.acs",
"java.lang",
"java.util"
] | com.cosylab.acs; java.lang; java.util; | 2,046,836 |
public Employees getEmployees(
String accessToken, String xeroTenantId, String firstName, String lastName, Integer page)
throws IOException {
try {
TypeReference<Employees> typeRef = new TypeReference<Employees>() {};
HttpResponse response =
getEmployeesForHttpResponse(accessToken, xeroTenantId, firstName, lastName, page);
return apiClient.getObjectMapper().readValue(response.getContent(), typeRef);
} catch (HttpResponseException e) {
if (logger.isDebugEnabled()) {
logger.debug(
"------------------ HttpResponseException "
+ e.getStatusCode()
+ " : getEmployees -------------------");
logger.debug(e.toString());
}
XeroApiExceptionHandler handler = new XeroApiExceptionHandler();
if (e.getStatusCode() == 400 || e.getStatusCode() == 405) {
TypeReference<Employees> errorTypeRef = new TypeReference<Employees>() {};
Employees object = apiClient.getObjectMapper().readValue(e.getContent(), errorTypeRef);
handler.validationError(e.getStatusCode(), "Employees", object.getProblem());
} else {
handler.execute(e);
}
} catch (IOException ioe) {
throw ioe;
}
return null;
} | Employees function( String accessToken, String xeroTenantId, String firstName, String lastName, Integer page) throws IOException { try { TypeReference<Employees> typeRef = new TypeReference<Employees>() {}; HttpResponse response = getEmployeesForHttpResponse(accessToken, xeroTenantId, firstName, lastName, page); return apiClient.getObjectMapper().readValue(response.getContent(), typeRef); } catch (HttpResponseException e) { if (logger.isDebugEnabled()) { logger.debug( STR + e.getStatusCode() + STR); logger.debug(e.toString()); } XeroApiExceptionHandler handler = new XeroApiExceptionHandler(); if (e.getStatusCode() == 400 e.getStatusCode() == 405) { TypeReference<Employees> errorTypeRef = new TypeReference<Employees>() {}; Employees object = apiClient.getObjectMapper().readValue(e.getContent(), errorTypeRef); handler.validationError(e.getStatusCode(), STR, object.getProblem()); } else { handler.execute(e); } } catch (IOException ioe) { throw ioe; } return null; } | /**
* searches employees
*
* <p><b>200</b> - search results matching criteria
*
* <p><b>400</b> - validation error for a bad request
*
* @param xeroTenantId Xero identifier for Tenant
* @param firstName Filter by first name
* @param lastName Filter by last name
* @param page Page number which specifies the set of records to retrieve. By default the number
* of the records per set is 100.
* @param accessToken Authorization token for user set in header of each request
* @return Employees
* @throws IOException if an error occurs while attempting to invoke the API
*/ | searches employees 200 - search results matching criteria 400 - validation error for a bad request | getEmployees | {
"repo_name": "SidneyAllen/Xero-Java",
"path": "src/main/java/com/xero/api/client/PayrollUkApi.java",
"license": "mit",
"size": 285506
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"com.google.api.client.http.HttpResponse",
"com.google.api.client.http.HttpResponseException",
"com.xero.api.XeroApiExceptionHandler",
"com.xero.models.payrolluk.Employees",
"java.io.IOException"
] | import com.fasterxml.jackson.core.type.TypeReference; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpResponseException; import com.xero.api.XeroApiExceptionHandler; import com.xero.models.payrolluk.Employees; import java.io.IOException; | import com.fasterxml.jackson.core.type.*; import com.google.api.client.http.*; import com.xero.api.*; import com.xero.models.payrolluk.*; import java.io.*; | [
"com.fasterxml.jackson",
"com.google.api",
"com.xero.api",
"com.xero.models",
"java.io"
] | com.fasterxml.jackson; com.google.api; com.xero.api; com.xero.models; java.io; | 1,843,968 |
public static int createPreSplitLoadTestTable(Configuration conf,
HTableDescriptor desc, HColumnDescriptor hcd, int numRegionsPerServer) throws IOException {
if (!desc.hasFamily(hcd.getName())) {
desc.addFamily(hcd);
}
int totalNumberOfRegions = 0;
Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
Admin admin = unmanagedConnection.getAdmin();
try {
// create a table a pre-splits regions.
// The number of splits is set as:
// region servers * regions per region server).
int numberOfServers = admin.getClusterStatus().getServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
totalNumberOfRegions = numberOfServers * numRegionsPerServer;
LOG.info("Number of live regionservers: " + numberOfServers + ", " +
"pre-splitting table into " + totalNumberOfRegions + " regions " +
"(regions per server: " + numRegionsPerServer + ")");
byte[][] splits = new RegionSplitter.HexStringSplit().split(
totalNumberOfRegions);
admin.createTable(desc, splits);
} catch (MasterNotRunningException e) {
LOG.error("Master not running", e);
throw new IOException(e);
} catch (TableExistsException e) {
LOG.warn("Table " + desc.getTableName() +
" already exists, continuing");
} finally {
admin.close();
unmanagedConnection.close();
}
return totalNumberOfRegions;
} | static int function(Configuration conf, HTableDescriptor desc, HColumnDescriptor hcd, int numRegionsPerServer) throws IOException { if (!desc.hasFamily(hcd.getName())) { desc.addFamily(hcd); } int totalNumberOfRegions = 0; Connection unmanagedConnection = ConnectionFactory.createConnection(conf); Admin admin = unmanagedConnection.getAdmin(); try { int numberOfServers = admin.getClusterStatus().getServers().size(); if (numberOfServers == 0) { throw new IllegalStateException(STR); } totalNumberOfRegions = numberOfServers * numRegionsPerServer; LOG.info(STR + numberOfServers + STR + STR + totalNumberOfRegions + STR + STR + numRegionsPerServer + ")"); byte[][] splits = new RegionSplitter.HexStringSplit().split( totalNumberOfRegions); admin.createTable(desc, splits); } catch (MasterNotRunningException e) { LOG.error(STR, e); throw new IOException(e); } catch (TableExistsException e) { LOG.warn(STR + desc.getTableName() + STR); } finally { admin.close(); unmanagedConnection.close(); } return totalNumberOfRegions; } | /**
* Creates a pre-split table for load testing. If the table already exists,
* logs a warning and continues.
* @return the number of regions the table was split into
*/ | Creates a pre-split table for load testing. If the table already exists, logs a warning and continues | createPreSplitLoadTestTable | {
"repo_name": "grokcoder/pbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 132664
} | [
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.client.Admin",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.client.ConnectionFactory",
"org.apache.hadoop.hbase.util.RegionSplitter"
] | import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.util.RegionSplitter; | import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 559,726 |
private NestedSet<Artifact> filterDiscoveredHeaders(
ActionExecutionContext actionExecutionContext,
NestedSet<Artifact> headers,
List<CcCompilationContext.HeaderInfo> headerInfo) {
Set<Artifact> undeclaredHeaders = Sets.newHashSet(headers.toList());
// Remove all declared headers and find out which modules were used while at it.
CcCompilationContext.HeadersAndModules headersAndModules =
ccCompilationContext.computeDeclaredHeadersAndUsedModules(
usePic, undeclaredHeaders, headerInfo);
usedModules = ImmutableList.copyOf(headersAndModules.modules);
undeclaredHeaders.removeAll(headersAndModules.headers);
// Note that this (compared to validateInclusions) does not take mandatoryInputs into account.
// The reason is that these by definition get added to the action input and thus are available
// anyway. Not having to look at them here saves us from requiring and ArtifactExpander, which
// actionExecutionContext doesn't have at this point. This only works as long as mandatory
// inputs do not contain headers that are built into a module.
for (Artifact source : getIncludeScannerSources()) {
undeclaredHeaders.remove(source);
}
for (Artifact header : additionalPrunableHeaders.toList()) {
undeclaredHeaders.remove(header);
}
if (undeclaredHeaders.isEmpty()) {
return headers;
}
Iterable<PathFragment> ignoreDirs =
cppConfiguration.isStrictSystemIncludes()
? getBuiltInIncludeDirectories()
: getValidationIgnoredDirs();
Set<Artifact> missing = Sets.newHashSet();
// Lazily initialize, so that compiles that properly declare all their files profit.
Set<PathFragment> declaredIncludeDirs = null;
for (Artifact header : undeclaredHeaders) {
if (FileSystemUtils.startsWithAny(header.getExecPath(), ignoreDirs)) {
continue;
}
if (declaredIncludeDirs == null) {
declaredIncludeDirs = ccCompilationContext.getLooseHdrsDirs().toSet();
}
if (!isDeclaredIn(cppConfiguration, actionExecutionContext, header, declaredIncludeDirs)) {
missing.add(header);
}
}
if (missing.isEmpty()) {
return headers;
}
return NestedSetBuilder.wrap(
Order.STABLE_ORDER,
Iterables.filter(headers.toList(), header -> !missing.contains(header)));
} | NestedSet<Artifact> function( ActionExecutionContext actionExecutionContext, NestedSet<Artifact> headers, List<CcCompilationContext.HeaderInfo> headerInfo) { Set<Artifact> undeclaredHeaders = Sets.newHashSet(headers.toList()); CcCompilationContext.HeadersAndModules headersAndModules = ccCompilationContext.computeDeclaredHeadersAndUsedModules( usePic, undeclaredHeaders, headerInfo); usedModules = ImmutableList.copyOf(headersAndModules.modules); undeclaredHeaders.removeAll(headersAndModules.headers); for (Artifact source : getIncludeScannerSources()) { undeclaredHeaders.remove(source); } for (Artifact header : additionalPrunableHeaders.toList()) { undeclaredHeaders.remove(header); } if (undeclaredHeaders.isEmpty()) { return headers; } Iterable<PathFragment> ignoreDirs = cppConfiguration.isStrictSystemIncludes() ? getBuiltInIncludeDirectories() : getValidationIgnoredDirs(); Set<Artifact> missing = Sets.newHashSet(); Set<PathFragment> declaredIncludeDirs = null; for (Artifact header : undeclaredHeaders) { if (FileSystemUtils.startsWithAny(header.getExecPath(), ignoreDirs)) { continue; } if (declaredIncludeDirs == null) { declaredIncludeDirs = ccCompilationContext.getLooseHdrsDirs().toSet(); } if (!isDeclaredIn(cppConfiguration, actionExecutionContext, header, declaredIncludeDirs)) { missing.add(header); } } if (missing.isEmpty()) { return headers; } return NestedSetBuilder.wrap( Order.STABLE_ORDER, Iterables.filter(headers.toList(), header -> !missing.contains(header))); } | /**
* Filters discovered headers according to declared rule inputs. This fundamentally mirrors the
* behavior of {@link #validateInclusions} and just removes inputs that would be considered
* invalid from {@code headers}. That way, the compiler does not get access to them (assuming a
* sand-boxed environment) and can diagnose the missing headers.
*/ | Filters discovered headers according to declared rule inputs. This fundamentally mirrors the behavior of <code>#validateInclusions</code> and just removes inputs that would be considered invalid from headers. That way, the compiler does not get access to them (assuming a sand-boxed environment) and can diagnose the missing headers | filterDiscoveredHeaders | {
"repo_name": "davidzchen/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java",
"license": "apache-2.0",
"size": 83322
} | [
"com.google.common.collect.ImmutableList",
"com.google.common.collect.Iterables",
"com.google.common.collect.Sets",
"com.google.devtools.build.lib.actions.ActionExecutionContext",
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.collect.nestedset.NestedSet",
"com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder",
"com.google.devtools.build.lib.collect.nestedset.Order",
"com.google.devtools.build.lib.vfs.FileSystemUtils",
"com.google.devtools.build.lib.vfs.PathFragment",
"java.util.List",
"java.util.Set"
] | import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.List; import java.util.Set; | import com.google.common.collect.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.collect.nestedset.*; import com.google.devtools.build.lib.vfs.*; import java.util.*; | [
"com.google.common",
"com.google.devtools",
"java.util"
] | com.google.common; com.google.devtools; java.util; | 2,043,846 |
EventExecutor executor(); | EventExecutor executor(); | /**
* Returns the wrapped {@link EventExecutor}.
*/ | Returns the wrapped <code>EventExecutor</code> | executor | {
"repo_name": "youprofit/netty",
"path": "common/src/main/java/io/netty/util/internal/CallableEventExecutorAdapter.java",
"license": "apache-2.0",
"size": 1427
} | [
"io.netty.util.concurrent.EventExecutor"
] | import io.netty.util.concurrent.EventExecutor; | import io.netty.util.concurrent.*; | [
"io.netty.util"
] | io.netty.util; | 2,333,579 |
public static boolean equals(Object[] left, List right) {
return coercedEquals(left, right);
} | static boolean function(Object[] left, List right) { return coercedEquals(left, right); } | /**
* Determines if the contents of this array are equal to the
* contents of the given list, in the same order. This returns
* <code>false</code> if either collection is <code>null</code>.
*
* @param left this array
* @param right the list being compared
* @return true if the contents of both collections are equal
* @since 1.5.0
*/ | Determines if the contents of this array are equal to the contents of the given list, in the same order. This returns <code>false</code> if either collection is <code>null</code> | equals | {
"repo_name": "mv2a/yajsw",
"path": "src/groovy-patch/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java",
"license": "apache-2.0",
"size": 704164
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,565,571 |
public List<BirdList> retrieve() throws DatabaseDownException, SQLException {
List<BirdList> list = super.retrieveObjects(SELECT_ALL_PUBLIC, true);
return list;
}
| List<BirdList> function() throws DatabaseDownException, SQLException { List<BirdList> list = super.retrieveObjects(SELECT_ALL_PUBLIC, true); return list; } | /**
* This method retrieves all public lists from database
*
* @return an ArrayList object with BirdList objects, not fully loaded
*
* @throws DatabaseDownException If the database is down
* @throws SQLException If some SQL Exception occurs
*/ | This method retrieves all public lists from database | retrieve | {
"repo_name": "BackupTheBerlios/arara-svn",
"path": "core/trunk/src/main/java/net/indrix/arara/dao/BirdListDAO.java",
"license": "gpl-2.0",
"size": 17139
} | [
"java.sql.SQLException",
"java.util.List",
"net.indrix.arara.vo.BirdList"
] | import java.sql.SQLException; import java.util.List; import net.indrix.arara.vo.BirdList; | import java.sql.*; import java.util.*; import net.indrix.arara.vo.*; | [
"java.sql",
"java.util",
"net.indrix.arara"
] | java.sql; java.util; net.indrix.arara; | 1,937,272 |
public void printNewDocumentTopics(File f, double threshold, int max) {
try {
PrintWriter pw;
pw = new PrintWriter(new FileWriter(f));
pw.println("#doc source topic proportion ...");
int docLen;
double topicDist[] = new double[topicAssignment.length];
for (int di = oldlist.size(); di < newlist.size() + oldlist.size(); di++) {
pw.print(di);
pw.print(' ');
if (newlist.get(di - oldlist.size()).getSource() != null) {
pw.print(newlist.get(di - oldlist.size()).getSource()
.toString());
} else {
pw.print("null-source");
}
pw.print(' ');
docLen = topicAssignment[di].length;
for (int ti = 0; ti < numTopics; ti++)
topicDist[ti] = (((float) docTopicCounts[di][ti]) / docLen);
if (max < 0)
max = numTopics;
for (int tp = 0; tp < max; tp++) {
double maxvalue = 0;
int maxindex = -1;
for (int ti = 0; ti < numTopics; ti++)
if (topicDist[ti] > maxvalue) {
maxvalue = topicDist[ti];
maxindex = ti;
}
if (maxindex == -1 || topicDist[maxindex] < threshold)
break;
pw.print(maxindex + " " + topicDist[maxindex] + " ");
topicDist[maxindex] = 0;
}
pw.println(' ');
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} | void function(File f, double threshold, int max) { try { PrintWriter pw; pw = new PrintWriter(new FileWriter(f)); pw.println(STR); int docLen; double topicDist[] = new double[topicAssignment.length]; for (int di = oldlist.size(); di < newlist.size() + oldlist.size(); di++) { pw.print(di); pw.print(' '); if (newlist.get(di - oldlist.size()).getSource() != null) { pw.print(newlist.get(di - oldlist.size()).getSource() .toString()); } else { pw.print(STR); } pw.print(' '); docLen = topicAssignment[di].length; for (int ti = 0; ti < numTopics; ti++) topicDist[ti] = (((float) docTopicCounts[di][ti]) / docLen); if (max < 0) max = numTopics; for (int tp = 0; tp < max; tp++) { double maxvalue = 0; int maxindex = -1; for (int ti = 0; ti < numTopics; ti++) if (topicDist[ti] > maxvalue) { maxvalue = topicDist[ti]; maxindex = ti; } if (maxindex == -1 topicDist[maxindex] < threshold) break; pw.print(maxindex + " " + topicDist[maxindex] + " "); topicDist[maxindex] = 0; } pw.println(' '); } } catch (IOException e) { e.printStackTrace(); } } | /**
* this prints the topics in the order of their proportion
*
* @author Shockley Xiang Li
* @param pw
* @param threshold
* @param max
*/ | this prints the topics in the order of their proportion | printNewDocumentTopics | {
"repo_name": "shockley/mymallet",
"path": "src/edu/nudt/influx/Utility/LdaRecorder.java",
"license": "epl-1.0",
"size": 27208
} | [
"java.io.File",
"java.io.FileWriter",
"java.io.IOException",
"java.io.PrintWriter"
] | import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; | import java.io.*; | [
"java.io"
] | java.io; | 2,047,732 |
public @Nonnull String[] getResources() {
if( resources == null ) {
resources = new String[0];
}
return resources;
} | @Nonnull String[] function() { if( resources == null ) { resources = new String[0]; } return resources; } | /**
* Resource(s) to which the cloud police rule applies
* @return resource identifier to which the policy rule applies, empty for any resource
*/ | Resource(s) to which the cloud police rule applies | getResources | {
"repo_name": "JanewzhWang/dasein-cloud-core",
"path": "src/main/java/org/dasein/cloud/identity/CloudPolicyRule.java",
"license": "apache-2.0",
"size": 4323
} | [
"javax.annotation.Nonnull"
] | import javax.annotation.Nonnull; | import javax.annotation.*; | [
"javax.annotation"
] | javax.annotation; | 1,323,462 |
@Override
public int doEndTag() throws JspException {
return EVAL_PAGE;
} | int function() throws JspException { return EVAL_PAGE; } | /**
* Default processing of the end tag returning EVAL_PAGE.
*
* @return EVAL_PAGE
* @throws JspException if an error occurs while processing this tag
*
* @see Tag#doEndTag()
*/ | Default processing of the end tag returning EVAL_PAGE | doEndTag | {
"repo_name": "apache/tomcat",
"path": "java/jakarta/servlet/jsp/tagext/TagSupport.java",
"license": "apache-2.0",
"size": 8026
} | [
"jakarta.servlet.jsp.JspException"
] | import jakarta.servlet.jsp.JspException; | import jakarta.servlet.jsp.*; | [
"jakarta.servlet.jsp"
] | jakarta.servlet.jsp; | 808,541 |
public CachedSensorValue getCachedSensorValue(SensorEnum sensorType) {
if (sensorType != null && sensorValues != null) {
for (CachedSensorValue cSensorValue : sensorValues) {
if (cSensorValue.getSensorType().equals(sensorType)) {
return cSensorValue;
}
}
}
return null;
} | CachedSensorValue function(SensorEnum sensorType) { if (sensorType != null && sensorValues != null) { for (CachedSensorValue cSensorValue : sensorValues) { if (cSensorValue.getSensorType().equals(sensorType)) { return cSensorValue; } } } return null; } | /**
* Returns the {@link CachedSensorValue} of the given sensor type or null, if no {@link CachedSensorValue} for the
* given sensor type exists.
*
* @param sensorType can be null
* @return the {@link CachedSensorValue} of the given sensorType or null
*/ | Returns the <code>CachedSensorValue</code> of the given sensor type or null, if no <code>CachedSensorValue</code> for the given sensor type exists | getCachedSensorValue | {
"repo_name": "MikeJMajor/openhab2-addons-dlinksmarthome",
"path": "bundles/org.openhab.binding.digitalstrom/src/main/java/org/openhab/binding/digitalstrom/internal/lib/climate/jsonresponsecontainer/BaseSensorValues.java",
"license": "epl-1.0",
"size": 5975
} | [
"org.openhab.binding.digitalstrom.internal.lib.climate.datatypes.CachedSensorValue",
"org.openhab.binding.digitalstrom.internal.lib.structure.devices.deviceparameters.constants.SensorEnum"
] | import org.openhab.binding.digitalstrom.internal.lib.climate.datatypes.CachedSensorValue; import org.openhab.binding.digitalstrom.internal.lib.structure.devices.deviceparameters.constants.SensorEnum; | import org.openhab.binding.digitalstrom.internal.lib.climate.datatypes.*; import org.openhab.binding.digitalstrom.internal.lib.structure.devices.deviceparameters.constants.*; | [
"org.openhab.binding"
] | org.openhab.binding; | 2,336,752 |
public void setHighlightStrength(float _highlightStrength) {
mHighlightStrength = _highlightStrength;
for (PieModel model : mPieData) {
highlightSlice(model);
}
invalidate();
} | void function(float _highlightStrength) { mHighlightStrength = _highlightStrength; for (PieModel model : mPieData) { highlightSlice(model); } invalidate(); } | /**
* Sets the highlight strength for the InnerPaddingOutline.
*
* @param _highlightStrength The highlighting value for the outline.
*/ | Sets the highlight strength for the InnerPaddingOutline | setHighlightStrength | {
"repo_name": "j4velin/EazeGraph",
"path": "src/main/java/org/eazegraph/lib/charts/PieChart.java",
"license": "apache-2.0",
"size": 46830
} | [
"org.eazegraph.lib.models.PieModel"
] | import org.eazegraph.lib.models.PieModel; | import org.eazegraph.lib.models.*; | [
"org.eazegraph.lib"
] | org.eazegraph.lib; | 753,467 |
public Grammar getGrammar(XMLGrammarDescription desc) {
synchronized (fGrammars) {
int hash = hashCode(desc);
int index = (hash & 0x7FFFFFFF) % fGrammars.length;
for (Entry entry = fGrammars[index] ; entry != null ; entry = entry.next) {
if ((entry.hash == hash) && equals(entry.desc, desc)) {
return entry.grammar;
}
}
return null;
}
} // getGrammar(XMLGrammarDescription):Grammar | Grammar function(XMLGrammarDescription desc) { synchronized (fGrammars) { int hash = hashCode(desc); int index = (hash & 0x7FFFFFFF) % fGrammars.length; for (Entry entry = fGrammars[index] ; entry != null ; entry = entry.next) { if ((entry.hash == hash) && equals(entry.desc, desc)) { return entry.grammar; } } return null; } } | /**
* Returns the grammar associated to the specified grammar description.
* Currently, the root element name is used as the key for DTD grammars
* and the target namespace is used as the key for Schema grammars.
*
* @param desc The Grammar Description.
*/ | Returns the grammar associated to the specified grammar description. Currently, the root element name is used as the key for DTD grammars and the target namespace is used as the key for Schema grammars | getGrammar | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/com/sun/org/apache/xerces/internal/util/XMLGrammarPoolImpl.java",
"license": "apache-2.0",
"size": 13512
} | [
"com.sun.org.apache.xerces.internal.xni.grammars.Grammar",
"com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription"
] | import com.sun.org.apache.xerces.internal.xni.grammars.Grammar; import com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription; | import com.sun.org.apache.xerces.internal.xni.grammars.*; | [
"com.sun.org"
] | com.sun.org; | 1,115,719 |
protected boolean save(List<FileBasedDatasetContainer> data, String filename) {
int i;
StringBuilder str;
str = new StringBuilder();
for (i = 0; i < data.size(); i++) {
if (i > 0)
str.append("\n");
str.append(data.get(i).toString());
}
return FileUtils.writeToFile(m_TestHelper.getTmpDirectory() + File.separator + filename, str, false);
} | boolean function(List<FileBasedDatasetContainer> data, String filename) { int i; StringBuilder str; str = new StringBuilder(); for (i = 0; i < data.size(); i++) { if (i > 0) str.append("\n"); str.append(data.get(i).toString()); } return FileUtils.writeToFile(m_TestHelper.getTmpDirectory() + File.separator + filename, str, false); } | /**
* Saves the data in the tmp directory.
*
* @param data the data to save
* @param filename the filename to save to (without path)
* @return true if successfully saved
*/ | Saves the data in the tmp directory | save | {
"repo_name": "waikato-datamining/adams-base",
"path": "adams-ml/src/test/java/adams/flow/transformer/preparefilebaseddataset/AbstractFileBasedDatasetPreparationTestCase.java",
"license": "gpl-3.0",
"size": 4912
} | [
"java.io.File",
"java.util.List"
] | import java.io.File; import java.util.List; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,752,320 |
private static boolean areEqualIgnoringSeconds(OffsetDateTime actual, OffsetDateTime other) {
return areEqualIgnoringMinutes(actual, other) && actual.getMinute() == other.getMinute();
} | static boolean function(OffsetDateTime actual, OffsetDateTime other) { return areEqualIgnoringMinutes(actual, other) && actual.getMinute() == other.getMinute(); } | /**
* Returns true if both OffsetDateTime are in the same year, month, day of month, hour and minute, false otherwise.
*
* @param actual the actual OffsetDateTime. expected not be null
* @param other the other OffsetDateTime. expected not be null
* @return true if both OffsetDateTime are in the same year, month, day of month, hour and minute, false otherwise.
*/ | Returns true if both OffsetDateTime are in the same year, month, day of month, hour and minute, false otherwise | areEqualIgnoringSeconds | {
"repo_name": "bric3/assertj-core",
"path": "src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java",
"license": "apache-2.0",
"size": 33264
} | [
"java.time.OffsetDateTime"
] | import java.time.OffsetDateTime; | import java.time.*; | [
"java.time"
] | java.time; | 1,226,116 |
public void setCpuUsage(Map<Long, Float> cpuUsage) {
this.cpuUsage = cpuUsage;
} | void function(Map<Long, Float> cpuUsage) { this.cpuUsage = cpuUsage; } | /**
* Sets the cpu usage.
*
* @param cpuUsage the cpuUsage to set
*/ | Sets the cpu usage | setCpuUsage | {
"repo_name": "impetus-opensource/jumbune",
"path": "common/src/main/java/org/jumbune/common/beans/NodeSystemStats.java",
"license": "lgpl-3.0",
"size": 5466
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,633,798 |
private OCSPCertificateStatusWrapper getOCSPCertificateStatus(CertificateSummary summary) {
CertificateStatus status;
switch (summary.getStatus()) {
case VALID:
status = CertificateStatus.GOOD;
break;
case REVOKED:
status = new RevokedStatus(summary.getRevocationTime().toDate(), summary.getRevocationReason().getCode());
break;
case EXPIRED:
status = new RevokedStatus(summary.getExpirationTime().toDate(), SUPERSEDED.getCode());
break;
case UNKNOWN:
status = getUnknownStatus();
break;
default:
throw new IllegalArgumentException("Unknown status! " + summary.getStatus().name());
}
DateTime updateTime = summary.getThisUpdateTime();
return new OCSPCertificateStatusWrapper(status,
updateTime,
updateTime.plusSeconds(certificateManager.getRefreshSeconds())
);
}
/**
* Gets the unknown CertificateStatus to return depending on the value of {@code rejectUnknown} | OCSPCertificateStatusWrapper function(CertificateSummary summary) { CertificateStatus status; switch (summary.getStatus()) { case VALID: status = CertificateStatus.GOOD; break; case REVOKED: status = new RevokedStatus(summary.getRevocationTime().toDate(), summary.getRevocationReason().getCode()); break; case EXPIRED: status = new RevokedStatus(summary.getExpirationTime().toDate(), SUPERSEDED.getCode()); break; case UNKNOWN: status = getUnknownStatus(); break; default: throw new IllegalArgumentException(STR + summary.getStatus().name()); } DateTime updateTime = summary.getThisUpdateTime(); return new OCSPCertificateStatusWrapper(status, updateTime, updateTime.plusSeconds(certificateManager.getRefreshSeconds()) ); } /** * Gets the unknown CertificateStatus to return depending on the value of {@code rejectUnknown} | /**
* Gets the OCSP Certificate Status Wrapper with the Certificate Status (good, revoked, unknown),
* the updated date, and the next update date.
*
* @param summary The certificate summary
* @return The status wrapper
*/ | Gets the OCSP Certificate Status Wrapper with the Certificate Status (good, revoked, unknown), the updated date, and the next update date | getOCSPCertificateStatus | {
"repo_name": "wdawson/revoker",
"path": "src/main/java/wdawson/samples/revoker/resources/OCSPResponderResource.java",
"license": "apache-2.0",
"size": 16988
} | [
"org.bouncycastle.cert.ocsp.CertificateStatus",
"org.bouncycastle.cert.ocsp.RevokedStatus",
"org.joda.time.DateTime"
] | import org.bouncycastle.cert.ocsp.CertificateStatus; import org.bouncycastle.cert.ocsp.RevokedStatus; import org.joda.time.DateTime; | import org.bouncycastle.cert.ocsp.*; import org.joda.time.*; | [
"org.bouncycastle.cert",
"org.joda.time"
] | org.bouncycastle.cert; org.joda.time; | 1,353,833 |
@Test
public void testOldFormatIgnoresPrevVersionsMostRecent() throws Exception {
storage.graalInfo.put(CommonConstants.CAP_GRAALVM_VERSION, "1.0.0");
setupCatalogFormat1("catalogFormat1.properties");
// copied from CatalogIterable, this is what interprets user input for install
Version gv = getLocalRegistry().getGraalVersion();
Version.Match selector = gv.match(Version.Match.Type.MOSTRECENT);
List<ComponentInfo> infos;
// check that versions 1.0.0-rcX are ignored for version 1.0.0
infos = new ArrayList<>(registry.loadComponents("ruby", selector, verbose));
assertEquals(1, infos.size());
infos = new ArrayList<>(registry.loadComponents("python", selector, verbose));
assertEquals(1, infos.size());
infos = new ArrayList<>(registry.loadComponents("r", selector, verbose));
assertEquals(1, infos.size());
} | void function() throws Exception { storage.graalInfo.put(CommonConstants.CAP_GRAALVM_VERSION, "1.0.0"); setupCatalogFormat1(STR); Version gv = getLocalRegistry().getGraalVersion(); Version.Match selector = gv.match(Version.Match.Type.MOSTRECENT); List<ComponentInfo> infos; infos = new ArrayList<>(registry.loadComponents("ruby", selector, verbose)); assertEquals(1, infos.size()); infos = new ArrayList<>(registry.loadComponents(STR, selector, verbose)); assertEquals(1, infos.size()); infos = new ArrayList<>(registry.loadComponents("r", selector, verbose)); assertEquals(1, infos.size()); } | /**
* Checks that previous versions (RCs) are ignored with the old format.
*
* @throws Exception
*/ | Checks that previous versions (RCs) are ignored with the old format | testOldFormatIgnoresPrevVersionsMostRecent | {
"repo_name": "smarr/Truffle",
"path": "vm/src/org.graalvm.component.installer.test/src/org/graalvm/component/installer/remote/CatalogCompatTest.java",
"license": "gpl-2.0",
"size": 10492
} | [
"java.util.ArrayList",
"java.util.List",
"org.graalvm.component.installer.CommonConstants",
"org.graalvm.component.installer.Version",
"org.graalvm.component.installer.model.ComponentInfo",
"org.junit.Assert"
] | import java.util.ArrayList; import java.util.List; import org.graalvm.component.installer.CommonConstants; import org.graalvm.component.installer.Version; import org.graalvm.component.installer.model.ComponentInfo; import org.junit.Assert; | import java.util.*; import org.graalvm.component.installer.*; import org.graalvm.component.installer.model.*; import org.junit.*; | [
"java.util",
"org.graalvm.component",
"org.junit"
] | java.util; org.graalvm.component; org.junit; | 2,184,890 |
public long append(String journalId, String producerId, InputStream in, int length)
throws IOException {
OutputStream out = new FileOutputStream(logFile, true);
try {
DataBuffer buffer = new DataBuffer();
buffer.writeUTF(journalId);
buffer.writeUTF(producerId);
buffer.writeInt(length);
buffer.copy(out);
IOUtils.copy(in, out);
out.flush();
lastRevision +=
2 + utfLength(journalId) + 2 + utfLength(producerId)
+ 4 + length;
return lastRevision;
} finally {
close(out);
}
} | long function(String journalId, String producerId, InputStream in, int length) throws IOException { OutputStream out = new FileOutputStream(logFile, true); try { DataBuffer buffer = new DataBuffer(); buffer.writeUTF(journalId); buffer.writeUTF(producerId); buffer.writeInt(length); buffer.copy(out); IOUtils.copy(in, out); out.flush(); lastRevision += 2 + utfLength(journalId) + 2 + utfLength(producerId) + 4 + length; return lastRevision; } finally { close(out); } } | /**
* Append a record to this log. Returns the revision following this record.
*
* @param journalId journal identifier
* @param producerId producer identifier
* @param in record to add
* @param length record length
* @throws java.io.IOException if an I/O error occurs
*/ | Append a record to this log. Returns the revision following this record | append | {
"repo_name": "sdmcraft/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/journal/FileRecordLog.java",
"license": "apache-2.0",
"size": 13763
} | [
"java.io.FileOutputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.OutputStream",
"org.apache.commons.io.IOUtils"
] | import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.io.IOUtils; | import java.io.*; import org.apache.commons.io.*; | [
"java.io",
"org.apache.commons"
] | java.io; org.apache.commons; | 1,136,202 |
private static synchronized void getNewInstance() {
logger.debug("Get new instance of CrossrefClient");
instance = new CrossrefClient();
}
protected CrossrefClient() {
this.executorService = Executors.newCachedThreadPool(r -> {
Thread t = Executors.defaultThreadFactory().newThread(r);
t.setDaemon(true);
return t;
});
this.timedSemaphore = null;
this.futures = new HashMap<>();
setLimits(1, 1000);
} | static synchronized void function() { logger.debug(STR); instance = new CrossrefClient(); } protected CrossrefClient() { this.executorService = Executors.newCachedThreadPool(r -> { Thread t = Executors.defaultThreadFactory().newThread(r); t.setDaemon(true); return t; }); this.timedSemaphore = null; this.futures = new HashMap<>(); setLimits(1, 1000); } | /**
* Creates a new instance.
*/ | Creates a new instance | getNewInstance | {
"repo_name": "alexduch/grobid",
"path": "grobid-core/src/main/java/org/grobid/core/utilities/crossref/CrossrefClient.java",
"license": "apache-2.0",
"size": 6487
} | [
"java.util.HashMap",
"java.util.concurrent.Executors"
] | import java.util.HashMap; import java.util.concurrent.Executors; | import java.util.*; import java.util.concurrent.*; | [
"java.util"
] | java.util; | 1,342,436 |
public Region<String, HttpSession> getOperatingRegion(); | Region<String, HttpSession> function(); | /**
* Get the {@code Region} being used by client code to put attributes.
*
* @return a {@code Region<String, HttpSession>} reference
*/ | Get the Region being used by client code to put attributes | getOperatingRegion | {
"repo_name": "prasi-in/geode",
"path": "extensions/geode-modules-session-internal/src/main/java/org/apache/geode/modules/session/internal/common/SessionCache.java",
"license": "apache-2.0",
"size": 2011
} | [
"javax.servlet.http.HttpSession",
"org.apache.geode.cache.Region"
] | import javax.servlet.http.HttpSession; import org.apache.geode.cache.Region; | import javax.servlet.http.*; import org.apache.geode.cache.*; | [
"javax.servlet",
"org.apache.geode"
] | javax.servlet; org.apache.geode; | 310,638 |
public PolicyDTO readPolicyDTO(Resource resource) throws EntitlementException {
String policy = null;
String policyId = null;
AbstractPolicy absPolicy = null;
PolicyDTO dto = null;
try {
policy = new String((byte[]) resource.getContent(), Charset.forName("UTF-8"));
absPolicy = PAPPolicyReader.getInstance(null).getPolicy(policy);
policyId = absPolicy.getId().toASCIIString();
dto = new PolicyDTO();
dto.setPolicyId(policyId);
dto.setPolicy(policy);
dto.setActive(Boolean.parseBoolean(resource.getProperty(PDPConstants.ACTIVE_POLICY)));
String policyOrder = resource.getProperty(PDPConstants.POLICY_ORDER);
if (policyOrder != null) {
dto.setPolicyOrder(Integer.parseInt(policyOrder));
} else {
dto.setPolicyOrder(0);
}
String version = resource.getProperty(PDPConstants.POLICY_VERSION);
if (version != null) {
dto.setVersion(version);
}
String lastModifiedTime = resource.getProperty(PDPConstants.LAST_MODIFIED_TIME);
if (lastModifiedTime != null) {
dto.setLastModifiedTime(lastModifiedTime);
}
String lastModifiedUser = resource.getProperty(PDPConstants.LAST_MODIFIED_USER);
if (lastModifiedUser != null) {
dto.setLastModifiedUser(lastModifiedUser);
}
dto.setPolicyType(resource.getProperty(PDPConstants.POLICY_TYPE));
String policyReferences = resource.getProperty(PDPConstants.POLICY_REFERENCE);
if (policyReferences != null && policyReferences.trim().length() > 0) {
dto.setPolicyIdReferences(policyReferences.split(PDPConstants.ATTRIBUTE_SEPARATOR));
}
String policySetReferences = resource.getProperty(PDPConstants.POLICY_SET_REFERENCE);
if (policySetReferences != null && policySetReferences.trim().length() > 0) {
dto.setPolicySetIdReferences(policySetReferences.split(PDPConstants.ATTRIBUTE_SEPARATOR));
}
//read policy meta data that is used for basic policy editor
dto.setPolicyEditor(resource.getProperty(PDPConstants.POLICY_EDITOR_TYPE));
String basicPolicyEditorMetaDataAmount = resource.getProperty(PDPConstants.
BASIC_POLICY_EDITOR_META_DATA_AMOUNT);
if (basicPolicyEditorMetaDataAmount != null) {
int amount = Integer.parseInt(basicPolicyEditorMetaDataAmount);
String[] basicPolicyEditorMetaData = new String[amount];
for (int i = 0; i < amount; i++) {
basicPolicyEditorMetaData[i] = resource.
getProperty(PDPConstants.BASIC_POLICY_EDITOR_META_DATA + i);
}
dto.setPolicyEditorData(basicPolicyEditorMetaData);
}
PolicyAttributeBuilder policyAttributeBuilder = new PolicyAttributeBuilder();
dto.setAttributeDTOs(policyAttributeBuilder.
getPolicyMetaDataFromRegistryProperties(resource.getProperties()));
return dto;
} catch (RegistryException e) {
log.error("Error while loading entitlement policy " + policyId + " from PAP policy store", e);
throw new EntitlementException("Error while loading entitlement policy " + policyId +
" from PAP policy store");
}
}
| PolicyDTO function(Resource resource) throws EntitlementException { String policy = null; String policyId = null; AbstractPolicy absPolicy = null; PolicyDTO dto = null; try { policy = new String((byte[]) resource.getContent(), Charset.forName("UTF-8")); absPolicy = PAPPolicyReader.getInstance(null).getPolicy(policy); policyId = absPolicy.getId().toASCIIString(); dto = new PolicyDTO(); dto.setPolicyId(policyId); dto.setPolicy(policy); dto.setActive(Boolean.parseBoolean(resource.getProperty(PDPConstants.ACTIVE_POLICY))); String policyOrder = resource.getProperty(PDPConstants.POLICY_ORDER); if (policyOrder != null) { dto.setPolicyOrder(Integer.parseInt(policyOrder)); } else { dto.setPolicyOrder(0); } String version = resource.getProperty(PDPConstants.POLICY_VERSION); if (version != null) { dto.setVersion(version); } String lastModifiedTime = resource.getProperty(PDPConstants.LAST_MODIFIED_TIME); if (lastModifiedTime != null) { dto.setLastModifiedTime(lastModifiedTime); } String lastModifiedUser = resource.getProperty(PDPConstants.LAST_MODIFIED_USER); if (lastModifiedUser != null) { dto.setLastModifiedUser(lastModifiedUser); } dto.setPolicyType(resource.getProperty(PDPConstants.POLICY_TYPE)); String policyReferences = resource.getProperty(PDPConstants.POLICY_REFERENCE); if (policyReferences != null && policyReferences.trim().length() > 0) { dto.setPolicyIdReferences(policyReferences.split(PDPConstants.ATTRIBUTE_SEPARATOR)); } String policySetReferences = resource.getProperty(PDPConstants.POLICY_SET_REFERENCE); if (policySetReferences != null && policySetReferences.trim().length() > 0) { dto.setPolicySetIdReferences(policySetReferences.split(PDPConstants.ATTRIBUTE_SEPARATOR)); } dto.setPolicyEditor(resource.getProperty(PDPConstants.POLICY_EDITOR_TYPE)); String basicPolicyEditorMetaDataAmount = resource.getProperty(PDPConstants. BASIC_POLICY_EDITOR_META_DATA_AMOUNT); if (basicPolicyEditorMetaDataAmount != null) { int amount = Integer.parseInt(basicPolicyEditorMetaDataAmount); String[] basicPolicyEditorMetaData = new String[amount]; for (int i = 0; i < amount; i++) { basicPolicyEditorMetaData[i] = resource. getProperty(PDPConstants.BASIC_POLICY_EDITOR_META_DATA + i); } dto.setPolicyEditorData(basicPolicyEditorMetaData); } PolicyAttributeBuilder policyAttributeBuilder = new PolicyAttributeBuilder(); dto.setAttributeDTOs(policyAttributeBuilder. getPolicyMetaDataFromRegistryProperties(resource.getProperties())); return dto; } catch (RegistryException e) { log.error(STR + policyId + STR, e); throw new EntitlementException(STR + policyId + STR); } } | /**
* Reads PolicyDTO for given registry resource
*
* @param resource Registry resource
* @return PolicyDTO
* @throws EntitlementException throws, if fails
*/ | Reads PolicyDTO for given registry resource | readPolicyDTO | {
"repo_name": "wattale/carbon-identity",
"path": "components/identity/org.wso2.carbon.identity.entitlement/src/main/java/org/wso2/carbon/identity/entitlement/pap/store/PAPPolicyStoreReader.java",
"license": "apache-2.0",
"size": 17213
} | [
"java.nio.charset.Charset",
"org.wso2.balana.AbstractPolicy",
"org.wso2.carbon.identity.entitlement.EntitlementException",
"org.wso2.carbon.identity.entitlement.PDPConstants",
"org.wso2.carbon.identity.entitlement.dto.PolicyDTO",
"org.wso2.carbon.identity.entitlement.pap.PAPPolicyReader",
"org.wso2.carbon.identity.entitlement.policy.PolicyAttributeBuilder",
"org.wso2.carbon.registry.core.Resource",
"org.wso2.carbon.registry.core.exceptions.RegistryException"
] | import java.nio.charset.Charset; import org.wso2.balana.AbstractPolicy; import org.wso2.carbon.identity.entitlement.EntitlementException; import org.wso2.carbon.identity.entitlement.PDPConstants; import org.wso2.carbon.identity.entitlement.dto.PolicyDTO; import org.wso2.carbon.identity.entitlement.pap.PAPPolicyReader; import org.wso2.carbon.identity.entitlement.policy.PolicyAttributeBuilder; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; | import java.nio.charset.*; import org.wso2.balana.*; import org.wso2.carbon.identity.entitlement.*; import org.wso2.carbon.identity.entitlement.dto.*; import org.wso2.carbon.identity.entitlement.pap.*; import org.wso2.carbon.identity.entitlement.policy.*; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.exceptions.*; | [
"java.nio",
"org.wso2.balana",
"org.wso2.carbon"
] | java.nio; org.wso2.balana; org.wso2.carbon; | 2,632,396 |
public void setCapacity(int value) {
Requires.argument(value >= count, "value", "Capacity must be greater than or equal to count");
if (value != elements.length) {
if (value > 0) {
elements = Arrays.copyOf(elements, value);
} else {
elements = ImmutableArrayList.<T>empty().array;
}
}
} | void function(int value) { Requires.argument(value >= count, "value", STR); if (value != elements.length) { if (value > 0) { elements = Arrays.copyOf(elements, value); } else { elements = ImmutableArrayList.<T>empty().array; } } } | /**
* Sets the length of the internal array.
*
* The internal array is reallocated to the given capacity if it is not already the specified length.
*
* @param value The length for the internal array.
* @throws IllegalArgumentException if {@code value} is less than {@link #size()}.
*/ | Sets the length of the internal array. The internal array is reallocated to the given capacity if it is not already the specified length | setCapacity | {
"repo_name": "sharwell/java-immutable",
"path": "src/com/tvl/util/ImmutableArrayList.java",
"license": "mit",
"size": 67574
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 1,846,526 |
@Test
public void testAutoSkipCheckInserted() throws Exception {
Cluster cluster = createNiceMock(Cluster.class);
RepositoryVersionEntity repoVersionEntity = createNiceMock(RepositoryVersionEntity.class);
EasyMock.expect(repoVersionEntity.getStackId()).andReturn(HDP_21).anyTimes();
RepositoryVersionDAO repoVersionDAO = createNiceMock(RepositoryVersionDAO.class);
EasyMock.expect(repoVersionDAO.findByStackNameAndVersion(EasyMock.anyString(),
EasyMock.anyString())).andReturn(repoVersionEntity).anyTimes();
UpgradeContext upgradeContext = createNiceMock(UpgradeContext.class);
EasyMock.expect(upgradeContext.getCluster()).andReturn(cluster).anyTimes();
EasyMock.expect(upgradeContext.getType()).andReturn(UpgradeType.ROLLING).anyTimes();
EasyMock.expect(upgradeContext.getDirection()).andReturn(Direction.UPGRADE).anyTimes();
EasyMock.expect(upgradeContext.getRepositoryVersion()).andReturn(repoVersionEntity).anyTimes();
EasyMock.expect(upgradeContext.isComponentFailureAutoSkipped()).andReturn(true).anyTimes();
EasyMock.expect(upgradeContext.isServiceCheckFailureAutoSkipped()).andReturn(true).anyTimes();
replayAll();
Grouping grouping = new Grouping();
grouping.skippable = true;
MockStageWrapperBuilder builder = new MockStageWrapperBuilder(grouping);
List<StageWrapper> mockStageWrappers = new ArrayList<>();
StageWrapper mockStageWrapper = EasyMock.createNiceMock(StageWrapper.class);
mockStageWrappers.add(mockStageWrapper);
builder.setMockStageWrappers(mockStageWrappers);
List<StageWrapper> stageWrappers = builder.build(upgradeContext);
Assert.assertEquals(2, stageWrappers.size());
StageWrapper skipSummaryWrapper = stageWrappers.get(1);
Assert.assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, skipSummaryWrapper.getType());
ServerActionTask task = (ServerActionTask)(skipSummaryWrapper.getTasks().get(0).getTasks().get(0));
Assert.assertEquals(AutoSkipFailedSummaryAction.class.getName(), task.implClass);
Assert.assertEquals(1, task.messages.size());
Assert.assertTrue(task.messages.get(0).contains("There are failures that were automatically skipped"));
verifyAll();
}
private final class MockStageWrapperBuilder extends StageWrapperBuilder {
private List<Integer> m_invocationOrder = new ArrayList<>();
private List<StageWrapper> m_stageWrappers = Collections.emptyList();
protected MockStageWrapperBuilder(Grouping grouping) {
super(grouping);
} | void function() throws Exception { Cluster cluster = createNiceMock(Cluster.class); RepositoryVersionEntity repoVersionEntity = createNiceMock(RepositoryVersionEntity.class); EasyMock.expect(repoVersionEntity.getStackId()).andReturn(HDP_21).anyTimes(); RepositoryVersionDAO repoVersionDAO = createNiceMock(RepositoryVersionDAO.class); EasyMock.expect(repoVersionDAO.findByStackNameAndVersion(EasyMock.anyString(), EasyMock.anyString())).andReturn(repoVersionEntity).anyTimes(); UpgradeContext upgradeContext = createNiceMock(UpgradeContext.class); EasyMock.expect(upgradeContext.getCluster()).andReturn(cluster).anyTimes(); EasyMock.expect(upgradeContext.getType()).andReturn(UpgradeType.ROLLING).anyTimes(); EasyMock.expect(upgradeContext.getDirection()).andReturn(Direction.UPGRADE).anyTimes(); EasyMock.expect(upgradeContext.getRepositoryVersion()).andReturn(repoVersionEntity).anyTimes(); EasyMock.expect(upgradeContext.isComponentFailureAutoSkipped()).andReturn(true).anyTimes(); EasyMock.expect(upgradeContext.isServiceCheckFailureAutoSkipped()).andReturn(true).anyTimes(); replayAll(); Grouping grouping = new Grouping(); grouping.skippable = true; MockStageWrapperBuilder builder = new MockStageWrapperBuilder(grouping); List<StageWrapper> mockStageWrappers = new ArrayList<>(); StageWrapper mockStageWrapper = EasyMock.createNiceMock(StageWrapper.class); mockStageWrappers.add(mockStageWrapper); builder.setMockStageWrappers(mockStageWrappers); List<StageWrapper> stageWrappers = builder.build(upgradeContext); Assert.assertEquals(2, stageWrappers.size()); StageWrapper skipSummaryWrapper = stageWrappers.get(1); Assert.assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, skipSummaryWrapper.getType()); ServerActionTask task = (ServerActionTask)(skipSummaryWrapper.getTasks().get(0).getTasks().get(0)); Assert.assertEquals(AutoSkipFailedSummaryAction.class.getName(), task.implClass); Assert.assertEquals(1, task.messages.size()); Assert.assertTrue(task.messages.get(0).contains(STR)); verifyAll(); } private final class MockStageWrapperBuilder extends StageWrapperBuilder { private List<Integer> m_invocationOrder = new ArrayList<>(); private List<StageWrapper> m_stageWrappers = Collections.emptyList(); protected MockStageWrapperBuilder(Grouping grouping) { super(grouping); } | /**
* Tests that a new task was inserted into the upgrade which will check for
* skipped failures and display a summary.
*
* @throws Exception
*/ | Tests that a new task was inserted into the upgrade which will check for skipped failures and display a summary | testAutoSkipCheckInserted | {
"repo_name": "sekikn/ambari",
"path": "ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilderTest.java",
"license": "apache-2.0",
"size": 8158
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.apache.ambari.server.orm.dao.RepositoryVersionDAO",
"org.apache.ambari.server.orm.entities.RepositoryVersionEntity",
"org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction",
"org.apache.ambari.server.stack.upgrade.Direction",
"org.apache.ambari.server.stack.upgrade.Grouping",
"org.apache.ambari.server.stack.upgrade.ServerActionTask",
"org.apache.ambari.server.stack.upgrade.orchestrate.StageWrapper",
"org.apache.ambari.server.stack.upgrade.orchestrate.StageWrapperBuilder",
"org.apache.ambari.server.stack.upgrade.orchestrate.UpgradeContext",
"org.apache.ambari.server.state.Cluster",
"org.apache.ambari.spi.upgrade.UpgradeType",
"org.easymock.EasyMock",
"org.junit.Assert"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.ambari.server.orm.dao.RepositoryVersionDAO; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction; import org.apache.ambari.server.stack.upgrade.Direction; import org.apache.ambari.server.stack.upgrade.Grouping; import org.apache.ambari.server.stack.upgrade.ServerActionTask; import org.apache.ambari.server.stack.upgrade.orchestrate.StageWrapper; import org.apache.ambari.server.stack.upgrade.orchestrate.StageWrapperBuilder; import org.apache.ambari.server.stack.upgrade.orchestrate.UpgradeContext; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.spi.upgrade.UpgradeType; import org.easymock.EasyMock; import org.junit.Assert; | import java.util.*; import org.apache.ambari.server.orm.dao.*; import org.apache.ambari.server.orm.entities.*; import org.apache.ambari.server.serveraction.upgrades.*; import org.apache.ambari.server.stack.upgrade.*; import org.apache.ambari.server.stack.upgrade.orchestrate.*; import org.apache.ambari.server.state.*; import org.apache.ambari.spi.upgrade.*; import org.easymock.*; import org.junit.*; | [
"java.util",
"org.apache.ambari",
"org.easymock",
"org.junit"
] | java.util; org.apache.ambari; org.easymock; org.junit; | 2,231,945 |
public List<String> getMember(String path); | List<String> function(String path); | /**
* Retrieves the all members of a group.
*
* @param path HDF path to the group.
* @return List<String> of members.
*/ | Retrieves the all members of a group | getMember | {
"repo_name": "bramalingam/bioformats",
"path": "components/formats-gpl/src/loci/formats/services/JHDFService.java",
"license": "gpl-2.0",
"size": 7084
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,839,261 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.