method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public void print(String string) throws IOException {
if (isBufferStreamBased()) {
String encoding = getContentTypeEncoding();
byte[] bytes;
if (encoding == null) {
bytes = string.getBytes();
} else {
bytes = string.getBytes(encoding);
}
buffer.getOutputStream().write(bytes);
return;
}
// make sure at least writer is initialized
buffer.getWriter().write(string);
}
| void function(String string) throws IOException { if (isBufferStreamBased()) { String encoding = getContentTypeEncoding(); byte[] bytes; if (encoding == null) { bytes = string.getBytes(); } else { bytes = string.getBytes(encoding); } buffer.getOutputStream().write(bytes); return; } buffer.getWriter().write(string); } | /**
* Appends string to the buffer.
*/ | Appends string to the buffer | print | {
"repo_name": "007slm/jodd",
"path": "jodd-servlet/src/main/java/jodd/servlet/wrapper/BufferResponseWrapper.java",
"license": "bsd-3-clause",
"size": 12471
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,288,381 |
@Deprecated
public void setEndpoint(String endpoint) throws JMSException {
try {
amazonSQSClient.setEndpoint(endpoint);
} catch (IllegalArgumentException ase) {
JMSException jmsException = new JMSException(ase.getMessage());
throw (JMSException) jmsException.initCause(ase);
}
} | void function(String endpoint) throws JMSException { try { amazonSQSClient.setEndpoint(endpoint); } catch (IllegalArgumentException ase) { JMSException jmsException = new JMSException(ase.getMessage()); throw (JMSException) jmsException.initCause(ase); } } | /**
* Sets SQS endpoint and wraps IllegalArgumentException.
* Deprecated. Instead of manipulating settings of existing AmazonSQS client, provide correct configuration when creating it through SQSConnectionFactory constructors.
*
* @param endpoint
* The endpoint (ex: "sqs.us-east-1.amazonaws.com") of the region
* specific AWS endpoint this client will communicate with.
* @throws JMSException
*/ | Sets SQS endpoint and wraps IllegalArgumentException. Deprecated. Instead of manipulating settings of existing AmazonSQS client, provide correct configuration when creating it through SQSConnectionFactory constructors | setEndpoint | {
"repo_name": "awslabs/amazon-sqs-java-messaging-lib",
"path": "src/main/java/com/amazon/sqs/javamessaging/AmazonSQSMessagingClientWrapper.java",
"license": "apache-2.0",
"size": 19435
} | [
"javax.jms.JMSException"
] | import javax.jms.JMSException; | import javax.jms.*; | [
"javax.jms"
] | javax.jms; | 713,632 |
public com.mozu.api.contracts.core.User getUser(String userId, String responseFields) throws Exception
{
MozuClient<com.mozu.api.contracts.core.User> client = com.mozu.api.clients.platform.adminuser.AdminUserClient.getUserClient( userId, responseFields);
client.setContext(_apiContext);
client.executeRequest();
return client.getResult();
} | com.mozu.api.contracts.core.User function(String userId, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.core.User> client = com.mozu.api.clients.platform.adminuser.AdminUserClient.getUserClient( userId, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); } | /**
* Retrieves the details of the specified administrator user account.
* <p><pre><code>
* AdminUser adminuser = new AdminUser();
* User user = adminuser.getUser( userId, responseFields);
* </code></pre></p>
* @param responseFields Use this field to include those fields which are not included by default.
* @param userId Unique identifier of the user whose tenant scopes you want to retrieve.
* @return com.mozu.api.contracts.core.User
* @see com.mozu.api.contracts.core.User
*/ | Retrieves the details of the specified administrator user account. <code><code> AdminUser adminuser = new AdminUser(); User user = adminuser.getUser( userId, responseFields); </code></code> | getUser | {
"repo_name": "bhewett/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/resources/platform/adminuser/AdminUserResource.java",
"license": "mit",
"size": 7781
} | [
"com.mozu.api.MozuClient"
] | import com.mozu.api.MozuClient; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 1,870,959 |
@SuppressWarnings("unchecked")
public BinaryVector copy() {
BinaryVector copy = new BinaryVector(dimension);
copy.bitSet = (FixedBitSet) bitSet.clone();
if (!isSparse)
copy.votingRecord = (ArrayList<FixedBitSet>) votingRecord.clone();
return copy;
} | @SuppressWarnings(STR) BinaryVector function() { BinaryVector copy = new BinaryVector(dimension); copy.bitSet = (FixedBitSet) bitSet.clone(); if (!isSparse) copy.votingRecord = (ArrayList<FixedBitSet>) votingRecord.clone(); return copy; } | /**
* Returns a new copy of this vector, in dense format.
*/ | Returns a new copy of this vector, in dense format | copy | {
"repo_name": "Lucky-Dhakad/semanticvectors",
"path": "src/main/java/pitt/search/semanticvectors/vectors/BinaryVector.java",
"license": "bsd-3-clause",
"size": 31043
} | [
"java.util.ArrayList",
"org.apache.lucene.util.FixedBitSet"
] | import java.util.ArrayList; import org.apache.lucene.util.FixedBitSet; | import java.util.*; import org.apache.lucene.util.*; | [
"java.util",
"org.apache.lucene"
] | java.util; org.apache.lucene; | 2,835,613 |
@Override()
public java.lang.Class getJavaClass(
) {
return org.chocolate_milk.model.FixedAssetSalesInfo.class;
} | @Override() java.lang.Class function( ) { return org.chocolate_milk.model.FixedAssetSalesInfo.class; } | /**
* Method getJavaClass.
*
* @return the Java class represented by this descriptor.
*/ | Method getJavaClass | getJavaClass | {
"repo_name": "galleon1/chocolate-milk",
"path": "src/org/chocolate_milk/model/descriptors/FixedAssetSalesInfoDescriptor.java",
"license": "lgpl-3.0",
"size": 11863
} | [
"org.chocolate_milk.model.FixedAssetSalesInfo"
] | import org.chocolate_milk.model.FixedAssetSalesInfo; | import org.chocolate_milk.model.*; | [
"org.chocolate_milk.model"
] | org.chocolate_milk.model; | 2,748,229 |
public Observable<ServiceResponse<TroubleshootingResultInner>> beginGetTroubleshootingWithServiceResponseAsync(String resourceGroupName, String networkWatcherName, TroubleshootingParameters parameters) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (networkWatcherName == null) {
throw new IllegalArgumentException("Parameter networkWatcherName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
} | Observable<ServiceResponse<TroubleshootingResultInner>> function(String resourceGroupName, String networkWatcherName, TroubleshootingParameters parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (networkWatcherName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); } | /**
* Initiate troubleshooting on a specified resource.
*
* @param resourceGroupName The name of the resource group.
* @param networkWatcherName The name of the network watcher resource.
* @param parameters Parameters that define the resource to troubleshoot.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the TroubleshootingResultInner object
*/ | Initiate troubleshooting on a specified resource | beginGetTroubleshootingWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2020_03_01/src/main/java/com/microsoft/azure/management/network/v2020_03_01/implementation/NetworkWatchersInner.java",
"license": "mit",
"size": 190989
} | [
"com.microsoft.azure.management.network.v2020_03_01.TroubleshootingParameters",
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.azure.management.network.v2020_03_01.TroubleshootingParameters; import com.microsoft.rest.ServiceResponse; | import com.microsoft.azure.management.network.v2020_03_01.*; import com.microsoft.rest.*; | [
"com.microsoft.azure",
"com.microsoft.rest"
] | com.microsoft.azure; com.microsoft.rest; | 2,227,879 |
public void incrementCount(List<K> l) {
incrementCount(l, 1.0);
} | void function(List<K> l) { incrementCount(l, 1.0); } | /**
* Equivalent to incrementCount(l, 1.0).
*/ | Equivalent to incrementCount(l, 1.0) | incrementCount | {
"repo_name": "heeyounglee/hcoref",
"path": "src/edu/stanford/nlp/stats/GeneralizedCounter.java",
"license": "gpl-2.0",
"size": 29230
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,145,269 |
public double computeEffectiveParticles(
DataDistribution<ParameterType> particles );
public static interface Updater<ObservationType,ParameterType>
extends CloneableSerializable
{ | double function( DataDistribution<ParameterType> particles ); public static interface Updater<ObservationType,ParameterType> extends CloneableSerializable { | /**
* Computes the effective number of particles.
* @param particles
* Current state of the Particle filter.
* @return
* Effective number of particles.
*/ | Computes the effective number of particles | computeEffectiveParticles | {
"repo_name": "codeaudit/Foundry",
"path": "Components/LearningCore/Source/gov/sandia/cognition/statistics/bayesian/ParticleFilter.java",
"license": "bsd-3-clause",
"size": 4514
} | [
"gov.sandia.cognition.statistics.DataDistribution",
"gov.sandia.cognition.util.CloneableSerializable"
] | import gov.sandia.cognition.statistics.DataDistribution; import gov.sandia.cognition.util.CloneableSerializable; | import gov.sandia.cognition.statistics.*; import gov.sandia.cognition.util.*; | [
"gov.sandia.cognition"
] | gov.sandia.cognition; | 1,418,762 |
if (t.isClass()) {
return JInterfaceType.createUnresolvableType();
}
Preconditions.checkArgument(t.isInterface());
String typeName = NameConverter.convertClassOrInterfaceToFullName(t);
if (scope.containsInterfaceType(typeName)) {
return scope.getInterfaceType(typeName);
} else {
return scope.createNewInterfaceType(t);
}
} | if (t.isClass()) { return JInterfaceType.createUnresolvableType(); } Preconditions.checkArgument(t.isInterface()); String typeName = NameConverter.convertClassOrInterfaceToFullName(t); if (scope.containsInterfaceType(typeName)) { return scope.getInterfaceType(typeName); } else { return scope.createNewInterfaceType(t); } } | /**
* Searches for a type within the Type Hierarchy.
* If found, returns it.
*
* @param t binding representing the sought after type.
* @return Returns a type within the TypeHierachie or a Unspecified Type.
*/ | Searches for a type within the Type Hierarchy. If found, returns it | convertInterfaceType | {
"repo_name": "nishanttotla/predator",
"path": "cpachecker/src/org/sosy_lab/cpachecker/cfa/parser/eclipse/java/ASTTypeConverter.java",
"license": "gpl-3.0",
"size": 2647
} | [
"com.google.common.base.Preconditions",
"org.sosy_lab.cpachecker.cfa.types.java.JInterfaceType"
] | import com.google.common.base.Preconditions; import org.sosy_lab.cpachecker.cfa.types.java.JInterfaceType; | import com.google.common.base.*; import org.sosy_lab.cpachecker.cfa.types.java.*; | [
"com.google.common",
"org.sosy_lab.cpachecker"
] | com.google.common; org.sosy_lab.cpachecker; | 2,636,734 |
public static <T> Iterator<T> iterator( Enumeration<T> e )
{
return new Iterator<T>()
{ | static <T> Iterator<T> function( Enumeration<T> e ) { return new Iterator<T>() { | /**
* Wrap a legacy {@link Enumeration} into an Iterator
* <p>
* @param <T>
* @param e
* <p>
* @return
*/ | Wrap a legacy <code>Enumeration</code> into an Iterator | iterator | {
"repo_name": "peter-mount/opendata-common",
"path": "core/src/main/java/uk/trainwatch/util/CollectionUtils.java",
"license": "apache-2.0",
"size": 15855
} | [
"java.util.Enumeration",
"java.util.Iterator"
] | import java.util.Enumeration; import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 2,658,354 |
@SuppressWarnings("unchecked")
public List<String> getObservationIdentifier(Session session) {
Criteria criteria =
session.createCriteria(getObservationFactory().contextualReferencedClass())
.add(Restrictions.eq(ContextualReferencedObservation.DELETED, false))
.add(Restrictions.isNotNull(ContextualReferencedObservation.IDENTIFIER))
.setProjection(Projections.distinct(Projections.property(ContextualReferencedObservation.IDENTIFIER)));
LOGGER.debug("QUERY getObservationIdentifiers(): {}", HibernateHelper.getSqlString(criteria));
return criteria.list();
} | @SuppressWarnings(STR) List<String> function(Session session) { Criteria criteria = session.createCriteria(getObservationFactory().contextualReferencedClass()) .add(Restrictions.eq(ContextualReferencedObservation.DELETED, false)) .add(Restrictions.isNotNull(ContextualReferencedObservation.IDENTIFIER)) .setProjection(Projections.distinct(Projections.property(ContextualReferencedObservation.IDENTIFIER))); LOGGER.debug(STR, HibernateHelper.getSqlString(criteria)); return criteria.list(); } | /**
* Get all observation identifiers
*
* @param session
* Hibernate session
* @return Observation identifiers
*/ | Get all observation identifiers | getObservationIdentifier | {
"repo_name": "ahuarte47/SOS",
"path": "hibernate/common/src/main/java/org/n52/sos/ds/hibernate/dao/observation/AbstractObservationDAO.java",
"license": "gpl-2.0",
"size": 82385
} | [
"java.util.List",
"org.hibernate.Criteria",
"org.hibernate.Session",
"org.hibernate.criterion.Projections",
"org.hibernate.criterion.Restrictions",
"org.n52.sos.ds.hibernate.entities.observation.ContextualReferencedObservation",
"org.n52.sos.ds.hibernate.util.HibernateHelper"
] | import java.util.List; import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.n52.sos.ds.hibernate.entities.observation.ContextualReferencedObservation; import org.n52.sos.ds.hibernate.util.HibernateHelper; | import java.util.*; import org.hibernate.*; import org.hibernate.criterion.*; import org.n52.sos.ds.hibernate.entities.observation.*; import org.n52.sos.ds.hibernate.util.*; | [
"java.util",
"org.hibernate",
"org.hibernate.criterion",
"org.n52.sos"
] | java.util; org.hibernate; org.hibernate.criterion; org.n52.sos; | 1,773,914 |
public byte[] getBytesFromTextChild() throws XMLSecurityException {
return Base64.decode(XMLUtils.getFullTextChildrenFromElement(this.constructionElement));
} | byte[] function() throws XMLSecurityException { return Base64.decode(XMLUtils.getFullTextChildrenFromElement(this.constructionElement)); } | /**
* Method getBytesFromTextChild
*
* @return The base64 bytes from the text children of this element
* @throws XMLSecurityException
*/ | Method getBytesFromTextChild | getBytesFromTextChild | {
"repo_name": "rokn/Count_Words_2015",
"path": "testing/openjdk2/jdk/src/share/classes/com/sun/org/apache/xml/internal/security/utils/ElementProxy.java",
"license": "mit",
"size": 16335
} | [
"com.sun.org.apache.xml.internal.security.exceptions.XMLSecurityException"
] | import com.sun.org.apache.xml.internal.security.exceptions.XMLSecurityException; | import com.sun.org.apache.xml.internal.security.exceptions.*; | [
"com.sun.org"
] | com.sun.org; | 130,348 |
@Override
protected Instances processDatasetWithClusterer(Instances data, Clusterer clusterer) {
Instances result;
Map<Integer,Integer> counts;
int i;
int cluster;
List<Integer> clusters;
DenseInstance inst;
result = createOutputFormat(data);
counts = new HashMap<>();
// cluster instances
for (i = 0; i < data.numInstances(); i++) {
try {
cluster = clusterer.clusterInstance(data.instance(i));
if (!counts.containsKey(cluster))
counts.put(cluster, 0);
counts.put(cluster, counts.get(cluster) + 1);
}
catch (Exception e) {
getLogger().log(Level.SEVERE, "Failed to cluster instance #" + (i+1) + "!", e);
}
}
// generate overview
clusters = new ArrayList<>(counts.keySet());
Collections.sort(clusters);
for (i = 0; i < clusters.size(); i++) {
cluster = clusters.get(i);
inst = new DenseInstance(1.0, new double[]{cluster, counts.get(cluster)});
result.add(inst);
}
return result;
} | Instances function(Instances data, Clusterer clusterer) { Instances result; Map<Integer,Integer> counts; int i; int cluster; List<Integer> clusters; DenseInstance inst; result = createOutputFormat(data); counts = new HashMap<>(); for (i = 0; i < data.numInstances(); i++) { try { cluster = clusterer.clusterInstance(data.instance(i)); if (!counts.containsKey(cluster)) counts.put(cluster, 0); counts.put(cluster, counts.get(cluster) + 1); } catch (Exception e) { getLogger().log(Level.SEVERE, STR + (i+1) + "!", e); } } clusters = new ArrayList<>(counts.keySet()); Collections.sort(clusters); for (i = 0; i < clusters.size(); i++) { cluster = clusters.get(i); inst = new DenseInstance(1.0, new double[]{cluster, counts.get(cluster)}); result.add(inst); } return result; } | /**
* Performs some form of processing on the full dataset.
*/ | Performs some form of processing on the full dataset | processDatasetWithClusterer | {
"repo_name": "waikato-datamining/adams-base",
"path": "adams-weka/src/main/java/adams/flow/transformer/wekaclusterer/ClusterCounts.java",
"license": "gpl-3.0",
"size": 3770
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"java.util.logging.Level"
] | import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; | import java.util.*; import java.util.logging.*; | [
"java.util"
] | java.util; | 2,366,466 |
public CmsSimplePropertyEditor getPropertyEditor() {
return m_propertyEditor;
}
| CmsSimplePropertyEditor function() { return m_propertyEditor; } | /**
* Gets the property editor instance.<p>
*
* @return the property editor instance
*/ | Gets the property editor instance | getPropertyEditor | {
"repo_name": "it-tavis/opencms-core",
"path": "src-gwt/org/opencms/ade/postupload/client/ui/CmsUploadPropertyPanel.java",
"license": "lgpl-2.1",
"size": 6498
} | [
"org.opencms.gwt.client.property.CmsSimplePropertyEditor"
] | import org.opencms.gwt.client.property.CmsSimplePropertyEditor; | import org.opencms.gwt.client.property.*; | [
"org.opencms.gwt"
] | org.opencms.gwt; | 1,324,365 |
@Test
public void testServerRestartWithNewTypes() throws Exception {
IgniteEx node1 = start(1, KeyClass.class, ValueClass.class);
assertTypes(node1, ValueClass.class);
IgniteEx node2 = startClientNoCache(2);
GridCacheContext<Object, Object> context0 = node2.context().cache().context().cacheContext(CU.cacheId(CACHE_NAME));
node2.cache(CACHE_NAME);
GridCacheContext<Object, Object> context = node2.context().cache().context().cacheContext(CU.cacheId(CACHE_NAME));
GridCacheAdapter<Object, Object> entries = node2.context().cache().internalCache(CACHE_NAME);
assertTrue(entries.active());
node2.cache(CACHE_NAME);
assertTypes(node2, ValueClass.class);
stopGrid(1); | void function() throws Exception { IgniteEx node1 = start(1, KeyClass.class, ValueClass.class); assertTypes(node1, ValueClass.class); IgniteEx node2 = startClientNoCache(2); GridCacheContext<Object, Object> context0 = node2.context().cache().context().cacheContext(CU.cacheId(CACHE_NAME)); node2.cache(CACHE_NAME); GridCacheContext<Object, Object> context = node2.context().cache().context().cacheContext(CU.cacheId(CACHE_NAME)); GridCacheAdapter<Object, Object> entries = node2.context().cache().internalCache(CACHE_NAME); assertTrue(entries.active()); node2.cache(CACHE_NAME); assertTypes(node2, ValueClass.class); stopGrid(1); | /**
* Test client reconnect after server restart accompanied by schema change.
*
* @throws Exception If failed.
*/ | Test client reconnect after server restart accompanied by schema change | testServerRestartWithNewTypes | {
"repo_name": "NSAmelchev/ignite",
"path": "modules/indexing/src/test/java/org/apache/ignite/internal/processors/cache/index/SchemaExchangeSelfTest.java",
"license": "apache-2.0",
"size": 21252
} | [
"org.apache.ignite.internal.IgniteEx",
"org.apache.ignite.internal.processors.cache.GridCacheAdapter",
"org.apache.ignite.internal.processors.cache.GridCacheContext",
"org.apache.ignite.internal.util.typedef.internal.CU"
] | import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.cache.GridCacheAdapter; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.util.typedef.internal.CU; | import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.util.typedef.internal.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,235,059 |
public void setValueAnimated(float _valueTo, long _animationDuration) {
mAnimationDuration = _animationDuration;
Message msg = new Message();
msg.what = AnimationMsg.SET_VALUE_ANIMATED.ordinal();
msg.obj = new float[]{mCurrentValue, _valueTo};
mAnimationHandler.sendMessage(msg);
} | void function(float _valueTo, long _animationDuration) { mAnimationDuration = _animationDuration; Message msg = new Message(); msg.what = AnimationMsg.SET_VALUE_ANIMATED.ordinal(); msg.obj = new float[]{mCurrentValue, _valueTo}; mAnimationHandler.sendMessage(msg); } | /**
* Sets the value of the circle view with an animation.
* The current value is used as the start value of the animation
*
* @param _valueTo value after animation
* @param _animationDuration the duration of the animation in milliseconds.
*/ | Sets the value of the circle view with an animation. The current value is used as the start value of the animation | setValueAnimated | {
"repo_name": "captainbupt/Circle-Progress-View",
"path": "CircleProgressView/src/main/java/at/grabner/circleprogress/CircleProgressView.java",
"license": "mit",
"size": 52578
} | [
"android.os.Message"
] | import android.os.Message; | import android.os.*; | [
"android.os"
] | android.os; | 1,427,121 |
@Test
public void testT1RV6D5_T1LV2D3() {
test_id = getTestId("T1RV6D5", "T1LV2D3", "225");
String src = selectTRVD("T1RV6D5");
String dest = selectTLVD("T1LV2D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
| void function() { test_id = getTestId(STR, STR, "225"); String src = selectTRVD(STR); String dest = selectTLVD(STR); String result = "."; try { result = TRVD_TLVD_Action(src, dest); } catch (RecognitionException e) { e.printStackTrace(); } catch (TokenStreamException e) { e.printStackTrace(); } assertTrue(Failure2, checkResult_Failure2(src, dest, result)); GraphicalEditor editor = getActiveEditor(); if (editor != null) { validateOrGenerateResults(editor, generateResults); } } | /**
* Perform the test for the given matrix column (T1RV6D5) and row (T1LV2D3).
*
*/ | Perform the test for the given matrix column (T1RV6D5) and row (T1LV2D3) | testT1RV6D5_T1LV2D3 | {
"repo_name": "jason-rhodes/bridgepoint",
"path": "src/org.xtuml.bp.als.oal.test/src/org/xtuml/bp/als/oal/test/SingleDimensionFixedArrayAssigmentTest_12_Generics.java",
"license": "apache-2.0",
"size": 155634
} | [
"org.xtuml.bp.ui.graphics.editor.GraphicalEditor"
] | import org.xtuml.bp.ui.graphics.editor.GraphicalEditor; | import org.xtuml.bp.ui.graphics.editor.*; | [
"org.xtuml.bp"
] | org.xtuml.bp; | 1,489,505 |
public ActivityPolicy withAdditionalProperties(Map<String, Object> additionalProperties) {
this.additionalProperties = additionalProperties;
return this;
} | ActivityPolicy function(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; return this; } | /**
* Set the additionalProperties property: Execution policy for an activity.
*
* @param additionalProperties the additionalProperties value to set.
* @return the ActivityPolicy object itself.
*/ | Set the additionalProperties property: Execution policy for an activity | withAdditionalProperties | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java",
"license": "mit",
"size": 6654
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,125,768 |
public Pair<Double, INDArray> nn(INDArray point) {
return nn(root, point, rect, Double.POSITIVE_INFINITY, null, 0);
} | Pair<Double, INDArray> function(INDArray point) { return nn(root, point, rect, Double.POSITIVE_INFINITY, null, 0); } | /**
* Query for nearest neighbor. Returns the distance and point
* @param point the point to query for
* @return
*/ | Query for nearest neighbor. Returns the distance and point | nn | {
"repo_name": "RobAltena/deeplearning4j",
"path": "deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/main/java/org/deeplearning4j/clustering/kdtree/KDTree.java",
"license": "apache-2.0",
"size": 12115
} | [
"org.nd4j.linalg.api.ndarray.INDArray",
"org.nd4j.linalg.primitives.Pair"
] | import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.primitives.Pair; | import org.nd4j.linalg.api.ndarray.*; import org.nd4j.linalg.primitives.*; | [
"org.nd4j.linalg"
] | org.nd4j.linalg; | 2,492,444 |
@Test
public void getFixListDefault() throws P4JavaException {
when(server.execMapCmdList(eq(FIXES.toString()),
argThat(FIX_FS_PARAMS_UNKNOWN_MATCHER), eq(null)))
.thenReturn(buildValidResultMap());
List<IFileSpec> specs = FileSpecBuilder.makeFileSpecList(DEPOT_DEV_PATH);
List<IFix> fixes = fixesDelegator.getFixList(specs, Changelist.DEFAULT,
TEST_JOB_123, true, 0);
verify(server).execMapCmdList(eq(FIXES.toString()), argThat(FIX_FS_PARAMS_UNKNOWN_MATCHER),
eq(null));
assertFixes(fixes);
} | void function() throws P4JavaException { when(server.execMapCmdList(eq(FIXES.toString()), argThat(FIX_FS_PARAMS_UNKNOWN_MATCHER), eq(null))) .thenReturn(buildValidResultMap()); List<IFileSpec> specs = FileSpecBuilder.makeFileSpecList(DEPOT_DEV_PATH); List<IFix> fixes = fixesDelegator.getFixList(specs, Changelist.DEFAULT, TEST_JOB_123, true, 0); verify(server).execMapCmdList(eq(FIXES.toString()), argThat(FIX_FS_PARAMS_UNKNOWN_MATCHER), eq(null)); assertFixes(fixes); } | /**
* Gets the fix list for a default.
*
* @throws P4JavaException
* the p4 java exception
*/ | Gets the fix list for a default | getFixListDefault | {
"repo_name": "groboclown/p4ic4idea",
"path": "p4java/r18-1/src/test/java/com/perforce/p4java/impl/mapbased/server/cmd/FixesDelegatorTest.java",
"license": "apache-2.0",
"size": 14139
} | [
"com.perforce.p4java.core.IFix",
"com.perforce.p4java.core.file.FileSpecBuilder",
"com.perforce.p4java.core.file.IFileSpec",
"com.perforce.p4java.exception.P4JavaException",
"com.perforce.p4java.impl.generic.core.Changelist",
"com.perforce.p4java.server.CmdSpec",
"java.util.List",
"org.mockito.ArgumentMatchers",
"org.mockito.Mockito"
] | import com.perforce.p4java.core.IFix; import com.perforce.p4java.core.file.FileSpecBuilder; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.impl.generic.core.Changelist; import com.perforce.p4java.server.CmdSpec; import java.util.List; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; | import com.perforce.p4java.core.*; import com.perforce.p4java.core.file.*; import com.perforce.p4java.exception.*; import com.perforce.p4java.impl.generic.core.*; import com.perforce.p4java.server.*; import java.util.*; import org.mockito.*; | [
"com.perforce.p4java",
"java.util",
"org.mockito"
] | com.perforce.p4java; java.util; org.mockito; | 320,981 |
private void logSlowRequests(long requestLifetime, Request<?> request,
byte[] responseContents, StatusLine statusLine) {
if (DEBUG || requestLifetime > SLOW_REQUEST_THRESHOLD_MS) {
VolleyLog.d("HTTP response for request=<%s> [lifetime=%d], [size=%s], " +
"[rc=%d], [retryCount=%s]", request, requestLifetime,
responseContents != null ? responseContents.length : "null",
statusLine.getStatusCode(), request.getRetryPolicy().getCurrentRetryCount());
}
} | void function(long requestLifetime, Request<?> request, byte[] responseContents, StatusLine statusLine) { if (DEBUG requestLifetime > SLOW_REQUEST_THRESHOLD_MS) { VolleyLog.d(STR + STR, request, requestLifetime, responseContents != null ? responseContents.length : "null", statusLine.getStatusCode(), request.getRetryPolicy().getCurrentRetryCount()); } } | /**
* Logs requests that took over SLOW_REQUEST_THRESHOLD_MS to complete.
*/ | Logs requests that took over SLOW_REQUEST_THRESHOLD_MS to complete | logSlowRequests | {
"repo_name": "shazangroup/Mobograph",
"path": "TMessagesProj/src/main/java/org/telegram/messenger/volley/toolbox/BasicNetwork.java",
"license": "gpl-2.0",
"size": 11099
} | [
"org.apache.http.StatusLine",
"org.telegram.messenger.volley.Request",
"org.telegram.messenger.volley.VolleyLog"
] | import org.apache.http.StatusLine; import org.telegram.messenger.volley.Request; import org.telegram.messenger.volley.VolleyLog; | import org.apache.http.*; import org.telegram.messenger.volley.*; | [
"org.apache.http",
"org.telegram.messenger"
] | org.apache.http; org.telegram.messenger; | 909,494 |
@Override
public void getFeature(Point request, StreamObserver<Feature> responseObserver) {
responseObserver.onNext(checkFeature(request));
responseObserver.onCompleted();
} | void function(Point request, StreamObserver<Feature> responseObserver) { responseObserver.onNext(checkFeature(request)); responseObserver.onCompleted(); } | /**
* Gets the {@link Feature} at the requested {@link Point}. If no feature at that location
* exists, an unnamed feature is returned at the provided location.
*
* @param request the requested location for the feature.
* @param responseObserver the observer that will receive the feature at the requested point.
*/ | Gets the <code>Feature</code> at the requested <code>Point</code>. If no feature at that location exists, an unnamed feature is returned at the provided location | getFeature | {
"repo_name": "robinbakkerus/workshop-grpc",
"path": "src/main/java/io/grpc/examples/routeguide/RouteGuideServer.java",
"license": "apache-2.0",
"size": 11416
} | [
"io.grpc.stub.StreamObserver"
] | import io.grpc.stub.StreamObserver; | import io.grpc.stub.*; | [
"io.grpc.stub"
] | io.grpc.stub; | 1,484,894 |
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(TurtlebotmissionPackage.Literals.MISSION__TASK);
}
return childrenFeatures;
}
| Collection<? extends EStructuralFeature> function(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(TurtlebotmissionPackage.Literals.MISSION__TASK); } return childrenFeatures; } | /**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This specifies how to implement <code>#getChildren</code> and is used to deduce an appropriate feature for an <code>org.eclipse.emf.edit.command.AddCommand</code>, <code>org.eclipse.emf.edit.command.RemoveCommand</code> or <code>org.eclipse.emf.edit.command.MoveCommand</code> in <code>#createCommand</code>. | getChildrenFeatures | {
"repo_name": "kribe48/wasp-mbse",
"path": "WASP-turtlebot-DSL/CodeFrame/se.chalmers.turtlebotmission.edit/src/turtlebotmission/provider/MissionItemProvider.java",
"license": "mit",
"size": 4644
} | [
"java.util.Collection",
"org.eclipse.emf.ecore.EStructuralFeature"
] | import java.util.Collection; import org.eclipse.emf.ecore.EStructuralFeature; | import java.util.*; import org.eclipse.emf.ecore.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 2,770,749 |
public static MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection> getPaymentsClient(String orderId, String responseFields) throws Exception
{
MozuUrl url = com.mozu.api.urls.commerce.orders.PaymentUrl.getPaymentsUrl(orderId, responseFields);
String verb = "GET";
Class<?> clz = com.mozu.api.contracts.commerceruntime.payments.PaymentCollection.class;
MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection> mozuClient = (MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection>) MozuClientFactory.getInstance(clz);
mozuClient.setVerb(verb);
mozuClient.setResourceUrl(url);
return mozuClient;
}
| static MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection> function(String orderId, String responseFields) throws Exception { MozuUrl url = com.mozu.api.urls.commerce.orders.PaymentUrl.getPaymentsUrl(orderId, responseFields); String verb = "GET"; Class<?> clz = com.mozu.api.contracts.commerceruntime.payments.PaymentCollection.class; MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection> mozuClient = (MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection>) MozuClientFactory.getInstance(clz); mozuClient.setVerb(verb); mozuClient.setResourceUrl(url); return mozuClient; } | /**
* Retrieves information about all payment transactions submitted for the specified order.
* <p><pre><code>
* MozuClient<com.mozu.api.contracts.commerceruntime.payments.PaymentCollection> mozuClient=GetPaymentsClient( orderId, responseFields);
* client.setBaseAddress(url);
* client.executeRequest();
* PaymentCollection paymentCollection = client.Result();
* </code></pre></p>
* @param orderId Unique identifier of the order.
* @param responseFields Use this field to include those fields which are not included by default.
* @return Mozu.Api.MozuClient <com.mozu.api.contracts.commerceruntime.payments.PaymentCollection>
* @see com.mozu.api.contracts.commerceruntime.payments.PaymentCollection
*/ | Retrieves information about all payment transactions submitted for the specified order. <code><code> MozuClient mozuClient=GetPaymentsClient( orderId, responseFields); client.setBaseAddress(url); client.executeRequest(); PaymentCollection paymentCollection = client.Result(); </code></code> | getPaymentsClient | {
"repo_name": "johngatti/mozu-java",
"path": "mozu-java-core/src/main/java/com/mozu/api/clients/commerce/orders/PaymentClient.java",
"license": "mit",
"size": 12133
} | [
"com.mozu.api.MozuClient",
"com.mozu.api.MozuClientFactory",
"com.mozu.api.MozuUrl"
] | import com.mozu.api.MozuClient; import com.mozu.api.MozuClientFactory; import com.mozu.api.MozuUrl; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 625,904 |
@Test
public void checkFailedPingToEntityError() throws Exception {
ThreadedDummyConnection threadedCon = getAuthentiactedDummyConnection();
//@formatter:off
String reply =
"<iq type='error' id='qrzSp-16' to='[email protected]'>" +
"<ping xmlns='urn:xmpp:ping'/>" +
"<error type='cancel'>" +
"<service-unavailable xmlns='urn:ietf:params:xml:ns:xmpp-stanzas'/>" +
"</error>" +
"</iq>";
//@formatter:on
IQ serviceUnavailable = (IQ) PacketParserUtils.parseStanza(reply);
threadedCon.addIQReply(serviceUnavailable);
PingManager pinger = PingManager.getInstanceFor(threadedCon);
boolean pingSuccess = pinger.ping(DUMMY_AT_EXAMPLE_ORG);
assertFalse(pingSuccess);
} | void function() throws Exception { ThreadedDummyConnection threadedCon = getAuthentiactedDummyConnection(); String reply = STR + STR + STR + STR + STR + "</iq>"; IQ serviceUnavailable = (IQ) PacketParserUtils.parseStanza(reply); threadedCon.addIQReply(serviceUnavailable); PingManager pinger = PingManager.getInstanceFor(threadedCon); boolean pingSuccess = pinger.ping(DUMMY_AT_EXAMPLE_ORG); assertFalse(pingSuccess); } | /**
* Server returns an exception for entity.
* @throws Exception
*/ | Server returns an exception for entity | checkFailedPingToEntityError | {
"repo_name": "esl/Smack",
"path": "smack-extensions/src/test/java/org/jivesoftware/smackx/ping/PingTest.java",
"license": "apache-2.0",
"size": 9386
} | [
"org.jivesoftware.smack.ThreadedDummyConnection",
"org.jivesoftware.smack.util.PacketParserUtils",
"org.junit.Assert"
] | import org.jivesoftware.smack.ThreadedDummyConnection; import org.jivesoftware.smack.util.PacketParserUtils; import org.junit.Assert; | import org.jivesoftware.smack.*; import org.jivesoftware.smack.util.*; import org.junit.*; | [
"org.jivesoftware.smack",
"org.junit"
] | org.jivesoftware.smack; org.junit; | 1,881,640 |
public void setColor(Color color); | void function(Color color); | /**
* Postavlja boju valnog oblika
*
* @param color zeljena boja
*/ | Postavlja boju valnog oblika | setColor | {
"repo_name": "mbezjak/vhdllab",
"path": "vhdllab-client/src/main/java/hr/fer/zemris/vhdllab/applets/simulations/Shape.java",
"license": "apache-2.0",
"size": 1978
} | [
"java.awt.Color"
] | import java.awt.Color; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,078,795 |
@Bean
AutoPilot autoPilot() {
return new AutoPilot(timeService(),
imuSensor(),
positionSensor(),
new DefaultPidControl(2,
0.0,
0.0,
PI / 4,
PI / 4),
servoController(),
bankingAngleCalculator());
} | AutoPilot autoPilot() { return new AutoPilot(timeService(), imuSensor(), positionSensor(), new DefaultPidControl(2, 0.0, 0.0, PI / 4, PI / 4), servoController(), bankingAngleCalculator()); } | /**
* Creates the {@link AutoPilot} bean.
*
* @return The {@link AutoPilot} bean.
*/ | Creates the <code>AutoPilot</code> bean | autoPilot | {
"repo_name": "harmenweber/space-project",
"path": "src/main/java/ch/harmen/spaceproject/SpaceProjectApplicationContext.java",
"license": "mit",
"size": 3001
} | [
"ch.harmen.iot.DefaultPidControl"
] | import ch.harmen.iot.DefaultPidControl; | import ch.harmen.iot.*; | [
"ch.harmen.iot"
] | ch.harmen.iot; | 135,104 |
private void initTilesUtil() throws ServletException {
if (TilesUtil.isTilesUtilImplSet()) {
log.debug("Skipping re-init of Tiles Plugin. Values defined in the " +
"first initialized plugin take precedence.");
return;
}
// Check if user has specified a TilesUtil implementation classname or not.
// If no implementation is specified, check if user has specified one
// shared single factory for all module, or one factory for each module.
if (this.getTilesUtilImplClassname() == null) {
if (isModuleAware()) {
TilesUtil.setTilesUtil(new TilesUtilStrutsModulesImpl());
} else {
TilesUtil.setTilesUtil(new TilesUtilStrutsImpl());
}
} else { // A classname is specified for the tilesUtilImp, use it.
try {
TilesUtilStrutsImpl impl =
(TilesUtilStrutsImpl) RequestUtils
.applicationClass(getTilesUtilImplClassname())
.newInstance();
TilesUtil.setTilesUtil(impl);
} catch (ClassCastException ex) {
throw new ServletException(
"Can't set TilesUtil implementation to '"
+ getTilesUtilImplClassname()
+ "'. TilesUtil implementation should be a subclass of '"
+ TilesUtilStrutsImpl.class.getName()
+ "'", ex);
} catch (Exception ex) {
throw new ServletException(
"Can't set TilesUtil implementation.",
ex);
}
}
} | void function() throws ServletException { if (TilesUtil.isTilesUtilImplSet()) { log.debug(STR + STR); return; } if (this.getTilesUtilImplClassname() == null) { if (isModuleAware()) { TilesUtil.setTilesUtil(new TilesUtilStrutsModulesImpl()); } else { TilesUtil.setTilesUtil(new TilesUtilStrutsImpl()); } } else { try { TilesUtilStrutsImpl impl = (TilesUtilStrutsImpl) RequestUtils .applicationClass(getTilesUtilImplClassname()) .newInstance(); TilesUtil.setTilesUtil(impl); } catch (ClassCastException ex) { throw new ServletException( STR + getTilesUtilImplClassname() + STR + TilesUtilStrutsImpl.class.getName() + "'", ex); } catch (Exception ex) { throw new ServletException( STR, ex); } } } | /**
* Set TilesUtil implementation according to properties 'tilesUtilImplClassname'
* and 'moduleAware'. These properties are taken into account only once. A
* side effect is that only the values set in the first initialized plugin are
* effectively taken into account.
* @throws ServletException
*/ | Set TilesUtil implementation according to properties 'tilesUtilImplClassname' and 'moduleAware'. These properties are taken into account only once. A side effect is that only the values set in the first initialized plugin are effectively taken into account | initTilesUtil | {
"repo_name": "davcamer/clients",
"path": "projects-for-testing/struts/tiles/src/main/java/org/apache/struts/tiles/TilesPlugin.java",
"license": "apache-2.0",
"size": 15064
} | [
"javax.servlet.ServletException",
"org.apache.struts.util.RequestUtils"
] | import javax.servlet.ServletException; import org.apache.struts.util.RequestUtils; | import javax.servlet.*; import org.apache.struts.util.*; | [
"javax.servlet",
"org.apache.struts"
] | javax.servlet; org.apache.struts; | 1,271,633 |
public FXMatrix getFXMatrix() {
return _fxMatrix;
} | FXMatrix function() { return _fxMatrix; } | /**
* Gets the fxMatrix field.
* @return the fxMatrix
*/ | Gets the fxMatrix field | getFXMatrix | {
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/provider/method/SuccessiveRootFinderCalibrationObjective.java",
"license": "apache-2.0",
"size": 4621
} | [
"com.opengamma.analytics.financial.forex.method.FXMatrix"
] | import com.opengamma.analytics.financial.forex.method.FXMatrix; | import com.opengamma.analytics.financial.forex.method.*; | [
"com.opengamma.analytics"
] | com.opengamma.analytics; | 92,936 |
public static XContentBuilder contentBuilder(XContentType type) throws IOException {
if (type == XContentType.JSON) {
return JsonXContent.contentBuilder();
} else if (type == XContentType.SMILE) {
return SmileXContent.contentBuilder();
} else if (type == XContentType.YAML) {
return YamlXContent.contentBuilder();
} else if (type == XContentType.CBOR) {
return CborXContent.contentBuilder();
}
throw new IllegalArgumentException("No matching content type for " + type);
} | static XContentBuilder function(XContentType type) throws IOException { if (type == XContentType.JSON) { return JsonXContent.contentBuilder(); } else if (type == XContentType.SMILE) { return SmileXContent.contentBuilder(); } else if (type == XContentType.YAML) { return YamlXContent.contentBuilder(); } else if (type == XContentType.CBOR) { return CborXContent.contentBuilder(); } throw new IllegalArgumentException(STR + type); } | /**
* Returns a binary content builder for the provided content type.
*/ | Returns a binary content builder for the provided content type | contentBuilder | {
"repo_name": "weipinghe/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java",
"license": "apache-2.0",
"size": 12991
} | [
"java.io.IOException",
"org.elasticsearch.common.xcontent.cbor.CborXContent",
"org.elasticsearch.common.xcontent.json.JsonXContent",
"org.elasticsearch.common.xcontent.smile.SmileXContent",
"org.elasticsearch.common.xcontent.yaml.YamlXContent"
] | import java.io.IOException; import org.elasticsearch.common.xcontent.cbor.CborXContent; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.common.xcontent.yaml.YamlXContent; | import java.io.*; import org.elasticsearch.common.xcontent.cbor.*; import org.elasticsearch.common.xcontent.json.*; import org.elasticsearch.common.xcontent.smile.*; import org.elasticsearch.common.xcontent.yaml.*; | [
"java.io",
"org.elasticsearch.common"
] | java.io; org.elasticsearch.common; | 247,498 |
Property createProperty(String name, Reader value) throws SearchEngineException; | Property createProperty(String name, Reader value) throws SearchEngineException; | /**
* Creates a property (TEXT type) for the specified reader.
*/ | Creates a property (TEXT type) for the specified reader | createProperty | {
"repo_name": "unkascrack/compass-fork",
"path": "compass-core/src/main/java/org/compass/core/ResourceFactory.java",
"license": "apache-2.0",
"size": 3818
} | [
"java.io.Reader",
"org.compass.core.engine.SearchEngineException"
] | import java.io.Reader; import org.compass.core.engine.SearchEngineException; | import java.io.*; import org.compass.core.engine.*; | [
"java.io",
"org.compass.core"
] | java.io; org.compass.core; | 2,134,285 |
public static String[] getNames(JSONObject jo) {
int length = jo.length();
if (length == 0) {
return null;
}
Iterator iterator = jo.keys();
String[] names = new String[length];
int i = 0;
while (iterator.hasNext()) {
names[i] = (String) iterator.next();
i += 1;
}
return names;
} | static String[] function(JSONObject jo) { int length = jo.length(); if (length == 0) { return null; } Iterator iterator = jo.keys(); String[] names = new String[length]; int i = 0; while (iterator.hasNext()) { names[i] = (String) iterator.next(); i += 1; } return names; } | /**
* Get an array of field names from a JSONObject.
*
* @return An array of field names, or null if there are no names.
*/ | Get an array of field names from a JSONObject | getNames | {
"repo_name": "FreeSchoolHackers/RiTa",
"path": "java/rita/json/JSONObject.java",
"license": "gpl-3.0",
"size": 56711
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 1,423,593 |
@Override
public KeyParameter deriveKey(CharSequence password) throws KeyCrypterException {
byte[] passwordBytes = null;
try {
passwordBytes = convertToByteArray(password);
byte[] salt = new byte[0];
if ( scryptParameters.getSalt() != null) {
salt = scryptParameters.getSalt().toByteArray();
} else {
// Warn the user that they are not using a salt.
// (Some early MultiBit wallets had a blank salt).
log.warn("You are using a ScryptParameters with no salt. Your encryption may be vulnerable to a dictionary attack.");
}
final Stopwatch watch = Stopwatch.createStarted();
byte[] keyBytes = SCrypt.scrypt(passwordBytes, salt, (int) scryptParameters.getN(), scryptParameters.getR(), scryptParameters.getP(), KEY_LENGTH);
watch.stop();
log.info("Deriving key took {} for {} scrypt iterations.", watch, scryptParameters.getN());
return new KeyParameter(keyBytes);
} catch (Exception e) {
throw new KeyCrypterException("Could not generate key from password and salt.", e);
} finally {
// Zero the password bytes.
if (passwordBytes != null) {
java.util.Arrays.fill(passwordBytes, (byte) 0);
}
}
} | KeyParameter function(CharSequence password) throws KeyCrypterException { byte[] passwordBytes = null; try { passwordBytes = convertToByteArray(password); byte[] salt = new byte[0]; if ( scryptParameters.getSalt() != null) { salt = scryptParameters.getSalt().toByteArray(); } else { log.warn(STR); } final Stopwatch watch = Stopwatch.createStarted(); byte[] keyBytes = SCrypt.scrypt(passwordBytes, salt, (int) scryptParameters.getN(), scryptParameters.getR(), scryptParameters.getP(), KEY_LENGTH); watch.stop(); log.info(STR, watch, scryptParameters.getN()); return new KeyParameter(keyBytes); } catch (Exception e) { throw new KeyCrypterException(STR, e); } finally { if (passwordBytes != null) { java.util.Arrays.fill(passwordBytes, (byte) 0); } } } | /**
* Generate AES key.
*
* This is a very slow operation compared to encrypt/ decrypt so it is normally worth caching the result.
*
* @param password The password to use in key generation
* @return The KeyParameter containing the created AES key
* @throws KeyCrypterException
*/ | Generate AES key. This is a very slow operation compared to encrypt/ decrypt so it is normally worth caching the result | deriveKey | {
"repo_name": "kmels/bitcoinj",
"path": "core/src/main/java/org/bitcoinj/crypto/KeyCrypterScrypt.java",
"license": "apache-2.0",
"size": 11123
} | [
"com.google.common.base.Stopwatch",
"com.lambdaworks.crypto.SCrypt",
"java.util.Arrays",
"org.bouncycastle.crypto.params.KeyParameter"
] | import com.google.common.base.Stopwatch; import com.lambdaworks.crypto.SCrypt; import java.util.Arrays; import org.bouncycastle.crypto.params.KeyParameter; | import com.google.common.base.*; import com.lambdaworks.crypto.*; import java.util.*; import org.bouncycastle.crypto.params.*; | [
"com.google.common",
"com.lambdaworks.crypto",
"java.util",
"org.bouncycastle.crypto"
] | com.google.common; com.lambdaworks.crypto; java.util; org.bouncycastle.crypto; | 403,726 |
public static void logSuccess(String user, String operation, String target,
ApplicationId appId, ContainerId containerId) {
if (LOG.isInfoEnabled()) {
LOG.info(createSuccessLog(user, operation, target, appId, null,
containerId));
}
} | static void function(String user, String operation, String target, ApplicationId appId, ContainerId containerId) { if (LOG.isInfoEnabled()) { LOG.info(createSuccessLog(user, operation, target, appId, null, containerId)); } } | /**
* Create a readable and parseable audit log string for a successful event.
*
* @param user User who made the service request to the ResourceManager
* @param operation Operation requested by the user.
* @param target The target on which the operation is being performed.
* @param appId Application Id in which operation was performed.
* @param containerId Container Id in which operation was performed.
*
* <br><br>
* Note that the {@link RMAuditLogger} uses tabs ('\t') as a key-val delimiter
* and hence the value fields should not contains tabs ('\t').
*/ | Create a readable and parseable audit log string for a successful event | logSuccess | {
"repo_name": "laxman-ch/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java",
"license": "apache-2.0",
"size": 12567
} | [
"org.apache.hadoop.yarn.api.records.ApplicationId",
"org.apache.hadoop.yarn.api.records.ContainerId"
] | import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; | import org.apache.hadoop.yarn.api.records.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 335,870 |
public Object getValueAt(int rowIndex, int columnIndex) {
if (null == sanpv) {
sanpv = project.getSaNpvTable();
}
if (0 >= columnIndex) {
Mmf mmf = project.get(rowIndex);
return mmf.getId() + ": " + mmf.getName();
} else {
return sanpv[rowIndex][columnIndex - 1];
}
} | Object function(int rowIndex, int columnIndex) { if (null == sanpv) { sanpv = project.getSaNpvTable(); } if (0 >= columnIndex) { Mmf mmf = project.get(rowIndex); return mmf.getId() + STR + mmf.getName(); } else { return sanpv[rowIndex][columnIndex - 1]; } } | /**
* Method for getting the SaNpv value for a mmf at a period
*
* If column is 0 returns id and name for the mmf.
*
* @see no.ntnu.mmfplanner.model.Project#getSaNpvTable()
* @param rowIndex the index of the row (mmf)
* @param columnIndex the index of the column (period)
*/ | Method for getting the SaNpv value for a mmf at a period If column is 0 returns id and name for the mmf | getValueAt | {
"repo_name": "jodal/mmfplanner",
"path": "src/main/java/no/ntnu/mmfplanner/ui/model/SaNpvTableModel.java",
"license": "gpl-2.0",
"size": 3150
} | [
"no.ntnu.mmfplanner.model.Mmf"
] | import no.ntnu.mmfplanner.model.Mmf; | import no.ntnu.mmfplanner.model.*; | [
"no.ntnu.mmfplanner"
] | no.ntnu.mmfplanner; | 342,917 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Flux<ByteBuffer>>> updateSegmentsWithResponseAsync(
String resourceGroupName,
String privateCloudName,
String segmentId,
WorkloadNetworkSegmentInner workloadNetworkSegment,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (privateCloudName == null) {
return Mono
.error(new IllegalArgumentException("Parameter privateCloudName is required and cannot be null."));
}
if (segmentId == null) {
return Mono.error(new IllegalArgumentException("Parameter segmentId is required and cannot be null."));
}
if (workloadNetworkSegment == null) {
return Mono
.error(
new IllegalArgumentException("Parameter workloadNetworkSegment is required and cannot be null."));
} else {
workloadNetworkSegment.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.updateSegments(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
privateCloudName,
segmentId,
workloadNetworkSegment,
accept,
context);
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> function( String resourceGroupName, String privateCloudName, String segmentId, WorkloadNetworkSegmentInner workloadNetworkSegment, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (privateCloudName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (segmentId == null) { return Mono.error(new IllegalArgumentException(STR)); } if (workloadNetworkSegment == null) { return Mono .error( new IllegalArgumentException(STR)); } else { workloadNetworkSegment.validate(); } final String accept = STR; context = this.client.mergeContext(context); return service .updateSegments( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, this.client.getApiVersion(), privateCloudName, segmentId, workloadNetworkSegment, accept, context); } | /**
* Create or update a segment by id in a private cloud workload network.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param privateCloudName Name of the private cloud.
* @param segmentId NSX Segment identifier. Generally the same as the Segment's display name.
* @param workloadNetworkSegment NSX Segment.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return nSX Segment.
*/ | Create or update a segment by id in a private cloud workload network | updateSegmentsWithResponseAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/avs/azure-resourcemanager-avs/src/main/java/com/azure/resourcemanager/avs/implementation/WorkloadNetworksClientImpl.java",
"license": "mit",
"size": 538828
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context",
"com.azure.resourcemanager.avs.fluent.models.WorkloadNetworkSegmentInner",
"java.nio.ByteBuffer"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.avs.fluent.models.WorkloadNetworkSegmentInner; import java.nio.ByteBuffer; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.avs.fluent.models.*; import java.nio.*; | [
"com.azure.core",
"com.azure.resourcemanager",
"java.nio"
] | com.azure.core; com.azure.resourcemanager; java.nio; | 1,416,338 |
@Test
public void testTC11() throws Exception {
final Path p = new Path("/TC11/foo");
System.out.println("p=" + p);
//a. Create file and write one block of data. Close file.
final int len1 = (int)BLOCK_SIZE;
{
FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE);
AppendTestUtil.write(out, 0, len1);
out.close();
}
//b. Reopen file in "append" mode. Append half block of data.
FSDataOutputStream out = fs.append(p);
final int len2 = (int)BLOCK_SIZE/2;
AppendTestUtil.write(out, len1, len2);
out.hflush();
//c. Rename file to file.new.
final Path pnew = new Path(p + ".new");
assertTrue(fs.rename(p, pnew));
//d. Close file handle that was opened in (b).
out.close();
//check block sizes
final long len = fs.getFileStatus(pnew).getLen();
final LocatedBlocks locatedblocks = fs.dfs.getNamenode().getBlockLocations(pnew.toString(), 0L, len);
final int numblock = locatedblocks.locatedBlockCount();
for(int i = 0; i < numblock; i++) {
final LocatedBlock lb = locatedblocks.get(i);
final ExtendedBlock blk = lb.getBlock();
final long size = lb.getBlockSize();
if (i < numblock - 1) {
assertEquals(BLOCK_SIZE, size);
}
for(DatanodeInfo datanodeinfo : lb.getLocations()) {
final DataNode dn = cluster.getDataNode(datanodeinfo.getIpcPort());
final Block metainfo = DataNodeTestUtils.getFSDataset(dn).getStoredBlock(
blk.getBlockPoolId(), blk.getBlockId());
assertEquals(size, metainfo.getNumBytes());
}
}
} | void function() throws Exception { final Path p = new Path(STR); System.out.println("p=" + p); final int len1 = (int)BLOCK_SIZE; { FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); AppendTestUtil.write(out, 0, len1); out.close(); } FSDataOutputStream out = fs.append(p); final int len2 = (int)BLOCK_SIZE/2; AppendTestUtil.write(out, len1, len2); out.hflush(); final Path pnew = new Path(p + ".new"); assertTrue(fs.rename(p, pnew)); out.close(); final long len = fs.getFileStatus(pnew).getLen(); final LocatedBlocks locatedblocks = fs.dfs.getNamenode().getBlockLocations(pnew.toString(), 0L, len); final int numblock = locatedblocks.locatedBlockCount(); for(int i = 0; i < numblock; i++) { final LocatedBlock lb = locatedblocks.get(i); final ExtendedBlock blk = lb.getBlock(); final long size = lb.getBlockSize(); if (i < numblock - 1) { assertEquals(BLOCK_SIZE, size); } for(DatanodeInfo datanodeinfo : lb.getLocations()) { final DataNode dn = cluster.getDataNode(datanodeinfo.getIpcPort()); final Block metainfo = DataNodeTestUtils.getFSDataset(dn).getStoredBlock( blk.getBlockPoolId(), blk.getBlockId()); assertEquals(size, metainfo.getNumBytes()); } } } | /**
* TC11: Racing rename
* @throws IOException an exception might be thrown
*/ | TC11: Racing rename | testTC11 | {
"repo_name": "ZhangXFeng/hadoop",
"path": "src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java",
"license": "apache-2.0",
"size": 14601
} | [
"org.apache.hadoop.fs.FSDataOutputStream",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.protocol.Block",
"org.apache.hadoop.hdfs.protocol.DatanodeInfo",
"org.apache.hadoop.hdfs.protocol.ExtendedBlock",
"org.apache.hadoop.hdfs.protocol.LocatedBlock",
"org.apache.hadoop.hdfs.protocol.LocatedBlocks",
"org.apache.hadoop.hdfs.server.datanode.DataNode",
"org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils",
"org.junit.Assert"
] | import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.junit.Assert; | import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.datanode.*; import org.junit.*; | [
"org.apache.hadoop",
"org.junit"
] | org.apache.hadoop; org.junit; | 596,163 |
public static java.util.List extractCleansingandDressingComponentList(ims.domain.ILightweightDomainFactory domainFactory, ims.spinalinjuries.vo.NurAssessmentCleansingDressingVoCollection voCollection)
{
return extractCleansingandDressingComponentList(domainFactory, voCollection, null, new HashMap());
}
| static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.spinalinjuries.vo.NurAssessmentCleansingDressingVoCollection voCollection) { return extractCleansingandDressingComponentList(domainFactory, voCollection, null, new HashMap()); } | /**
* Create the ims.nursing.assessment.domain.objects.CleansingandDressingComponent list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/ | Create the ims.nursing.assessment.domain.objects.CleansingandDressingComponent list from the value object collection | extractCleansingandDressingComponentList | {
"repo_name": "FreudianNM/openMAXIMS",
"path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/spinalinjuries/vo/domain/NurAssessmentCleansingDressingVoAssembler.java",
"license": "agpl-3.0",
"size": 25021
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 622,678 |
public static void getBrokerSchemaFolderName(JET2Context context, String select, String variable) {
XPathContextExtender extender = XPathContextExtender.getInstance(context);
Object currentContext = extender.currentXPathContextObject();
Object element = extender.resolveSingle(currentContext, select);
String brokerSchema = extender.getContent(element);
if (brokerSchema != null) {
brokerSchema = brokerSchema.replace(".", "/");
}
context.setVariable(variable, brokerSchema);
} | static void function(JET2Context context, String select, String variable) { XPathContextExtender extender = XPathContextExtender.getInstance(context); Object currentContext = extender.currentXPathContextObject(); Object element = extender.resolveSingle(currentContext, select); String brokerSchema = extender.getContent(element); if (brokerSchema != null) { brokerSchema = brokerSchema.replace(".", "/"); } context.setVariable(variable, brokerSchema); } | /**
* Transform broker schema to schema folder name
*
* @param context
* <code>JET2Context</code>.
* @param brokerSchema
* The broker schema.
* @param variable
* Where to put the resulting value.
*/ | Transform broker schema to schema folder name | getBrokerSchemaFolderName | {
"repo_name": "ot4i/service-facade-mq-request-response-pattern",
"path": "src/com.ibm.etools.mft.pattern.sen/src/com/ibm/etools/mft/pattern/sen/plugin/PatternUtility.java",
"license": "epl-1.0",
"size": 9252
} | [
"org.eclipse.jet.JET2Context",
"org.eclipse.jet.XPathContextExtender"
] | import org.eclipse.jet.JET2Context; import org.eclipse.jet.XPathContextExtender; | import org.eclipse.jet.*; | [
"org.eclipse.jet"
] | org.eclipse.jet; | 2,681,424 |
public Choice getChoice() {
return new Choice(this);
}
public List<Unavailability> getUnavailabilities() { return iUnavailabilities; } | Choice function() { return new Choice(this); } public List<Unavailability> getUnavailabilities() { return iUnavailabilities; } | /**
* Choice matching this section
* @return choice matching this section
*/ | Choice matching this section | getChoice | {
"repo_name": "UniTime/cpsolver",
"path": "src/org/cpsolver/studentsct/model/Section.java",
"license": "lgpl-3.0",
"size": 38061
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,288,542 |
private JPanel getJStrainColorsPanel() {
if (jStrainColorsPanel == null) {
jStrainColorsPanel = new JPanel();
SpringLayout layout = new SpringLayout();
jStrainColorsPanel.setLayout(layout);
jStrainColorsPanel.setBorder(BorderFactory.createRaisedBevelBorder());
jStrainColorsPanel.add(getJStrainConstantCheckPanel(), null);
jStrainColorsPanel.add(getJStrainConstantSettingsPanel(), null);
jStrainColorsPanel.add(getJStrainRandomCheckPanel(), null);
jStrainColorsPanel.add(getJStrainRandomSettingsPanel(), null);
jStrainColorsPanel.add(getJStrainTintCheckPanel(), null);
jStrainColorsPanel.add(getJStrainTintSettingsPanel(), null);
jStrainColorsPanel.add(getJStrainTwoToneCheckPanel(), null);
jStrainColorsPanel.add(getJStrainTwoToneSettingsPanel(), null);
Util.makeCompactGrid(jStrainColorsPanel,layout,4,2,5,5,5,5);
}
return jStrainColorsPanel;
} | JPanel function() { if (jStrainColorsPanel == null) { jStrainColorsPanel = new JPanel(); SpringLayout layout = new SpringLayout(); jStrainColorsPanel.setLayout(layout); jStrainColorsPanel.setBorder(BorderFactory.createRaisedBevelBorder()); jStrainColorsPanel.add(getJStrainConstantCheckPanel(), null); jStrainColorsPanel.add(getJStrainConstantSettingsPanel(), null); jStrainColorsPanel.add(getJStrainRandomCheckPanel(), null); jStrainColorsPanel.add(getJStrainRandomSettingsPanel(), null); jStrainColorsPanel.add(getJStrainTintCheckPanel(), null); jStrainColorsPanel.add(getJStrainTintSettingsPanel(), null); jStrainColorsPanel.add(getJStrainTwoToneCheckPanel(), null); jStrainColorsPanel.add(getJStrainTwoToneSettingsPanel(), null); Util.makeCompactGrid(jStrainColorsPanel,layout,4,2,5,5,5,5); } return jStrainColorsPanel; } | /**
* This method initializes jStrainColorsPanel
*
* @return javax.swing.JPanel
*/ | This method initializes jStrainColorsPanel | getJStrainColorsPanel | {
"repo_name": "jmeppley/strainer",
"path": "src/amd/strainer/display/actions/DisplayOptionsDialog.java",
"license": "lgpl-3.0",
"size": 56827
} | [
"javax.swing.BorderFactory",
"javax.swing.JPanel",
"javax.swing.SpringLayout"
] | import javax.swing.BorderFactory; import javax.swing.JPanel; import javax.swing.SpringLayout; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 1,987,878 |
public PointF hDGetPoint(Q Dcon) {
return (hDGetHex(Dcon).getLoc());
}; | PointF function(Q Dcon) { return (hDGetHex(Dcon).getLoc()); }; | /**
* Gets the point associated with the HD port.
*/ | Gets the point associated with the HD port | hDGetPoint | {
"repo_name": "viridian1138/VectorVictor",
"path": "VectorVictor/VectorVictor/src/geomdir/depictors/Vect2Base.java",
"license": "gpl-3.0",
"size": 61111
} | [
"android.graphics.PointF"
] | import android.graphics.PointF; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 1,474,878 |
public static final Parcelable.Creator<CharSequence> CHAR_SEQUENCE_CREATOR
= new Parcelable.Creator<CharSequence>() {
public CharSequence createFromParcel(Parcel p) {
int kind = p.readInt();
String string = p.readString();
if (string == null) {
return null;
}
if (kind == 1) {
return string;
}
SpannableString sp = new SpannableString(string);
while (true) {
kind = p.readInt();
if (kind == 0)
break;
switch (kind) {
case ALIGNMENT_SPAN:
readSpan(p, sp, new AlignmentSpan.Standard(p));
break;
case FOREGROUND_COLOR_SPAN:
readSpan(p, sp, new ForegroundColorSpan(p));
break;
case RELATIVE_SIZE_SPAN:
readSpan(p, sp, new RelativeSizeSpan(p));
break;
case SCALE_X_SPAN:
readSpan(p, sp, new ScaleXSpan(p));
break;
case STRIKETHROUGH_SPAN:
readSpan(p, sp, new StrikethroughSpan(p));
break;
case UNDERLINE_SPAN:
readSpan(p, sp, new UnderlineSpan(p));
break;
case STYLE_SPAN:
readSpan(p, sp, new StyleSpan(p));
break;
case BULLET_SPAN:
readSpan(p, sp, new BulletSpan(p));
break;
case QUOTE_SPAN:
readSpan(p, sp, new QuoteSpan(p));
break;
case LEADING_MARGIN_SPAN:
readSpan(p, sp, new LeadingMarginSpan.Standard(p));
break;
case URL_SPAN:
readSpan(p, sp, new URLSpan(p));
break;
case BACKGROUND_COLOR_SPAN:
readSpan(p, sp, new BackgroundColorSpan(p));
break;
case TYPEFACE_SPAN:
readSpan(p, sp, new TypefaceSpan(p));
break;
case SUPERSCRIPT_SPAN:
readSpan(p, sp, new SuperscriptSpan(p));
break;
case SUBSCRIPT_SPAN:
readSpan(p, sp, new SubscriptSpan(p));
break;
case ABSOLUTE_SIZE_SPAN:
readSpan(p, sp, new AbsoluteSizeSpan(p));
break;
case TEXT_APPEARANCE_SPAN:
readSpan(p, sp, new TextAppearanceSpan(p));
break;
case ANNOTATION:
readSpan(p, sp, new Annotation(p));
break;
case SUGGESTION_SPAN:
readSpan(p, sp, new SuggestionSpan(p));
break;
case SPELL_CHECK_SPAN:
readSpan(p, sp, new SpellCheckSpan(p));
break;
case SUGGESTION_RANGE_SPAN:
readSpan(p, sp, new SuggestionRangeSpan(p));
break;
case EASY_EDIT_SPAN:
readSpan(p, sp, new EasyEditSpan(p));
break;
case LOCALE_SPAN:
readSpan(p, sp, new LocaleSpan(p));
break;
default:
throw new RuntimeException("bogus span encoding " + kind);
}
}
return sp;
} | static final Parcelable.Creator<CharSequence> CHAR_SEQUENCE_CREATOR = new Parcelable.Creator<CharSequence>() { public CharSequence function(Parcel p) { int kind = p.readInt(); String string = p.readString(); if (string == null) { return null; } if (kind == 1) { return string; } SpannableString sp = new SpannableString(string); while (true) { kind = p.readInt(); if (kind == 0) break; switch (kind) { case ALIGNMENT_SPAN: readSpan(p, sp, new AlignmentSpan.Standard(p)); break; case FOREGROUND_COLOR_SPAN: readSpan(p, sp, new ForegroundColorSpan(p)); break; case RELATIVE_SIZE_SPAN: readSpan(p, sp, new RelativeSizeSpan(p)); break; case SCALE_X_SPAN: readSpan(p, sp, new ScaleXSpan(p)); break; case STRIKETHROUGH_SPAN: readSpan(p, sp, new StrikethroughSpan(p)); break; case UNDERLINE_SPAN: readSpan(p, sp, new UnderlineSpan(p)); break; case STYLE_SPAN: readSpan(p, sp, new StyleSpan(p)); break; case BULLET_SPAN: readSpan(p, sp, new BulletSpan(p)); break; case QUOTE_SPAN: readSpan(p, sp, new QuoteSpan(p)); break; case LEADING_MARGIN_SPAN: readSpan(p, sp, new LeadingMarginSpan.Standard(p)); break; case URL_SPAN: readSpan(p, sp, new URLSpan(p)); break; case BACKGROUND_COLOR_SPAN: readSpan(p, sp, new BackgroundColorSpan(p)); break; case TYPEFACE_SPAN: readSpan(p, sp, new TypefaceSpan(p)); break; case SUPERSCRIPT_SPAN: readSpan(p, sp, new SuperscriptSpan(p)); break; case SUBSCRIPT_SPAN: readSpan(p, sp, new SubscriptSpan(p)); break; case ABSOLUTE_SIZE_SPAN: readSpan(p, sp, new AbsoluteSizeSpan(p)); break; case TEXT_APPEARANCE_SPAN: readSpan(p, sp, new TextAppearanceSpan(p)); break; case ANNOTATION: readSpan(p, sp, new Annotation(p)); break; case SUGGESTION_SPAN: readSpan(p, sp, new SuggestionSpan(p)); break; case SPELL_CHECK_SPAN: readSpan(p, sp, new SpellCheckSpan(p)); break; case SUGGESTION_RANGE_SPAN: readSpan(p, sp, new SuggestionRangeSpan(p)); break; case EASY_EDIT_SPAN: readSpan(p, sp, new EasyEditSpan(p)); break; case LOCALE_SPAN: readSpan(p, sp, new LocaleSpan(p)); break; default: throw new RuntimeException(STR + kind); } } return sp; } | /**
* Read and return a new CharSequence, possibly with styles,
* from the parcel.
*/ | Read and return a new CharSequence, possibly with styles, from the parcel | createFromParcel | {
"repo_name": "JSDemos/android-sdk-20",
"path": "src/android/text/TextUtils.java",
"license": "apache-2.0",
"size": 59445
} | [
"android.os.Parcel",
"android.os.Parcelable",
"android.text.style.AbsoluteSizeSpan",
"android.text.style.AlignmentSpan",
"android.text.style.BackgroundColorSpan",
"android.text.style.BulletSpan",
"android.text.style.EasyEditSpan",
"android.text.style.ForegroundColorSpan",
"android.text.style.LeadingMarginSpan",
"android.text.style.LocaleSpan",
"android.text.style.QuoteSpan",
"android.text.style.RelativeSizeSpan",
"android.text.style.ScaleXSpan",
"android.text.style.SpellCheckSpan",
"android.text.style.StrikethroughSpan",
"android.text.style.StyleSpan",
"android.text.style.SubscriptSpan",
"android.text.style.SuggestionRangeSpan",
"android.text.style.SuggestionSpan",
"android.text.style.SuperscriptSpan",
"android.text.style.TextAppearanceSpan",
"android.text.style.TypefaceSpan",
"android.text.style.URLSpan",
"android.text.style.UnderlineSpan"
] | import android.os.Parcel; import android.os.Parcelable; import android.text.style.AbsoluteSizeSpan; import android.text.style.AlignmentSpan; import android.text.style.BackgroundColorSpan; import android.text.style.BulletSpan; import android.text.style.EasyEditSpan; import android.text.style.ForegroundColorSpan; import android.text.style.LeadingMarginSpan; import android.text.style.LocaleSpan; import android.text.style.QuoteSpan; import android.text.style.RelativeSizeSpan; import android.text.style.ScaleXSpan; import android.text.style.SpellCheckSpan; import android.text.style.StrikethroughSpan; import android.text.style.StyleSpan; import android.text.style.SubscriptSpan; import android.text.style.SuggestionRangeSpan; import android.text.style.SuggestionSpan; import android.text.style.SuperscriptSpan; import android.text.style.TextAppearanceSpan; import android.text.style.TypefaceSpan; import android.text.style.URLSpan; import android.text.style.UnderlineSpan; | import android.os.*; import android.text.style.*; | [
"android.os",
"android.text"
] | android.os; android.text; | 2,654,936 |
public void apply( ICurveWithNormals3f sweep , Point2f data[] , float lateralOffset , float angle , Point3f trans[] )
{
if( data.length != trans.length )
{
throw new IllegalArgumentException( "result must be the same size as data" );
}
for( int i = 0 ; i < data.length ; i++ )
{
apply( sweep , data[ i ].x , data[ i ].y + lateralOffset , angle , trans[ i ] );
}
}
| void function( ICurveWithNormals3f sweep , Point2f data[] , float lateralOffset , float angle , Point3f trans[] ) { if( data.length != trans.length ) { throw new IllegalArgumentException( STR ); } for( int i = 0 ; i < data.length ; i++ ) { apply( sweep , data[ i ].x , data[ i ].y + lateralOffset , angle , trans[ i ] ); } } | /**
* Transforms a collection of depth/offset points on the sweep into 3D world coordinates, storing them in the result collection. The result must be the same
* size as the data and contain no null elements.
*
* @return whether the operation was successful or not.
*/ | Transforms a collection of depth/offset points on the sweep into 3D world coordinates, storing them in the result collection. The result must be the same size as the data and contain no null elements | apply | {
"repo_name": "jedwards1211/breakout",
"path": "andork-j3d-utils/src/org/andork/math3d/curve/Sweeper3f.java",
"license": "gpl-2.0",
"size": 8589
} | [
"javax.vecmath.Point2f",
"javax.vecmath.Point3f",
"org.andork.math.curve.ICurveWithNormals3f"
] | import javax.vecmath.Point2f; import javax.vecmath.Point3f; import org.andork.math.curve.ICurveWithNormals3f; | import javax.vecmath.*; import org.andork.math.curve.*; | [
"javax.vecmath",
"org.andork.math"
] | javax.vecmath; org.andork.math; | 1,180,867 |
public TransLogTable getTransLogTable() {
return transLogTable;
} | TransLogTable function() { return transLogTable; } | /**
* Gets the log table for the transformation.
*
* @return the log table for the transformation
*/ | Gets the log table for the transformation | getTransLogTable | {
"repo_name": "eayoungs/pentaho-kettle",
"path": "engine/src/org/pentaho/di/trans/TransMeta.java",
"license": "apache-2.0",
"size": 221441
} | [
"org.pentaho.di.core.logging.TransLogTable"
] | import org.pentaho.di.core.logging.TransLogTable; | import org.pentaho.di.core.logging.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 2,316,642 |
public boolean isIncludeParticipants() {
Set<ISharePreference> prefs = getPreferencesByType(IncludeParticipantsPreference.INCLUDE_PARTICIPANTS);
for(ISharePreference pref: prefs) {
return Boolean.parseBoolean(pref.getValue());
}
// preference not present, default is false
return false;
}
| boolean function() { Set<ISharePreference> prefs = getPreferencesByType(IncludeParticipantsPreference.INCLUDE_PARTICIPANTS); for(ISharePreference pref: prefs) { return Boolean.parseBoolean(pref.getValue()); } return false; } | /**
* Short cut to determine if this share has an
* IncludeParticipants preference set to true.
*
* @return the value of the IncludeParticipants preference, or false if not set
*/ | Short cut to determine if this share has an IncludeParticipants preference set to true | isIncludeParticipants | {
"repo_name": "nblair/shareurl",
"path": "src/main/java/edu/wisc/wisccal/shareurl/domain/SharePreferences.java",
"license": "apache-2.0",
"size": 14237
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 320,469 |
// fill grid with background color
final int bgR = (int) (this.backgroundCol >> 16);
final int bgG = (int) ((this.backgroundCol >> 8) & 0xff);
final int bgB = (int) (this.backgroundCol & 0xff);
if (this.frame == null) {
final Graphics2D gr = this.image.createGraphics();
Color c = new Color(bgR, bgG, bgB);
gr.setBackground(c);
gr.clearRect(0, 0, this.width, this.height);
gr.setColor(c);
gr.fillRect(0, 0, this.width, this.height);
} else {
int p = 0;
for (int i = 0; i < width; i++) {
this.frame[p++] = (byte) bgR;
this.frame[p++] = (byte) bgG;
this.frame[p++] = (byte) bgB;
}
final int rw = width * 3;
for (int i = 1; i < height; i++) {
System.arraycopy(this.frame, 0, this.frame, i * rw, rw);
}
}
} | final int bgR = (int) (this.backgroundCol >> 16); final int bgG = (int) ((this.backgroundCol >> 8) & 0xff); final int bgB = (int) (this.backgroundCol & 0xff); if (this.frame == null) { final Graphics2D gr = this.image.createGraphics(); Color c = new Color(bgR, bgG, bgB); gr.setBackground(c); gr.clearRect(0, 0, this.width, this.height); gr.setColor(c); gr.fillRect(0, 0, this.width, this.height); } else { int p = 0; for (int i = 0; i < width; i++) { this.frame[p++] = (byte) bgR; this.frame[p++] = (byte) bgG; this.frame[p++] = (byte) bgB; } final int rw = width * 3; for (int i = 1; i < height; i++) { System.arraycopy(this.frame, 0, this.frame, i * rw, rw); } } } | /**
* Deletes all pixels of image and sets them to previously defined
* background color.
*/ | Deletes all pixels of image and sets them to previously defined background color | clear | {
"repo_name": "karajrish/loklak_server",
"path": "src/org/loklak/visualization/graphics/RasterPlotter.java",
"license": "lgpl-2.1",
"size": 46617
} | [
"java.awt.Color",
"java.awt.Graphics2D"
] | import java.awt.Color; import java.awt.Graphics2D; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,469,302 |
public Observable<ServiceResponse<NetworkInterfaceIPConfigurationInner>> getVirtualMachineScaleSetIpConfigurationWithServiceResponseAsync(String resourceGroupName, String virtualMachineScaleSetName, String virtualmachineIndex, String networkInterfaceName, String ipConfigurationName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (virtualMachineScaleSetName == null) {
throw new IllegalArgumentException("Parameter virtualMachineScaleSetName is required and cannot be null.");
}
if (virtualmachineIndex == null) {
throw new IllegalArgumentException("Parameter virtualmachineIndex is required and cannot be null.");
}
if (networkInterfaceName == null) {
throw new IllegalArgumentException("Parameter networkInterfaceName is required and cannot be null.");
}
if (ipConfigurationName == null) {
throw new IllegalArgumentException("Parameter ipConfigurationName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
} | Observable<ServiceResponse<NetworkInterfaceIPConfigurationInner>> function(String resourceGroupName, String virtualMachineScaleSetName, String virtualmachineIndex, String networkInterfaceName, String ipConfigurationName) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (virtualMachineScaleSetName == null) { throw new IllegalArgumentException(STR); } if (virtualmachineIndex == null) { throw new IllegalArgumentException(STR); } if (networkInterfaceName == null) { throw new IllegalArgumentException(STR); } if (ipConfigurationName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } | /**
* Get the specified network interface ip configuration in a virtual machine scale set.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineScaleSetName The name of the virtual machine scale set.
* @param virtualmachineIndex The virtual machine index.
* @param networkInterfaceName The name of the network interface.
* @param ipConfigurationName The name of the ip configuration.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the NetworkInterfaceIPConfigurationInner object
*/ | Get the specified network interface ip configuration in a virtual machine scale set | getVirtualMachineScaleSetIpConfigurationWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/network/v2019_08_01/implementation/NetworkInterfacesInner.java",
"license": "mit",
"size": 192401
} | [
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.rest.ServiceResponse; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 2,288,082 |
public RuleConfiguredTargetBuilder addOutputGroup(String name, NestedSet<Artifact> artifacts) {
getOutputGroupBuilder(name).addTransitive(artifacts);
return this;
} | RuleConfiguredTargetBuilder function(String name, NestedSet<Artifact> artifacts) { getOutputGroupBuilder(name).addTransitive(artifacts); return this; } | /**
* Adds a set of files to an output group.
*/ | Adds a set of files to an output group | addOutputGroup | {
"repo_name": "damienmg/bazel",
"path": "src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java",
"license": "apache-2.0",
"size": 16782
} | [
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.collect.nestedset.NestedSet"
] | import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.collect.nestedset.NestedSet; | import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.collect.nestedset.*; | [
"com.google.devtools"
] | com.google.devtools; | 1,823,822 |
public Integer createReplyComment(ReplyCommentDTO replyCommentDTO);
| Integer function(ReplyCommentDTO replyCommentDTO); | /**
* Creates the reply comment.
*
* @param replyCommentDTO the reply comment dto
* @return the integer
*/ | Creates the reply comment | createReplyComment | {
"repo_name": "aholake/hiringviet",
"path": "src/main/java/vn/com/hiringviet/service/ReplyCommentService.java",
"license": "apache-2.0",
"size": 969
} | [
"vn.com.hiringviet.dto.ReplyCommentDTO"
] | import vn.com.hiringviet.dto.ReplyCommentDTO; | import vn.com.hiringviet.dto.*; | [
"vn.com.hiringviet"
] | vn.com.hiringviet; | 2,236,051 |
for (final StravaSubscriptionObjectType type : StravaSubscriptionObjectType.values()) {
assertNotNull(type.getDescription());
}
} | for (final StravaSubscriptionObjectType type : StravaSubscriptionObjectType.values()) { assertNotNull(type.getDescription()); } } | /**
* Test returning the description
*/ | Test returning the description | testGetDescription | {
"repo_name": "danshannon/javastrava-test",
"path": "src/main/java/test/model/reference/StravaSubscriptionObjectTypeTest.java",
"license": "apache-2.0",
"size": 887
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 2,494,881 |
public void testLocationComparator() {
// Change the GeoLocation of the comments
GeoLocation specificLoc = new GeoLocation(0.0, 0.0);
GeoLocation geoLoc1 = new GeoLocation(1.4, 0.0);
GeoLocation geoLoc2 = new GeoLocation(8.0, 20.0);
GeoLocation geoLoc3 = new GeoLocation(-2.0, -4.0);
// Attach locations to comments
comment1.setLocation(geoLoc1);
comment2.setLocation(geoLoc2);
comment3.setLocation(geoLoc3);
// Create a list of (distance from current location, comment) pairs.
ArrayList<Pair<Double, Comment>> pairs = new ArrayList<Pair<Double, Comment>>();
for(Comment comment: comments) {
Double distance = specificLoc.distanceFrom(comment.getLocation());
pairs.add(Pair.create(distance, comment));
}
Collections.sort(pairs, new LocationComparator());
// Check the comments were sorted by a specific location.
assertTrue("Comments should be sorted by a specific geolocation",
pairs.get(0).first < pairs.get(1).first);
assertTrue(pairs.get(1).first < pairs.get(2).first);
}
| void function() { GeoLocation specificLoc = new GeoLocation(0.0, 0.0); GeoLocation geoLoc1 = new GeoLocation(1.4, 0.0); GeoLocation geoLoc2 = new GeoLocation(8.0, 20.0); GeoLocation geoLoc3 = new GeoLocation(-2.0, -4.0); comment1.setLocation(geoLoc1); comment2.setLocation(geoLoc2); comment3.setLocation(geoLoc3); ArrayList<Pair<Double, Comment>> pairs = new ArrayList<Pair<Double, Comment>>(); for(Comment comment: comments) { Double distance = specificLoc.distanceFrom(comment.getLocation()); pairs.add(Pair.create(distance, comment)); } Collections.sort(pairs, new LocationComparator()); assertTrue(STR, pairs.get(0).first < pairs.get(1).first); assertTrue(pairs.get(1).first < pairs.get(2).first); } | /**
* Checks that comments can be sorted based on some fixed GeoLocation using
* the LocationComparator.
*/ | Checks that comments can be sorted based on some fixed GeoLocation using the LocationComparator | testLocationComparator | {
"repo_name": "CMPUT301W14T06/LARD",
"path": "LARDTestTest/src/ca/ualberta/lard/test/ComparatorTests.java",
"license": "mit",
"size": 4501
} | [
"android.util.Pair",
"ca.ualberta.lard.comparator.LocationComparator",
"ca.ualberta.lard.model.Comment",
"ca.ualberta.lard.model.GeoLocation",
"java.util.ArrayList",
"java.util.Collections"
] | import android.util.Pair; import ca.ualberta.lard.comparator.LocationComparator; import ca.ualberta.lard.model.Comment; import ca.ualberta.lard.model.GeoLocation; import java.util.ArrayList; import java.util.Collections; | import android.util.*; import ca.ualberta.lard.comparator.*; import ca.ualberta.lard.model.*; import java.util.*; | [
"android.util",
"ca.ualberta.lard",
"java.util"
] | android.util; ca.ualberta.lard; java.util; | 2,554,880 |
public Integer removeAction(User loggedInUser,
String chainLabel,
Integer actionId) {
ActionChain chain = this.acUtil.getActionChainByLabel(loggedInUser, chainLabel);
for (ActionChainEntry entry : chain.getEntries()) {
if (entry.getAction().getId().equals(Long.valueOf(actionId))) {
ActionChainFactory.removeActionChainEntry(chain, entry);
return BaseHandler.VALID;
}
}
throw new NoSuchActionException("ID: " + actionId);
} | Integer function(User loggedInUser, String chainLabel, Integer actionId) { ActionChain chain = this.acUtil.getActionChainByLabel(loggedInUser, chainLabel); for (ActionChainEntry entry : chain.getEntries()) { if (entry.getAction().getId().equals(Long.valueOf(actionId))) { ActionChainFactory.removeActionChainEntry(chain, entry); return BaseHandler.VALID; } } throw new NoSuchActionException(STR + actionId); } | /**
* Remove an action from the Action Chain.
*
* @param loggedInUser The current user
* @param chainLabel The label of the Action Chain.
* @param actionId Action ID.
* @return 1 if successful, exception otherwise
*
* @xmlrpc.doc Remove an action from an Action Chain.
* @xmlrpc.param #param_desc("string", "sessionKey", "Session token, issued at login")
* @xmlrpc.param #param_desc("string", "chainLabel", "Label of the chain")
* @xmlrpc.param #param_desc("int", "actionId", "Action ID")
* @xmlrpc.returntype #return_int_success()
*/ | Remove an action from the Action Chain | removeAction | {
"repo_name": "xkollar/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/chain/ActionChainHandler.java",
"license": "gpl-2.0",
"size": 21753
} | [
"com.redhat.rhn.domain.action.ActionChain",
"com.redhat.rhn.domain.action.ActionChainEntry",
"com.redhat.rhn.domain.action.ActionChainFactory",
"com.redhat.rhn.domain.user.User",
"com.redhat.rhn.frontend.xmlrpc.BaseHandler",
"com.redhat.rhn.frontend.xmlrpc.NoSuchActionException"
] | import com.redhat.rhn.domain.action.ActionChain; import com.redhat.rhn.domain.action.ActionChainEntry; import com.redhat.rhn.domain.action.ActionChainFactory; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.frontend.xmlrpc.BaseHandler; import com.redhat.rhn.frontend.xmlrpc.NoSuchActionException; | import com.redhat.rhn.domain.action.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.frontend.xmlrpc.*; | [
"com.redhat.rhn"
] | com.redhat.rhn; | 2,689,121 |
@Override
public String toString() {
if (isEmpty()) {
return "{}"; //$NON-NLS-1$
}
StringBuilder buffer = new StringBuilder(size() * 28);
buffer.append('{');
Iterator<Map.Entry<K, V>> it = entrySet().iterator();
while (it.hasNext()) {
Map.Entry<K, V> entry = it.next();
Object key = entry.getKey();
if (key != this) {
buffer.append(key);
} else {
buffer.append("(this Map)"); //$NON-NLS-1$
}
buffer.append('=');
Object value = entry.getValue();
if (value != this) {
buffer.append(value);
} else {
buffer.append("(this Map)"); //$NON-NLS-1$
}
if (it.hasNext()) {
buffer.append(", "); //$NON-NLS-1$
}
}
buffer.append('}');
return buffer.toString();
}
/**
* Returns a collection of the values contained in this map. The collection
* is backed by this map so changes to one are reflected by the other. The
* collection supports remove, removeAll, retainAll and clear operations,
* and it does not support add or addAll operations.
* <p>
* This method returns a collection which is the subclass of
* AbstractCollection. The iterator method of this subclass returns a
* "wrapper object" over the iterator of map's entrySet(). The {@code size} | String function() { if (isEmpty()) { return "{}"; } StringBuilder buffer = new StringBuilder(size() * 28); buffer.append('{'); Iterator<Map.Entry<K, V>> it = entrySet().iterator(); while (it.hasNext()) { Map.Entry<K, V> entry = it.next(); Object key = entry.getKey(); if (key != this) { buffer.append(key); } else { buffer.append(STR); } buffer.append('='); Object value = entry.getValue(); if (value != this) { buffer.append(value); } else { buffer.append(STR); } if (it.hasNext()) { buffer.append(STR); } } buffer.append('}'); return buffer.toString(); } /** * Returns a collection of the values contained in this map. The collection * is backed by this map so changes to one are reflected by the other. The * collection supports remove, removeAll, retainAll and clear operations, * and it does not support add or addAll operations. * <p> * This method returns a collection which is the subclass of * AbstractCollection. The iterator method of this subclass returns a * STR over the iterator of map's entrySet(). The {@code size} | /**
* Returns the string representation of this map.
*
* @return the string representation of this map.
*/ | Returns the string representation of this map | toString | {
"repo_name": "Programming-Systems-Lab/phosphor",
"path": "Phosphor/src/main/java/edu/columbia/cs/psl/phosphor/struct/harmony/util/AbstractMap.java",
"license": "mit",
"size": 15170
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 1,164,382 |
boolean supports( final Path path ); | boolean supports( final Path path ); | /**
* Helpers signal whether it supports the given Path
* @param path
* The Path that was deleted.
* @return
*/ | Helpers signal whether it supports the given Path | supports | {
"repo_name": "kiereleaseuser/uberfire",
"path": "uberfire-extensions/uberfire-commons-editor/uberfire-commons-editor-backend/src/main/java/org/uberfire/ext/editor/commons/backend/service/helper/DeleteHelper.java",
"license": "apache-2.0",
"size": 1273
} | [
"org.uberfire.backend.vfs.Path"
] | import org.uberfire.backend.vfs.Path; | import org.uberfire.backend.vfs.*; | [
"org.uberfire.backend"
] | org.uberfire.backend; | 590,290 |
public double getSpecialDoubleProperty(String name,
Hashtable valueSet,
String defaultKey)
{
return this.getDoubleAttribute(name, valueSet, defaultKey, true);
} | double function(String name, Hashtable valueSet, String defaultKey) { return this.getDoubleAttribute(name, valueSet, defaultKey, true); } | /**
* Returns an attribute by looking up a key in a hashtable.
*
* @deprecated Use {@link #getDoubleAttribute(java.lang.String,
* java.util.Hashtable, java.lang.String, boolean)
* getDoubleAttribute} instead.
*/ | Returns an attribute by looking up a key in a hashtable | getSpecialDoubleProperty | {
"repo_name": "lsilvestre/Jogre",
"path": "api/src/nanoxml/XMLElement.java",
"license": "gpl-2.0",
"size": 99514
} | [
"java.util.Hashtable"
] | import java.util.Hashtable; | import java.util.*; | [
"java.util"
] | java.util; | 1,896,477 |
private static String getClassName(String fullClassName) {
if(!StringUtil.isSet(fullClassName)) {
return null;
}
fullClassName += " ";
char[] classNameArray = fullClassName.toCharArray();
StringBuilder buffer = new StringBuilder();
List<String> tokens = new ArrayList<String>();
String className;
int dotIndex;
for(int i = 0; i < fullClassName.length(); ++i) {
if("<>, ".indexOf(classNameArray[i]) > -1) {
if(buffer.length() > 0) {
className = buffer.toString();
dotIndex = className.lastIndexOf('.');
if(dotIndex > -1) {
className = className.substring(dotIndex+1);
}
tokens.add(className.trim());
}
buffer = new StringBuilder();
tokens.add(classNameArray[i] + "");
}
else {
buffer.append(classNameArray[i]);
}
}
StringBuilder result = new StringBuilder();
for (String string : tokens) {
result.append(string);
}
return result.toString().trim();
}
| static String function(String fullClassName) { if(!StringUtil.isSet(fullClassName)) { return null; } fullClassName += " "; char[] classNameArray = fullClassName.toCharArray(); StringBuilder buffer = new StringBuilder(); List<String> tokens = new ArrayList<String>(); String className; int dotIndex; for(int i = 0; i < fullClassName.length(); ++i) { if(STR.indexOf(classNameArray[i]) > -1) { if(buffer.length() > 0) { className = buffer.toString(); dotIndex = className.lastIndexOf('.'); if(dotIndex > -1) { className = className.substring(dotIndex+1); } tokens.add(className.trim()); } buffer = new StringBuilder(); tokens.add(classNameArray[i] + ""); } else { buffer.append(classNameArray[i]); } } StringBuilder result = new StringBuilder(); for (String string : tokens) { result.append(string); } return result.toString().trim(); } | /**
* Converts a full class name into just the class name.
*
* @param fullClassName
* an objects fully qualified class name.
* @return The class name of the object with the package portion removed.
*/ | Converts a full class name into just the class name | getClassName | {
"repo_name": "justinrknowles/toshookan",
"path": "src/main/java/toshookan/domainmodel/VelocityDomainModelCodeGenerator.java",
"license": "apache-2.0",
"size": 16799
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,000,775 |
@Override
public Spans getSpans(final LeafReaderContext context, final Bits acceptDocs, final Map<Term,TermContext> termContexts) throws IOException {
ArrayList<Spans> containerContained = prepareConjunction(context, acceptDocs, termContexts);
if (containerContained == null) {
return null;
}
Spans big = containerContained.get(0);
Spans little = containerContained.get(1);
return new ContainSpans(big, little, little) { | Spans function(final LeafReaderContext context, final Bits acceptDocs, final Map<Term,TermContext> termContexts) throws IOException { ArrayList<Spans> containerContained = prepareConjunction(context, acceptDocs, termContexts); if (containerContained == null) { return null; } Spans big = containerContained.get(0); Spans little = containerContained.get(1); return new ContainSpans(big, little, little) { | /**
* Return spans from <code>little</code> that are contained in a spans from <code>big</code>.
* The payload is from the spans of <code>little</code>.
*/ | Return spans from <code>little</code> that are contained in a spans from <code>big</code>. The payload is from the spans of <code>little</code> | getSpans | {
"repo_name": "q474818917/solr-5.2.0",
"path": "lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java",
"license": "apache-2.0",
"size": 3931
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.Map",
"org.apache.lucene.index.LeafReaderContext",
"org.apache.lucene.index.Term",
"org.apache.lucene.index.TermContext",
"org.apache.lucene.util.Bits"
] | import java.io.IOException; import java.util.ArrayList; import java.util.Map; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.util.Bits; | import java.io.*; import java.util.*; import org.apache.lucene.index.*; import org.apache.lucene.util.*; | [
"java.io",
"java.util",
"org.apache.lucene"
] | java.io; java.util; org.apache.lucene; | 2,819,000 |
public Options validateIndices(Boolean validateIndices) {
this.validateIndices = validateIndices;
return this;
}
}
@OpInputsMetadata(
outputsClass = DenseToDenseSetOperation.class
)
public static class Inputs<T extends TType> extends RawOpInputs<DenseToDenseSetOperation<T>> {
public final Operand<T> set1;
public final Operand<T> set2;
public final String setOperation;
public final boolean validateIndices;
public final DataType T;
public Inputs(GraphOperation op) {
super(new DenseToDenseSetOperation<>(op), op, Arrays.asList("set_operation", "validate_indices", "T"));
int inputIndex = 0;
set1 = (Operand<T>) op.input(inputIndex++);
set2 = (Operand<T>) op.input(inputIndex++);
setOperation = op.attributes().getAttrString("set_operation");
validateIndices = op.attributes().getAttrBool("validate_indices");
T = op.attributes().getAttrType("T");
}
} | Options function(Boolean validateIndices) { this.validateIndices = validateIndices; return this; } } @OpInputsMetadata( outputsClass = DenseToDenseSetOperation.class ) static class Inputs<T extends TType> extends RawOpInputs<DenseToDenseSetOperation<T>> { public final Operand<T> set1; public final Operand<T> set2; public final String setOperation; public final boolean function; final DataType T; public Inputs(GraphOperation op) { super(new DenseToDenseSetOperation<>(op), op, Arrays.asList(STR, STR, "T")); int inputIndex = 0; set1 = (Operand<T>) op.input(inputIndex++); set2 = (Operand<T>) op.input(inputIndex++); setOperation = op.attributes().getAttrString(STR); function = op.attributes().getAttrBool(STR); T = op.attributes().getAttrType("T"); } } | /**
* Sets the validateIndices option.
*
* @param validateIndices the validateIndices option
* @return this Options instance.
*/ | Sets the validateIndices option | validateIndices | {
"repo_name": "tensorflow/java",
"path": "tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/sparse/DenseToDenseSetOperation.java",
"license": "apache-2.0",
"size": 7203
} | [
"java.util.Arrays",
"org.tensorflow.GraphOperation",
"org.tensorflow.Operand",
"org.tensorflow.op.RawOpInputs",
"org.tensorflow.op.annotation.OpInputsMetadata",
"org.tensorflow.proto.framework.DataType",
"org.tensorflow.types.family.TType"
] | import java.util.Arrays; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.family.TType; | import java.util.*; import org.tensorflow.*; import org.tensorflow.op.*; import org.tensorflow.op.annotation.*; import org.tensorflow.proto.framework.*; import org.tensorflow.types.family.*; | [
"java.util",
"org.tensorflow",
"org.tensorflow.op",
"org.tensorflow.proto",
"org.tensorflow.types"
] | java.util; org.tensorflow; org.tensorflow.op; org.tensorflow.proto; org.tensorflow.types; | 1,187,165 |
Set<LogListener> getLogListeners(); | Set<LogListener> getLogListeners(); | /**
* Gets a list of {@link LogListener}.
*/ | Gets a list of <code>LogListener</code> | getLogListeners | {
"repo_name": "tlehoux/camel",
"path": "camel-core/src/main/java/org/apache/camel/CamelContext.java",
"license": "apache-2.0",
"size": 77596
} | [
"java.util.Set",
"org.apache.camel.spi.LogListener"
] | import java.util.Set; import org.apache.camel.spi.LogListener; | import java.util.*; import org.apache.camel.spi.*; | [
"java.util",
"org.apache.camel"
] | java.util; org.apache.camel; | 1,646,593 |
@Test ( expected = PicturesComparator.PicturesComparatorException.class )
public void comparePicturesWithNullPickedPicture() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNullPickedPicture");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(createEmptyBitmap(), null);
} | @Test ( expected = PicturesComparator.PicturesComparatorException.class ) void function() throws PicturesComparator.PicturesComparatorException { l(this, STR); PicturesComparator pc = new PixelByPixelPicturesComparator(); pc.comparePictures(createEmptyBitmap(), null); } | /**
* Tests the comparePictures() with null picked picture
*
* <i>Comparisons with null picked picture must thrown an exception</i>
*/ | Tests the comparePictures() with null picked picture Comparisons with null picked picture must thrown an exception | comparePicturesWithNullPickedPicture | {
"repo_name": "pylapp/SmoothClicker",
"path": "app/app/src/androidTest/java/pylapp/smoothclicker/android/tools/screen/ItPixelByPixelPicturesComparator.java",
"license": "mit",
"size": 12817
} | [
"org.junit.Test"
] | import org.junit.Test; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,104,951 |
personalDictionary = new PersonalDictionary(personalFile, dictionaryEngine.get_dic_encoding()) {
protected void dictionaryChanged() {
super.dictionaryChanged();
fireDictionaryConfigurationChanged();
}
};
// Fill-in the dictionary engine
List words = personalDictionary.getWords();
for (Iterator iter = words.iterator(); iter.hasNext();) {
String customWord = (String) iter.next();
dictionaryEngine.addCustomWord(customWord);
}
} | personalDictionary = new PersonalDictionary(personalFile, dictionaryEngine.get_dic_encoding()) { void function() { super.dictionaryChanged(); fireDictionaryConfigurationChanged(); } }; List words = personalDictionary.getWords(); for (Iterator iter = words.iterator(); iter.hasNext();) { String customWord = (String) iter.next(); dictionaryEngine.addCustomWord(customWord); } } | /**
* ensure that any direct change to the PersonalDictionary fires an event to the main dictionary
*/ | ensure that any direct change to the PersonalDictionary fires an event to the main dictionary | dictionaryChanged | {
"repo_name": "dbaeli/spell6r",
"path": "src/main/java/org/dts/spell/dictionary/openoffice/OpenOfficeSpellDictionary.java",
"license": "lgpl-2.1",
"size": 5802
} | [
"java.util.Iterator",
"java.util.List"
] | import java.util.Iterator; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,226,243 |
public boolean isEventBelongingToNode(GUIMouseEvent event) {
return isEventBelongingToNode(event, null);
} | boolean function(GUIMouseEvent event) { return isEventBelongingToNode(event, null); } | /**
* Is event belonging to node
* @param event
* @return boolean
*/ | Is event belonging to node | isEventBelongingToNode | {
"repo_name": "andreasdr/tdme",
"path": "src/net/drewke/tdme/gui/nodes/GUINode.java",
"license": "mit",
"size": 35207
} | [
"net.drewke.tdme.gui.events.GUIMouseEvent"
] | import net.drewke.tdme.gui.events.GUIMouseEvent; | import net.drewke.tdme.gui.events.*; | [
"net.drewke.tdme"
] | net.drewke.tdme; | 378,920 |
public static JavadocTags getJavadocTags(TextBlock cmt,
JavadocTagType tagType) {
final String[] text = cmt.getText();
final List<JavadocTag> tags = Lists.newArrayList();
final List<InvalidJavadocTag> invalidTags = Lists.newArrayList();
Pattern blockTagPattern = Pattern.compile("/\\*{2,}\\s*@(\\p{Alpha}+)\\s");
for (int i = 0; i < text.length; i++) {
final String textValue = text[i];
final Matcher blockTagMatcher = blockTagPattern.matcher(textValue);
if ((tagType == JavadocTagType.ALL || tagType == JavadocTagType.BLOCK)
&& blockTagMatcher.find()) {
final String tagName = blockTagMatcher.group(1);
String content = textValue.substring(blockTagMatcher.end(1));
if (content.endsWith("*/")) {
content = content.substring(0, content.length() - 2);
}
final int line = cmt.getStartLineNo() + i;
int col = blockTagMatcher.start(1) - 1;
if (i == 0) {
col += cmt.getStartColNo();
}
if (JavadocTagInfo.isValidName(tagName)) {
tags.add(
new JavadocTag(line, col, tagName, content.trim()));
}
else {
invalidTags.add(new InvalidJavadocTag(line, col, tagName));
}
}
// No block tag, so look for inline validTags
else if (tagType == JavadocTagType.ALL || tagType == JavadocTagType.INLINE) {
lookForInlineTags(cmt, i, tags, invalidTags);
}
blockTagPattern = Pattern.compile("^\\s*\\**\\s*@(\\p{Alpha}+)\\s");
}
return new JavadocTags(tags, invalidTags);
} | static JavadocTags function(TextBlock cmt, JavadocTagType tagType) { final String[] text = cmt.getText(); final List<JavadocTag> tags = Lists.newArrayList(); final List<InvalidJavadocTag> invalidTags = Lists.newArrayList(); Pattern blockTagPattern = Pattern.compile(STR); for (int i = 0; i < text.length; i++) { final String textValue = text[i]; final Matcher blockTagMatcher = blockTagPattern.matcher(textValue); if ((tagType == JavadocTagType.ALL tagType == JavadocTagType.BLOCK) && blockTagMatcher.find()) { final String tagName = blockTagMatcher.group(1); String content = textValue.substring(blockTagMatcher.end(1)); if (content.endsWith("*/")) { content = content.substring(0, content.length() - 2); } final int line = cmt.getStartLineNo() + i; int col = blockTagMatcher.start(1) - 1; if (i == 0) { col += cmt.getStartColNo(); } if (JavadocTagInfo.isValidName(tagName)) { tags.add( new JavadocTag(line, col, tagName, content.trim())); } else { invalidTags.add(new InvalidJavadocTag(line, col, tagName)); } } else if (tagType == JavadocTagType.ALL tagType == JavadocTagType.INLINE) { lookForInlineTags(cmt, i, tags, invalidTags); } blockTagPattern = Pattern.compile(STR); } return new JavadocTags(tags, invalidTags); } | /**
* Gets validTags from a given piece of Javadoc.
* @param cmt
* the Javadoc comment to process.
* @param tagType
* the type of validTags we're interested in
* @return all standalone validTags from the given javadoc.
*/ | Gets validTags from a given piece of Javadoc | getJavadocTags | {
"repo_name": "gallandarakhneorg/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/utils/JavadocUtils.java",
"license": "lgpl-2.1",
"size": 15258
} | [
"com.google.common.collect.Lists",
"com.puppycrawl.tools.checkstyle.api.TextBlock",
"com.puppycrawl.tools.checkstyle.checks.javadoc.InvalidJavadocTag",
"com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTag",
"com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTagInfo",
"com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTags",
"java.util.List",
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] | import com.google.common.collect.Lists; import com.puppycrawl.tools.checkstyle.api.TextBlock; import com.puppycrawl.tools.checkstyle.checks.javadoc.InvalidJavadocTag; import com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTag; import com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTagInfo; import com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocTags; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; | import com.google.common.collect.*; import com.puppycrawl.tools.checkstyle.api.*; import com.puppycrawl.tools.checkstyle.checks.javadoc.*; import java.util.*; import java.util.regex.*; | [
"com.google.common",
"com.puppycrawl.tools",
"java.util"
] | com.google.common; com.puppycrawl.tools; java.util; | 2,395,545 |
public static ApprovalDialog getInformationDialog(Dialog owner, ModalityType modal) {
ApprovalDialog result;
result = new ApprovalDialog(owner, modal);
result.setApproveVisible(true);
result.setDiscardVisible(false);
result.setCancelVisible(false);
return result;
} | static ApprovalDialog function(Dialog owner, ModalityType modal) { ApprovalDialog result; result = new ApprovalDialog(owner, modal); result.setApproveVisible(true); result.setDiscardVisible(false); result.setCancelVisible(false); return result; } | /**
* Returns a basic info dialog (ok).
*
* @param owner the owner of the dialog
* @param modal the modality of the dialog
*/ | Returns a basic info dialog (ok) | getInformationDialog | {
"repo_name": "Waikato/fcms-widgets",
"path": "src/main/java/nz/ac/waikato/cms/gui/core/ApprovalDialog.java",
"license": "gpl-3.0",
"size": 14246
} | [
"java.awt.Dialog"
] | import java.awt.Dialog; | import java.awt.*; | [
"java.awt"
] | java.awt; | 96,819 |
public String getPicturePosition() throws SonyProjectorException {
if (!model.isPicturePositionAvailable()) {
throw new SonyProjectorException("Unavailable item " + SonyProjectorItem.PICTURE_POSITION.getName()
+ " for projector model " + model.getName());
}
return model.getPicturePositionNameFromDataCode(getSetting(SonyProjectorItem.PICTURE_POSITION));
} | String function() throws SonyProjectorException { if (!model.isPicturePositionAvailable()) { throw new SonyProjectorException(STR + SonyProjectorItem.PICTURE_POSITION.getName() + STR + model.getName()); } return model.getPicturePositionNameFromDataCode(getSetting(SonyProjectorItem.PICTURE_POSITION)); } | /**
* Request the projector to get the current mode for the picture position setting
*
* @return the current mode for the picture position setting
*
* @throws SonyProjectorException - In case this setting is not available for the projector or any other problem
*/ | Request the projector to get the current mode for the picture position setting | getPicturePosition | {
"repo_name": "openhab/openhab2",
"path": "bundles/org.openhab.binding.sonyprojector/src/main/java/org/openhab/binding/sonyprojector/internal/communication/SonyProjectorConnector.java",
"license": "epl-1.0",
"size": 43215
} | [
"org.openhab.binding.sonyprojector.internal.SonyProjectorException"
] | import org.openhab.binding.sonyprojector.internal.SonyProjectorException; | import org.openhab.binding.sonyprojector.internal.*; | [
"org.openhab.binding"
] | org.openhab.binding; | 93,698 |
public Locale getLocale() {
return m_locale;
} | Locale function() { return m_locale; } | /**
* Returns the locale.<p>
*
* @return the locale
*/ | Returns the locale | getLocale | {
"repo_name": "alkacon/opencms-core",
"path": "src/org/opencms/xml/content/CmsMappingResolutionContext.java",
"license": "lgpl-2.1",
"size": 11806
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 2,909,839 |
NetSuiteConnectionProperties getConnectionProperties();
/**
* Return identifier of referenced connection component.
*
* @return referenced connection component's ID or {@code null} | NetSuiteConnectionProperties getConnectionProperties(); /** * Return identifier of referenced connection component. * * @return referenced connection component's ID or {@code null} | /**
* Return connection properties used by this properties object.
*
* @return connection properties
*/ | Return connection properties used by this properties object | getConnectionProperties | {
"repo_name": "Talend/components",
"path": "components/components-netsuite/components-netsuite-definition/src/main/java/org/talend/components/netsuite/NetSuiteProvideConnectionProperties.java",
"license": "apache-2.0",
"size": 1181
} | [
"org.talend.components.netsuite.connection.NetSuiteConnectionProperties"
] | import org.talend.components.netsuite.connection.NetSuiteConnectionProperties; | import org.talend.components.netsuite.connection.*; | [
"org.talend.components"
] | org.talend.components; | 1,126,876 |
public GitRepository createBareRepo(GeneralOptions generalOptions, Path path)
throws RepoException {
GitRepository repo =
GitRepository.newBareRepo(
path, getGitEnvironment(generalOptions.getEnvironment()),
generalOptions.isVerbose(), generalOptions.fetchTimeout, gitNoVerify);
return initRepo(repo);
} | GitRepository function(GeneralOptions generalOptions, Path path) throws RepoException { GitRepository repo = GitRepository.newBareRepo( path, getGitEnvironment(generalOptions.getEnvironment()), generalOptions.isVerbose(), generalOptions.fetchTimeout, gitNoVerify); return initRepo(repo); } | /**
* Create a new initialized repository in the location.
*
* <p>Can be overwritten to create custom GitRepository objects.
*/ | Create a new initialized repository in the location. Can be overwritten to create custom GitRepository objects | createBareRepo | {
"repo_name": "google/copybara",
"path": "java/com/google/copybara/git/GitOptions.java",
"license": "apache-2.0",
"size": 5466
} | [
"com.google.copybara.GeneralOptions",
"com.google.copybara.exception.RepoException",
"java.nio.file.Path"
] | import com.google.copybara.GeneralOptions; import com.google.copybara.exception.RepoException; import java.nio.file.Path; | import com.google.copybara.*; import com.google.copybara.exception.*; import java.nio.file.*; | [
"com.google.copybara",
"java.nio"
] | com.google.copybara; java.nio; | 143,831 |
private void logTimeoutObjectsFrequency() {
StringBuilder sb = new StringBuilder("Timeout objects frequency [");
for (Ignite ignite : G.allGrids()) {
IgniteEx igniteEx = (IgniteEx)ignite;
Map<String, Integer> objFreqMap = new HashMap<>();
Set<GridTimeoutObject> objs = getTimeoutObjects(igniteEx);
for (GridTimeoutObject obj : objs) {
String clsName = obj.getClass().getSimpleName();
Integer cnt = objFreqMap.get(clsName);
if (cnt == null)
objFreqMap.put(clsName, 1);
else
objFreqMap.put(clsName, cnt + 1);
}
sb.append("[")
.append(igniteEx.name()).append(": size=")
.append(objs.size()).append(", ");
for (Map.Entry<String, Integer> entry : objFreqMap.entrySet()) {
sb.append(entry.getKey()).append("=")
.append(entry.getValue())
.append(", ");
}
sb.delete(sb.length() - 2, sb.length())
.append("]; ");
}
sb.delete(sb.length() - 2, sb.length())
.append("]");
info(sb.toString()
.replaceAll("distributed.IgniteTxRemoveTimeoutObjectsTest", "Grid"));
} | void function() { StringBuilder sb = new StringBuilder(STR); for (Ignite ignite : G.allGrids()) { IgniteEx igniteEx = (IgniteEx)ignite; Map<String, Integer> objFreqMap = new HashMap<>(); Set<GridTimeoutObject> objs = getTimeoutObjects(igniteEx); for (GridTimeoutObject obj : objs) { String clsName = obj.getClass().getSimpleName(); Integer cnt = objFreqMap.get(clsName); if (cnt == null) objFreqMap.put(clsName, 1); else objFreqMap.put(clsName, cnt + 1); } sb.append("[") .append(igniteEx.name()).append(STR) .append(objs.size()).append(STR); for (Map.Entry<String, Integer> entry : objFreqMap.entrySet()) { sb.append(entry.getKey()).append("=") .append(entry.getValue()) .append(STR); } sb.delete(sb.length() - 2, sb.length()) .append(STR); } sb.delete(sb.length() - 2, sb.length()) .append("]"); info(sb.toString() .replaceAll("distributed.IgniteTxRemoveTimeoutObjectsTestSTRGrid")); } | /**
* Print the number of each timeout object type on each grid to the log.
*/ | Print the number of each timeout object type on each grid to the log | logTimeoutObjectsFrequency | {
"repo_name": "vladisav/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/IgniteTxRemoveTimeoutObjectsTest.java",
"license": "apache-2.0",
"size": 7168
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.Set",
"org.apache.ignite.Ignite",
"org.apache.ignite.internal.IgniteEx",
"org.apache.ignite.internal.processors.timeout.GridTimeoutObject",
"org.apache.ignite.internal.util.typedef.G"
] | import java.util.HashMap; import java.util.Map; import java.util.Set; import org.apache.ignite.Ignite; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.processors.timeout.GridTimeoutObject; import org.apache.ignite.internal.util.typedef.G; | import java.util.*; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.timeout.*; import org.apache.ignite.internal.util.typedef.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 2,304,515 |
private String getLocalizedKey(String propertyKey,
boolean useDefaultLanguage) {
String language;
if (useDefaultLanguage) {
language = defaultLocale.getLanguage();
} else {
Locale locale = Locale.getDefault();
language = locale.getLanguage();
}
return propertyKey + "_" + language;
} | String function(String propertyKey, boolean useDefaultLanguage) { String language; if (useDefaultLanguage) { language = defaultLocale.getLanguage(); } else { Locale locale = Locale.getDefault(); language = locale.getLanguage(); } return propertyKey + "_" + language; } | /**
* Small helper to retrieve the "localized" property key. E.g. key:
* MAIL_SERVER ==> localized version: MAIL_SERVER_en
*
* @param propertyKey
* the property key to be localized.
* @param useDefaultLanguage
* <code>pass true to use the default locale.</code>
* @return the "localized" version of the property key.
*/ | Small helper to retrieve the "localized" property key. E.g. key: MAIL_SERVER ==> localized version: MAIL_SERVER_en | getLocalizedKey | {
"repo_name": "opetrovski/development",
"path": "oscm-devruntime/javasrc/org/oscm/setup/UserNotificationHandler.java",
"license": "apache-2.0",
"size": 18530
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 70,198 |
public int onlineConsistencyRepair() throws IOException, KeeperException,
InterruptedException {
clearState();
LOG.info("Loading regionsinfo from the .META. table");
boolean success = loadMetaEntries();
if (!success) return -1;
// Check if .META. is found only once and in the right place
if (!checkMetaRegion()) {
// Will remove later if we can fix it
errors.reportError("Encountered fatal error. Exiting...");
return -2;
}
// get a list of all tables that have not changed recently.
if (!checkMetaOnly) {
reportTablesInFlux();
}
// get regions according to what is online on each RegionServer
loadDeployedRegions();
// load regiondirs and regioninfos from HDFS
loadHdfsRegionDirs();
loadHdfsRegionInfos();
// Empty cells in .META.?
reportEmptyMetaCells();
// Get disabled tables from ZooKeeper
loadDisabledTables();
// fix the orphan tables
fixOrphanTables();
// Check and fix consistency
checkAndFixConsistency();
// Check integrity (does not fix)
checkIntegrity();
return errors.getErrorList().size();
} | int function() throws IOException, KeeperException, InterruptedException { clearState(); LOG.info(STR); boolean success = loadMetaEntries(); if (!success) return -1; if (!checkMetaRegion()) { errors.reportError(STR); return -2; } if (!checkMetaOnly) { reportTablesInFlux(); } loadDeployedRegions(); loadHdfsRegionDirs(); loadHdfsRegionInfos(); reportEmptyMetaCells(); loadDisabledTables(); fixOrphanTables(); checkAndFixConsistency(); checkIntegrity(); return errors.getErrorList().size(); } | /**
* This repair method requires the cluster to be online since it contacts
* region servers and the masters. It makes each region's state in HDFS, in
* .META., and deployments consistent.
*
* @return If > 0 , number of errors detected, if < 0 there was an unrecoverable
* error. If 0, we have a clean hbase.
*/ | This repair method requires the cluster to be online since it contacts region servers and the masters. It makes each region's state in HDFS, in .META., and deployments consistent | onlineConsistencyRepair | {
"repo_name": "matteobertozzi/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java",
"license": "apache-2.0",
"size": 130110
} | [
"java.io.IOException",
"org.apache.zookeeper.KeeperException"
] | import java.io.IOException; import org.apache.zookeeper.KeeperException; | import java.io.*; import org.apache.zookeeper.*; | [
"java.io",
"org.apache.zookeeper"
] | java.io; org.apache.zookeeper; | 738,867 |
@Override public void exitClauseArgs(@NotNull ErlangParser.ClauseArgsContext ctx) { } | @Override public void exitClauseArgs(@NotNull ErlangParser.ClauseArgsContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | enterClauseArgs | {
"repo_name": "IsThisThePayneResidence/intellidots",
"path": "src/main/java/ua/edu/hneu/ast/parsers/ErlangBaseListener.java",
"license": "gpl-3.0",
"size": 35359
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 559,137 |
@Test
public void errorMessageTest9() throws PcepParseException, PcepOutOfBoundMessageException {
byte[] errorMsg = new byte[]{0x20, 0x06, 0x00, 0x14, // common header
0x0D, 0x10, 0x00, 0x08, // PCEP-ERROR Object Header
0x00, 0x00, 0x01, 0x01, 0x0D, 0x10, 0x00, 0x08, // PCEP-ERROR Object Header
0x00, 0x00, 0x01, 0x01};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(errorMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = null;
message = reader.readFrom(buffer);
byte[] testErrorMsg = {0};
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
assertThat(message, instanceOf(PcepErrorMsg.class));
message.writeTo(buf);
int iReadLen = buf.writerIndex();
testErrorMsg = new byte[iReadLen];
buf.readBytes(testErrorMsg, 0, iReadLen);
assertThat(testErrorMsg, is(errorMsg));
} | void function() throws PcepParseException, PcepOutOfBoundMessageException { byte[] errorMsg = new byte[]{0x20, 0x06, 0x00, 0x14, 0x0D, 0x10, 0x00, 0x08, 0x00, 0x00, 0x01, 0x01, 0x0D, 0x10, 0x00, 0x08, 0x00, 0x00, 0x01, 0x01}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(errorMsg); PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader(); PcepMessage message = null; message = reader.readFrom(buffer); byte[] testErrorMsg = {0}; ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); assertThat(message, instanceOf(PcepErrorMsg.class)); message.writeTo(buf); int iReadLen = buf.writerIndex(); testErrorMsg = new byte[iReadLen]; buf.readBytes(testErrorMsg, 0, iReadLen); assertThat(testErrorMsg, is(errorMsg)); } | /**
* This test case checks for
* PCEP-ERROR Object, PCEP-ERROR Object
* in PcepErrorMsg message.
*/ | This test case checks for PCEP-ERROR Object, PCEP-ERROR Object in PcepErrorMsg message | errorMessageTest9 | {
"repo_name": "kuujo/onos",
"path": "protocols/pcep/pcepio/src/test/java/org/onosproject/pcepio/protocol/PcepErrorMsgTest.java",
"license": "apache-2.0",
"size": 29749
} | [
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers",
"org.hamcrest.core.Is",
"org.jboss.netty.buffer.ChannelBuffer",
"org.jboss.netty.buffer.ChannelBuffers",
"org.onosproject.pcepio.exceptions.PcepOutOfBoundMessageException",
"org.onosproject.pcepio.exceptions.PcepParseException"
] | import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.hamcrest.core.Is; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.onosproject.pcepio.exceptions.PcepOutOfBoundMessageException; import org.onosproject.pcepio.exceptions.PcepParseException; | import org.hamcrest.*; import org.hamcrest.core.*; import org.jboss.netty.buffer.*; import org.onosproject.pcepio.exceptions.*; | [
"org.hamcrest",
"org.hamcrest.core",
"org.jboss.netty",
"org.onosproject.pcepio"
] | org.hamcrest; org.hamcrest.core; org.jboss.netty; org.onosproject.pcepio; | 531,580 |
final public Collection<Enrolment> getPropaedeuticEnrolments() {
final Collection<Enrolment> result = new ArrayList<Enrolment>();
for (final Enrolment enrolment : getEnrolmentsSet()) {
if (enrolment.isPropaedeutic()) {
result.add(enrolment);
}
}
return result;
} | final Collection<Enrolment> function() { final Collection<Enrolment> result = new ArrayList<Enrolment>(); for (final Enrolment enrolment : getEnrolmentsSet()) { if (enrolment.isPropaedeutic()) { result.add(enrolment); } } return result; } | /**
* Note that this method must not use the ExtraCurriculumGroup due to the
* pre-Bolonha SCPs
* @return get propaedeutic enrolments
*/ | Note that this method must not use the ExtraCurriculumGroup due to the pre-Bolonha SCPs | getPropaedeuticEnrolments | {
"repo_name": "gil-l/fenix",
"path": "src/main/java/org/fenixedu/academic/domain/StudentCurricularPlan.java",
"license": "lgpl-3.0",
"size": 115072
} | [
"java.util.ArrayList",
"java.util.Collection"
] | import java.util.ArrayList; import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 1,602,798 |
public static Translation[] getTranslations(
final String translations_file) throws Exception
{
final ArrayList<Translation> trans_arr = new ArrayList<Translation>();
// Open the file
final FileInputStream fstream = new FileInputStream(translations_file);
try {
// Convert file into buffered reader which can read line-by-line
final DataInputStream in = new DataInputStream(fstream);
final BufferedReader br = new BufferedReader(new InputStreamReader(in));
String strLine;
int countLines = 0;
// Read File Line By Line
while ((strLine = br.readLine()) != null)
{
// Count lines read from the file
countLines++;
// Remove spaces
strLine = strLine.trim();
// Skip comments
if (strLine.length() <= 0 ||
strLine.startsWith("#"))
continue;
// Expect some_regular_expression_pattern = translation
final int separator = strLine.indexOf("=");
if (separator < 0) {
br.close();
throw new Exception("Missing separator in line "
+ countLines);
}
// Add pattern & replacement to array of translations
final String pattern = strLine.substring(0, separator);
final String replacement = strLine.substring(separator + 1,
strLine.length());
trans_arr.add(new Translation(pattern, replacement));
}
br.close();
}
finally
{
// Close the input stream
fstream.close();
}
// Convert array list into plain array
return trans_arr.toArray(new Translation[trans_arr.size()]);
} | static Translation[] function( final String translations_file) throws Exception { final ArrayList<Translation> trans_arr = new ArrayList<Translation>(); final FileInputStream fstream = new FileInputStream(translations_file); try { final DataInputStream in = new DataInputStream(fstream); final BufferedReader br = new BufferedReader(new InputStreamReader(in)); String strLine; int countLines = 0; while ((strLine = br.readLine()) != null) { countLines++; strLine = strLine.trim(); if (strLine.length() <= 0 strLine.startsWith("#")) continue; final int separator = strLine.indexOf("="); if (separator < 0) { br.close(); throw new Exception(STR + countLines); } final String pattern = strLine.substring(0, separator); final String replacement = strLine.substring(separator + 1, strLine.length()); trans_arr.add(new Translation(pattern, replacement)); } br.close(); } finally { fstream.close(); } return trans_arr.toArray(new Translation[trans_arr.size()]); } | /** Read translations from file
* @param filename File to parse
* @return Array of translations
* @throws Exception on error (file not found, parse error)
*/ | Read translations from file | getTranslations | {
"repo_name": "css-iter/cs-studio",
"path": "applications/alarm/alarm-plugins/org.csstudio.alarm.beast.annunciator/src/org/csstudio/alarm/beast/annunciator/model/TranslationFileReader.java",
"license": "epl-1.0",
"size": 3548
} | [
"java.io.BufferedReader",
"java.io.DataInputStream",
"java.io.FileInputStream",
"java.io.InputStreamReader",
"java.util.ArrayList",
"org.csstudio.utility.speech.Translation"
] | import java.io.BufferedReader; import java.io.DataInputStream; import java.io.FileInputStream; import java.io.InputStreamReader; import java.util.ArrayList; import org.csstudio.utility.speech.Translation; | import java.io.*; import java.util.*; import org.csstudio.utility.speech.*; | [
"java.io",
"java.util",
"org.csstudio.utility"
] | java.io; java.util; org.csstudio.utility; | 823,317 |
protected String generateUniqueName() {
return generateUniqueName(name.getMethodName().replace('[', '-').replace("]", ""));
}
private static class DataFlowShell extends JLineShellComponent {
private final JLineShellComponent shell;
public DataFlowShell(JLineShellComponent shell) {
this.shell = shell;
} | String function() { return generateUniqueName(name.getMethodName().replace('[', '-').replace("]", "")); } private static class DataFlowShell extends JLineShellComponent { private final JLineShellComponent shell; public DataFlowShell(JLineShellComponent shell) { this.shell = shell; } | /**
* Return a unique random name for stream/task testing.
*
* @return unique random stream/task name
*/ | Return a unique random name for stream/task testing | generateUniqueName | {
"repo_name": "jvalkeal/spring-cloud-data",
"path": "spring-cloud-dataflow-shell-core/src/test/java/org/springframework/cloud/dataflow/shell/AbstractShellIntegrationTest.java",
"license": "apache-2.0",
"size": 6888
} | [
"org.springframework.shell.core.JLineShellComponent"
] | import org.springframework.shell.core.JLineShellComponent; | import org.springframework.shell.core.*; | [
"org.springframework.shell"
] | org.springframework.shell; | 1,437,293 |
private Hashtable<String, RegistryPackageDocument> transformSensorDescriptions(Collection<SirSearchResultElement> sensors) {
Hashtable<String, RegistryPackageDocument> transformedDocs = new Hashtable<>();
XmlObject description;
for (SirSearchResultElement sensorResultElem : sensors) {
log.debug("Transforming sensor description of sensor {}", sensorResultElem.getSensorId());
// get SensorML
SirXmlSensorDescription sensorDescr = (SirXmlSensorDescription) sensorResultElem.getSensorDescription();
description = sensorDescr.getDescription();
// having problems with XmlValueDisconnectedException, try transforming a deep copy
XmlObject copy = description.copy();
boolean isConform;
try {
isConform = acceptsDocument(copy);
}
catch (OwsExceptionReport | IOException e) {
log.error("Could not check if catalog accepts the given document!", e);
continue;
}
if (isConform) {
// transform SensorML
XmlObject ebrimDescription;
try {
if (copy instanceof SystemType) {
SystemType st = (SystemType) copy;
ebrimDescription = this.transformer.transform(st);
}
else if (copy instanceof SensorMLDocument) {
SensorMLDocument smlDoc = (SensorMLDocument) copy;
ebrimDescription = this.transformer.transform(smlDoc);
}
else {
throw new UnsupportedOperationException("Sensor description is of unsupported type (must be either SystemType or SensorMLDocument): "
+ copy.xmlText());
}
}
catch (XmlException | TransformerException | XmlValueDisconnectedException | IOException e) {
log.error("Exception: Could not transform sensor description: " + XmlTools.inspect(description)
+ "\n - TransformerException: " + e);
transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT);
continue;
}
// convert transformed document to identifiable type
RegistryPackageDocument registryPackage;
try {
registryPackage = RegistryPackageDocument.Factory.parse(ebrimDescription.getDomNode());
}
catch (XmlException e) {
log.warn("Could not parse sensor description to IdentifiableType: "
+ XmlTools.inspect(ebrimDescription),
e);
transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT);
continue;
}
if (SirConfigurator.getInstance().isValidateRequests()) {
// check if the transformed document for the outgoing request is valid and warn if not.
if (registryPackage.validate()) {
log.debug("Added new (valid!) transformed sensor description for sensor with id {}:\n{}",
sensorResultElem.getSensorId(),
XmlTools.inspect(ebrimDescription));
}
else {
String errors = XmlTools.validateAndIterateErrors(registryPackage);
log.warn("Transformed sensor description sensor with id " + sensorResultElem.getSensorId()
+ " IS NOT VALID and might not be accepted by the service.");
log.debug("\nErrors:\t{}\nebRIM:\t{}", errors, ebrimDescription.xmlText());
}
}
// add transformed document and sensor id
transformedDocs.put(sensorResultElem.getSensorId(), registryPackage);
log.info("Transformed sensor with id " + sensorResultElem.getSensorId()
+ " to RegistryPackage with id " + registryPackage.getIdentifiable().getId());
}
else {
log.warn("Could not transform sensor description of sensor " + sensorResultElem.getSensorId()
+ ". It is not conform with this catalog's required profile!");
transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT);
}
} // for loop
return transformedDocs;
} | Hashtable<String, RegistryPackageDocument> function(Collection<SirSearchResultElement> sensors) { Hashtable<String, RegistryPackageDocument> transformedDocs = new Hashtable<>(); XmlObject description; for (SirSearchResultElement sensorResultElem : sensors) { log.debug(STR, sensorResultElem.getSensorId()); SirXmlSensorDescription sensorDescr = (SirXmlSensorDescription) sensorResultElem.getSensorDescription(); description = sensorDescr.getDescription(); XmlObject copy = description.copy(); boolean isConform; try { isConform = acceptsDocument(copy); } catch (OwsExceptionReport IOException e) { log.error(STR, e); continue; } if (isConform) { XmlObject ebrimDescription; try { if (copy instanceof SystemType) { SystemType st = (SystemType) copy; ebrimDescription = this.transformer.transform(st); } else if (copy instanceof SensorMLDocument) { SensorMLDocument smlDoc = (SensorMLDocument) copy; ebrimDescription = this.transformer.transform(smlDoc); } else { throw new UnsupportedOperationException(STR + copy.xmlText()); } } catch (XmlException TransformerException XmlValueDisconnectedException IOException e) { log.error(STR + XmlTools.inspect(description) + STR + e); transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT); continue; } RegistryPackageDocument registryPackage; try { registryPackage = RegistryPackageDocument.Factory.parse(ebrimDescription.getDomNode()); } catch (XmlException e) { log.warn(STR + XmlTools.inspect(ebrimDescription), e); transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT); continue; } if (SirConfigurator.getInstance().isValidateRequests()) { if (registryPackage.validate()) { log.debug(STR, sensorResultElem.getSensorId(), XmlTools.inspect(ebrimDescription)); } else { String errors = XmlTools.validateAndIterateErrors(registryPackage); log.warn(STR + sensorResultElem.getSensorId() + STR); log.debug(STR, errors, ebrimDescription.xmlText()); } } transformedDocs.put(sensorResultElem.getSensorId(), registryPackage); log.info(STR + sensorResultElem.getSensorId() + STR + registryPackage.getIdentifiable().getId()); } else { log.warn(STR + sensorResultElem.getSensorId() + STR); transformedDocs.put(sensorResultElem.getSensorId(), ITransformer.TRANSFORMATION_ERROR_OBJECT); } } return transformedDocs; } | /**
*
* Method tries to transform the given collection of sensor description documents. if there is a problem
* with one of the documents the returned list will contain the kvp <"sensorID", NULL>. This allows
* alerting the user to check the log for details.
*
* @param sensors
* @return
*/ | Method tries to transform the given collection of sensor description documents. if there is a problem with one of the documents the returned list will contain the kvp . This allows alerting the user to check the log for details | transformSensorDescriptions | {
"repo_name": "52North/OpenSensorSearch",
"path": "service/src/main/java/org/n52/sir/catalog/csw/CswCatalog.java",
"license": "apache-2.0",
"size": 33984
} | [
"java.io.IOException",
"java.util.Collection",
"java.util.Hashtable",
"javax.xml.transform.TransformerException",
"net.opengis.sensorML.x101.SensorMLDocument",
"net.opengis.sensorML.x101.SystemType",
"org.apache.xmlbeans.XmlException",
"org.apache.xmlbeans.XmlObject",
"org.apache.xmlbeans.impl.values.XmlValueDisconnectedException",
"org.n52.oss.sir.api.SirSearchResultElement",
"org.n52.oss.sir.api.SirXmlSensorDescription",
"org.n52.oss.sir.ows.OwsExceptionReport",
"org.n52.oss.util.XmlTools",
"org.n52.sir.SirConfigurator",
"org.n52.sir.xml.ITransformer"
] | import java.io.IOException; import java.util.Collection; import java.util.Hashtable; import javax.xml.transform.TransformerException; import net.opengis.sensorML.x101.SensorMLDocument; import net.opengis.sensorML.x101.SystemType; import org.apache.xmlbeans.XmlException; import org.apache.xmlbeans.XmlObject; import org.apache.xmlbeans.impl.values.XmlValueDisconnectedException; import org.n52.oss.sir.api.SirSearchResultElement; import org.n52.oss.sir.api.SirXmlSensorDescription; import org.n52.oss.sir.ows.OwsExceptionReport; import org.n52.oss.util.XmlTools; import org.n52.sir.SirConfigurator; import org.n52.sir.xml.ITransformer; | import java.io.*; import java.util.*; import javax.xml.transform.*; import net.opengis.*; import org.apache.xmlbeans.*; import org.apache.xmlbeans.impl.values.*; import org.n52.oss.sir.api.*; import org.n52.oss.sir.ows.*; import org.n52.oss.util.*; import org.n52.sir.*; import org.n52.sir.xml.*; | [
"java.io",
"java.util",
"javax.xml",
"net.opengis",
"org.apache.xmlbeans",
"org.n52.oss",
"org.n52.sir"
] | java.io; java.util; javax.xml; net.opengis; org.apache.xmlbeans; org.n52.oss; org.n52.sir; | 1,052,727 |
public ViewHolder setText(int viewId, String text){
TextView tv = getView(viewId);
tv.setText(text);
return this;
} | ViewHolder function(int viewId, String text){ TextView tv = getView(viewId); tv.setText(text); return this; } | /**
* set text
*/ | set text | setText | {
"repo_name": "LSL-Git/ImageLabelerApp",
"path": "app/src/main/java/app/com/lsl/imagelabelerapp/lsl/utils/ViewHolder.java",
"license": "apache-2.0",
"size": 2244
} | [
"android.widget.TextView"
] | import android.widget.TextView; | import android.widget.*; | [
"android.widget"
] | android.widget; | 2,323,840 |
public UniqueID getID(); | UniqueID function(); | /**
* Returns the unique id.
*
* @return The unique id of this active object.
*/ | Returns the unique id | getID | {
"repo_name": "paraita/programming",
"path": "programming-core/src/main/java/org/objectweb/proactive/core/jmx/mbean/BodyWrapperMBean.java",
"license": "agpl-3.0",
"size": 2943
} | [
"org.objectweb.proactive.core.UniqueID"
] | import org.objectweb.proactive.core.UniqueID; | import org.objectweb.proactive.core.*; | [
"org.objectweb.proactive"
] | org.objectweb.proactive; | 1,850,096 |
public CertificateOrderInner withProvisioningState(ProvisioningState provisioningState) {
this.provisioningState = provisioningState;
return this;
} | CertificateOrderInner function(ProvisioningState provisioningState) { this.provisioningState = provisioningState; return this; } | /**
* Set the provisioningState value.
*
* @param provisioningState the provisioningState value to set
* @return the CertificateOrderInner object itself.
*/ | Set the provisioningState value | withProvisioningState | {
"repo_name": "herveyw/azure-sdk-for-java",
"path": "azure-mgmt-website/src/main/java/com/microsoft/azure/management/website/implementation/CertificateOrderInner.java",
"license": "mit",
"size": 11765
} | [
"com.microsoft.azure.management.website.ProvisioningState"
] | import com.microsoft.azure.management.website.ProvisioningState; | import com.microsoft.azure.management.website.*; | [
"com.microsoft.azure"
] | com.microsoft.azure; | 401,651 |
logger.info("selectFeatures");
Configuration configuration = getConfiguration();
Dataframe[] data = Datasets.featureTransformationPCA(configuration);
Dataframe originalData = data[0];
Dataframe validationData = data[0].copy();
Dataframe expResult = data[1];
String storageName = this.getClass().getSimpleName();
PCA.TrainingParameters param = new PCA.TrainingParameters();
param.setMaxDimensions(null);
PCA instance = MLBuilder.create(param, configuration);
instance.fit_transform(originalData);
instance.save(storageName);
originalData.close();
instance.close();
instance = MLBuilder.load(PCA.class, storageName, configuration);
instance.transform(validationData);
assertEquals(validationData.size(), expResult.size());
Iterator<Record> itResult = validationData.iterator();
Iterator<Record> itExpectedResult = expResult.iterator();
while(itResult.hasNext()) {
Record r1 = itResult.next();
Record r2 = itExpectedResult.next();
for(Map.Entry<Object, Object> entry : r1.getX().entrySet()) {
Object feature = entry.getKey();
Double value = TypeInference.toDouble(entry.getValue());
assertEquals(TypeInference.toDouble(r2.getX().get(feature)), value, Constants.DOUBLE_ACCURACY_MEDIUM);
}
}
instance.delete();
validationData.close();
expResult.close();
} | logger.info(STR); Configuration configuration = getConfiguration(); Dataframe[] data = Datasets.featureTransformationPCA(configuration); Dataframe originalData = data[0]; Dataframe validationData = data[0].copy(); Dataframe expResult = data[1]; String storageName = this.getClass().getSimpleName(); PCA.TrainingParameters param = new PCA.TrainingParameters(); param.setMaxDimensions(null); PCA instance = MLBuilder.create(param, configuration); instance.fit_transform(originalData); instance.save(storageName); originalData.close(); instance.close(); instance = MLBuilder.load(PCA.class, storageName, configuration); instance.transform(validationData); assertEquals(validationData.size(), expResult.size()); Iterator<Record> itResult = validationData.iterator(); Iterator<Record> itExpectedResult = expResult.iterator(); while(itResult.hasNext()) { Record r1 = itResult.next(); Record r2 = itExpectedResult.next(); for(Map.Entry<Object, Object> entry : r1.getX().entrySet()) { Object feature = entry.getKey(); Double value = TypeInference.toDouble(entry.getValue()); assertEquals(TypeInference.toDouble(r2.getX().get(feature)), value, Constants.DOUBLE_ACCURACY_MEDIUM); } } instance.delete(); validationData.close(); expResult.close(); } | /**
* Test of selectFeatures method, of class PCA.
*/ | Test of selectFeatures method, of class PCA | testSelectFeatures | {
"repo_name": "datumbox/datumbox-framework",
"path": "datumbox-framework-core/src/test/java/com/datumbox/framework/core/machinelearning/featureselection/PCATest.java",
"license": "apache-2.0",
"size": 3228
} | [
"com.datumbox.framework.common.Configuration",
"com.datumbox.framework.common.dataobjects.TypeInference",
"com.datumbox.framework.core.Datasets",
"com.datumbox.framework.core.common.dataobjects.Dataframe",
"com.datumbox.framework.core.common.dataobjects.Record",
"com.datumbox.framework.core.machinelearning.MLBuilder",
"com.datumbox.framework.tests.Constants",
"java.util.Iterator",
"java.util.Map",
"org.junit.Assert"
] | import com.datumbox.framework.common.Configuration; import com.datumbox.framework.common.dataobjects.TypeInference; import com.datumbox.framework.core.Datasets; import com.datumbox.framework.core.common.dataobjects.Dataframe; import com.datumbox.framework.core.common.dataobjects.Record; import com.datumbox.framework.core.machinelearning.MLBuilder; import com.datumbox.framework.tests.Constants; import java.util.Iterator; import java.util.Map; import org.junit.Assert; | import com.datumbox.framework.common.*; import com.datumbox.framework.common.dataobjects.*; import com.datumbox.framework.core.*; import com.datumbox.framework.core.common.dataobjects.*; import com.datumbox.framework.core.machinelearning.*; import com.datumbox.framework.tests.*; import java.util.*; import org.junit.*; | [
"com.datumbox.framework",
"java.util",
"org.junit"
] | com.datumbox.framework; java.util; org.junit; | 2,113,702 |
@Override
public AppleBitcodeMode getAppleBitcodeMode() {
return appleBitcodeMode;
} | AppleBitcodeMode function() { return appleBitcodeMode; } | /**
* Returns the bitcode mode to use for compilation.
*
* <p>Users can control bitcode mode using the {@code apple_bitcode} build flag, but bitcode will
* be disabled for all simulator architectures regardless of this flag.
*/ | Returns the bitcode mode to use for compilation. Users can control bitcode mode using the apple_bitcode build flag, but bitcode will be disabled for all simulator architectures regardless of this flag | getAppleBitcodeMode | {
"repo_name": "twitter-forks/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CppConfiguration.java",
"license": "apache-2.0",
"size": 29964
} | [
"com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions"
] | import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions; | import com.google.devtools.build.lib.rules.apple.*; | [
"com.google.devtools"
] | com.google.devtools; | 412,506 |
private void createMapAnnotationandLinkToProject(long projectId)
throws Exception
{
List<NamedValue> result = new ArrayList<NamedValue>();
result.add(new NamedValue("mitomycin-A", "20mM"));
result.add(new NamedValue("PBS", "10mM"));
result.add(new NamedValue("incubation", "5min"));
result.add(new NamedValue("temperature", "37"));
result.add(new NamedValue("Organism", "Homo sapiens"));
MapAnnotationData data = new MapAnnotationData();
data.setContent(result);
data.setDescription("Training Example");
//Use the following namespace if you want the annotation to be editable
//in the webclient and insight
data.setNameSpace(MapAnnotationData.NS_CLIENT_CREATED);
DataManagerFacility fac = gateway.getFacility(DataManagerFacility.class);
fac.attachAnnotation(ctx, data, new ProjectData(new ProjectI(projectId, false)));
}
// Create file annotation
// ====================== | void function(long projectId) throws Exception { List<NamedValue> result = new ArrayList<NamedValue>(); result.add(new NamedValue(STR, "20mM")); result.add(new NamedValue("PBS", "10mM")); result.add(new NamedValue(STR, "5min")); result.add(new NamedValue(STR, "37")); result.add(new NamedValue(STR, STR)); MapAnnotationData data = new MapAnnotationData(); data.setContent(result); data.setDescription(STR); data.setNameSpace(MapAnnotationData.NS_CLIENT_CREATED); DataManagerFacility fac = gateway.getFacility(DataManagerFacility.class); fac.attachAnnotation(ctx, data, new ProjectData(new ProjectI(projectId, false))); } | /**
* Creates a map annotation and links to the specified project.
* @param projectId The omero project identifier
* @throws Exception
*/ | Creates a map annotation and links to the specified project | createMapAnnotationandLinkToProject | {
"repo_name": "knabar/openmicroscopy",
"path": "examples/Training/java/src/training/WriteData.java",
"license": "gpl-2.0",
"size": 15632
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,229,375 |
@XmlAttribute
public void setErrorHandlerRef(String errorHandlerRef) {
this.errorHandlerRef = errorHandlerRef;
// we use an specific error handler ref (from Spring DSL) then wrap that
// with a error handler build ref so Camel knows its not just the default one
setErrorHandlerBuilder(new ErrorHandlerBuilderRef(errorHandlerRef));
} | void function(String errorHandlerRef) { this.errorHandlerRef = errorHandlerRef; setErrorHandlerBuilder(new ErrorHandlerBuilderRef(errorHandlerRef)); } | /**
* Sets the bean ref name of the error handler builder to use on this route
*/ | Sets the bean ref name of the error handler builder to use on this route | setErrorHandlerRef | {
"repo_name": "jonmcewen/camel",
"path": "camel-core/src/main/java/org/apache/camel/model/RouteDefinition.java",
"license": "apache-2.0",
"size": 44503
} | [
"org.apache.camel.builder.ErrorHandlerBuilderRef"
] | import org.apache.camel.builder.ErrorHandlerBuilderRef; | import org.apache.camel.builder.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,538,193 |
public NestedSet<LibraryToLink> getLibraries() {
return this.libraries;
} | NestedSet<LibraryToLink> function() { return this.libraries; } | /**
* Returns libraries that are to be inputs to the linker.
*/ | Returns libraries that are to be inputs to the linker | getLibraries | {
"repo_name": "juhalindfors/bazel-patches",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java",
"license": "apache-2.0",
"size": 25946
} | [
"com.google.devtools.build.lib.collect.nestedset.NestedSet",
"com.google.devtools.build.lib.rules.cpp.LinkerInputs"
] | import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.rules.cpp.LinkerInputs; | import com.google.devtools.build.lib.collect.nestedset.*; import com.google.devtools.build.lib.rules.cpp.*; | [
"com.google.devtools"
] | com.google.devtools; | 1,189,133 |
List<GwasDTO> mappedList = new ArrayList<>();
for (GwasDTO gwasMapping : gwasMappings) {
if (gwasMapping.getGwasMgiGeneSymbol().equals(mgiGeneSymbol)) {
mappedList.add(gwasMapping);
}
return mappedList;
}
return null;
} | List<GwasDTO> mappedList = new ArrayList<>(); for (GwasDTO gwasMapping : gwasMappings) { if (gwasMapping.getGwasMgiGeneSymbol().equals(mgiGeneSymbol)) { mappedList.add(gwasMapping); } return mappedList; } return null; } | /**
* Fetch all gwas mapping rows filtered by mgi gene sysmbol.
*
* @return all gwas mapping rows filtered by mgi gene symbol
* @throws SQLException
*/ | Fetch all gwas mapping rows filtered by mgi gene sysmbol | getGwasMappingByGeneSymbol | {
"repo_name": "mpi2/PhenotypeArchive",
"path": "src/main/java/uk/ac/ebi/phenotype/dao/GwasDAO.java",
"license": "apache-2.0",
"size": 14613
} | [
"java.util.ArrayList",
"java.util.List",
"uk.ac.ebi.phenotype.service.dto.GwasDTO"
] | import java.util.ArrayList; import java.util.List; import uk.ac.ebi.phenotype.service.dto.GwasDTO; | import java.util.*; import uk.ac.ebi.phenotype.service.dto.*; | [
"java.util",
"uk.ac.ebi"
] | java.util; uk.ac.ebi; | 320,457 |
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object)
{
if (itemPropertyDescriptors == null)
{
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
} | List<IItemPropertyDescriptor> function(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; } | /**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This returns the property descriptors for the adapted class. | getPropertyDescriptors | {
"repo_name": "peterkir/org.eclipse.oomph",
"path": "plugins/org.eclipse.oomph.setup.edit/src/org/eclipse/oomph/setup/provider/SetupTaskContainerItemProvider.java",
"license": "epl-1.0",
"size": 8001
} | [
"java.util.List",
"org.eclipse.emf.edit.provider.IItemPropertyDescriptor"
] | import java.util.List; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; | import java.util.*; import org.eclipse.emf.edit.provider.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 1,371,775 |
private static void addExtraReads(ModuleLayer bootLayer) {
// decode the command line options
Map<String, List<String>> map = decode("jdk.module.addreads.");
if (map.isEmpty())
return;
for (Map.Entry<String, List<String>> e : map.entrySet()) {
// the key is $MODULE
String mn = e.getKey();
Optional<Module> om = bootLayer.findModule(mn);
if (!om.isPresent()) {
warnUnknownModule(ADD_READS, mn);
continue;
}
Module m = om.get();
// the value is the set of other modules (by name)
for (String name : e.getValue()) {
if (ALL_UNNAMED.equals(name)) {
Modules.addReadsAllUnnamed(m);
} else {
om = bootLayer.findModule(name);
if (om.isPresent()) {
Modules.addReads(m, om.get());
} else {
warnUnknownModule(ADD_READS, name);
}
}
}
}
} | static void function(ModuleLayer bootLayer) { Map<String, List<String>> map = decode(STR); if (map.isEmpty()) return; for (Map.Entry<String, List<String>> e : map.entrySet()) { String mn = e.getKey(); Optional<Module> om = bootLayer.findModule(mn); if (!om.isPresent()) { warnUnknownModule(ADD_READS, mn); continue; } Module m = om.get(); for (String name : e.getValue()) { if (ALL_UNNAMED.equals(name)) { Modules.addReadsAllUnnamed(m); } else { om = bootLayer.findModule(name); if (om.isPresent()) { Modules.addReads(m, om.get()); } else { warnUnknownModule(ADD_READS, name); } } } } } | /**
* Process the --add-reads options to add any additional read edges that
* are specified on the command-line.
*/ | Process the --add-reads options to add any additional read edges that are specified on the command-line | addExtraReads | {
"repo_name": "md-5/jdk10",
"path": "src/java.base/share/classes/jdk/internal/module/ModuleBootstrap.java",
"license": "gpl-2.0",
"size": 41334
} | [
"java.util.List",
"java.util.Map",
"java.util.Optional"
] | import java.util.List; import java.util.Map; import java.util.Optional; | import java.util.*; | [
"java.util"
] | java.util; | 1,689,997 |
protected void addOldViewNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_RenameViewType_oldViewName_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_RenameViewType_oldViewName_feature", "_UI_RenameViewType_type"),
DbchangelogPackage.eINSTANCE.getRenameViewType_OldViewName(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
| void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), DbchangelogPackage.eINSTANCE.getRenameViewType_OldViewName(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } | /**
* This adds a property descriptor for the Old View Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Old View Name feature. | addOldViewNamePropertyDescriptor | {
"repo_name": "Treehopper/EclipseAugments",
"path": "liquibase-editor/eu.hohenegger.xsd.liquibase.ui/src-gen/org/liquibase/xml/ns/dbchangelog/provider/RenameViewTypeItemProvider.java",
"license": "epl-1.0",
"size": 8770
} | [
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor",
"org.liquibase.xml.ns.dbchangelog.DbchangelogPackage"
] | import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.liquibase.xml.ns.dbchangelog.DbchangelogPackage; | import org.eclipse.emf.edit.provider.*; import org.liquibase.xml.ns.dbchangelog.*; | [
"org.eclipse.emf",
"org.liquibase.xml"
] | org.eclipse.emf; org.liquibase.xml; | 594,319 |
public void addMemberHeader(ExecutableElement member, Content methodsContentTree) {
methodsContentTree.addContent(getHead(member));
methodsContentTree.addContent(getSignature(member));
} | void function(ExecutableElement member, Content methodsContentTree) { methodsContentTree.addContent(getHead(member)); methodsContentTree.addContent(getSignature(member)); } | /**
* Add the member header.
*
* @param member the method document to be listed
* @param methodsContentTree the content tree to which the member header will be added
*/ | Add the member header | addMemberHeader | {
"repo_name": "FauxFaux/jdk9-langtools",
"path": "src/jdk.javadoc/share/classes/jdk/javadoc/internal/doclets/formats/html/HtmlSerialMethodWriter.java",
"license": "gpl-2.0",
"size": 6699
} | [
"javax.lang.model.element.ExecutableElement"
] | import javax.lang.model.element.ExecutableElement; | import javax.lang.model.element.*; | [
"javax.lang"
] | javax.lang; | 1,120,744 |
Set<Class<?>> findImplementations(Class<?> parent, String... packageNames); | Set<Class<?>> findImplementations(Class<?> parent, String... packageNames); | /**
* Attempts to discover classes that are assignable to the type provided. In
* the case that an interface is provided this method will collect
* implementations. In the case of a non-interface class, subclasses will be
* collected.
*
* @param parent the class of interface to find subclasses or implementations of
* @param packageNames one or more package names to scan (including subpackages) for classes
* @return the classes found, returns an empty set if none found
*/ | Attempts to discover classes that are assignable to the type provided. In the case that an interface is provided this method will collect implementations. In the case of a non-interface class, subclasses will be collected | findImplementations | {
"repo_name": "Fabryprog/camel",
"path": "core/camel-api/src/main/java/org/apache/camel/spi/PackageScanClassResolver.java",
"license": "apache-2.0",
"size": 3992
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 1,057,267 |
public List<Integer> probReachStrategy(STPG stpg, int state, BitSet target, boolean min1, boolean min2, double lastSoln[]) throws PrismException
{
double val = stpg.mvMultMinMaxSingle(state, lastSoln, min1, min2);
return stpg.mvMultMinMaxSingleChoices(state, lastSoln, min1, min2, val);
} | List<Integer> function(STPG stpg, int state, BitSet target, boolean min1, boolean min2, double lastSoln[]) throws PrismException { double val = stpg.mvMultMinMaxSingle(state, lastSoln, min1, min2); return stpg.mvMultMinMaxSingleChoices(state, lastSoln, min1, min2, val); } | /**
* Construct strategy information for min/max reachability probabilities.
* (More precisely, list of indices of player 1 choices resulting in min/max.)
* (Note: indices are guaranteed to be sorted in ascending order.)
* @param stpg The STPG
* @param state The state to generate strategy info for
* @param target The set of target states to reach
* @param min1 Min or max probabilities for player 1 (true=min, false=max)
* @param min2 Min or max probabilities for player 2 (true=min, false=max)
* @param lastSoln Vector of probabilities from which to recompute in one iteration
*/ | Construct strategy information for min/max reachability probabilities. (More precisely, list of indices of player 1 choices resulting in min/max.) (Note: indices are guaranteed to be sorted in ascending order.) | probReachStrategy | {
"repo_name": "nicodelpiano/prism",
"path": "src/explicit/STPGModelChecker.java",
"license": "gpl-2.0",
"size": 32072
} | [
"java.util.BitSet",
"java.util.List"
] | import java.util.BitSet; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 821,121 |
private Node tryFoldHook(Node n) {
Preconditions.checkState(n.isHook(), n);
Node parent = n.getParent();
Preconditions.checkNotNull(parent);
Node cond = n.getFirstChild();
Node thenBody = cond.getNext();
Node elseBody = thenBody.getNext();
TernaryValue condValue = NodeUtil.getImpureBooleanValue(cond);
if (condValue == TernaryValue.UNKNOWN) {
// If the result nodes are equivalent, then one of the nodes can be
// removed and it doesn't matter which.
if (!areNodesEqualForInlining(thenBody, elseBody)) {
return n; // We can't remove branches otherwise!
}
}
// Transform "(a = 2) ? x =2 : y" into "a=2,x=2"
Node branchToKeep = condValue.toBoolean(true) ? thenBody : elseBody;
Node replacement;
boolean condHasSideEffects = mayHaveSideEffects(cond);
// Must detach after checking for side effects, to ensure that the parents
// of nodes are set correctly.
n.detachChildren();
if (condHasSideEffects) {
replacement = IR.comma(cond, branchToKeep).srcref(n);
} else {
replacement = branchToKeep;
}
parent.replaceChild(n, replacement);
reportCodeChange();
return replacement;
} | Node function(Node n) { Preconditions.checkState(n.isHook(), n); Node parent = n.getParent(); Preconditions.checkNotNull(parent); Node cond = n.getFirstChild(); Node thenBody = cond.getNext(); Node elseBody = thenBody.getNext(); TernaryValue condValue = NodeUtil.getImpureBooleanValue(cond); if (condValue == TernaryValue.UNKNOWN) { if (!areNodesEqualForInlining(thenBody, elseBody)) { return n; } } Node branchToKeep = condValue.toBoolean(true) ? thenBody : elseBody; Node replacement; boolean condHasSideEffects = mayHaveSideEffects(cond); n.detachChildren(); if (condHasSideEffects) { replacement = IR.comma(cond, branchToKeep).srcref(n); } else { replacement = branchToKeep; } parent.replaceChild(n, replacement); reportCodeChange(); return replacement; } | /**
* Try folding HOOK (?:) if the condition results of the condition is known.
* @return the replacement node, if changed, or the original if not
*/ | Try folding HOOK (?:) if the condition results of the condition is known | tryFoldHook | {
"repo_name": "redforks/closure-compiler",
"path": "src/com/google/javascript/jscomp/PeepholeRemoveDeadCode.java",
"license": "apache-2.0",
"size": 32888
} | [
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.IR",
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.jstype.TernaryValue"
] | import com.google.common.base.Preconditions; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.TernaryValue; | import com.google.common.base.*; import com.google.javascript.rhino.*; import com.google.javascript.rhino.jstype.*; | [
"com.google.common",
"com.google.javascript"
] | com.google.common; com.google.javascript; | 2,793,446 |
@Test
public void testFormatWithNonInteractive() throws IOException {
// we check for a non empty dir, so create a child path
File data = new File(hdfsDir, "file");
if (!data.mkdirs()) {
fail("Failed to create dir " + data.getPath());
}
String[] argv = { "-format", "-nonInteractive" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have been aborted with exit code 1", 1,
e.status);
}
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
} | void function() throws IOException { File data = new File(hdfsDir, "file"); if (!data.mkdirs()) { fail(STR + data.getPath()); } String[] argv = { STR, STR }; try { NameNode.createNameNode(argv, config); fail(STR); } catch (ExitException e) { assertEquals(STR, 1, e.status); } File version = new File(hdfsDir, STR); assertFalse(STR, version.exists()); } | /**
* Test namenode format with -format -nonInteractive options when a non empty
* name directory exists. Format should not succeed.
*
* @throws IOException
*/ | Test namenode format with -format -nonInteractive options when a non empty name directory exists. Format should not succeed | testFormatWithNonInteractive | {
"repo_name": "NJUJYB/disYarn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestClusterId.java",
"license": "apache-2.0",
"size": 14538
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.util.ExitUtil",
"org.junit.Assert"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.util.ExitUtil; import org.junit.Assert; | import java.io.*; import org.apache.hadoop.util.*; import org.junit.*; | [
"java.io",
"org.apache.hadoop",
"org.junit"
] | java.io; org.apache.hadoop; org.junit; | 2,761,366 |
public void optimize( Graphics2D g )
throws Exception {
for ( Optimizer optimizer : optimizers ) {
optimizer.optimize( g );
}
}
| void function( Graphics2D g ) throws Exception { for ( Optimizer optimizer : optimizers ) { optimizer.optimize( g ); } } | /**
* Performs the optimization for all contained {@link Optimizer} instances. Calls
* {@link Optimizer#optimize(Graphics2D)} for all contained {@link Optimizer} instances. subsequently.
*
* @param g
*/ | Performs the optimization for all contained <code>Optimizer</code> instances. Calls <code>Optimizer#optimize(Graphics2D)</code> for all contained <code>Optimizer</code> instances. subsequently | optimize | {
"repo_name": "lat-lon/deegree2-base",
"path": "deegree2-core/src/main/java/org/deegree/graphics/optimizers/OptimizerChain.java",
"license": "lgpl-2.1",
"size": 3067
} | [
"java.awt.Graphics2D"
] | import java.awt.Graphics2D; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,759,814 |
private ProcessRequestScheduleEntityBean getProcessRequestScheduleEntityBean(
CReqInstructionLog reqInstructionLog, Integer requestId,
String emailsGroup, String processClassName, String currentUser,
Timestamp currentDate) throws CommDatabaseException {
ProcessRequestScheduleEntityBean scheduleBean = null;
String instructionName = null;
String instructionValue = null;
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss");
List<InstructionParameters> list = reqInstructionLog
.getInstructionParametersList();
KEEP_ALIVE keepAlive = KEEP_ALIVE.NO;
if(list != null) {
for (InstructionParameters instructionParameters : list) {
instructionName = instructionParameters.getName();
instructionValue = instructionParameters.getValue();
if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.BATCH_NAME.name()
.equals(instructionName)) {
scheduledBatchName = instructionValue;
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.FREQUENCY.name()
.equals(instructionName)) {
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean.setFreqType(instructionValue.toUpperCase());
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.RECUR_EVERY
.name().equals(instructionName)) {
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean.setRecur(Integer.parseInt(instructionValue));
// } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.BATCH_RUN_DATE
// .name().equals(instructionName)) {
//
// Date parsedDate;
// try {
// parsedDate = dateFormat2.parse(instructionValue);
// if (scheduleBean == null) {
// scheduleBean = new ProcessRequestScheduleEntityBean();
// }
// scheduleBean
// .setStartDt(new Timestamp(parsedDate.getTime()));
// } catch (ParseException e) {
// // Dummy
// e.printStackTrace();
// }
}else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.SCHEDULE_DATE
.name().equals(instructionName)) {
Date parsedDate;
try {
parsedDate = sdf.parse(instructionValue);
scheduleTime = new Timestamp(parsedDate.getTime());
} catch (ParseException e) {
// Dummy
}
}else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.WEEK_DAY
.name().equals(instructionName)) {
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean.setOnWeekDay(instructionValue);
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.END_ON_DATE
.name().equals(instructionName)) {
Date parsedDate;
try {
parsedDate = sdf.parse(instructionValue);
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean
.setEndDt(new Timestamp(parsedDate.getTime()));
} catch (ParseException e) {
// Dummy
}
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.END_ON_OCCURRENCE
.name().equals(instructionName)) {
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean.setEndOccur(Long.parseLong(instructionValue));
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.SKIP_FLAG
.name().equals(instructionName)) {
if (scheduleBean == null) {
scheduleBean = new ProcessRequestScheduleEntityBean();
}
scheduleBean.setSkipFlag(instructionValue);
} else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.KEEP_ALIVE.name().equals(instructionName)) {
keepAlive = KEEP_ALIVE.resolve(instructionValue);
}
}
}
if (scheduleBean != null) {
scheduleBean.setSchId(requestId);
scheduleBean.setSchStat(ISchedule.SCHEDULE_STATUS.ACTIVE.getID());
scheduleBean.setStartDt(scheduleTime);
scheduleBean.setUserId(currentUser);
scheduleBean.setEntryDt(currentDate);
scheduleBean.setReqStat(IProcessRequest.REQUEST_STATUS.QUEUED.getID());
scheduleBean.setOccurCounter(1);
scheduleBean.setProcessClassNm(processClassName);
scheduleBean.setFutureSchedulingOnly(ISchedule.FUTURE_SCHEDULING_ONLY);
scheduleBean.setFixedDate("N");
scheduleBean.setEmailIds(emailsGroup);
scheduleBean.setWeekdayCheckFlag("N");
scheduleBean.setKeepAlive(keepAlive.getID());
}
return scheduleBean;
}
| ProcessRequestScheduleEntityBean function( CReqInstructionLog reqInstructionLog, Integer requestId, String emailsGroup, String processClassName, String currentUser, Timestamp currentDate) throws CommDatabaseException { ProcessRequestScheduleEntityBean scheduleBean = null; String instructionName = null; String instructionValue = null; SimpleDateFormat sdf = new SimpleDateFormat(STR); List<InstructionParameters> list = reqInstructionLog .getInstructionParametersList(); KEEP_ALIVE keepAlive = KEEP_ALIVE.NO; if(list != null) { for (InstructionParameters instructionParameters : list) { instructionName = instructionParameters.getName(); instructionValue = instructionParameters.getValue(); if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.BATCH_NAME.name() .equals(instructionName)) { scheduledBatchName = instructionValue; } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.FREQUENCY.name() .equals(instructionName)) { if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean.setFreqType(instructionValue.toUpperCase()); } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.RECUR_EVERY .name().equals(instructionName)) { if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean.setRecur(Integer.parseInt(instructionValue)); }else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.SCHEDULE_DATE .name().equals(instructionName)) { Date parsedDate; try { parsedDate = sdf.parse(instructionValue); scheduleTime = new Timestamp(parsedDate.getTime()); } catch (ParseException e) { } }else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.WEEK_DAY .name().equals(instructionName)) { if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean.setOnWeekDay(instructionValue); } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.END_ON_DATE .name().equals(instructionName)) { Date parsedDate; try { parsedDate = sdf.parse(instructionValue); if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean .setEndDt(new Timestamp(parsedDate.getTime())); } catch (ParseException e) { } } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.END_ON_OCCURRENCE .name().equals(instructionName)) { if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean.setEndOccur(Long.parseLong(instructionValue)); } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.SKIP_FLAG .name().equals(instructionName)) { if (scheduleBean == null) { scheduleBean = new ProcessRequestScheduleEntityBean(); } scheduleBean.setSkipFlag(instructionValue); } else if (CommConstants.SCHEDULE_INSTRCUTION_PARAMS.KEEP_ALIVE.name().equals(instructionName)) { keepAlive = KEEP_ALIVE.resolve(instructionValue); } } } if (scheduleBean != null) { scheduleBean.setSchId(requestId); scheduleBean.setSchStat(ISchedule.SCHEDULE_STATUS.ACTIVE.getID()); scheduleBean.setStartDt(scheduleTime); scheduleBean.setUserId(currentUser); scheduleBean.setEntryDt(currentDate); scheduleBean.setReqStat(IProcessRequest.REQUEST_STATUS.QUEUED.getID()); scheduleBean.setOccurCounter(1); scheduleBean.setProcessClassNm(processClassName); scheduleBean.setFutureSchedulingOnly(ISchedule.FUTURE_SCHEDULING_ONLY); scheduleBean.setFixedDate("N"); scheduleBean.setEmailIds(emailsGroup); scheduleBean.setWeekdayCheckFlag("N"); scheduleBean.setKeepAlive(keepAlive.getID()); } return scheduleBean; } | /**
* Creates (Inserts) records in the process_request_schedule table.
*
* @param reqInstructionLog
* The instruction log to be inserted in INSTRUCTION_LOG of CORE @link
* CReqInstructionLog
*
* @param requestId
* Instruction request id.
*
* @param currentUser
* The current application user
*
* @param currentDate
* The system date
*
* @param processClassName
* The process class name
*
* @param emailsGroup
* The email group to which the notification regarding batch
* process will be sent
*
* @throws CommDatabaseException
* Any database related I/O exception
*/ | Creates (Inserts) records in the process_request_schedule table | getProcessRequestScheduleEntityBean | {
"repo_name": "MastekLtd/JBEAM",
"path": "supporting_libraries/jbeam-plugins/jbeam-core-comm-oracle/src/main/java/com/stgmastek/core/comm/server/dao/impl/BatchDAO.java",
"license": "lgpl-3.0",
"size": 43501
} | [
"com.stgmastek.core.comm.exception.CommDatabaseException",
"com.stgmastek.core.comm.server.vo.CReqInstructionLog",
"com.stgmastek.core.comm.server.vo.InstructionParameters",
"com.stgmastek.core.comm.util.CommConstants",
"java.sql.Timestamp",
"java.text.ParseException",
"java.text.SimpleDateFormat",
"java.util.Date",
"java.util.List"
] | import com.stgmastek.core.comm.exception.CommDatabaseException; import com.stgmastek.core.comm.server.vo.CReqInstructionLog; import com.stgmastek.core.comm.server.vo.InstructionParameters; import com.stgmastek.core.comm.util.CommConstants; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; | import com.stgmastek.core.comm.exception.*; import com.stgmastek.core.comm.server.vo.*; import com.stgmastek.core.comm.util.*; import java.sql.*; import java.text.*; import java.util.*; | [
"com.stgmastek.core",
"java.sql",
"java.text",
"java.util"
] | com.stgmastek.core; java.sql; java.text; java.util; | 1,181,711 |
public static String netconfGet(NetconfSession session, String filter) {
String reply;
try {
reply = session.get(filter, null);
} catch (NetconfException e) {
throw new IllegalStateException(new NetconfException("Failed to retrieve configuration.", e));
}
return reply;
} | static String function(NetconfSession session, String filter) { String reply; try { reply = session.get(filter, null); } catch (NetconfException e) { throw new IllegalStateException(new NetconfException(STR, e)); } return reply; } | /**
* Retrieves session reply information for get operation.
*
* @param session explicit NETCONF session
* @param filter the filter string of xml content
* @return the reply string
*/ | Retrieves session reply information for get operation | netconfGet | {
"repo_name": "opennetworkinglab/onos",
"path": "drivers/polatis/netconf/src/main/java/org/onosproject/drivers/polatis/netconf/PolatisNetconfUtility.java",
"license": "apache-2.0",
"size": 11359
} | [
"org.onosproject.netconf.NetconfException",
"org.onosproject.netconf.NetconfSession"
] | import org.onosproject.netconf.NetconfException; import org.onosproject.netconf.NetconfSession; | import org.onosproject.netconf.*; | [
"org.onosproject.netconf"
] | org.onosproject.netconf; | 1,417,798 |
public static <T> double RR(List<T> rankedList, List<T> groundTruth) {
for (int i = 0, n = rankedList.size(); i < n; i++) {
T item = rankedList.get(i);
if (groundTruth.contains(item))
return 1 / (i + 1.0);
}
return 0;
} | static <T> double function(List<T> rankedList, List<T> groundTruth) { for (int i = 0, n = rankedList.size(); i < n; i++) { T item = rankedList.get(i); if (groundTruth.contains(item)) return 1 / (i + 1.0); } return 0; } | /**
* Compute the reciprocal rank of a list of ranked items
*
* @param <T>
*
* @param rankedList
* a list of ranked item IDs, the highest-ranking item first
* @param groundTruth
* a collection of positive/correct item IDs
* @return the mean reciprocal rank for the given data<
*/ | Compute the reciprocal rank of a list of ranked items | RR | {
"repo_name": "taolian/librec",
"path": "librec/src/main/java/librec/util/Measures.java",
"license": "gpl-3.0",
"size": 8835
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,821,632 |
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name="invoice")
public Invoice getInvoice() {
return invoice;
}
| @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name=STR) Invoice function() { return invoice; } | /**
* Gets the invoice.
*
* @return the invoice
*/ | Gets the invoice | getInvoice | {
"repo_name": "Esleelkartea/aonGTA",
"path": "aongta_v1.0.0_src/Fuentes y JavaDoc/aon-finance/src/com/code/aon/finance/Finance.java",
"license": "gpl-2.0",
"size": 5400
} | [
"javax.persistence.FetchType",
"javax.persistence.JoinColumn",
"javax.persistence.ManyToOne"
] | import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 630,856 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.