method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<UserInner> getWithResponse(String upnOrObjectId, Context context) {
return getWithResponseAsync(upnOrObjectId, context).block();
} | @ServiceMethod(returns = ReturnType.SINGLE) Response<UserInner> function(String upnOrObjectId, Context context) { return getWithResponseAsync(upnOrObjectId, context).block(); } | /**
* Gets user information from the directory.
*
* @param upnOrObjectId The object ID or principal name of the user for which to get information.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return user information from the directory.
*/ | Gets user information from the directory | getWithResponse | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/implementation/UsersClientImpl.java",
"license": "mit",
"size": 50884
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context",
"com.azure.resourcemanager.authorization.fluent.models.UserInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.authorization.fluent.models.UserInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.authorization.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,714,784 |
public WindGateJdbcInputModel withOptions(String... newValues) {
Arguments.requireNonNull(newValues);
return withOptions(Arrays.asList(newValues));
} | WindGateJdbcInputModel function(String... newValues) { Arguments.requireNonNull(newValues); return withOptions(Arrays.asList(newValues)); } | /**
* Sets the options.
* @param newValues the options
* @return this
*/ | Sets the options | withOptions | {
"repo_name": "ashigeru/asakusafw-compiler",
"path": "dag/compiler/jdbc/src/main/java/com/asakusafw/dag/compiler/jdbc/windgate/WindGateJdbcInputModel.java",
"license": "apache-2.0",
"size": 2598
} | [
"com.asakusafw.lang.utils.common.Arguments",
"java.util.Arrays"
] | import com.asakusafw.lang.utils.common.Arguments; import java.util.Arrays; | import com.asakusafw.lang.utils.common.*; import java.util.*; | [
"com.asakusafw.lang",
"java.util"
] | com.asakusafw.lang; java.util; | 2,755,233 |
public void savePreviousImplementedVersion(Page page, Integer version) {
String value = version != null ? String.valueOf(version) : null;
ContentEntityObject entityObject = getPageManager().getById(page.getId());
getContentPropertyManager().setStringProperty(entityObject, PREVIOUS_IMPLEMENTED_VERSION, value);
} | void function(Page page, Integer version) { String value = version != null ? String.valueOf(version) : null; ContentEntityObject entityObject = getPageManager().getById(page.getId()); getContentPropertyManager().setStringProperty(entityObject, PREVIOUS_IMPLEMENTED_VERSION, value); } | /**
* Saves the sprecified version as the Previous implemented version
*
* @param page
* @param version
*/ | Saves the sprecified version as the Previous implemented version | savePreviousImplementedVersion | {
"repo_name": "benhamidene/livingdoc-confluence",
"path": "livingdoc-confluence-plugin/src/main/java/info/novatec/testit/livingdoc/confluence/velocity/ConfluenceLivingDoc.java",
"license": "gpl-3.0",
"size": 31707
} | [
"com.atlassian.confluence.core.ContentEntityObject",
"com.atlassian.confluence.pages.Page"
] | import com.atlassian.confluence.core.ContentEntityObject; import com.atlassian.confluence.pages.Page; | import com.atlassian.confluence.core.*; import com.atlassian.confluence.pages.*; | [
"com.atlassian.confluence"
] | com.atlassian.confluence; | 2,699,354 |
public void setRegistryService(RegistryService registryService) {
BPMNAnalyticsCoreServerHolder.getInstance().setRegistryService(registryService);
} | void function(RegistryService registryService) { BPMNAnalyticsCoreServerHolder.getInstance().setRegistryService(registryService); } | /**
* Set RegistryService instance when bundle get bind to OSGI runtime.
*
* @param registryService
*/ | Set RegistryService instance when bundle get bind to OSGI runtime | setRegistryService | {
"repo_name": "wso2/product-ei",
"path": "components/org.wso2.ei.bpmn.analytics.core/src/main/java/org/wso2/ei/bpmn/analytics/core/internal/BPMNAnalyticsCoreServiceComponent.java",
"license": "apache-2.0",
"size": 4275
} | [
"org.wso2.carbon.registry.core.service.RegistryService"
] | import org.wso2.carbon.registry.core.service.RegistryService; | import org.wso2.carbon.registry.core.service.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 1,941,395 |
private void removeSearchPredicates(
final GroupNodeBase<IGroupMemberNode> group,
final Map<URI, StatementPatternNode> statementPatterns) {
for(StatementPatternNode sp : statementPatterns.values()) {
if (!group.removeArg(sp))
throw new AssertionError();
}
}
| void function( final GroupNodeBase<IGroupMemberNode> group, final Map<URI, StatementPatternNode> statementPatterns) { for(StatementPatternNode sp : statementPatterns.values()) { if (!group.removeArg(sp)) throw new AssertionError(); } } | /**
* Remove each {@link StatementPatternNode} from the group.
*
* @param group
* The group.
* @param statementPatterns
* The statement pattern nodes.
*/ | Remove each <code>StatementPatternNode</code> from the group | removeSearchPredicates | {
"repo_name": "smalyshev/blazegraph",
"path": "bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/ASTSearchOptimizer.java",
"license": "gpl-2.0",
"size": 18289
} | [
"com.bigdata.rdf.sparql.ast.GroupNodeBase",
"com.bigdata.rdf.sparql.ast.IGroupMemberNode",
"com.bigdata.rdf.sparql.ast.StatementPatternNode",
"java.util.Map"
] | import com.bigdata.rdf.sparql.ast.GroupNodeBase; import com.bigdata.rdf.sparql.ast.IGroupMemberNode; import com.bigdata.rdf.sparql.ast.StatementPatternNode; import java.util.Map; | import com.bigdata.rdf.sparql.ast.*; import java.util.*; | [
"com.bigdata.rdf",
"java.util"
] | com.bigdata.rdf; java.util; | 311,254 |
public static boolean matchesUnpredictableAssignment(String assignment) {
if (unpredictableAssignmentsPatternInitialized == false) {
String regEx = AvlConfig.getUnpredictableAssignmentsRegEx();
if (regEx != null && !regEx.isEmpty()) {
regExPattern = Pattern.compile(regEx);
}
unpredictableAssignmentsPatternInitialized = true;
}
if (regExPattern == null)
return false;
return regExPattern.matcher(assignment).matches();
}
| static boolean function(String assignment) { if (unpredictableAssignmentsPatternInitialized == false) { String regEx = AvlConfig.getUnpredictableAssignmentsRegEx(); if (regEx != null && !regEx.isEmpty()) { regExPattern = Pattern.compile(regEx); } unpredictableAssignmentsPatternInitialized = true; } if (regExPattern == null) return false; return regExPattern.matcher(assignment).matches(); } | /**
* Returns true if the assignment specified matches the regular expression
* for unpredictable assignments.
*
* @param assignment
* @return true if assignment matches regular expression
*/ | Returns true if the assignment specified matches the regular expression for unpredictable assignments | matchesUnpredictableAssignment | {
"repo_name": "goeuropa/transitime",
"path": "transitime/src/main/java/org/transitime/db/structs/AvlReport.java",
"license": "gpl-3.0",
"size": 34880
} | [
"java.util.regex.Pattern",
"org.transitime.configData.AvlConfig"
] | import java.util.regex.Pattern; import org.transitime.configData.AvlConfig; | import java.util.regex.*; import org.transitime.*; | [
"java.util",
"org.transitime"
] | java.util; org.transitime; | 2,523,248 |
public MermaidlagMessages addConstraintsParametersScriptAssertMessage(String property, String script) {
assertPropertyNotNull(property);
add(property, new ActionMessage(CONSTRAINTS_ParametersScriptAssert_MESSAGE, script));
return this;
}
/**
* Add the created action message for the key 'constraints.Range.message' with parameters.
* <pre>
* message: must be between {min} and {max} | MermaidlagMessages function(String property, String script) { assertPropertyNotNull(property); add(property, new ActionMessage(CONSTRAINTS_ParametersScriptAssert_MESSAGE, script)); return this; } /** * Add the created action message for the key 'constraints.Range.message' with parameters. * <pre> * message: must be between {min} and {max} | /**
* Add the created action message for the key 'constraints.ParametersScriptAssert.message' with parameters.
* <pre>
* message: script expression "{script}" didn't evaluate to true
* </pre>
* @param property The property name for the message. (NotNull)
* @param script The parameter script for message. (NotNull)
* @return this. (NotNull)
*/ | Add the created action message for the key 'constraints.ParametersScriptAssert.message' with parameters. <code> message: script expression "{script}" didn't evaluate to true </code> | addConstraintsParametersScriptAssertMessage | {
"repo_name": "dbflute-session/lastaflute-rehearsal-mermaidlag",
"path": "src/main/java/org/docksidestage/mylasta/action/MermaidlagMessages.java",
"license": "apache-2.0",
"size": 30490
} | [
"org.lastaflute.web.ruts.message.ActionMessage"
] | import org.lastaflute.web.ruts.message.ActionMessage; | import org.lastaflute.web.ruts.message.*; | [
"org.lastaflute.web"
] | org.lastaflute.web; | 2,536,958 |
private void updateNsCache(String namespace,
ListMultimap<String, TablePermission> tablePerms) {
PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>();
for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) {
if (AuthUtil.isGroupPrincipal(entry.getKey())) {
newTablePerms.putGroup(AuthUtil.getGroupName(entry.getKey()), entry.getValue());
} else {
newTablePerms.putUser(entry.getKey(), entry.getValue());
}
}
nsCache.put(namespace, newTablePerms);
mtime.incrementAndGet();
} | void function(String namespace, ListMultimap<String, TablePermission> tablePerms) { PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>(); for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) { if (AuthUtil.isGroupPrincipal(entry.getKey())) { newTablePerms.putGroup(AuthUtil.getGroupName(entry.getKey()), entry.getValue()); } else { newTablePerms.putUser(entry.getKey(), entry.getValue()); } } nsCache.put(namespace, newTablePerms); mtime.incrementAndGet(); } | /**
* Updates the internal permissions cache for a single table, splitting
* the permissions listed into separate caches for users and groups to optimize
* group lookups.
*
* @param namespace
* @param tablePerms
*/ | Updates the internal permissions cache for a single table, splitting the permissions listed into separate caches for users and groups to optimize group lookups | updateNsCache | {
"repo_name": "juwi/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java",
"license": "apache-2.0",
"size": 25291
} | [
"com.google.common.collect.ListMultimap",
"java.util.Map",
"org.apache.hadoop.hbase.AuthUtil"
] | import com.google.common.collect.ListMultimap; import java.util.Map; import org.apache.hadoop.hbase.AuthUtil; | import com.google.common.collect.*; import java.util.*; import org.apache.hadoop.hbase.*; | [
"com.google.common",
"java.util",
"org.apache.hadoop"
] | com.google.common; java.util; org.apache.hadoop; | 283,121 |
Response<Void> deleteAuthorizationRuleWithResponse(
String resourceGroupName, String namespaceName, String authorizationRuleName, Context context); | Response<Void> deleteAuthorizationRuleWithResponse( String resourceGroupName, String namespaceName, String authorizationRuleName, Context context); | /**
* Deletes a namespace authorization rule.
*
* @param resourceGroupName The name of the resource group.
* @param namespaceName The namespace name.
* @param authorizationRuleName Authorization Rule Name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/ | Deletes a namespace authorization rule | deleteAuthorizationRuleWithResponse | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/notificationhubs/azure-resourcemanager-notificationhubs/src/main/java/com/azure/resourcemanager/notificationhubs/models/Namespaces.java",
"license": "mit",
"size": 20638
} | [
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context"
] | import com.azure.core.http.rest.Response; import com.azure.core.util.Context; | import com.azure.core.http.rest.*; import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 2,533,507 |
void execute(final AbstractInteractor interactor); | void execute(final AbstractInteractor interactor); | /**
* This method should call the interactor's run method and thus start the interactor. This should be called
* on a background thread as interactors might do lengthy operations.
*
* @param interactor The interactor to run.
*/ | This method should call the interactor's run method and thus start the interactor. This should be called on a background thread as interactors might do lengthy operations | execute | {
"repo_name": "pcase/Shopper",
"path": "app/src/main/java/com/azurehorsecreations/shopper/domain/executor/IExecutor.java",
"license": "mit",
"size": 581
} | [
"com.azurehorsecreations.shopper.domain.interactors.base.AbstractInteractor"
] | import com.azurehorsecreations.shopper.domain.interactors.base.AbstractInteractor; | import com.azurehorsecreations.shopper.domain.interactors.base.*; | [
"com.azurehorsecreations.shopper"
] | com.azurehorsecreations.shopper; | 1,167,085 |
@ServiceMethod(returns = ReturnType.SINGLE)
public void unlink(
String resourceGroupName, String serverName, String databaseName, String linkId, Boolean forcedTermination) {
unlinkAsync(resourceGroupName, serverName, databaseName, linkId, forcedTermination).block();
} | @ServiceMethod(returns = ReturnType.SINGLE) void function( String resourceGroupName, String serverName, String databaseName, String linkId, Boolean forcedTermination) { unlinkAsync(resourceGroupName, serverName, databaseName, linkId, forcedTermination).block(); } | /**
* Deletes a database replication link in forced or friendly way.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value
* from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param databaseName The name of the database that has the replication link to be failed over.
* @param linkId The ID of the replication link to be failed over.
* @param forcedTermination Determines whether link will be terminated in a forced or a friendly way.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/ | Deletes a database replication link in forced or friendly way | unlink | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/implementation/ReplicationLinksClientImpl.java",
"license": "mit",
"size": 86786
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; | import com.azure.core.annotation.*; | [
"com.azure.core"
] | com.azure.core; | 1,962,880 |
public boolean checkProductAvailability(String prodName) {
boolean isProductAvailable = false;
pDao = new ProductDAO();
isProductAvailable = pDao.getProductAvailability(prodName);
return isProductAvailable;
}
| boolean function(String prodName) { boolean isProductAvailable = false; pDao = new ProductDAO(); isProductAvailable = pDao.getProductAvailability(prodName); return isProductAvailable; } | /**
* checks if a product is available
* @param prodName the name of the product
* @return true if available, else false
*/ | checks if a product is available | checkProductAvailability | {
"repo_name": "elliottpost/lakeshoremarket",
"path": "src/com/online/lakeshoremarket/domain/ProductDomain.java",
"license": "gpl-2.0",
"size": 3270
} | [
"com.online.lakeshoremarket.dao.ProductDAO"
] | import com.online.lakeshoremarket.dao.ProductDAO; | import com.online.lakeshoremarket.dao.*; | [
"com.online.lakeshoremarket"
] | com.online.lakeshoremarket; | 1,526,626 |
public void load(MySQLConnectionWrapper wrapper)
{
if (loaded)
{
throw new RuntimeException("Definitions already loaded.");
}
loadDefinitions(wrapper);
loaded = true;
} | void function(MySQLConnectionWrapper wrapper) { if (loaded) { throw new RuntimeException(STR); } loadDefinitions(wrapper); loaded = true; } | /**
* Load the definitions.
* @param wrapper The connection wrapper to the database.
*/ | Load the definitions | load | {
"repo_name": "ry60003333/sapphire-engine",
"path": "src/com/allgofree/sapphire/definitions/DefinitionManager.java",
"license": "lgpl-3.0",
"size": 2497
} | [
"org.allgofree.mysql.MySQLConnectionWrapper"
] | import org.allgofree.mysql.MySQLConnectionWrapper; | import org.allgofree.mysql.*; | [
"org.allgofree.mysql"
] | org.allgofree.mysql; | 177,162 |
public void sort(Sort s) {
setSortOrder(s.build());
} | void function(Sort s) { setSortOrder(s.build()); } | /**
* Sets the current sort order using the fluid Sort API. Read the
* documentation for {@link Sort} for more information.
*
* @param s
* a sort instance
*/ | Sets the current sort order using the fluid Sort API. Read the documentation for <code>Sort</code> for more information | sort | {
"repo_name": "Peppe/vaadin",
"path": "client/src/com/vaadin/client/widgets/Grid.java",
"license": "apache-2.0",
"size": 306271
} | [
"com.vaadin.client.widget.grid.sort.Sort"
] | import com.vaadin.client.widget.grid.sort.Sort; | import com.vaadin.client.widget.grid.sort.*; | [
"com.vaadin.client"
] | com.vaadin.client; | 1,832,310 |
@Override
public java.math.BigDecimal getQty ()
{
BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Qty);
if (bd == null)
return Env.ZERO;
return bd;
} | java.math.BigDecimal function () { BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Qty); if (bd == null) return Env.ZERO; return bd; } | /** Get Menge.
@return Quantity
*/ | Get Menge | getQty | {
"repo_name": "klst-com/metasfresh",
"path": "de.metas.adempiere.adempiere/base/src/main/java-gen/org/compiere/model/X_Fact_Acct.java",
"license": "gpl-2.0",
"size": 39793
} | [
"java.math.BigDecimal",
"org.compiere.util.Env"
] | import java.math.BigDecimal; import org.compiere.util.Env; | import java.math.*; import org.compiere.util.*; | [
"java.math",
"org.compiere.util"
] | java.math; org.compiere.util; | 1,714,912 |
public boolean get() {
return SmartDashboard.getBoolean(key, defaultValue);
} | boolean function() { return SmartDashboard.getBoolean(key, defaultValue); } | /**
* Returns the current value associated with the key. Will return the
* default value when it does not {@link #exists() exist}.
*
* @return current value
*/ | Returns the current value associated with the key. Will return the default value when it does not <code>#exists() exist</code> | get | {
"repo_name": "Team4334/robot-code",
"path": "src/edu/first/util/dashboard/BooleanDashboard.java",
"license": "gpl-3.0",
"size": 2357
} | [
"edu.wpi.first.wpilibj.smartdashboard.SmartDashboard"
] | import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; | import edu.wpi.first.wpilibj.smartdashboard.*; | [
"edu.wpi.first"
] | edu.wpi.first; | 152,620 |
AuthenticationBuilder addSuccesses(Map<String, HandlerResult> successes); | AuthenticationBuilder addSuccesses(Map<String, HandlerResult> successes); | /**
* Adds successes authentication builder.
*
* @param successes the successes
* @return the authentication builder
* @since 4.2.0
*/ | Adds successes authentication builder | addSuccesses | {
"repo_name": "PetrGasparik/cas",
"path": "cas-server-core-api-authentication/src/main/java/org/jasig/cas/authentication/AuthenticationBuilder.java",
"license": "apache-2.0",
"size": 4727
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,098,382 |
interface WithCertificate {
WithPfxFilePassword withPfxFile(File file); | interface WithCertificate { WithPfxFilePassword withPfxFile(File file); | /**
* Specifies the PFX certificate file to upload.
* @param file the PFX certificate file
* @return the next stage of the app service certificate definition
*/ | Specifies the PFX certificate file to upload | withPfxFile | {
"repo_name": "pomortaz/azure-sdk-for-java",
"path": "azure-mgmt-appservice/src/main/java/com/microsoft/azure/management/appservice/AppServiceCertificate.java",
"license": "mit",
"size": 5723
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 621,254 |
public CommandManager getPacketProcessor ()
{
return processor;
} | CommandManager function () { return processor; } | /**
* Gets packet processor.
*
* @return the packet processor
*/ | Gets packet processor | getPacketProcessor | {
"repo_name": "NeoSmartpen/AndroidSDK2.0",
"path": "NASDK2.0_Studio/app/src/main/java/kr/neolab/sdk/pen/bluetooth/BTLEAdt.java",
"license": "gpl-3.0",
"size": 78761
} | [
"kr.neolab.sdk.pen.bluetooth.cmd.CommandManager"
] | import kr.neolab.sdk.pen.bluetooth.cmd.CommandManager; | import kr.neolab.sdk.pen.bluetooth.cmd.*; | [
"kr.neolab.sdk"
] | kr.neolab.sdk; | 1,670,720 |
PropertyAdmin getPropertyAdmin(String property) throws PulsarAdminException; | PropertyAdmin getPropertyAdmin(String property) throws PulsarAdminException; | /**
* Get the config of the property.
* <p>
* Get the admin configuration for a given property.
*
* @param property
* Property name
* @return the property configuration
*
* @throws NotAuthorizedException
* Don't have admin permission
* @throws NotFoundException
* Property does not exist
* @throws PulsarAdminException
* Unexpected error
*/ | Get the config of the property. Get the admin configuration for a given property | getPropertyAdmin | {
"repo_name": "yush1ga/pulsar",
"path": "pulsar-client-admin/src/main/java/org/apache/pulsar/client/admin/Properties.java",
"license": "apache-2.0",
"size": 4282
} | [
"org.apache.pulsar.common.policies.data.PropertyAdmin"
] | import org.apache.pulsar.common.policies.data.PropertyAdmin; | import org.apache.pulsar.common.policies.data.*; | [
"org.apache.pulsar"
] | org.apache.pulsar; | 1,570,591 |
public static String getMimeType(String url) {
String type = null;
String extension = MimeTypeMap.getFileExtensionFromUrl(url);
if (extension != null) {
type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
}
return type;
} | static String function(String url) { String type = null; String extension = MimeTypeMap.getFileExtensionFromUrl(url); if (extension != null) { type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); } return type; } | /**
* get mime type of given file
* @param url file path or url
* @return
*/ | get mime type of given file | getMimeType | {
"repo_name": "cloverstudio/Spika-Android",
"path": "app/src/main/java/com/clover_studio/spikachatmodule/utils/Tools.java",
"license": "mit",
"size": 13958
} | [
"android.webkit.MimeTypeMap"
] | import android.webkit.MimeTypeMap; | import android.webkit.*; | [
"android.webkit"
] | android.webkit; | 2,520,708 |
void saveUserPrefs(UserPrefs userPrefs) throws IOException; | void saveUserPrefs(UserPrefs userPrefs) throws IOException; | /**
* Saves the given {@link seedu.task.model.UserPrefs} to the storage.
* @param userPrefs cannot be null.
* @throws IOException if there was any problem writing to the file.
*/ | Saves the given <code>seedu.task.model.UserPrefs</code> to the storage | saveUserPrefs | {
"repo_name": "CS2103JAN2017-F11-B3/main",
"path": "src/main/java/seedu/task/storage/UserPrefsStorage.java",
"license": "mit",
"size": 956
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,190,568 |
public File getFile() {
return file;
} | File function() { return file; } | /**
* Returns file reference.
*
* @return file reference
*/ | Returns file reference | getFile | {
"repo_name": "1Tristan/VariantSync",
"path": "src/de/ovgu/variantsync/applicationlayer/datamodel/resources/ResourceChangesFilePatch.java",
"license": "lgpl-3.0",
"size": 5302
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 908,279 |
@Redirect(method = "aiStep", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/entity/boss/enderdragon/phases/DragonPhaseInstance;getFlyTargetLocation()Lnet/minecraft/world/phys/Vec3;"))
@Nullable
private Vec3 impl$getTargetLocationOrNull(final DragonPhaseInstance phase) {
final Vec3 target = phase.getFlyTargetLocation();
if (target != null && target.x == this.shadow$getX() && target.z == this.shadow$getZ()) {
return null; // Skips the movement code
}
return target;
} | @Redirect(method = STR, at = @At(value = STR, target = STR)) private Vec3 impl$getTargetLocationOrNull(final DragonPhaseInstance phase) { final Vec3 target = phase.getFlyTargetLocation(); if (target != null && target.x == this.shadow$getX() && target.z == this.shadow$getZ()) { return null; } return target; } | /**
* Fixes a hidden divide-by-zero error when {@link DragonHoverPhase} returns the
* current location as the target location.
*
* @author JBYoshi
*/ | Fixes a hidden divide-by-zero error when <code>DragonHoverPhase</code> returns the current location as the target location | impl$getTargetLocationOrNull | {
"repo_name": "SpongePowered/Sponge",
"path": "src/mixins/java/org/spongepowered/common/mixin/core/world/entity/boss/enderdragon/EnderDragonMixin.java",
"license": "mit",
"size": 4212
} | [
"net.minecraft.world.entity.boss.enderdragon.phases.DragonPhaseInstance",
"net.minecraft.world.phys.Vec3",
"org.spongepowered.asm.mixin.injection.At",
"org.spongepowered.asm.mixin.injection.Redirect"
] | import net.minecraft.world.entity.boss.enderdragon.phases.DragonPhaseInstance; import net.minecraft.world.phys.Vec3; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Redirect; | import net.minecraft.world.entity.boss.enderdragon.phases.*; import net.minecraft.world.phys.*; import org.spongepowered.asm.mixin.injection.*; | [
"net.minecraft.world",
"org.spongepowered.asm"
] | net.minecraft.world; org.spongepowered.asm; | 2,777,980 |
@Test(timeout = 120000)
public void testDecommission2NodeWithBusyNode() throws Exception {
byte busyDNIndex = 6;
byte decommissionDNIndex = 6;
byte decommissionDNIndex2 = 8;
//1. create EC file
final Path ecFile = new Path(ecDir, "testDecommission2NodeWithBusyNode");
int writeBytes = cellSize * dataBlocks;
writeStripedFile(dfs, ecFile, writeBytes);
Assert.assertEquals(0, bm.numOfUnderReplicatedBlocks());
FileChecksum fileChecksum1 = dfs.getFileChecksum(ecFile, writeBytes);
//2. make once DN busy
final INodeFile fileNode = cluster.getNamesystem().getFSDirectory()
.getINode4Write(ecFile.toString()).asFile();
BlockInfo firstBlock = fileNode.getBlocks()[0];
DatanodeStorageInfo[] dnStorageInfos = bm.getStorages(firstBlock);
DatanodeDescriptor busyNode = dnStorageInfos[busyDNIndex]
.getDatanodeDescriptor();
for (int j = 0; j < replicationStreamsHardLimit; j++) {
busyNode.incrementPendingReplicationWithoutTargets();
}
//3. decommissioning one node
List<DatanodeInfo> decommissionNodes = new ArrayList<>();
decommissionNodes.add(dnStorageInfos[decommissionDNIndex]
.getDatanodeDescriptor());
decommissionNodes.add(dnStorageInfos[decommissionDNIndex2]
.getDatanodeDescriptor());
decommissionNode(0, decommissionNodes, AdminStates.DECOMMISSION_INPROGRESS);
//4. wait for decommissioning and not busy block to replicate(9-2+1=8)
GenericTestUtils.waitFor(
() -> bm.countNodes(firstBlock).liveReplicas() >= 8,
100, 60000);
//5. release busy DN, make the decommissioning and busy block can replicate
busyNode.decrementPendingReplicationWithoutTargets();
//6. decommissioned one node,make the decommission finished
decommissionNode(0, decommissionNodes, AdminStates.DECOMMISSIONED);
//7. Busy DN shouldn't be reconstructed
DatanodeStorageInfo[] newDnStorageInfos = bm.getStorages(firstBlock);
Assert.assertEquals("Busy DN shouldn't be reconstructed",
dnStorageInfos[busyDNIndex].getStorageID(),
newDnStorageInfos[busyDNIndex].getStorageID());
//8. check the checksum of a file
FileChecksum fileChecksum2 = dfs.getFileChecksum(ecFile, writeBytes);
Assert.assertEquals("Checksum mismatches!", fileChecksum1, fileChecksum2);
//9. check the data is correct
StripedFileTestUtil.checkData(dfs, ecFile, writeBytes, decommissionNodes,
null, blockGroupSize);
} | @Test(timeout = 120000) void function() throws Exception { byte busyDNIndex = 6; byte decommissionDNIndex = 6; byte decommissionDNIndex2 = 8; final Path ecFile = new Path(ecDir, STR); int writeBytes = cellSize * dataBlocks; writeStripedFile(dfs, ecFile, writeBytes); Assert.assertEquals(0, bm.numOfUnderReplicatedBlocks()); FileChecksum fileChecksum1 = dfs.getFileChecksum(ecFile, writeBytes); final INodeFile fileNode = cluster.getNamesystem().getFSDirectory() .getINode4Write(ecFile.toString()).asFile(); BlockInfo firstBlock = fileNode.getBlocks()[0]; DatanodeStorageInfo[] dnStorageInfos = bm.getStorages(firstBlock); DatanodeDescriptor busyNode = dnStorageInfos[busyDNIndex] .getDatanodeDescriptor(); for (int j = 0; j < replicationStreamsHardLimit; j++) { busyNode.incrementPendingReplicationWithoutTargets(); } List<DatanodeInfo> decommissionNodes = new ArrayList<>(); decommissionNodes.add(dnStorageInfos[decommissionDNIndex] .getDatanodeDescriptor()); decommissionNodes.add(dnStorageInfos[decommissionDNIndex2] .getDatanodeDescriptor()); decommissionNode(0, decommissionNodes, AdminStates.DECOMMISSION_INPROGRESS); GenericTestUtils.waitFor( () -> bm.countNodes(firstBlock).liveReplicas() >= 8, 100, 60000); busyNode.decrementPendingReplicationWithoutTargets(); decommissionNode(0, decommissionNodes, AdminStates.DECOMMISSIONED); DatanodeStorageInfo[] newDnStorageInfos = bm.getStorages(firstBlock); Assert.assertEquals(STR, dnStorageInfos[busyDNIndex].getStorageID(), newDnStorageInfos[busyDNIndex].getStorageID()); FileChecksum fileChecksum2 = dfs.getFileChecksum(ecFile, writeBytes); Assert.assertEquals(STR, fileChecksum1, fileChecksum2); StripedFileTestUtil.checkData(dfs, ecFile, writeBytes, decommissionNodes, null, blockGroupSize); } | /**
* Decommission may generate the parity block's content with all 0
* in some case.
* @throws Exception
*/ | Decommission may generate the parity block's content with all 0 in some case | testDecommission2NodeWithBusyNode | {
"repo_name": "plusplusjiajia/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommissionWithStriped.java",
"license": "apache-2.0",
"size": 44516
} | [
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.fs.FileChecksum",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.protocol.DatanodeInfo",
"org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo",
"org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor",
"org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo",
"org.apache.hadoop.hdfs.server.namenode.INodeFile",
"org.apache.hadoop.test.GenericTestUtils",
"org.junit.Assert",
"org.junit.Test"
] | import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo; import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; import org.junit.Test; | import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.blockmanagement.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.test.*; import org.junit.*; | [
"java.util",
"org.apache.hadoop",
"org.junit"
] | java.util; org.apache.hadoop; org.junit; | 2,288,249 |
public int getRandomNonZeroElement(SecureRandom sr)
{
int controltime = 1 << 20;
int count = 0;
int result = RandUtils.nextInt(sr, 1 << degree);
while ((result == 0) && (count < controltime))
{
result = RandUtils.nextInt(sr, 1 << degree);
count++;
}
if (count == controltime)
{
result = 1;
}
return result;
} | int function(SecureRandom sr) { int controltime = 1 << 20; int count = 0; int result = RandUtils.nextInt(sr, 1 << degree); while ((result == 0) && (count < controltime)) { result = RandUtils.nextInt(sr, 1 << degree); count++; } if (count == controltime) { result = 1; } return result; } | /**
* create a random non-zero field element using PRNG sr
*
* @param sr SecureRandom
* @return a random non-zero element
*/ | create a random non-zero field element using PRNG sr | getRandomNonZeroElement | {
"repo_name": "sake/bouncycastle-java",
"path": "src/org/bouncycastle/pqc/math/linearalgebra/GF2mField.java",
"license": "mit",
"size": 8309
} | [
"java.security.SecureRandom"
] | import java.security.SecureRandom; | import java.security.*; | [
"java.security"
] | java.security; | 2,540,068 |
public static PGPPublicKeyRing insertPublicKey(
PGPPublicKeyRing pubRing,
PGPPublicKey pubKey)
{
List keys = new ArrayList(pubRing.keys);
boolean found = false;
boolean masterFound = false;
for (int i = 0; i != keys.size();i++)
{
PGPPublicKey key = (PGPPublicKey)keys.get(i);
if (key.getKeyID() == pubKey.getKeyID())
{
found = true;
keys.set(i, pubKey);
}
if (key.isMasterKey())
{
masterFound = true;
}
}
if (!found)
{
if (pubKey.isMasterKey())
{
if (masterFound)
{
throw new IllegalArgumentException("cannot add a master key to a ring that already has one");
}
keys.add(0, pubKey);
}
else
{
keys.add(pubKey);
}
}
return new PGPPublicKeyRing(keys);
} | static PGPPublicKeyRing function( PGPPublicKeyRing pubRing, PGPPublicKey pubKey) { List keys = new ArrayList(pubRing.keys); boolean found = false; boolean masterFound = false; for (int i = 0; i != keys.size();i++) { PGPPublicKey key = (PGPPublicKey)keys.get(i); if (key.getKeyID() == pubKey.getKeyID()) { found = true; keys.set(i, pubKey); } if (key.isMasterKey()) { masterFound = true; } } if (!found) { if (pubKey.isMasterKey()) { if (masterFound) { throw new IllegalArgumentException(STR); } keys.add(0, pubKey); } else { keys.add(pubKey); } } return new PGPPublicKeyRing(keys); } | /**
* Returns a new key ring with the public key passed in
* either added or replacing an existing one.
*
* @param pubRing the public key ring to be modified
* @param pubKey the public key to be inserted.
* @return a new keyRing
*/ | Returns a new key ring with the public key passed in either added or replacing an existing one | insertPublicKey | {
"repo_name": "iseki-masaya/spongycastle",
"path": "pg/src/main/java/org/spongycastle/openpgp/PGPPublicKeyRing.java",
"license": "mit",
"size": 6828
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,728,664 |
public void persistAssociations(Entity baseBean, String linkName, List<Key> selectedKeys, RequestContext ctx) throws DbException {
Link link = baseBean.getBackRef(linkName);
EntityModel entityModel = EntityManager.getEntityModel(link.getModel().getEntityName());
String associatedLinkName = entityModel.getAssociatedLink(linkName);
for (Key selectedKey : selectedKeys) {
// For each selected element we'll create an association.
Entity association = DomainUtils.newDomain(link.getModel().getEntityName());
association.setForeignKey(associatedLinkName, selectedKey);
association.setForeignKey(link.getModel().getLinkName(), baseBean.getPrimaryKey());
association.getLink(linkName).setEntity(baseBean);
DB.persist(association, new Action(Constants.SELECT, Constants.SELECT), ctx);
}
} | void function(Entity baseBean, String linkName, List<Key> selectedKeys, RequestContext ctx) throws DbException { Link link = baseBean.getBackRef(linkName); EntityModel entityModel = EntityManager.getEntityModel(link.getModel().getEntityName()); String associatedLinkName = entityModel.getAssociatedLink(linkName); for (Key selectedKey : selectedKeys) { Entity association = DomainUtils.newDomain(link.getModel().getEntityName()); association.setForeignKey(associatedLinkName, selectedKey); association.setForeignKey(link.getModel().getLinkName(), baseBean.getPrimaryKey()); association.getLink(linkName).setEntity(baseBean); DB.persist(association, new Action(Constants.SELECT, Constants.SELECT), ctx); } } | /**
* Persists entities' associations into database.
*
* @param baseBean
* Domain object linked to associations to persist.
* @param linkName
* Name of the link between {@code baseBean} and associations to persist.
* @param selectedKeys
* Keys of the associations to persist.
* @param ctx
* Current context.
* @throws DbException
* Exception thrown if an error occurs.
*/ | Persists entities' associations into database | persistAssociations | {
"repo_name": "Micht69/shoplist",
"path": "src/fr/logica/db/DbEntity.java",
"license": "mit",
"size": 15381
} | [
"fr.logica.business.Action",
"fr.logica.business.Constants",
"fr.logica.business.Entity",
"fr.logica.business.EntityManager",
"fr.logica.business.EntityModel",
"fr.logica.business.Key",
"fr.logica.business.Link",
"fr.logica.business.context.RequestContext",
"fr.logica.reflect.DomainUtils",
"java.util.List"
] | import fr.logica.business.Action; import fr.logica.business.Constants; import fr.logica.business.Entity; import fr.logica.business.EntityManager; import fr.logica.business.EntityModel; import fr.logica.business.Key; import fr.logica.business.Link; import fr.logica.business.context.RequestContext; import fr.logica.reflect.DomainUtils; import java.util.List; | import fr.logica.business.*; import fr.logica.business.context.*; import fr.logica.reflect.*; import java.util.*; | [
"fr.logica.business",
"fr.logica.reflect",
"java.util"
] | fr.logica.business; fr.logica.reflect; java.util; | 2,754,287 |
private void load (Element monitorDocElem) throws InvalidFileFormat, BadXMLDocumentException {
Element jobDefinitionsElem = XMLPersistenceHelper.getChildElement(monitorDocElem, MonitorSaveConstants.JobDescriptions);
if (jobDefinitionsElem == null) {
throw new InvalidFileFormat ("JobDescriptions element missing");
}
List<Element> jobDefinitions = XMLPersistenceHelper.getChildElements(jobDefinitionsElem);
for (final Element element : jobDefinitions) {
MonitorJobDescription jobDescription = MonitorJobDescription.Load (element);
if (jobDescription != null) {
addJobDescription(jobDescription);
}
}
}
| void function (Element monitorDocElem) throws InvalidFileFormat, BadXMLDocumentException { Element jobDefinitionsElem = XMLPersistenceHelper.getChildElement(monitorDocElem, MonitorSaveConstants.JobDescriptions); if (jobDefinitionsElem == null) { throw new InvalidFileFormat (STR); } List<Element> jobDefinitions = XMLPersistenceHelper.getChildElements(jobDefinitionsElem); for (final Element element : jobDefinitions) { MonitorJobDescription jobDescription = MonitorJobDescription.Load (element); if (jobDescription != null) { addJobDescription(jobDescription); } } } | /**
* Method load
*
* @param monitorDocElem
*/ | Method load | load | {
"repo_name": "levans/Open-Quark",
"path": "src/BAM_Sample/org/openquark/samples/bam/model/MonitorDocument.java",
"license": "bsd-3-clause",
"size": 11590
} | [
"java.util.List",
"org.openquark.util.xml.BadXMLDocumentException",
"org.openquark.util.xml.XMLPersistenceHelper",
"org.w3c.dom.Element"
] | import java.util.List; import org.openquark.util.xml.BadXMLDocumentException; import org.openquark.util.xml.XMLPersistenceHelper; import org.w3c.dom.Element; | import java.util.*; import org.openquark.util.xml.*; import org.w3c.dom.*; | [
"java.util",
"org.openquark.util",
"org.w3c.dom"
] | java.util; org.openquark.util; org.w3c.dom; | 664,581 |
@Test
public void testToString() {
final Tuple2<String, String> t = tuple2("a", "b");
assertThat(t.toString(), is(equalTo("(a, b)")));
} | void function() { final Tuple2<String, String> t = tuple2("a", "b"); assertThat(t.toString(), is(equalTo(STR))); } | /**
* Test method for {@link de.vorb.util.tuple.Tuple2#toString()}.
*/ | Test method for <code>de.vorb.util.tuple.Tuple2#toString()</code> | testToString | {
"repo_name": "pvorb/tuple",
"path": "src/test/java/de/vorb/util/tuple/test/Tuple2Test.java",
"license": "mit",
"size": 4246
} | [
"de.vorb.util.tuple.Tuple",
"de.vorb.util.tuple.Tuple2",
"org.hamcrest.CoreMatchers",
"org.junit.Assert"
] | import de.vorb.util.tuple.Tuple; import de.vorb.util.tuple.Tuple2; import org.hamcrest.CoreMatchers; import org.junit.Assert; | import de.vorb.util.tuple.*; import org.hamcrest.*; import org.junit.*; | [
"de.vorb.util",
"org.hamcrest",
"org.junit"
] | de.vorb.util; org.hamcrest; org.junit; | 1,700,808 |
public double[] generateNoiseOctaves(double[] p_76304_1_, int p_76304_2_, int p_76304_3_, int p_76304_4_, int p_76304_5_, int p_76304_6_, int p_76304_7_, double p_76304_8_, double p_76304_10_, double p_76304_12_)
{
if (p_76304_1_ == null)
{
p_76304_1_ = new double[p_76304_5_ * p_76304_6_ * p_76304_7_];
}
else
{
for (int k1 = 0; k1 < p_76304_1_.length; ++k1)
{
p_76304_1_[k1] = 0.0D;
}
}
double d6 = 1.0D;
for (int l1 = 0; l1 < this.octaves; ++l1)
{
double d3 = (double)p_76304_2_ * d6 * p_76304_8_;
double d4 = (double)p_76304_3_ * d6 * p_76304_10_;
double d5 = (double)p_76304_4_ * d6 * p_76304_12_;
long i2 = MathHelper.floor_double_long(d3);
long j2 = MathHelper.floor_double_long(d5);
d3 -= (double)i2;
d5 -= (double)j2;
i2 %= 16777216L;
j2 %= 16777216L;
d3 += (double)i2;
d5 += (double)j2;
this.generatorCollection[l1].populateNoiseArray(p_76304_1_, d3, d4, d5, p_76304_5_, p_76304_6_, p_76304_7_, p_76304_8_ * d6, p_76304_10_ * d6, p_76304_12_ * d6, d6);
d6 /= 2.0D;
}
return p_76304_1_;
} | double[] function(double[] p_76304_1_, int p_76304_2_, int p_76304_3_, int p_76304_4_, int p_76304_5_, int p_76304_6_, int p_76304_7_, double p_76304_8_, double p_76304_10_, double p_76304_12_) { if (p_76304_1_ == null) { p_76304_1_ = new double[p_76304_5_ * p_76304_6_ * p_76304_7_]; } else { for (int k1 = 0; k1 < p_76304_1_.length; ++k1) { p_76304_1_[k1] = 0.0D; } } double d6 = 1.0D; for (int l1 = 0; l1 < this.octaves; ++l1) { double d3 = (double)p_76304_2_ * d6 * p_76304_8_; double d4 = (double)p_76304_3_ * d6 * p_76304_10_; double d5 = (double)p_76304_4_ * d6 * p_76304_12_; long i2 = MathHelper.floor_double_long(d3); long j2 = MathHelper.floor_double_long(d5); d3 -= (double)i2; d5 -= (double)j2; i2 %= 16777216L; j2 %= 16777216L; d3 += (double)i2; d5 += (double)j2; this.generatorCollection[l1].populateNoiseArray(p_76304_1_, d3, d4, d5, p_76304_5_, p_76304_6_, p_76304_7_, p_76304_8_ * d6, p_76304_10_ * d6, p_76304_12_ * d6, d6); d6 /= 2.0D; } return p_76304_1_; } | /**
* pars:(par2,3,4=noiseOffset ; so that adjacent noise segments connect) (pars5,6,7=x,y,zArraySize),(pars8,10,12 =
* x,y,z noiseScale)
*/ | pars:(par2,3,4=noiseOffset ; so that adjacent noise segments connect) (pars5,6,7=x,y,zArraySize),(pars8,10,12 = x,y,z noiseScale) | generateNoiseOctaves | {
"repo_name": "trixmot/mod1",
"path": "build/tmp/recompileMc/sources/net/minecraft/world/gen/NoiseGeneratorOctaves.java",
"license": "lgpl-2.1",
"size": 2653
} | [
"net.minecraft.util.MathHelper"
] | import net.minecraft.util.MathHelper; | import net.minecraft.util.*; | [
"net.minecraft.util"
] | net.minecraft.util; | 4,640 |
public AccessibleRelationSet getAccessibleRelationSet() {
return ac.getAccessibleRelationSet();
} | AccessibleRelationSet function() { return ac.getAccessibleRelationSet(); } | /**
* Gets the AccessibleRelationSet associated with an object
*
* @return an AccessibleRelationSet if supported by object;
* otherwise return null
* @see AccessibleRelationSet
*/ | Gets the AccessibleRelationSet associated with an object | getAccessibleRelationSet | {
"repo_name": "mirkosertic/Bytecoder",
"path": "classlib/java.desktop/src/main/resources/META-INF/modules/java.desktop/classes/javax/swing/JComboBox.java",
"license": "apache-2.0",
"size": 92256
} | [
"javax.accessibility.AccessibleRelationSet"
] | import javax.accessibility.AccessibleRelationSet; | import javax.accessibility.*; | [
"javax.accessibility"
] | javax.accessibility; | 2,587,691 |
Topology topology = get(TopologyService.class).currentTopology();
ObjectNode root =
codec(Topology.class).encode(topology, this);
return ok(root).build();
} | Topology topology = get(TopologyService.class).currentTopology(); ObjectNode root = codec(Topology.class).encode(topology, this); return ok(root).build(); } | /**
* Gets the topology overview for a REST GET operation.
*
* @return topology overview
*/ | Gets the topology overview for a REST GET operation | getTopology | {
"repo_name": "kuangrewawa/onos",
"path": "web/api/src/main/java/org/onosproject/rest/TopologyWebResource.java",
"license": "apache-2.0",
"size": 8541
} | [
"com.fasterxml.jackson.databind.node.ObjectNode",
"org.onosproject.net.topology.Topology",
"org.onosproject.net.topology.TopologyService"
] | import com.fasterxml.jackson.databind.node.ObjectNode; import org.onosproject.net.topology.Topology; import org.onosproject.net.topology.TopologyService; | import com.fasterxml.jackson.databind.node.*; import org.onosproject.net.topology.*; | [
"com.fasterxml.jackson",
"org.onosproject.net"
] | com.fasterxml.jackson; org.onosproject.net; | 1,807,219 |
public void setExplicitCommandPath(File path) {
this.explicitCommandPath = path;
} | void function(File path) { this.explicitCommandPath = path; } | /**
* Sets the explicit Hadoop command path.
* @param path the Hadoop command path, or {@code null} if it should be inferred
*/ | Sets the explicit Hadoop command path | setExplicitCommandPath | {
"repo_name": "cocoatomo/asakusafw",
"path": "testing-project/asakusa-test-moderator/src/main/java/com/asakusafw/testdriver/hadoop/ConfigurationFactory.java",
"license": "apache-2.0",
"size": 8394
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 2,818,305 |
public void testSetTimesMissingPartially() throws Exception {
if (!timesSupported())
return;
create(igfs, paths(DIR, SUBDIR), null);
createFile(igfsSecondary, FILE, chunk);
final long MAX_ALIGN_ON_SECOND = (long)Integer.MAX_VALUE * 1000;
igfs.setTimes(FILE, MAX_ALIGN_ON_SECOND - 1000, MAX_ALIGN_ON_SECOND);
IgfsFile info = igfs.info(FILE);
assert info != null;
assertEquals(MAX_ALIGN_ON_SECOND - 1000, info.accessTime());
assertEquals(MAX_ALIGN_ON_SECOND, info.modificationTime());
T2<Long, Long> secondaryTimes = igfsSecondary.times(FILE.toString());
assertEquals(info.accessTime(), (long) secondaryTimes.get1());
assertEquals(info.modificationTime(), (long) secondaryTimes.get2());
try {
igfs.setTimes(FILE2, MAX_ALIGN_ON_SECOND, MAX_ALIGN_ON_SECOND);
fail("Exception is not thrown for missing file.");
} catch (Exception ignore) {
// No-op.
}
} | void function() throws Exception { if (!timesSupported()) return; create(igfs, paths(DIR, SUBDIR), null); createFile(igfsSecondary, FILE, chunk); final long MAX_ALIGN_ON_SECOND = (long)Integer.MAX_VALUE * 1000; igfs.setTimes(FILE, MAX_ALIGN_ON_SECOND - 1000, MAX_ALIGN_ON_SECOND); IgfsFile info = igfs.info(FILE); assert info != null; assertEquals(MAX_ALIGN_ON_SECOND - 1000, info.accessTime()); assertEquals(MAX_ALIGN_ON_SECOND, info.modificationTime()); T2<Long, Long> secondaryTimes = igfsSecondary.times(FILE.toString()); assertEquals(info.accessTime(), (long) secondaryTimes.get1()); assertEquals(info.modificationTime(), (long) secondaryTimes.get2()); try { igfs.setTimes(FILE2, MAX_ALIGN_ON_SECOND, MAX_ALIGN_ON_SECOND); fail(STR); } catch (Exception ignore) { } } | /**
* Test setTimes method when path is partially missing.
*
* @throws Exception If failed.
*/ | Test setTimes method when path is partially missing | testSetTimesMissingPartially | {
"repo_name": "pperalta/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsDualAbstractSelfTest.java",
"license": "apache-2.0",
"size": 57219
} | [
"org.apache.ignite.igfs.IgfsFile"
] | import org.apache.ignite.igfs.IgfsFile; | import org.apache.ignite.igfs.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 2,192,859 |
public ImageDescriptor getImageDescriptor(); | ImageDescriptor function(); | /**
* Returns the descriptor of the image to use for a file of this type.
* <p>
* The image is obtained from the default editor. A default file image is
* returned if no default editor is available.
* </p>
*
* @return the descriptor of the image to use for a resource of this type
*/ | Returns the descriptor of the image to use for a file of this type. The image is obtained from the default editor. A default file image is returned if no default editor is available. | getImageDescriptor | {
"repo_name": "ghillairet/gef-gwt",
"path": "src/main/java/org/eclipse/ui/IFileEditorMapping.java",
"license": "epl-1.0",
"size": 2902
} | [
"org.eclipse.jface.resource.ImageDescriptor"
] | import org.eclipse.jface.resource.ImageDescriptor; | import org.eclipse.jface.resource.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 1,504,676 |
public static FsPermission getFilePermissions(final FileSystem fs,
final Configuration conf, final String permssionConfKey) {
boolean enablePermissions = conf.getBoolean(
HConstants.ENABLE_DATA_FILE_UMASK, false);
if (enablePermissions) {
try {
FsPermission perm = new FsPermission(FULL_RWX_PERMISSIONS);
// make sure that we have a mask, if not, go default.
String mask = conf.get(permssionConfKey);
if (mask == null)
return FsPermission.getDefault();
// appy the umask
FsPermission umask = new FsPermission(mask);
return perm.applyUMask(umask);
} catch (IllegalArgumentException e) {
LOG.warn(
"Incorrect umask attempted to be created: "
+ conf.get(permssionConfKey)
+ ", using default file permissions.", e);
return FsPermission.getDefault();
}
}
return FsPermission.getDefault();
} | static FsPermission function(final FileSystem fs, final Configuration conf, final String permssionConfKey) { boolean enablePermissions = conf.getBoolean( HConstants.ENABLE_DATA_FILE_UMASK, false); if (enablePermissions) { try { FsPermission perm = new FsPermission(FULL_RWX_PERMISSIONS); String mask = conf.get(permssionConfKey); if (mask == null) return FsPermission.getDefault(); FsPermission umask = new FsPermission(mask); return perm.applyUMask(umask); } catch (IllegalArgumentException e) { LOG.warn( STR + conf.get(permssionConfKey) + STR, e); return FsPermission.getDefault(); } } return FsPermission.getDefault(); } | /**
* Get the file permissions specified in the configuration, if they are
* enabled.
*
* @param fs filesystem that the file will be created on.
* @param conf configuration to read for determining if permissions are
* enabled and which to use
* @param permssionConfKey property key in the configuration to use when
* finding the permission
* @return the permission to use when creating a new file on the fs. If
* special permissions are not specified in the configuration, then
* the default permissions on the the fs will be returned.
*/ | Get the file permissions specified in the configuration, if they are enabled | getFilePermissions | {
"repo_name": "StumbleUponArchive/hbase",
"path": "src/main/java/org/apache/hadoop/hbase/util/FSUtils.java",
"license": "apache-2.0",
"size": 41243
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.permission.FsPermission",
"org.apache.hadoop.hbase.HConstants"
] | import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HConstants; | import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.hbase.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 202,554 |
public static File chooseFile(final Component parent, final File file, final boolean open, final boolean onlyDirs,
final String extension, final String extensionDescription) {
return chooseFile(parent, null, file, open, onlyDirs, extension, extensionDescription);
} | static File function(final Component parent, final File file, final boolean open, final boolean onlyDirs, final String extension, final String extensionDescription) { return chooseFile(parent, null, file, open, onlyDirs, extension, extensionDescription); } | /**
* Opens a file chooser with a reasonable start directory. If the extension is null, no file
* filters will be used. This method allows choosing directories.
*/ | Opens a file chooser with a reasonable start directory. If the extension is null, no file filters will be used. This method allows choosing directories | chooseFile | {
"repo_name": "rapidminer/rapidminer-studio",
"path": "src/main/java/com/rapidminer/gui/tools/SwingTools.java",
"license": "agpl-3.0",
"size": 93902
} | [
"java.awt.Component",
"java.io.File"
] | import java.awt.Component; import java.io.File; | import java.awt.*; import java.io.*; | [
"java.awt",
"java.io"
] | java.awt; java.io; | 1,249,767 |
protected boolean ifRunning(BooleanSupplier function) {
try {
runningLock.lock();
if (running) {
return function.getAsBoolean();
}
return false;
} finally {
runningLock.unlock();
}
} | boolean function(BooleanSupplier function) { try { runningLock.lock(); if (running) { return function.getAsBoolean(); } return false; } finally { runningLock.unlock(); } } | /**
* Call the supplied function that returns a value if and only if this node is running.
*
* @param function the function; may not be null
* @return {@code false} if the node was not running, or the result of the function if running
*/ | Call the supplied function that returns a value if and only if this node is running | ifRunning | {
"repo_name": "rhauch/debezium-proto",
"path": "debezium/src/main/java/org/debezium/driver/DbzNode.java",
"license": "apache-2.0",
"size": 21353
} | [
"java.util.function.BooleanSupplier"
] | import java.util.function.BooleanSupplier; | import java.util.function.*; | [
"java.util"
] | java.util; | 843,931 |
@SuppressWarnings("unchecked")
private IfStatement createOnClickListenerForRadioButtons(LayoutNode node)
{
IfStatement ifSt;
Modifier privateMod = typeDeclaration.getAST().newModifier(ModifierKeyword.PRIVATE_KEYWORD);
SimpleType listenerType =
getListenerSimpleType(JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS,
JavaViewBasedOnLayoutModifierConstants.METHOD_ON_CLICK_LISTENER);
VariableDeclarationFragment variableFragment =
typeDeclaration.getAST().newVariableDeclarationFragment();
SimpleName varName =
variableFragment.getAST().newSimpleName(
JavaViewBasedOnLayoutModifierConstants.HANDLER_ONCLICK_LISTENER);
variableFragment.setName(varName);
FieldDeclaration declaration =
typeDeclaration.getAST().newFieldDeclaration(variableFragment);
declaration.modifiers().add(privateMod);
declaration.setType(listenerType);
ClassInstanceCreation classInstanceCreation =
typeDeclaration.getAST().newClassInstanceCreation();
SimpleType listenerType2 =
getListenerSimpleType(JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS,
JavaViewBasedOnLayoutModifierConstants.METHOD_ON_CLICK_LISTENER);
classInstanceCreation.setType(listenerType2);
AnonymousClassDeclaration classDeclaration =
typeDeclaration.getAST().newAnonymousClassDeclaration();
MethodDeclaration methodDeclaration =
addMethodDeclaration(ModifierKeyword.PUBLIC_KEYWORD,
JavaViewBasedOnLayoutModifierConstants.METHOD_NAME_ON_CLICK,
PrimitiveType.VOID, JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS,
JavaViewBasedOnLayoutModifierConstants.VIEW_VARIABLE_NAME);
Block block = typeDeclaration.getAST().newBlock();
ifSt = createElseIfForEachRadioButtonId(node);
block.statements().add(ifSt);
methodDeclaration.setBody(block);
classDeclaration.bodyDeclarations().add(methodDeclaration);
classInstanceCreation.setAnonymousClassDeclaration(classDeclaration);
variableFragment.setInitializer(classInstanceCreation);
typeDeclaration.bodyDeclarations().add(declaration);
return ifSt;
}
| @SuppressWarnings(STR) IfStatement function(LayoutNode node) { IfStatement ifSt; Modifier privateMod = typeDeclaration.getAST().newModifier(ModifierKeyword.PRIVATE_KEYWORD); SimpleType listenerType = getListenerSimpleType(JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS, JavaViewBasedOnLayoutModifierConstants.METHOD_ON_CLICK_LISTENER); VariableDeclarationFragment variableFragment = typeDeclaration.getAST().newVariableDeclarationFragment(); SimpleName varName = variableFragment.getAST().newSimpleName( JavaViewBasedOnLayoutModifierConstants.HANDLER_ONCLICK_LISTENER); variableFragment.setName(varName); FieldDeclaration declaration = typeDeclaration.getAST().newFieldDeclaration(variableFragment); declaration.modifiers().add(privateMod); declaration.setType(listenerType); ClassInstanceCreation classInstanceCreation = typeDeclaration.getAST().newClassInstanceCreation(); SimpleType listenerType2 = getListenerSimpleType(JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS, JavaViewBasedOnLayoutModifierConstants.METHOD_ON_CLICK_LISTENER); classInstanceCreation.setType(listenerType2); AnonymousClassDeclaration classDeclaration = typeDeclaration.getAST().newAnonymousClassDeclaration(); MethodDeclaration methodDeclaration = addMethodDeclaration(ModifierKeyword.PUBLIC_KEYWORD, JavaViewBasedOnLayoutModifierConstants.METHOD_NAME_ON_CLICK, PrimitiveType.VOID, JavaViewBasedOnLayoutModifierConstants.VIEW_CLASS, JavaViewBasedOnLayoutModifierConstants.VIEW_VARIABLE_NAME); Block block = typeDeclaration.getAST().newBlock(); ifSt = createElseIfForEachRadioButtonId(node); block.statements().add(ifSt); methodDeclaration.setBody(block); classDeclaration.bodyDeclarations().add(methodDeclaration); classInstanceCreation.setAnonymousClassDeclaration(classDeclaration); variableFragment.setInitializer(classInstanceCreation); typeDeclaration.bodyDeclarations().add(declaration); return ifSt; } | /**
* Creates field with an anonymous class declaration for radio buttons
*/ | Creates field with an anonymous class declaration for radio buttons | createOnClickListenerForRadioButtons | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "tools/motodev/src/plugins/android.codeutils/src/com/motorola/studio/android/generateviewbylayout/codegenerators/RadioButtonCodeGenerator.java",
"license": "gpl-2.0",
"size": 18183
} | [
"com.motorola.studio.android.generateviewbylayout.JavaViewBasedOnLayoutModifierConstants",
"com.motorola.studio.android.generateviewbylayout.model.LayoutNode",
"org.eclipse.jdt.core.dom.AnonymousClassDeclaration",
"org.eclipse.jdt.core.dom.Block",
"org.eclipse.jdt.core.dom.ClassInstanceCreation",
"org.eclipse.jdt.core.dom.FieldDeclaration",
"org.eclipse.jdt.core.dom.IfStatement",
"org.eclipse.jdt.core.dom.MethodDeclaration",
"org.eclipse.jdt.core.dom.Modifier",
"org.eclipse.jdt.core.dom.PrimitiveType",
"org.eclipse.jdt.core.dom.SimpleName",
"org.eclipse.jdt.core.dom.SimpleType",
"org.eclipse.jdt.core.dom.VariableDeclarationFragment"
] | import com.motorola.studio.android.generateviewbylayout.JavaViewBasedOnLayoutModifierConstants; import com.motorola.studio.android.generateviewbylayout.model.LayoutNode; import org.eclipse.jdt.core.dom.AnonymousClassDeclaration; import org.eclipse.jdt.core.dom.Block; import org.eclipse.jdt.core.dom.ClassInstanceCreation; import org.eclipse.jdt.core.dom.FieldDeclaration; import org.eclipse.jdt.core.dom.IfStatement; import org.eclipse.jdt.core.dom.MethodDeclaration; import org.eclipse.jdt.core.dom.Modifier; import org.eclipse.jdt.core.dom.PrimitiveType; import org.eclipse.jdt.core.dom.SimpleName; import org.eclipse.jdt.core.dom.SimpleType; import org.eclipse.jdt.core.dom.VariableDeclarationFragment; | import com.motorola.studio.android.generateviewbylayout.*; import com.motorola.studio.android.generateviewbylayout.model.*; import org.eclipse.jdt.core.dom.*; | [
"com.motorola.studio",
"org.eclipse.jdt"
] | com.motorola.studio; org.eclipse.jdt; | 1,248,772 |
public static InStream create(String name,
DiskRangeList input,
long length) throws IOException {
return create(name, input, length, null);
} | static InStream function(String name, DiskRangeList input, long length) throws IOException { return create(name, input, length, null); } | /**
* Create an input stream from a list of disk ranges with data.
* @param name the name of the stream
* @param input the list of ranges of bytes for the stream; from disk or cache
* @param length the length in bytes of the stream
* @return an input stream
*/ | Create an input stream from a list of disk ranges with data | create | {
"repo_name": "majetideepak/orc",
"path": "java/core/src/java/org/apache/orc/impl/InStream.java",
"license": "apache-2.0",
"size": 24272
} | [
"java.io.IOException",
"org.apache.hadoop.hive.common.io.DiskRangeList"
] | import java.io.IOException; import org.apache.hadoop.hive.common.io.DiskRangeList; | import java.io.*; import org.apache.hadoop.hive.common.io.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,316,015 |
@Test
public void testAfterStartupContext() throws IOException
{
jetty.copyWebapp("foo-webapp-1.war","foo.war");
jetty.copyWebapp("foo.xml","foo.xml");
waitForDirectoryScan();
waitForDirectoryScan();
jetty.assertWebAppContextsExists("/foo");
} | void function() throws IOException { jetty.copyWebapp(STR,STR); jetty.copyWebapp(STR,STR); waitForDirectoryScan(); waitForDirectoryScan(); jetty.assertWebAppContextsExists("/foo"); } | /**
* Simple webapp deployment after startup of server.
*/ | Simple webapp deployment after startup of server | testAfterStartupContext | {
"repo_name": "sdw2330976/Research-jetty-9.2.5",
"path": "jetty-deploy/src/test/java/org/eclipse/jetty/deploy/providers/ScanningAppProviderRuntimeUpdatesTest.java",
"license": "apache-2.0",
"size": 5997
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,781,260 |
public void testGetControlInstanceControlContextHashtableOfQQ002() throws Exception {
MockControl cs = new MockControl("c1", false, new byte[] { 1, 2, 3, 4 });
assertEquals(cs,ControlFactory.getControlInstance(cs,null,null));
}
| void function() throws Exception { MockControl cs = new MockControl("c1", false, new byte[] { 1, 2, 3, 4 }); assertEquals(cs,ControlFactory.getControlInstance(cs,null,null)); } | /**
* <p>Test method for 'javax.naming.ldap.ControlFactory.getControlInstance(Control, Context, Hashtable<?, ?>)'</p>
* <p>Here we are testing the static method of the class ControlFactory</p>
* <p>The expected result is the control sended.</p>
*/ | Test method for 'javax.naming.ldap.ControlFactory.getControlInstance(Control, Context, Hashtable)' Here we are testing the static method of the class ControlFactory The expected result is the control sended | testGetControlInstanceControlContextHashtableOfQQ002 | {
"repo_name": "shannah/cn1",
"path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/jndi/src/test/java/org/apache/harmony/jndi/tests/javax/naming/ldap/ControlFactoryTest.java",
"license": "gpl-2.0",
"size": 4640
} | [
"javax.naming.ldap.ControlFactory",
"org.apache.harmony.jndi.tests.javax.naming.spi.mock.ldap.MockControl"
] | import javax.naming.ldap.ControlFactory; import org.apache.harmony.jndi.tests.javax.naming.spi.mock.ldap.MockControl; | import javax.naming.ldap.*; import org.apache.harmony.jndi.tests.javax.naming.spi.mock.ldap.*; | [
"javax.naming",
"org.apache.harmony"
] | javax.naming; org.apache.harmony; | 848,641 |
void announce(Arena arena, int wave); | void announce(Arena arena, int wave); | /**
* Announce to all players that this wave is spawning.
* @param arena an arena
* @param wave a wave number
*/ | Announce to all players that this wave is spawning | announce | {
"repo_name": "slipcor/MobArena",
"path": "src/main/java/com/garbagemule/MobArena/waves/Wave.java",
"license": "gpl-3.0",
"size": 4829
} | [
"com.garbagemule.MobArena"
] | import com.garbagemule.MobArena; | import com.garbagemule.*; | [
"com.garbagemule"
] | com.garbagemule; | 2,512,638 |
public void test_sort$DII() {
// Test for method void java.util.Arrays.sort(double [], int, int)
int startIndex = arraySize / 4;
int endIndex = 3 * arraySize / 4;
double[] reversedArray = new double[arraySize];
double[] originalReversedArray = new double[arraySize];
for (int counter = 0; counter < arraySize; counter++) {
reversedArray[counter] = (double) (arraySize - counter - 1);
originalReversedArray[counter] = reversedArray[counter];
}
Arrays.sort(reversedArray, startIndex, endIndex);
for (int counter = 0; counter < startIndex; counter++)
assertTrue("Array modified outside of bounds",
reversedArray[counter] == originalReversedArray[counter]);
for (int counter = startIndex; counter < endIndex - 1; counter++)
assertTrue("Array not sorted within bounds",
reversedArray[counter] <= reversedArray[counter + 1]);
for (int counter = endIndex; counter < arraySize; counter++)
assertTrue("Array modified outside of bounds",
reversedArray[counter] == originalReversedArray[counter]);
//exception testing
try {
Arrays.sort(reversedArray, startIndex + 1, startIndex);
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException ignore) {
}
try {
Arrays.sort(reversedArray, -1, startIndex);
fail("ArrayIndexOutOfBoundsException expected (1)");
} catch (ArrayIndexOutOfBoundsException ignore) {
}
try {
Arrays.sort(reversedArray, startIndex, reversedArray.length + 1);
fail("ArrayIndexOutOfBoundsException expected (2)");
} catch (ArrayIndexOutOfBoundsException ignore) {
}
} | public void test_sort$DII() { int startIndex = arraySize / 4; int endIndex = 3 * arraySize / 4; double[] reversedArray = new double[arraySize]; double[] originalReversedArray = new double[arraySize]; for (int counter = 0; counter < arraySize; counter++) { reversedArray[counter] = (double) (arraySize - counter - 1); originalReversedArray[counter] = reversedArray[counter]; } Arrays.sort(reversedArray, startIndex, endIndex); for (int counter = 0; counter < startIndex; counter++) assertTrue(STR, reversedArray[counter] == originalReversedArray[counter]); for (int counter = startIndex; counter < endIndex - 1; counter++) assertTrue(STR, reversedArray[counter] <= reversedArray[counter + 1]); for (int counter = endIndex; counter < arraySize; counter++) assertTrue(STR, reversedArray[counter] == originalReversedArray[counter]); try { Arrays.sort(reversedArray, startIndex + 1, startIndex); fail(STR); } catch (IllegalArgumentException ignore) { } try { Arrays.sort(reversedArray, -1, startIndex); fail(STR); } catch (ArrayIndexOutOfBoundsException ignore) { } try { Arrays.sort(reversedArray, startIndex, reversedArray.length + 1); fail(STR); } catch (ArrayIndexOutOfBoundsException ignore) { } } | /**
* java.util.Arrays#sort(double[], int, int)
*/ | java.util.Arrays#sort(double[], int, int) | test_sort$DII | {
"repo_name": "mirego/j2objc",
"path": "jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/util/ArraysTest.java",
"license": "apache-2.0",
"size": 207868
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 1,183,347 |
public final static <A, B> Map<A, B> toLinkedHashMap(final Iterable<A> keys, final Iterable<B> values)
{
return intoMap(keys, values, new LinkedHashMap<A, B>());
} | final static <A, B> Map<A, B> function(final Iterable<A> keys, final Iterable<B> values) { return intoMap(keys, values, new LinkedHashMap<A, B>()); } | /**
* Puts the given key/value pairs into a LinkedHashMap.
*
* @param keys
* Collection of keys.
* @param values
* Collection of values.
* @return The LinkedHashMap.
* @see LinkedHashMap
*/ | Puts the given key/value pairs into a LinkedHashMap | toLinkedHashMap | {
"repo_name": "rjeschke/neetutils-base",
"path": "src/main/java/com/github/rjeschke/neetutils/collections/Colls.java",
"license": "apache-2.0",
"size": 28113
} | [
"java.util.LinkedHashMap",
"java.util.Map"
] | import java.util.LinkedHashMap; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,943,153 |
public static Optional<Cookie> getCookieFromRequest(final String cookieName, final HttpServletRequest request) {
val cookies = request.getCookies();
if (cookies == null) {
return Optional.empty();
}
return Arrays.stream(cookies).filter(c -> c.getName().equalsIgnoreCase(cookieName)).findFirst();
} | static Optional<Cookie> function(final String cookieName, final HttpServletRequest request) { val cookies = request.getCookies(); if (cookies == null) { return Optional.empty(); } return Arrays.stream(cookies).filter(c -> c.getName().equalsIgnoreCase(cookieName)).findFirst(); } | /**
* Gets cookie from request.
*
* @param cookieName the cookie name
* @param request the request
* @return the cookie from request
*/ | Gets cookie from request | getCookieFromRequest | {
"repo_name": "leleuj/cas",
"path": "core/cas-server-core-cookie-api/src/main/java/org/apereo/cas/web/support/CookieUtils.java",
"license": "apache-2.0",
"size": 3744
} | [
"java.util.Arrays",
"java.util.Optional",
"javax.servlet.http.Cookie",
"javax.servlet.http.HttpServletRequest"
] | import java.util.Arrays; import java.util.Optional; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; | import java.util.*; import javax.servlet.http.*; | [
"java.util",
"javax.servlet"
] | java.util; javax.servlet; | 1,087,373 |
public void enableHelp(MenuItem comp, String id, HelpSet hs); | void function(MenuItem comp, String id, HelpSet hs); | /**
* Enables help for a MenuItem. This method sets a
* component's helpID and HelpSet.
*
* @see CSH.setHelpID
* @see CSH.setHelpSet
*/ | Enables help for a MenuItem. This method sets a component's helpID and HelpSet | enableHelp | {
"repo_name": "srnsw/xena",
"path": "xena/ext/src/javahelp/jhMaster/JavaHelp/src/new/javax/help/HelpBroker.java",
"license": "gpl-3.0",
"size": 13348
} | [
"java.awt.MenuItem"
] | import java.awt.MenuItem; | import java.awt.*; | [
"java.awt"
] | java.awt; | 505,910 |
private Response<Bitmap> doParse(NetworkResponse response) {
byte[] data = response.data;
BitmapFactory.Options decodeOptions = new BitmapFactory.Options();
Bitmap bitmap = null;
if (mMaxWidth == 0 && mMaxHeight == 0) {
decodeOptions.inPreferredConfig = mDecodeConfig;
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
} else {
// If we have to resize this image, first get the natural bounds.
decodeOptions.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
int actualWidth = decodeOptions.outWidth;
int actualHeight = decodeOptions.outHeight;
// Then compute the dimensions we would ideally like to decode to.
int desiredWidth = getResizedDimension(mMaxWidth, mMaxHeight,
actualWidth, actualHeight, mScaleType);
int desiredHeight = getResizedDimension(mMaxHeight, mMaxWidth,
actualHeight, actualWidth, mScaleType);
// Decode to the nearest power of two scaling factor.
decodeOptions.inJustDecodeBounds = false;
decodeOptions.inPreferQualityOverSpeed = PREFER_QUALITY_OVER_SPEED;
decodeOptions.inSampleSize =
findBestSampleSize(actualWidth, actualHeight, desiredWidth, desiredHeight);
Bitmap tempBitmap =
BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
// If necessary, scale down to the maximal acceptable size.
if (tempBitmap != null && (tempBitmap.getWidth() > desiredWidth ||
tempBitmap.getHeight() > desiredHeight)) {
bitmap = Bitmap.createScaledBitmap(tempBitmap,
desiredWidth, desiredHeight, true);
} else {
bitmap = tempBitmap;
}
}
if (bitmap == null) {
return Response.error(new ParseError(response));
} else {
return Response.success(bitmap, HttpHeaderParser.parseCacheHeaders(response));
}
} | Response<Bitmap> function(NetworkResponse response) { byte[] data = response.data; BitmapFactory.Options decodeOptions = new BitmapFactory.Options(); Bitmap bitmap = null; if (mMaxWidth == 0 && mMaxHeight == 0) { decodeOptions.inPreferredConfig = mDecodeConfig; bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions); } else { decodeOptions.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions); int actualWidth = decodeOptions.outWidth; int actualHeight = decodeOptions.outHeight; int desiredWidth = getResizedDimension(mMaxWidth, mMaxHeight, actualWidth, actualHeight, mScaleType); int desiredHeight = getResizedDimension(mMaxHeight, mMaxWidth, actualHeight, actualWidth, mScaleType); decodeOptions.inJustDecodeBounds = false; decodeOptions.inPreferQualityOverSpeed = PREFER_QUALITY_OVER_SPEED; decodeOptions.inSampleSize = findBestSampleSize(actualWidth, actualHeight, desiredWidth, desiredHeight); Bitmap tempBitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions); if (tempBitmap != null && (tempBitmap.getWidth() > desiredWidth tempBitmap.getHeight() > desiredHeight)) { bitmap = Bitmap.createScaledBitmap(tempBitmap, desiredWidth, desiredHeight, true); } else { bitmap = tempBitmap; } } if (bitmap == null) { return Response.error(new ParseError(response)); } else { return Response.success(bitmap, HttpHeaderParser.parseCacheHeaders(response)); } } | /**
* The real guts of parseNetworkResponse. Broken out for readability.
*/ | The real guts of parseNetworkResponse. Broken out for readability | doParse | {
"repo_name": "djodjoni/jus",
"path": "examples-android/src/main/java/io/apptik/comm/jus/examples/request/ImageRequest.java",
"license": "apache-2.0",
"size": 9734
} | [
"android.graphics.Bitmap",
"android.graphics.BitmapFactory",
"io.apptik.comm.jus.NetworkResponse",
"io.apptik.comm.jus.ParseError",
"io.apptik.comm.jus.Response",
"io.apptik.comm.jus.toolbox.HttpHeaderParser"
] | import android.graphics.Bitmap; import android.graphics.BitmapFactory; import io.apptik.comm.jus.NetworkResponse; import io.apptik.comm.jus.ParseError; import io.apptik.comm.jus.Response; import io.apptik.comm.jus.toolbox.HttpHeaderParser; | import android.graphics.*; import io.apptik.comm.jus.*; import io.apptik.comm.jus.toolbox.*; | [
"android.graphics",
"io.apptik.comm"
] | android.graphics; io.apptik.comm; | 1,177,635 |
protected boolean registerProcessor(ProcessorDefinition<?> processor) {
// skip on exception
if (processor instanceof OnExceptionDefinition) {
return false;
}
// skip on completion
if (processor instanceof OnCompletionDefinition) {
return false;
}
// skip intercept
if (processor instanceof InterceptDefinition) {
return false;
}
// skip aop
if (processor instanceof AOPDefinition) {
return false;
}
// skip policy
if (processor instanceof PolicyDefinition) {
return false;
}
// only if custom id assigned
boolean only = getManagementStrategy().getManagementAgent().getOnlyRegisterProcessorWithCustomId() != null
&& getManagementStrategy().getManagementAgent().getOnlyRegisterProcessorWithCustomId();
if (only) {
return processor.hasCustomIdAssigned();
}
// use customer filter
return getManagementStrategy().manageProcessor(processor);
} | boolean function(ProcessorDefinition<?> processor) { if (processor instanceof OnExceptionDefinition) { return false; } if (processor instanceof OnCompletionDefinition) { return false; } if (processor instanceof InterceptDefinition) { return false; } if (processor instanceof AOPDefinition) { return false; } if (processor instanceof PolicyDefinition) { return false; } boolean only = getManagementStrategy().getManagementAgent().getOnlyRegisterProcessorWithCustomId() != null && getManagementStrategy().getManagementAgent().getOnlyRegisterProcessorWithCustomId(); if (only) { return processor.hasCustomIdAssigned(); } return getManagementStrategy().manageProcessor(processor); } | /**
* Should the given processor be registered.
*/ | Should the given processor be registered | registerProcessor | {
"repo_name": "gilfernandes/camel",
"path": "camel-core/src/main/java/org/apache/camel/management/DefaultManagementLifecycleStrategy.java",
"license": "apache-2.0",
"size": 42958
} | [
"org.apache.camel.model.AOPDefinition",
"org.apache.camel.model.InterceptDefinition",
"org.apache.camel.model.OnCompletionDefinition",
"org.apache.camel.model.OnExceptionDefinition",
"org.apache.camel.model.PolicyDefinition",
"org.apache.camel.model.ProcessorDefinition"
] | import org.apache.camel.model.AOPDefinition; import org.apache.camel.model.InterceptDefinition; import org.apache.camel.model.OnCompletionDefinition; import org.apache.camel.model.OnExceptionDefinition; import org.apache.camel.model.PolicyDefinition; import org.apache.camel.model.ProcessorDefinition; | import org.apache.camel.model.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,792,836 |
protected void stopPluginListeners() {
for (Iterator iterator = pluginListeners.iterator(); iterator.hasNext();) {
PluginListener listener = (PluginListener) iterator.next();
try {
listener.pluginDestroyed(this);
} catch (Throwable t) {
LOG.error(getLogPrefix()+" Failed when invoking pluginDestroyed on Plugin Listener '"+listener.getClass().getName()+"'.", t);
}
}
} | void function() { for (Iterator iterator = pluginListeners.iterator(); iterator.hasNext();) { PluginListener listener = (PluginListener) iterator.next(); try { listener.pluginDestroyed(this); } catch (Throwable t) { LOG.error(getLogPrefix()+STR+listener.getClass().getName()+"'.", t); } } } | /**
* If we fail to stop a plugin listener, try the next one but don't propogate any
* exceptions out of this method. Otherwise the plugin ends up dying and can't be
* reloaded from a hot deploy.
*/ | If we fail to stop a plugin listener, try the next one but don't propogate any exceptions out of this method. Otherwise the plugin ends up dying and can't be reloaded from a hot deploy | stopPluginListeners | {
"repo_name": "sbower/kuali-rice-1",
"path": "impl/src/main/java/org/kuali/rice/kew/plugin/Plugin.java",
"license": "apache-2.0",
"size": 5926
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 2,132,423 |
public UpdateRequest script(Script script) {
this.script = script;
return this;
} | UpdateRequest function(Script script) { this.script = script; return this; } | /**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/ | The script to execute. Note, make sure not to send different script each times and instead use script params if possible with the same (automatically compiled) script | script | {
"repo_name": "dpursehouse/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/action/update/UpdateRequest.java",
"license": "apache-2.0",
"size": 25021
} | [
"org.elasticsearch.script.Script"
] | import org.elasticsearch.script.Script; | import org.elasticsearch.script.*; | [
"org.elasticsearch.script"
] | org.elasticsearch.script; | 2,305,177 |
public Object getUserData(Node n, String key) {
if (nodeUserData == null) {
return null;
}
Map<String, UserDataRecord> t = nodeUserData.get(n);
if (t == null) {
return null;
}
UserDataRecord r = t.get(key);
if (r != null) {
return r.fData;
}
return null;
} | Object function(Node n, String key) { if (nodeUserData == null) { return null; } Map<String, UserDataRecord> t = nodeUserData.get(n); if (t == null) { return null; } UserDataRecord r = t.get(key); if (r != null) { return r.fData; } return null; } | /**
* Retrieves the object associated to a key on a this node. The object
* must first have been set to this node by calling
* <code>setUserData</code> with the same key.
* @param n The node the object is associated to.
* @param key The key the object is associated to.
* @return Returns the <code>DOMObject</code> associated to the given key
* on this node, or <code>null</code> if there was none.
* @since DOM Level 3
*/ | Retrieves the object associated to a key on a this node. The object must first have been set to this node by calling <code>setUserData</code> with the same key | getUserData | {
"repo_name": "openjdk/jdk8u",
"path": "jaxp/src/com/sun/org/apache/xerces/internal/dom/CoreDocumentImpl.java",
"license": "gpl-2.0",
"size": 105220
} | [
"java.util.Map",
"org.w3c.dom.Node"
] | import java.util.Map; import org.w3c.dom.Node; | import java.util.*; import org.w3c.dom.*; | [
"java.util",
"org.w3c.dom"
] | java.util; org.w3c.dom; | 1,147,753 |
@Idempotent
public LocatedBlock updateBlockForPipeline(ExtendedBlock block,
String clientName) throws IOException; | LocatedBlock function(ExtendedBlock block, String clientName) throws IOException; | /**
* Get a new generation stamp together with an access token for
* a block under construction
*
* This method is called only when a client needs to recover a failed
* pipeline or set up a pipeline for appending to a block.
*
* @param block a block
* @param clientName the name of the client
* @return a located block with a new generation stamp and an access token
* @throws IOException if any error occurs
*/ | Get a new generation stamp together with an access token for a block under construction This method is called only when a client needs to recover a failed pipeline or set up a pipeline for appending to a block | updateBlockForPipeline | {
"repo_name": "huiyi-learning/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java",
"license": "apache-2.0",
"size": 58668
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,977,928 |
@Test
public void testMassiveConcurrentInitBorrow() throws Exception {
final int numThreads = 200;
ds.setDriverClassName("org.apache.commons.dbcp2.TesterConnectionDelayDriver");
ds.setUrl("jdbc:apache:commons:testerConnectionDelayDriver:20");
ds.setInitialSize(8);
final List<Throwable> errors = Collections.synchronizedList(new ArrayList<>());
final Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
threads[i] = new TestThread(2, 0, 50);
threads[i].setUncaughtExceptionHandler((t, e) -> errors.add(e));
}
for (int i = 0; i < numThreads; i++) {
threads[i].start();
if (i%4 == 0) {
Thread.sleep(20);
}
}
for (int i = 0; i < numThreads; i++) {
threads[i].join();
}
assertEquals(0, errors.size());
ds.close();
} | void function() throws Exception { final int numThreads = 200; ds.setDriverClassName(STR); ds.setUrl(STR); ds.setInitialSize(8); final List<Throwable> errors = Collections.synchronizedList(new ArrayList<>()); final Thread[] threads = new Thread[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new TestThread(2, 0, 50); threads[i].setUncaughtExceptionHandler((t, e) -> errors.add(e)); } for (int i = 0; i < numThreads; i++) { threads[i].start(); if (i%4 == 0) { Thread.sleep(20); } } for (int i = 0; i < numThreads; i++) { threads[i].join(); } assertEquals(0, errors.size()); ds.close(); } | /**
* Fire up 100 Threads but only have 10 maxActive and forcedBlock.
* See
* @throws Exception
*/ | Fire up 100 Threads but only have 10 maxActive and forcedBlock. See | testMassiveConcurrentInitBorrow | {
"repo_name": "apache/commons-dbcp",
"path": "src/test/java/org/apache/commons/dbcp2/TestParallelCreationWithNoIdle.java",
"license": "apache-2.0",
"size": 5575
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.junit.jupiter.api.Assertions"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Assertions; | import java.util.*; import org.junit.jupiter.api.*; | [
"java.util",
"org.junit.jupiter"
] | java.util; org.junit.jupiter; | 373,590 |
@EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true)
public void onPlayerQuit(PlayerQuitEvent event) {
Player player = event.getPlayer();
if (!UserManager.hasPlayerDataKey(player)) {
return;
}
McMMOPlayer mcMMOPlayer = UserManager.getPlayer(player);
mcMMOPlayer.logout(false);
} | @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) void function(PlayerQuitEvent event) { Player player = event.getPlayer(); if (!UserManager.hasPlayerDataKey(player)) { return; } McMMOPlayer mcMMOPlayer = UserManager.getPlayer(player); mcMMOPlayer.logout(false); } | /**
* Monitor PlayerQuitEvents.
* <p>
* These events are monitored for the purpose of resetting player
* variables and other garbage collection tasks that must take place when
* a player exits the server.
*
* @param event The event to monitor
*/ | Monitor PlayerQuitEvents. These events are monitored for the purpose of resetting player variables and other garbage collection tasks that must take place when a player exits the server | onPlayerQuit | {
"repo_name": "Maximvdw/mcMMO",
"path": "src/main/java/com/gmail/nossr50/listeners/PlayerListener.java",
"license": "agpl-3.0",
"size": 27120
} | [
"com.gmail.nossr50.datatypes.player.McMMOPlayer",
"com.gmail.nossr50.util.player.UserManager",
"org.bukkit.entity.Player",
"org.bukkit.event.EventHandler",
"org.bukkit.event.EventPriority",
"org.bukkit.event.player.PlayerQuitEvent"
] | import com.gmail.nossr50.datatypes.player.McMMOPlayer; import com.gmail.nossr50.util.player.UserManager; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.player.PlayerQuitEvent; | import com.gmail.nossr50.datatypes.player.*; import com.gmail.nossr50.util.player.*; import org.bukkit.entity.*; import org.bukkit.event.*; import org.bukkit.event.player.*; | [
"com.gmail.nossr50",
"org.bukkit.entity",
"org.bukkit.event"
] | com.gmail.nossr50; org.bukkit.entity; org.bukkit.event; | 1,753,431 |
private boolean isStale() {
long lastRun = outputFile.lastModified();
long sourcesLastModified =
max(getLastModifiedTime(this.sourceFileLists), getLastModifiedTime(this.sourcePaths));
long externsLastModified = getLastModifiedTime(this.externFileLists);
return lastRun <= sourcesLastModified || lastRun <= externsLastModified;
} | boolean function() { long lastRun = outputFile.lastModified(); long sourcesLastModified = max(getLastModifiedTime(this.sourceFileLists), getLastModifiedTime(this.sourcePaths)); long externsLastModified = getLastModifiedTime(this.externFileLists); return lastRun <= sourcesLastModified lastRun <= externsLastModified; } | /**
* Determine if compilation must actually happen, i.e. if any input file
* (extern or source) has changed after the outputFile was last modified.
*
* @return true if compilation should happen
*/ | Determine if compilation must actually happen, i.e. if any input file (extern or source) has changed after the outputFile was last modified | isStale | {
"repo_name": "GoogleChromeLabs/chromeos_smart_card_connector",
"path": "third_party/closure-compiler/src/src/com/google/javascript/jscomp/ant/CompileTask.java",
"license": "apache-2.0",
"size": 23834
} | [
"java.lang.Math"
] | import java.lang.Math; | import java.lang.*; | [
"java.lang"
] | java.lang; | 450,966 |
public static Pluralizer pluralizer(Locale locale) {
String className = "org.jvnet.inflector.lang." + locale.getLanguage() + ".NounPluralizer";
try {
Class<?> c = Class.forName(className);
return (Pluralizer) c.newInstance();
} catch (ClassNotFoundException e) {
return null;
} catch (InstantiationException e) {
throw new RuntimeException("Problem instantiating " + className, e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Problem instantiating " + className, e);
}
} | static Pluralizer function(Locale locale) { String className = STR + locale.getLanguage() + STR; try { Class<?> c = Class.forName(className); return (Pluralizer) c.newInstance(); } catch (ClassNotFoundException e) { return null; } catch (InstantiationException e) { throw new RuntimeException(STR + className, e); } catch (IllegalAccessException e) { throw new RuntimeException(STR + className, e); } } | /**
* <p>
* Creates a new {@link Pluralizer} instance for the specified locale.
* </p>
* @param locale the locale specifying the language of the pluralizer
* @return a pluralizer instance for the specified locale, or <code>null</code> if there is none for this locale
*/ | Creates a new <code>Pluralizer</code> instance for the specified locale. | pluralizer | {
"repo_name": "tomwhite/inflector",
"path": "src/main/java/org/jvnet/inflector/Noun.java",
"license": "apache-2.0",
"size": 4333
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 271,029 |
@GwtCompatible(serializable = true)
@Beta
public static <K extends Enum<K>, V> ImmutableMap<K, V> immutableEnumMap(
Map<K, ? extends V> map) {
if (map instanceof ImmutableEnumMap) {
@SuppressWarnings("unchecked") // safe covariant cast
ImmutableEnumMap<K, V> result = (ImmutableEnumMap<K, V>) map;
return result;
} else if (map.isEmpty()) {
return ImmutableMap.of();
} else {
for (Map.Entry<K, ? extends V> entry : map.entrySet()) {
checkNotNull(entry.getKey());
checkNotNull(entry.getValue());
}
return ImmutableEnumMap.asImmutable(new EnumMap<K, V>(map));
}
}
/**
* Creates a <i>mutable</i>, empty {@code HashMap} instance.
*
* <p><b>Note:</b> if mutability is not required, use {@link
* ImmutableMap#of()} instead.
*
* <p><b>Note:</b> if {@code K} is an {@code enum} type, use {@link
* #newEnumMap} instead.
*
* @return a new, empty {@code HashMap} | @GwtCompatible(serializable = true) static <K extends Enum<K>, V> ImmutableMap<K, V> function( Map<K, ? extends V> map) { if (map instanceof ImmutableEnumMap) { @SuppressWarnings(STR) ImmutableEnumMap<K, V> result = (ImmutableEnumMap<K, V>) map; return result; } else if (map.isEmpty()) { return ImmutableMap.of(); } else { for (Map.Entry<K, ? extends V> entry : map.entrySet()) { checkNotNull(entry.getKey()); checkNotNull(entry.getValue()); } return ImmutableEnumMap.asImmutable(new EnumMap<K, V>(map)); } } /** * Creates a <i>mutable</i>, empty {@code HashMap} instance. * * <p><b>Note:</b> if mutability is not required, use { * ImmutableMap#of()} instead. * * <p><b>Note:</b> if {@code K} is an {@code enum} type, use { * #newEnumMap} instead. * * @return a new, empty {@code HashMap} | /**
* Returns an immutable map instance containing the given entries.
* Internally, the returned map will be backed by an {@link EnumMap}.
*
* <p>The iteration order of the returned map follows the enum's iteration
* order, not the order in which the elements appear in the given map.
*
* @param map the map to make an immutable copy of
* @return an immutable map containing those entries
* @since 14.0
*/ | Returns an immutable map instance containing the given entries. Internally, the returned map will be backed by an <code>EnumMap</code>. The iteration order of the returned map follows the enum's iteration order, not the order in which the elements appear in the given map | immutableEnumMap | {
"repo_name": "binhvu7/guava",
"path": "guava/src/com/google/common/collect/Maps.java",
"license": "apache-2.0",
"size": 137526
} | [
"com.google.common.annotations.GwtCompatible",
"com.google.common.base.Preconditions",
"java.util.EnumMap",
"java.util.HashMap",
"java.util.Map"
] | import com.google.common.annotations.GwtCompatible; import com.google.common.base.Preconditions; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; | import com.google.common.annotations.*; import com.google.common.base.*; import java.util.*; | [
"com.google.common",
"java.util"
] | com.google.common; java.util; | 775,255 |
public final TaskExecutor getExecutor() {
return executor;
} | final TaskExecutor function() { return executor; } | /**
* Returns the executor used to execute action of the associated task node. If not
* specified anywhere, the default executor simply executes task synchronously on the
* calling thread.
*
* @return the executor used to execute action of the associated task node. This
* method never returns {@code null}.
*/ | Returns the executor used to execute action of the associated task node. If not specified anywhere, the default executor simply executes task synchronously on the calling thread | getExecutor | {
"repo_name": "kelemen/JTrim",
"path": "subprojects/jtrim-task-graph/src/main/java/org/jtrim2/taskgraph/TaskNodeProperties.java",
"license": "apache-2.0",
"size": 5735
} | [
"org.jtrim2.executor.TaskExecutor"
] | import org.jtrim2.executor.TaskExecutor; | import org.jtrim2.executor.*; | [
"org.jtrim2.executor"
] | org.jtrim2.executor; | 2,720,703 |
@Column(name = "AIR_CARR_CD", length = 3, nullable = true)
public String getAirCarrierCode() {
return airCarrierCode;
} | @Column(name = STR, length = 3, nullable = true) String function() { return airCarrierCode; } | /**
* Gets the airCarrierCode attribute.
* @return Returns the airCarrierCode.
*/ | Gets the airCarrierCode attribute | getAirCarrierCode | {
"repo_name": "bhutchinson/kfs",
"path": "kfs-tem/src/main/java/org/kuali/kfs/module/tem/businessobject/AgencyStagingData.java",
"license": "agpl-3.0",
"size": 52782
} | [
"javax.persistence.Column"
] | import javax.persistence.Column; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 2,056,226 |
public int compareTo(PublisherPublicKeyDigest o) {
int result = DataUtils.compare(this.digest(), o.digest());
return result;
} | int function(PublisherPublicKeyDigest o) { int result = DataUtils.compare(this.digest(), o.digest()); return result; } | /**
* Implement Comparable
* @param o the other thing to compare to
* @return -1, 0 or 1 depending on whether we are before, equal to or lexicographically after o
*/ | Implement Comparable | compareTo | {
"repo_name": "ebollens/ccnmp",
"path": "javasrc/src/org/ccnx/ccn/protocol/PublisherPublicKeyDigest.java",
"license": "lgpl-2.1",
"size": 8791
} | [
"org.ccnx.ccn.impl.support.DataUtils"
] | import org.ccnx.ccn.impl.support.DataUtils; | import org.ccnx.ccn.impl.support.*; | [
"org.ccnx.ccn"
] | org.ccnx.ccn; | 2,668,443 |
default <T> CompletionStage<T> executeCompletionStage(Supplier<CompletionStage<T>> supplier) {
return decorateCompletionStage(this, supplier).get();
} | default <T> CompletionStage<T> executeCompletionStage(Supplier<CompletionStage<T>> supplier) { return decorateCompletionStage(this, supplier).get(); } | /**
* Decorates and executes the decorated CompletionStage.
*
* @param supplier the original CompletionStage
* @param <T> the type of results supplied by this supplier
* @return the decorated CompletionStage.
*/ | Decorates and executes the decorated CompletionStage | executeCompletionStage | {
"repo_name": "drmaas/resilience4j",
"path": "resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/RateLimiter.java",
"license": "apache-2.0",
"size": 33535
} | [
"java.util.concurrent.CompletionStage",
"java.util.function.Supplier"
] | import java.util.concurrent.CompletionStage; import java.util.function.Supplier; | import java.util.concurrent.*; import java.util.function.*; | [
"java.util"
] | java.util; | 2,770,063 |
@RequestMapping(
value = "/{id}/dependencies", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE
)
@ResponseStatus(HttpStatus.NO_CONTENT)
public void addDependenciesForApplication(
@PathVariable("id") final String id,
@RequestBody final Set<String> dependencies
) throws GenieException {
log.debug("Called with id {} and dependencies {}", id, dependencies);
this.applicationService.addDependenciesForApplication(id, dependencies);
} | @RequestMapping( value = STR, method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE ) @ResponseStatus(HttpStatus.NO_CONTENT) void function( @PathVariable("id") final String id, @RequestBody final Set<String> dependencies ) throws GenieException { log.debug(STR, id, dependencies); this.applicationService.addDependenciesForApplication(id, dependencies); } | /**
* Add new dependency files for a given application.
*
* @param id The id of the application to add the dependency file to. Not
* null/empty/blank.
* @param dependencies The dependency files to add. Not null.
* @throws GenieException For any error
*/ | Add new dependency files for a given application | addDependenciesForApplication | {
"repo_name": "ajoymajumdar/genie",
"path": "genie-web/src/main/java/com/netflix/genie/web/controllers/ApplicationRestController.java",
"license": "apache-2.0",
"size": 20549
} | [
"com.netflix.genie.common.exceptions.GenieException",
"java.util.Set",
"org.springframework.http.HttpStatus",
"org.springframework.http.MediaType",
"org.springframework.web.bind.annotation.PathVariable",
"org.springframework.web.bind.annotation.RequestBody",
"org.springframework.web.bind.annotation.RequestMapping",
"org.springframework.web.bind.annotation.RequestMethod",
"org.springframework.web.bind.annotation.ResponseStatus"
] | import com.netflix.genie.common.exceptions.GenieException; import java.util.Set; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; | import com.netflix.genie.common.exceptions.*; import java.util.*; import org.springframework.http.*; import org.springframework.web.bind.annotation.*; | [
"com.netflix.genie",
"java.util",
"org.springframework.http",
"org.springframework.web"
] | com.netflix.genie; java.util; org.springframework.http; org.springframework.web; | 2,357,269 |
public String getPublishedId()
{
return Validator.check(publishedId, "0");
} | String function() { return Validator.check(publishedId, "0"); } | /**
* get published id
*
* @return the published id
*/ | get published id | getPublishedId | {
"repo_name": "rodriguezdevera/sakai",
"path": "samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/evaluation/TotalScoresBean.java",
"license": "apache-2.0",
"size": 33629
} | [
"org.sakaiproject.tool.assessment.ui.bean.util.Validator"
] | import org.sakaiproject.tool.assessment.ui.bean.util.Validator; | import org.sakaiproject.tool.assessment.ui.bean.util.*; | [
"org.sakaiproject.tool"
] | org.sakaiproject.tool; | 1,721,575 |
public final String getPriority() {
return DicomPriority.toString(priority);
}
| final String function() { return DicomPriority.toString(priority); } | /**
* Returns the DICOM priority as int value.
* <p>
* This value is used for CFIND.
* 0..MED, 1..HIGH, 2..LOW
*
* @return Returns the priority.
*/ | Returns the DICOM priority as int value. This value is used for CFIND. 0..MED, 1..HIGH, 2..LOW | getPriority | {
"repo_name": "medicayun/medicayundicom",
"path": "dcm4jboss-all/tags/DCM4JBOSS_2_5_4/dcm4jboss-sar/src/java/org/dcm4chex/archive/dcm/mwlscu/MWLScuService.java",
"license": "apache-2.0",
"size": 11722
} | [
"org.dcm4chex.archive.config.DicomPriority"
] | import org.dcm4chex.archive.config.DicomPriority; | import org.dcm4chex.archive.config.*; | [
"org.dcm4chex.archive"
] | org.dcm4chex.archive; | 1,484,183 |
@Override
protected void doPaintSelection(Graphics g) {
} | void function(Graphics g) { } | /**
* Paints the selection.
*
* @param g the graphics context
*/ | Paints the selection | doPaintSelection | {
"repo_name": "waikato-datamining/adams-base",
"path": "adams-imaging/src/main/java/adams/gui/visualization/object/annotator/ClassificationLabelAnnotator.java",
"license": "gpl-3.0",
"size": 3184
} | [
"java.awt.Graphics"
] | import java.awt.Graphics; | import java.awt.*; | [
"java.awt"
] | java.awt; | 724,381 |
public Attributes getAttributes(Name name)
throws NamingException {
CacheEntry entry = cacheLookup(name.toString());
if (entry != null) {
if (!entry.exists) {
throw notFoundException;
}
return entry.attributes;
}
Attributes attributes = dirContext.getAttributes(parseName(name));
if (!(attributes instanceof ResourceAttributes)) {
attributes = new ResourceAttributes(attributes);
}
return attributes;
} | Attributes function(Name name) throws NamingException { CacheEntry entry = cacheLookup(name.toString()); if (entry != null) { if (!entry.exists) { throw notFoundException; } return entry.attributes; } Attributes attributes = dirContext.getAttributes(parseName(name)); if (!(attributes instanceof ResourceAttributes)) { attributes = new ResourceAttributes(attributes); } return attributes; } | /**
* Retrieves all of the attributes associated with a named object.
*
* @return the set of attributes associated with name.
* Returns an empty attribute set if name has no attributes; never null.
* @param name the name of the object from which to retrieve attributes
* @exception NamingException if a naming exception is encountered
*/ | Retrieves all of the attributes associated with a named object | getAttributes | {
"repo_name": "benothman/jboss-web-nio2",
"path": "java/org/apache/naming/resources/ProxyDirContext.java",
"license": "lgpl-3.0",
"size": 70983
} | [
"javax.naming.Name",
"javax.naming.NamingException",
"javax.naming.directory.Attributes"
] | import javax.naming.Name; import javax.naming.NamingException; import javax.naming.directory.Attributes; | import javax.naming.*; import javax.naming.directory.*; | [
"javax.naming"
] | javax.naming; | 2,205,647 |
public void addItemText(String text, Set answerSet) {
if (this.data.getItemTextSet() == null) {
this.data.setItemTextSet(new HashSet());
}
Long sequence = Long.valueOf(this.data.getItemTextSet().size()+1);
ItemText itemText = new ItemText((ItemData)this.data, sequence,
text, answerSet);
this.data.getItemTextSet().add(itemText);
this.itemTextSet = this.data.getItemTextSet();
} | void function(String text, Set answerSet) { if (this.data.getItemTextSet() == null) { this.data.setItemTextSet(new HashSet()); } Long sequence = Long.valueOf(this.data.getItemTextSet().size()+1); ItemText itemText = new ItemText((ItemData)this.data, sequence, text, answerSet); this.data.getItemTextSet().add(itemText); this.itemTextSet = this.data.getItemTextSet(); } | /**
* Add item text (question text) to ItemFacade (question). For multiple
* choice, multiple correct, survey, matching & fill in the blank, you can
* specify a set of acceptable answers. Usually, the purpose for this is
* to facilitate auto-grading.
* @param text
* @param answerSet
*/ | Add item text (question text) to ItemFacade (question). For multiple choice, multiple correct, survey, matching & fill in the blank, you can specify a set of acceptable answers. Usually, the purpose for this is to facilitate auto-grading | addItemText | {
"repo_name": "ouit0408/sakai",
"path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/ItemFacade.java",
"license": "apache-2.0",
"size": 33564
} | [
"java.util.HashSet",
"java.util.Set",
"org.sakaiproject.tool.assessment.data.dao.assessment.ItemData",
"org.sakaiproject.tool.assessment.data.dao.assessment.ItemText"
] | import java.util.HashSet; import java.util.Set; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemData; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemText; | import java.util.*; import org.sakaiproject.tool.assessment.data.dao.assessment.*; | [
"java.util",
"org.sakaiproject.tool"
] | java.util; org.sakaiproject.tool; | 1,919,048 |
private void matchActionsToDisplayObjects() {
Collection c = displayobjects.values();
Iterator it = c.iterator();
while (it.hasNext()) {
Object o = it.next();
if (o instanceof JComponentHandler) {
JComponentHandler handler = (JComponentHandler) o;
// if this component handler has action ids specified
List refs = handler.getActionReferences();
if (refs != null) {
//go through all the action references associated with the handler, and find the corresponding action
Iterator rit = refs.iterator();
while (rit.hasNext()) {
NActionReference ref = (NActionReference) rit.next();
String actionID = ref.getActionID();
//find the action which matches this ID.
String keyBinding = ref.getTrigger();
handler.registerAction(
keyBinding,
findAction(actionID));
}
}
}
}
} | void function() { Collection c = displayobjects.values(); Iterator it = c.iterator(); while (it.hasNext()) { Object o = it.next(); if (o instanceof JComponentHandler) { JComponentHandler handler = (JComponentHandler) o; List refs = handler.getActionReferences(); if (refs != null) { Iterator rit = refs.iterator(); while (rit.hasNext()) { NActionReference ref = (NActionReference) rit.next(); String actionID = ref.getActionID(); String keyBinding = ref.getTrigger(); handler.registerAction( keyBinding, findAction(actionID)); } } } } } | /**
* This is used to link the actions specified in the xml to the
* components. The actions each have an id, which is matched to
* the actionID attribute in JComponentHandlers. Once the correct
* action is found for a component, an event listener is set up
* */ | This is used to link the actions specified in the xml to the components. The actions each have an id, which is matched to the actionID attribute in JComponentHandlers. Once the correct action is found for a component, an event listener is set up | matchActionsToDisplayObjects | {
"repo_name": "boompieman/iim_project",
"path": "nxt_1.4.4/src/net/sourceforge/nite/nstyle/JDomParser.java",
"license": "gpl-3.0",
"size": 20347
} | [
"java.util.Collection",
"java.util.Iterator",
"java.util.List",
"net.sourceforge.nite.gui.actions.NActionReference",
"net.sourceforge.nite.nstyle.handler.JComponentHandler"
] | import java.util.Collection; import java.util.Iterator; import java.util.List; import net.sourceforge.nite.gui.actions.NActionReference; import net.sourceforge.nite.nstyle.handler.JComponentHandler; | import java.util.*; import net.sourceforge.nite.gui.actions.*; import net.sourceforge.nite.nstyle.handler.*; | [
"java.util",
"net.sourceforge.nite"
] | java.util; net.sourceforge.nite; | 256,920 |
void saveWSDL(Organization org, String apiId, ResourceFile wsdlResourceFile) throws WSDLPersistenceException; | void saveWSDL(Organization org, String apiId, ResourceFile wsdlResourceFile) throws WSDLPersistenceException; | /**
* Save the passed WSDL schema definition of the API. This includes initial creation operation and later
* update operations
*
* @param org Organization the WSDL is owned by
* @param apiId API ID
* @param wsdlResourceFile WSDL Resource File
* @throws WSDLPersistenceException
*/ | Save the passed WSDL schema definition of the API. This includes initial creation operation and later update operations | saveWSDL | {
"repo_name": "jaadds/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.persistence/src/main/java/org/wso2/carbon/apimgt/persistence/APIPersistence.java",
"license": "apache-2.0",
"size": 18354
} | [
"org.wso2.carbon.apimgt.persistence.dto.Organization",
"org.wso2.carbon.apimgt.persistence.dto.ResourceFile",
"org.wso2.carbon.apimgt.persistence.exceptions.WSDLPersistenceException"
] | import org.wso2.carbon.apimgt.persistence.dto.Organization; import org.wso2.carbon.apimgt.persistence.dto.ResourceFile; import org.wso2.carbon.apimgt.persistence.exceptions.WSDLPersistenceException; | import org.wso2.carbon.apimgt.persistence.dto.*; import org.wso2.carbon.apimgt.persistence.exceptions.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 1,344,434 |
public Query getQuery() throws JSONException, IOException {
return getQ(false, false);
}
| Query function() throws JSONException, IOException { return getQ(false, false); } | /**
* Gets a Query object. There is one request to server.
*
* @return Created Query object.
* @throws org.json.JSONException
* @throws java.io.IOException
*/ | Gets a Query object. There is one request to server | getQuery | {
"repo_name": "valdasraps/resthub",
"path": "clients/java/src/main/java/lt/emasina/resthub/model/QueryManager.java",
"license": "lgpl-3.0",
"size": 12678
} | [
"java.io.IOException",
"org.json.JSONException"
] | import java.io.IOException; import org.json.JSONException; | import java.io.*; import org.json.*; | [
"java.io",
"org.json"
] | java.io; org.json; | 1,545,078 |
public HTable createTable(TableName tableName, byte[] family)
throws IOException{
return createTable(tableName, new byte[][]{family});
} | HTable function(TableName tableName, byte[] family) throws IOException{ return createTable(tableName, new byte[][]{family}); } | /**
* Create a table.
* @param tableName
* @param family
* @return An HTable instance for the created table.
* @throws IOException
*/ | Create a table | createTable | {
"repo_name": "juwi/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 151512
} | [
"java.io.IOException",
"org.apache.hadoop.hbase.client.HTable"
] | import java.io.IOException; import org.apache.hadoop.hbase.client.HTable; | import java.io.*; import org.apache.hadoop.hbase.client.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 95,109 |
public static void initReadRange(final CFlags flags) {
initReadRange(flags, "read");
} | static void function(final CFlags flags) { initReadRange(flags, "read"); } | /**
* Initialise flags for ranges of reads
* @param flags shared flags
*/ | Initialise flags for ranges of reads | initReadRange | {
"repo_name": "RealTimeGenomics/rtg-tools",
"path": "src/com/rtg/launcher/CommonFlags.java",
"license": "bsd-2-clause",
"size": 30102
} | [
"com.rtg.util.cli.CFlags"
] | import com.rtg.util.cli.CFlags; | import com.rtg.util.cli.*; | [
"com.rtg.util"
] | com.rtg.util; | 1,124,758 |
private void configureCircuitBreakerOpenStateIntervalFunction(InstanceProperties properties,
CircuitBreakerConfig.Builder builder) {
// these take precedence over deprecated properties. Setting one or the other will still work.
if (properties.getWaitDurationInOpenState() != null
&& properties.getWaitDurationInOpenState().toMillis() > 0) {
Duration waitDuration = properties.getWaitDurationInOpenState();
if (properties.getEnableExponentialBackoff() != null && properties
.getEnableExponentialBackoff()) {
if (properties.getExponentialBackoffMultiplier() != null) {
builder.waitIntervalFunctionInOpenState(IntervalFunction
.ofExponentialBackoff(waitDuration.toMillis(),
properties.getExponentialBackoffMultiplier()));
} else {
builder.waitIntervalFunctionInOpenState(IntervalFunction
.ofExponentialBackoff(properties.getWaitDurationInOpenState().toMillis()));
}
} else if (properties.getEnableRandomizedWait() != null && properties
.getEnableRandomizedWait()) {
if (properties.getRandomizedWaitFactor() != null) {
builder.waitIntervalFunctionInOpenState(IntervalFunction
.ofRandomized(waitDuration.toMillis(),
properties.getRandomizedWaitFactor()));
} else {
builder.waitIntervalFunctionInOpenState(
IntervalFunction.ofRandomized(waitDuration));
}
} else {
builder.waitDurationInOpenState(properties.getWaitDurationInOpenState());
}
}
} | void function(InstanceProperties properties, CircuitBreakerConfig.Builder builder) { if (properties.getWaitDurationInOpenState() != null && properties.getWaitDurationInOpenState().toMillis() > 0) { Duration waitDuration = properties.getWaitDurationInOpenState(); if (properties.getEnableExponentialBackoff() != null && properties .getEnableExponentialBackoff()) { if (properties.getExponentialBackoffMultiplier() != null) { builder.waitIntervalFunctionInOpenState(IntervalFunction .ofExponentialBackoff(waitDuration.toMillis(), properties.getExponentialBackoffMultiplier())); } else { builder.waitIntervalFunctionInOpenState(IntervalFunction .ofExponentialBackoff(properties.getWaitDurationInOpenState().toMillis())); } } else if (properties.getEnableRandomizedWait() != null && properties .getEnableRandomizedWait()) { if (properties.getRandomizedWaitFactor() != null) { builder.waitIntervalFunctionInOpenState(IntervalFunction .ofRandomized(waitDuration.toMillis(), properties.getRandomizedWaitFactor())); } else { builder.waitIntervalFunctionInOpenState( IntervalFunction.ofRandomized(waitDuration)); } } else { builder.waitDurationInOpenState(properties.getWaitDurationInOpenState()); } } } | /**
* decide which circuit breaker delay policy for open state will be configured based into the
* configured properties
*
* @param properties the backend circuit breaker properties
* @param builder the circuit breaker config builder
*/ | decide which circuit breaker delay policy for open state will be configured based into the configured properties | configureCircuitBreakerOpenStateIntervalFunction | {
"repo_name": "drmaas/resilience4j",
"path": "resilience4j-framework-common/src/main/java/io/github/resilience4j/common/circuitbreaker/configuration/CircuitBreakerConfigurationProperties.java",
"license": "apache-2.0",
"size": 26520
} | [
"io.github.resilience4j.circuitbreaker.CircuitBreakerConfig",
"io.github.resilience4j.core.IntervalFunction",
"java.time.Duration"
] | import io.github.resilience4j.circuitbreaker.CircuitBreakerConfig; import io.github.resilience4j.core.IntervalFunction; import java.time.Duration; | import io.github.resilience4j.circuitbreaker.*; import io.github.resilience4j.core.*; import java.time.*; | [
"io.github.resilience4j",
"java.time"
] | io.github.resilience4j; java.time; | 975,022 |
protected void testFileSpecificMetadata(String mimetype,
Map<QName, Serializable> properties)
{
// Check for extra fields
assertEquals(
"Property " + ContentModel.PROP_AUTHOR + " not found for mimetype " + mimetype,
"Nevin Nollop",
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_AUTHOR)));
// Ensure that we can also get things which are standard
// Tika metadata properties, if we so choose to
assertTrue(
"Test Property " + TIKA_LAST_AUTHOR_TEST_PROPERTY + " not found for mimetype " + mimetype,
properties.containsKey(TIKA_LAST_AUTHOR_TEST_PROPERTY)
);
assertEquals(
"Test Property " + TIKA_LAST_AUTHOR_TEST_PROPERTY + " incorrect for mimetype " + mimetype,
"paolon",
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(TIKA_LAST_AUTHOR_TEST_PROPERTY)));
} | void function(String mimetype, Map<QName, Serializable> properties) { assertEquals( STR + ContentModel.PROP_AUTHOR + STR + mimetype, STR, DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_AUTHOR))); assertTrue( STR + TIKA_LAST_AUTHOR_TEST_PROPERTY + STR + mimetype, properties.containsKey(TIKA_LAST_AUTHOR_TEST_PROPERTY) ); assertEquals( STR + TIKA_LAST_AUTHOR_TEST_PROPERTY + STR + mimetype, STR, DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(TIKA_LAST_AUTHOR_TEST_PROPERTY))); } | /**
* We also provide the creation date - check that
*/ | We also provide the creation date - check that | testFileSpecificMetadata | {
"repo_name": "Alfresco/alfresco-repository",
"path": "src/test/java/org/alfresco/repo/content/metadata/DWGMetadataExtracterTest.java",
"license": "lgpl-3.0",
"size": 6533
} | [
"java.io.Serializable",
"java.util.Map",
"org.alfresco.model.ContentModel",
"org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter",
"org.alfresco.service.namespace.QName"
] | import java.io.Serializable; import java.util.Map; import org.alfresco.model.ContentModel; import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter; import org.alfresco.service.namespace.QName; | import java.io.*; import java.util.*; import org.alfresco.model.*; import org.alfresco.service.cmr.repository.datatype.*; import org.alfresco.service.namespace.*; | [
"java.io",
"java.util",
"org.alfresco.model",
"org.alfresco.service"
] | java.io; java.util; org.alfresco.model; org.alfresco.service; | 2,103,804 |
protected synchronized void fireSelectionCommandPerformed(){
ConfirmboxEvent event = new ConfirmboxEvent(this, selectionCommand, ConfirmboxEvent.NULL_COMMAND);
Iterator<TextboxListener> iterator = listeners.iterator();
while(iterator.hasNext()){
((ConfirmboxListener) iterator.next()).selectionCommandPerformed(event);
}
}
| synchronized void function(){ ConfirmboxEvent event = new ConfirmboxEvent(this, selectionCommand, ConfirmboxEvent.NULL_COMMAND); Iterator<TextboxListener> iterator = listeners.iterator(); while(iterator.hasNext()){ ((ConfirmboxListener) iterator.next()).selectionCommandPerformed(event); } } | /**
* Fires when a selection command is performed.
*/ | Fires when a selection command is performed | fireSelectionCommandPerformed | {
"repo_name": "dstumpff/VortexGameLibrary",
"path": "VortexGameEngine/src/vortex/gameentity/textbox/Confirmbox.java",
"license": "bsd-3-clause",
"size": 10870
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 1,964,171 |
@WebMethod
@Path("/removeAllMembersFromAuthzGroup")
@Produces("text/plain")
@GET
public String removeAllMembersFromAuthzGroup(
@WebParam(name = "sessionid", partName = "sessionid") @QueryParam("sessionid") String sessionid,
@WebParam(name = "authzgroupid", partName = "authzgroupid") @QueryParam("authzgroupid") String authzgroupid) {
Session session = establishSession(sessionid);
try {
AuthzGroup realmEdit = authzGroupService.getAuthzGroup(authzgroupid);
realmEdit.removeMembers();
authzGroupService.save(realmEdit);
} catch (Exception e) {
LOG.error("WS removeAllMembersFromAuthzGroup(): " + e.getClass().getName() + " : " + e.getMessage());
return e.getClass().getName() + " : " + e.getMessage();
}
return "success";
} | @Path(STR) @Produces(STR) String function( @WebParam(name = STR, partName = STR) @QueryParam(STR) String sessionid, @WebParam(name = STR, partName = STR) @QueryParam(STR) String authzgroupid) { Session session = establishSession(sessionid); try { AuthzGroup realmEdit = authzGroupService.getAuthzGroup(authzgroupid); realmEdit.removeMembers(); authzGroupService.save(realmEdit); } catch (Exception e) { LOG.error(STR + e.getClass().getName() + STR + e.getMessage()); return e.getClass().getName() + STR + e.getMessage(); } return STR; } | /**
* Remove all users from an authgroup
*
* @param sessionid the id of a valid session
* @param authzgroupid the id of the authzgroup to remove the users from
* @return success or exception message
*/ | Remove all users from an authgroup | removeAllMembersFromAuthzGroup | {
"repo_name": "pushyamig/sakai",
"path": "webservices/cxf/src/java/org/sakaiproject/webservices/SakaiScript.java",
"license": "apache-2.0",
"size": 209455
} | [
"javax.jws.WebParam",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"javax.ws.rs.QueryParam",
"org.sakaiproject.authz.api.AuthzGroup",
"org.sakaiproject.tool.api.Session"
] | import javax.jws.WebParam; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import org.sakaiproject.authz.api.AuthzGroup; import org.sakaiproject.tool.api.Session; | import javax.jws.*; import javax.ws.rs.*; import org.sakaiproject.authz.api.*; import org.sakaiproject.tool.api.*; | [
"javax.jws",
"javax.ws",
"org.sakaiproject.authz",
"org.sakaiproject.tool"
] | javax.jws; javax.ws; org.sakaiproject.authz; org.sakaiproject.tool; | 1,446,567 |
@VisibleForTesting
Set<Pair<Protos.OfferID, TaskGroupKey>> getStaticBans() {
return hostOffers.getStaticBans();
} | Set<Pair<Protos.OfferID, TaskGroupKey>> getStaticBans() { return hostOffers.getStaticBans(); } | /**
* Get all static bans.
*/ | Get all static bans | getStaticBans | {
"repo_name": "apache/aurora",
"path": "src/main/java/org/apache/aurora/scheduler/offers/OfferManagerImpl.java",
"license": "apache-2.0",
"size": 8602
} | [
"java.util.Set",
"org.apache.aurora.common.collections.Pair",
"org.apache.aurora.scheduler.base.TaskGroupKey",
"org.apache.mesos.v1.Protos"
] | import java.util.Set; import org.apache.aurora.common.collections.Pair; import org.apache.aurora.scheduler.base.TaskGroupKey; import org.apache.mesos.v1.Protos; | import java.util.*; import org.apache.aurora.common.collections.*; import org.apache.aurora.scheduler.base.*; import org.apache.mesos.v1.*; | [
"java.util",
"org.apache.aurora",
"org.apache.mesos"
] | java.util; org.apache.aurora; org.apache.mesos; | 758,371 |
public long getIntervalInMillis() {
return TimeUnit.MILLISECONDS.convert(getIntervalInHours(), TimeUnit.HOURS);
}
} | long function() { return TimeUnit.MILLISECONDS.convert(getIntervalInHours(), TimeUnit.HOURS); } } | /**
* Get the garbage collection interval in milliseconds.
*
* @return the interval; never null
*/ | Get the garbage collection interval in milliseconds | getIntervalInMillis | {
"repo_name": "jasperstein/modeshape",
"path": "modeshape-jcr/src/main/java/org/modeshape/jcr/RepositoryConfiguration.java",
"license": "apache-2.0",
"size": 125045
} | [
"java.util.concurrent.TimeUnit"
] | import java.util.concurrent.TimeUnit; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 2,281,315 |
public static Set<Method> parseList(String methodList) {
if (methodList == null) {
return ALL;
}
methodList = methodList.toUpperCase(Locale.ROOT);
String[] methods = methodList.split(",");
if (methods.length == 0) {
return ALL;
} else if (methods.length == 1) {
return Collections.singleton(Method.valueOf(methods[0]));
} else {
Set<Method> result = new TreeSet<>();
for (String method : methods) {
result.add(Method.valueOf(method.trim()));
}
return ALL.equals(result) ? ALL : Collections.unmodifiableSet(result);
}
} | static Set<Method> function(String methodList) { if (methodList == null) { return ALL; } methodList = methodList.toUpperCase(Locale.ROOT); String[] methods = methodList.split(","); if (methods.length == 0) { return ALL; } else if (methods.length == 1) { return Collections.singleton(Method.valueOf(methods[0])); } else { Set<Method> result = new TreeSet<>(); for (String method : methods) { result.add(Method.valueOf(method.trim())); } return ALL.equals(result) ? ALL : Collections.unmodifiableSet(result); } } | /**
* Parse the given comma separated {@code methodList} to a {@link Set} of {@link Method}s. If {@code methodList} is
* empty or {@code null} returns {@link #ALL}.
*
* @param methodList a comma separated list of HTTP method names.
* @return a {@link Set} of {@link Method}s
*/ | Parse the given comma separated methodList to a <code>Set</code> of <code>Method</code>s. If methodList is empty or null returns <code>#ALL</code> | parseList | {
"repo_name": "objectiser/camel",
"path": "components/camel-platform-http/src/main/java/org/apache/camel/component/platform/http/spi/Method.java",
"license": "apache-2.0",
"size": 2685
} | [
"java.util.Collections",
"java.util.Locale",
"java.util.Set",
"java.util.TreeSet"
] | import java.util.Collections; import java.util.Locale; import java.util.Set; import java.util.TreeSet; | import java.util.*; | [
"java.util"
] | java.util; | 1,391,778 |
public int calculateIndex(IFigure container, Point location) {
return 0;
}
} | int function(IFigure container, Point location) { return 0; } } | /**
* Calculate the index of the container.
*
* @param container
* The container
* @param location
* The location
* @return int
*/ | Calculate the index of the container | calculateIndex | {
"repo_name": "debabratahazra/DS",
"path": "designstudio/components/page/ui/com.odcgroup.page.ui/src/main/java/com/odcgroup/page/ui/figure/table/ColumnHeader.java",
"license": "epl-1.0",
"size": 8822
} | [
"org.eclipse.draw2d.IFigure",
"org.eclipse.draw2d.geometry.Point"
] | import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.geometry.Point; | import org.eclipse.draw2d.*; import org.eclipse.draw2d.geometry.*; | [
"org.eclipse.draw2d"
] | org.eclipse.draw2d; | 2,688,029 |
protected Collection<IAction> generateCreateChildActions(Collection<?> descriptors, ISelection selection)
{
Collection<IAction> actions = new ArrayList<IAction>();
if (descriptors != null)
{
for (Object descriptor : descriptors)
{
actions.add(new CreateChildAction(activeEditorPart, selection, descriptor));
}
}
return actions;
} | Collection<IAction> function(Collection<?> descriptors, ISelection selection) { Collection<IAction> actions = new ArrayList<IAction>(); if (descriptors != null) { for (Object descriptor : descriptors) { actions.add(new CreateChildAction(activeEditorPart, selection, descriptor)); } } return actions; } | /**
* This generates a {@link org.eclipse.emf.edit.ui.action.CreateChildAction} for each object in <code>descriptors</code>,
* and returns the collection of these actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This generates a <code>org.eclipse.emf.edit.ui.action.CreateChildAction</code> for each object in <code>descriptors</code>, and returns the collection of these actions. | generateCreateChildActions | {
"repo_name": "mdean77/Model-Driven-Decision-Support",
"path": "edu.utah.dcc.e4.application.xcore.model/src/application/presentation/ApplicationActionBarContributor.java",
"license": "epl-1.0",
"size": 14893
} | [
"java.util.ArrayList",
"java.util.Collection",
"org.eclipse.emf.edit.ui.action.CreateChildAction",
"org.eclipse.jface.action.IAction",
"org.eclipse.jface.viewers.ISelection"
] | import java.util.ArrayList; import java.util.Collection; import org.eclipse.emf.edit.ui.action.CreateChildAction; import org.eclipse.jface.action.IAction; import org.eclipse.jface.viewers.ISelection; | import java.util.*; import org.eclipse.emf.edit.ui.action.*; import org.eclipse.jface.action.*; import org.eclipse.jface.viewers.*; | [
"java.util",
"org.eclipse.emf",
"org.eclipse.jface"
] | java.util; org.eclipse.emf; org.eclipse.jface; | 484,651 |
protected final Assertion getAssertionFrom(final Map<String, Object> model) {
return (Assertion) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_ASSERTION);
} | final Assertion function(final Map<String, Object> model) { return (Assertion) model.get(CasViewConstants.MODEL_ATTRIBUTE_NAME_ASSERTION); } | /**
* Gets the assertion from the model.
*
* @param model the model
* @return the assertion from
*/ | Gets the assertion from the model | getAssertionFrom | {
"repo_name": "xuchengdong/cas4.1.9",
"path": "cas-server-core/src/main/java/org/jasig/cas/web/view/AbstractCasView.java",
"license": "apache-2.0",
"size": 14281
} | [
"java.util.Map",
"org.jasig.cas.validation.Assertion"
] | import java.util.Map; import org.jasig.cas.validation.Assertion; | import java.util.*; import org.jasig.cas.validation.*; | [
"java.util",
"org.jasig.cas"
] | java.util; org.jasig.cas; | 1,422,927 |
public TCardFieldOptionBean getBean(IdentityMap createdBeans)
{
TCardFieldOptionBean result = (TCardFieldOptionBean) createdBeans.get(this);
if (result != null ) {
// we have already created a bean for this object, return it
return result;
}
// no bean exists for this object; create a new one
result = new TCardFieldOptionBean();
createdBeans.put(this, result);
result.setObjectID(getObjectID());
result.setGroupingField(getGroupingField());
result.setOptionID(getOptionID());
result.setOptionPosition(getOptionPosition());
result.setOptionWidth(getOptionWidth());
result.setMaxNumber(getMaxNumber());
result.setUuid(getUuid());
if (aTCardGroupingField != null)
{
TCardGroupingFieldBean relatedBean = aTCardGroupingField.getBean(createdBeans);
result.setTCardGroupingFieldBean(relatedBean);
}
result.setModified(isModified());
result.setNew(isNew());
return result;
} | TCardFieldOptionBean function(IdentityMap createdBeans) { TCardFieldOptionBean result = (TCardFieldOptionBean) createdBeans.get(this); if (result != null ) { return result; } result = new TCardFieldOptionBean(); createdBeans.put(this, result); result.setObjectID(getObjectID()); result.setGroupingField(getGroupingField()); result.setOptionID(getOptionID()); result.setOptionPosition(getOptionPosition()); result.setOptionWidth(getOptionWidth()); result.setMaxNumber(getMaxNumber()); result.setUuid(getUuid()); if (aTCardGroupingField != null) { TCardGroupingFieldBean relatedBean = aTCardGroupingField.getBean(createdBeans); result.setTCardGroupingFieldBean(relatedBean); } result.setModified(isModified()); result.setNew(isNew()); return result; } | /**
* Creates a TCardFieldOptionBean with the contents of this object
* intended for internal use only
* @param createdBeans a IdentityMap which maps objects
* to already created beans
* @return a TCardFieldOptionBean with the contents of this object
*/ | Creates a TCardFieldOptionBean with the contents of this object intended for internal use only | getBean | {
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/BaseTCardFieldOption.java",
"license": "gpl-3.0",
"size": 30548
} | [
"com.aurel.track.beans.TCardFieldOptionBean",
"com.aurel.track.beans.TCardGroupingFieldBean",
"org.apache.commons.collections.map.IdentityMap"
] | import com.aurel.track.beans.TCardFieldOptionBean; import com.aurel.track.beans.TCardGroupingFieldBean; import org.apache.commons.collections.map.IdentityMap; | import com.aurel.track.beans.*; import org.apache.commons.collections.map.*; | [
"com.aurel.track",
"org.apache.commons"
] | com.aurel.track; org.apache.commons; | 731,225 |
@Test
public void testECOWithFormB31() {
LocalTime eventStartTime = new LocalTime(16, 30, 0);
LocalTime eventStopTime = new LocalTime(17, 50, 0);
LocalTime formStartTime = new LocalTime(16, 35, 0);
LocalTime formStopTime = new LocalTime(17, 45, 0);
LocalTime absenceTime = new LocalTime(16, 40, 0);
absenceWithClassConflictFormHelper(eventStartTime, eventStopTime,
formStartTime, formStopTime, absenceTime,
Absence.Type.EarlyCheckOut, Absence.Status.Approved);
} | void function() { LocalTime eventStartTime = new LocalTime(16, 30, 0); LocalTime eventStopTime = new LocalTime(17, 50, 0); LocalTime formStartTime = new LocalTime(16, 35, 0); LocalTime formStopTime = new LocalTime(17, 45, 0); LocalTime absenceTime = new LocalTime(16, 40, 0); absenceWithClassConflictFormHelper(eventStartTime, eventStopTime, formStartTime, formStopTime, absenceTime, Absence.Type.EarlyCheckOut, Absence.Status.Approved); } | /**
* class times within event but buffer eclipses, tardy time in buffer in
* event
*/ | class times within event but buffer eclipses, tardy time in buffer in event | testECOWithFormB31 | {
"repo_name": "curtisullerich/attendance",
"path": "src/test/java/edu/iastate/music/marching/attendance/test/model/interact/FormClassConflictSimpleTest.java",
"license": "mit",
"size": 48344
} | [
"edu.iastate.music.marching.attendance.model.store.Absence",
"org.joda.time.LocalTime"
] | import edu.iastate.music.marching.attendance.model.store.Absence; import org.joda.time.LocalTime; | import edu.iastate.music.marching.attendance.model.store.*; import org.joda.time.*; | [
"edu.iastate.music",
"org.joda.time"
] | edu.iastate.music; org.joda.time; | 2,368,355 |
@Override
public void shutDown() {
logger.log(Level.INFO, "Keyword search ingest module instance {0} shutting down", instanceNum); //NON-NLS
if ((initialized == false) || (context == null)) {
return;
}
if (context.fileIngestIsCancelled()) {
logger.log(Level.INFO, "Keyword search ingest module instance {0} stopping search job due to ingest cancellation", instanceNum); //NON-NLS
IngestSearchRunner.getInstance().stopJob(jobId);
cleanup();
return;
}
// Remove from the search list and trigger final commit and final search
IngestSearchRunner.getInstance().endJob(jobId);
// We only need to post the summary msg from the last module per job
if (refCounter.decrementAndGet(jobId) == 0) {
try {
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles); //NON-NLS
final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks); //NON-NLS
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
logger.log(Level.SEVERE, "Error executing Solr queries to check number of indexed files and file chunks", ex); //NON-NLS
}
postIndexSummary();
synchronized (ingestStatus) {
ingestStatus.remove(jobId);
}
}
cleanup();
} | void function() { logger.log(Level.INFO, STR, instanceNum); if ((initialized == false) (context == null)) { return; } if (context.fileIngestIsCancelled()) { logger.log(Level.INFO, STR, instanceNum); IngestSearchRunner.getInstance().stopJob(jobId); cleanup(); return; } IngestSearchRunner.getInstance().endJob(jobId); if (refCounter.decrementAndGet(jobId) == 0) { try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); logger.log(Level.INFO, STR, numIndexedFiles); final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); logger.log(Level.INFO, STR, numIndexedChunks); } catch (NoOpenCoreException KeywordSearchModuleException ex) { logger.log(Level.SEVERE, STR, ex); } postIndexSummary(); synchronized (ingestStatus) { ingestStatus.remove(jobId); } } cleanup(); } | /**
* After all files are ingested, execute final index commit and final search
* Cleanup resources, threads, timers
*/ | After all files are ingested, execute final index commit and final search Cleanup resources, threads, timers | shutDown | {
"repo_name": "esaunders/autopsy",
"path": "KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java",
"license": "apache-2.0",
"size": 34524
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 2,488,337 |
public int generateBytes(byte[] out, int outOff, int len) throws DataLengthException,
IllegalArgumentException
{
if ((out.length - len) < outOff)
{
throw new DataLengthException("output buffer too small");
}
long oBytes = len;
int outLen = digest.getDigestSize();
//
// this is at odds with the standard implementation, the
// maximum value should be hBits * (2^32 - 1) where hBits
// is the digest output size in bits. We can't have an
// array with a long index at the moment...
//
if (oBytes > ((2L << 32) - 1))
{
throw new IllegalArgumentException("Output length too large");
}
int cThreshold = (int)((oBytes + outLen - 1) / outLen);
byte[] dig = new byte[digest.getDigestSize()];
byte[] C = new byte[4];
Pack.intToBigEndian(counterStart, C, 0);
int counterBase = counterStart & ~0xFF;
for (int i = 0; i < cThreshold; i++)
{
digest.update(C, 0, C.length);
digest.update(shared, 0, shared.length);
if (iv != null)
{
digest.update(iv, 0, iv.length);
}
digest.doFinal(dig, 0);
if (len > outLen)
{
System.arraycopy(dig, 0, out, outOff, outLen);
outOff += outLen;
len -= outLen;
}
else
{
System.arraycopy(dig, 0, out, outOff, len);
}
if (++C[3] == 0)
{
counterBase += 0x100;
Pack.intToBigEndian(counterBase, C, 0);
}
}
digest.reset();
return (int)oBytes;
} | int function(byte[] out, int outOff, int len) throws DataLengthException, IllegalArgumentException { if ((out.length - len) < outOff) { throw new DataLengthException(STR); } long oBytes = len; int outLen = digest.getDigestSize(); { throw new IllegalArgumentException(STR); } int cThreshold = (int)((oBytes + outLen - 1) / outLen); byte[] dig = new byte[digest.getDigestSize()]; byte[] C = new byte[4]; Pack.intToBigEndian(counterStart, C, 0); int counterBase = counterStart & ~0xFF; for (int i = 0; i < cThreshold; i++) { digest.update(C, 0, C.length); digest.update(shared, 0, shared.length); if (iv != null) { digest.update(iv, 0, iv.length); } digest.doFinal(dig, 0); if (len > outLen) { System.arraycopy(dig, 0, out, outOff, outLen); outOff += outLen; len -= outLen; } else { System.arraycopy(dig, 0, out, outOff, len); } if (++C[3] == 0) { counterBase += 0x100; Pack.intToBigEndian(counterBase, C, 0); } } digest.reset(); return (int)oBytes; } | /**
* fill len bytes of the output buffer with bytes generated from the
* derivation function.
*
* @throws IllegalArgumentException
* if the size of the request will cause an overflow.
* @throws DataLengthException
* if the out buffer is too small.
*/ | fill len bytes of the output buffer with bytes generated from the derivation function | generateBytes | {
"repo_name": "gcc2ge/ethereumj",
"path": "ethereumj-core/src/main/java/org/ethereum/ConcatKDFBytesGenerator.java",
"license": "mit",
"size": 3947
} | [
"org.spongycastle.crypto.DataLengthException",
"org.spongycastle.util.Pack"
] | import org.spongycastle.crypto.DataLengthException; import org.spongycastle.util.Pack; | import org.spongycastle.crypto.*; import org.spongycastle.util.*; | [
"org.spongycastle.crypto",
"org.spongycastle.util"
] | org.spongycastle.crypto; org.spongycastle.util; | 1,467,194 |
public GridClientConfiguration setServers(Collection<String> srvs) {
this.srvs = srvs != null ? srvs : Collections.<String>emptySet();
return this;
} | GridClientConfiguration function(Collection<String> srvs) { this.srvs = srvs != null ? srvs : Collections.<String>emptySet(); return this; } | /**
* Sets list of servers this client should connect to.
*
* @param srvs List of servers.
* @return {@code this} for chaining.
*/ | Sets list of servers this client should connect to | setServers | {
"repo_name": "samaitra/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/client/GridClientConfiguration.java",
"license": "apache-2.0",
"size": 32734
} | [
"java.util.Collection",
"java.util.Collections"
] | import java.util.Collection; import java.util.Collections; | import java.util.*; | [
"java.util"
] | java.util; | 2,075,275 |
public Result render(boolean freshRender) {
checkLock();
SessionParams params = getParams();
try {
if (mViewRoot == null) {
return ERROR_NOT_INFLATED.createResult();
}
// measure the views
int w_spec, h_spec;
RenderingMode renderingMode = params.getRenderingMode();
// only do the screen measure when needed.
boolean newRenderSize = false;
if (mMeasuredScreenWidth == -1) {
newRenderSize = true;
mMeasuredScreenWidth = params.getScreenWidth();
mMeasuredScreenHeight = params.getScreenHeight();
if (renderingMode != RenderingMode.NORMAL) {
// measure the full size needed by the layout.
w_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenWidth,
renderingMode.isHorizExpand() ?
MeasureSpec.UNSPECIFIED // this lets us know the actual needed size
: MeasureSpec.EXACTLY);
h_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenHeight,
renderingMode.isVertExpand() ?
MeasureSpec.UNSPECIFIED // this lets us know the actual needed size
: MeasureSpec.EXACTLY);
mViewRoot.measure(w_spec, h_spec);
if (renderingMode.isHorizExpand()) {
int neededWidth = mViewRoot.getChildAt(0).getMeasuredWidth();
if (neededWidth > mMeasuredScreenWidth) {
mMeasuredScreenWidth = neededWidth;
}
}
if (renderingMode.isVertExpand()) {
int neededHeight = mViewRoot.getChildAt(0).getMeasuredHeight();
if (neededHeight > mMeasuredScreenHeight) {
mMeasuredScreenHeight = neededHeight;
}
}
}
}
// remeasure with the size we need
// This must always be done before the call to layout
w_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenWidth, MeasureSpec.EXACTLY);
h_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenHeight, MeasureSpec.EXACTLY);
mViewRoot.measure(w_spec, h_spec);
// now do the layout.
mViewRoot.layout(0, 0, mMeasuredScreenWidth, mMeasuredScreenHeight);
if (params.isLayoutOnly()) {
// delete the canvas and image to reset them on the next full rendering
mImage = null;
mCanvas = null;
} else {
mViewRoot.mAttachInfo.mTreeObserver.dispatchOnPreDraw();
// draw the views
// create the BufferedImage into which the layout will be rendered.
boolean newImage = false;
if (newRenderSize || mCanvas == null) {
if (params.getImageFactory() != null) {
mImage = params.getImageFactory().getImage(
mMeasuredScreenWidth,
mMeasuredScreenHeight);
} else {
mImage = new BufferedImage(
mMeasuredScreenWidth,
mMeasuredScreenHeight,
BufferedImage.TYPE_INT_ARGB);
newImage = true;
}
if (params.isBgColorOverridden()) {
// since we override the content, it's the same as if it was a new image.
newImage = true;
Graphics2D gc = mImage.createGraphics();
gc.setColor(new Color(params.getOverrideBgColor(), true));
gc.setComposite(AlphaComposite.Src);
gc.fillRect(0, 0, mMeasuredScreenWidth, mMeasuredScreenHeight);
gc.dispose();
}
// create an Android bitmap around the BufferedImage
Bitmap bitmap = Bitmap_Delegate.createBitmap(mImage,
true , params.getDensity());
// create a Canvas around the Android bitmap
mCanvas = new Canvas(bitmap);
mCanvas.setDensity(params.getDensity().getDpiValue());
}
if (freshRender && newImage == false) {
Graphics2D gc = mImage.createGraphics();
gc.setComposite(AlphaComposite.Src);
gc.setColor(new Color(0x00000000, true));
gc.fillRect(0, 0,
mMeasuredScreenWidth, mMeasuredScreenHeight);
// done
gc.dispose();
}
mViewRoot.draw(mCanvas);
}
mViewInfoList = startVisitingViews(mViewRoot, 0);
// success!
return SUCCESS.createResult();
} catch (Throwable e) {
// get the real cause of the exception.
Throwable t = e;
while (t.getCause() != null) {
t = t.getCause();
}
return ERROR_UNKNOWN.createResult(t.getMessage(), t);
}
} | Result function(boolean freshRender) { checkLock(); SessionParams params = getParams(); try { if (mViewRoot == null) { return ERROR_NOT_INFLATED.createResult(); } int w_spec, h_spec; RenderingMode renderingMode = params.getRenderingMode(); boolean newRenderSize = false; if (mMeasuredScreenWidth == -1) { newRenderSize = true; mMeasuredScreenWidth = params.getScreenWidth(); mMeasuredScreenHeight = params.getScreenHeight(); if (renderingMode != RenderingMode.NORMAL) { w_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenWidth, renderingMode.isHorizExpand() ? MeasureSpec.UNSPECIFIED : MeasureSpec.EXACTLY); h_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenHeight, renderingMode.isVertExpand() ? MeasureSpec.UNSPECIFIED : MeasureSpec.EXACTLY); mViewRoot.measure(w_spec, h_spec); if (renderingMode.isHorizExpand()) { int neededWidth = mViewRoot.getChildAt(0).getMeasuredWidth(); if (neededWidth > mMeasuredScreenWidth) { mMeasuredScreenWidth = neededWidth; } } if (renderingMode.isVertExpand()) { int neededHeight = mViewRoot.getChildAt(0).getMeasuredHeight(); if (neededHeight > mMeasuredScreenHeight) { mMeasuredScreenHeight = neededHeight; } } } } w_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenWidth, MeasureSpec.EXACTLY); h_spec = MeasureSpec.makeMeasureSpec(mMeasuredScreenHeight, MeasureSpec.EXACTLY); mViewRoot.measure(w_spec, h_spec); mViewRoot.layout(0, 0, mMeasuredScreenWidth, mMeasuredScreenHeight); if (params.isLayoutOnly()) { mImage = null; mCanvas = null; } else { mViewRoot.mAttachInfo.mTreeObserver.dispatchOnPreDraw(); boolean newImage = false; if (newRenderSize mCanvas == null) { if (params.getImageFactory() != null) { mImage = params.getImageFactory().getImage( mMeasuredScreenWidth, mMeasuredScreenHeight); } else { mImage = new BufferedImage( mMeasuredScreenWidth, mMeasuredScreenHeight, BufferedImage.TYPE_INT_ARGB); newImage = true; } if (params.isBgColorOverridden()) { newImage = true; Graphics2D gc = mImage.createGraphics(); gc.setColor(new Color(params.getOverrideBgColor(), true)); gc.setComposite(AlphaComposite.Src); gc.fillRect(0, 0, mMeasuredScreenWidth, mMeasuredScreenHeight); gc.dispose(); } Bitmap bitmap = Bitmap_Delegate.createBitmap(mImage, true , params.getDensity()); mCanvas = new Canvas(bitmap); mCanvas.setDensity(params.getDensity().getDpiValue()); } if (freshRender && newImage == false) { Graphics2D gc = mImage.createGraphics(); gc.setComposite(AlphaComposite.Src); gc.setColor(new Color(0x00000000, true)); gc.fillRect(0, 0, mMeasuredScreenWidth, mMeasuredScreenHeight); gc.dispose(); } mViewRoot.draw(mCanvas); } mViewInfoList = startVisitingViews(mViewRoot, 0); return SUCCESS.createResult(); } catch (Throwable e) { Throwable t = e; while (t.getCause() != null) { t = t.getCause(); } return ERROR_UNKNOWN.createResult(t.getMessage(), t); } } | /**
* Renders the scene.
* <p>
* {@link #acquire(long)} must have been called before this.
*
* @param freshRender whether the render is a new one and should erase the existing bitmap (in
* the case where bitmaps are reused). This is typically needed when not playing
* animations.)
*
* @throws IllegalStateException if the current context is different than the one owned by
* the scene, or if {@link #acquire(long)} was not called.
*
* @see RenderParams#getRenderingMode()
* @see RenderSession#render(long)
*/ | Renders the scene. <code>#acquire(long)</code> must have been called before this | render | {
"repo_name": "mateor/pdroid",
"path": "android-2.3.4_r1/tags/1.32/frameworks/base/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/impl/RenderSessionImpl.java",
"license": "gpl-3.0",
"size": 38977
} | [
"android.graphics.Bitmap",
"android.graphics.Canvas",
"android.view.View",
"com.android.ide.common.rendering.api.Result",
"com.android.ide.common.rendering.api.SessionParams",
"java.awt.AlphaComposite",
"java.awt.Color",
"java.awt.Graphics2D",
"java.awt.image.BufferedImage"
] | import android.graphics.Bitmap; import android.graphics.Canvas; import android.view.View; import com.android.ide.common.rendering.api.Result; import com.android.ide.common.rendering.api.SessionParams; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; | import android.graphics.*; import android.view.*; import com.android.ide.common.rendering.api.*; import java.awt.*; import java.awt.image.*; | [
"android.graphics",
"android.view",
"com.android.ide",
"java.awt"
] | android.graphics; android.view; com.android.ide; java.awt; | 1,921,462 |
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
mTestName = req.getParameter("testName");
mTestDesc = req.getParameter("testDesc");
mTestTypeID = req.getParameter("testTypeID");
mTestTypeName = req.getParameter("testTypeName");
mProjID = req.getParameter("projID");
mParentID = req.getParameter("parentID");
mRemoteUser = req.getRemoteUser();
try {
saveTest(req, resp);
StringBuffer reply = new StringBuffer("<h3>Add Test " + mTestName
+ " was Successful</h3>");
showPage(req, resp, reply, null, this);
} catch (Exception e) {
StringBuffer error = new StringBuffer("<h3>Error: "
+ e.getMessage() + "<br>" + e.getCause() + "</h3>");
throwError(req, resp, error, this);
return;
}
}
| void function(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { mTestName = req.getParameter(STR); mTestDesc = req.getParameter(STR); mTestTypeID = req.getParameter(STR); mTestTypeName = req.getParameter(STR); mProjID = req.getParameter(STR); mParentID = req.getParameter(STR); mRemoteUser = req.getRemoteUser(); try { saveTest(req, resp); StringBuffer reply = new StringBuffer(STR + mTestName + STR); showPage(req, resp, reply, null, this); } catch (Exception e) { StringBuffer error = new StringBuffer(STR + e.getMessage() + "<br>" + e.getCause() + "</h3>"); throwError(req, resp, error, this); return; } } | /**
* Displays addition of new Test results,
* including errors to the user
*
* @param req The Servlet Request
* @param resp The Servlet Response
* @throws IOException
* @throws ServletException
*/ | Displays addition of new Test results, including errors to the user | doPost | {
"repo_name": "bobbrady/tpteam",
"path": "tpmanager/src/edu/harvard/fas/rbrady/tpteam/tpmanager/http/admin/add/AddTestEntity.java",
"license": "mit",
"size": 5468
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 1,456,083 |
public static Object readObject(java.sql.ResultSet resultSet, int index) throws Exception {
ObjectInputStream objIn = new ObjectInputStream(resultSet.getBinaryStream(index));
Object obj = objIn.readObject();
objIn.close();
return obj;
} | static Object function(java.sql.ResultSet resultSet, int index) throws Exception { ObjectInputStream objIn = new ObjectInputStream(resultSet.getBinaryStream(index)); Object obj = objIn.readObject(); objIn.close(); return obj; } | /**
* Given a ResultSet and an index into the columns of that ResultSet, read
* binary data from the column which represents a serialized object, and
* re-create the object.
*
* @param resultSet
* the ResultSet to use.
* @param index
* an index into the ResultSet.
* @return the object if it can be de-serialized
* @throws Exception
* if an error occurs
*/ | Given a ResultSet and an index into the columns of that ResultSet, read binary data from the column which represents a serialized object, and re-create the object | readObject | {
"repo_name": "swankjesse/mysql-connector-j",
"path": "src/com/mysql/jdbc/Util.java",
"license": "gpl-2.0",
"size": 24865
} | [
"java.io.ObjectInputStream"
] | import java.io.ObjectInputStream; | import java.io.*; | [
"java.io"
] | java.io; | 465,205 |
@WebMethod
@WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602")
@RequestWrapper(localName = "getPlacementsByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602", className = "com.google.api.ads.dfp.jaxws.v201602.PlacementServiceInterfacegetPlacementsByStatement")
@ResponseWrapper(localName = "getPlacementsByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602", className = "com.google.api.ads.dfp.jaxws.v201602.PlacementServiceInterfacegetPlacementsByStatementResponse")
public PlacementPage getPlacementsByStatement(
@WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v201602")
Statement filterStatement)
throws ApiException_Exception
; | @WebResult(name = "rval", targetNamespace = STRgetPlacementsByStatementSTRhttps: @ResponseWrapper(localName = "getPlacementsByStatementResponseSTRhttps: PlacementPage function( @WebParam(name = "filterStatementSTRhttps: Statement filterStatement) throws ApiException_Exception ; | /**
*
* Gets a {@link PlacementPage} of {@link Placement} objects that satisfy the
* given {@link Statement#query}. The following fields are supported for
* filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th> <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code description}</td>
* <td>{@link Placement#description}</td>
* </tr>
* <tr>
* <td>{@code id}</td>
* <td>{@link Placement#id}</td>
* </tr>
* <tr>
* <td>{@code isAdSenseTargetingEnabled}</td>
* <td>{@link Placement#isAdSenseTargetingEnabled}</td>
* </tr>
* <tr>
* <td>{@code name}</td>
* <td>{@link Placement#name}</td>
* </tr>
* <tr>
* <td>{@code placementCode}</td>
* <td>{@link Placement#placementCode}</td>
* </tr>
* <tr>
* <td>{@code status}</td>
* <td>{@link Placement#status}</td>
* </tr>
* <tr>
* <td>{@code lastModifiedDateTime}</td>
* <td>{@link Placement#lastModifiedDateTime}</td>
* </tr>
* </table>
*
* @param filterStatement a Publisher Query Language statement used to filter
* a set of placements
* @return the placements that match the given filter
*
*
* @param filterStatement
* @return
* returns com.google.api.ads.dfp.jaxws.v201602.PlacementPage
* @throws ApiException_Exception
*/ | Gets a <code>PlacementPage</code> of <code>Placement</code> objects that satisfy the given <code>Statement#query</code>. The following fields are supported for filtering: PQL Property Object Property description <code>Placement#description</code> id <code>Placement#id</code> isAdSenseTargetingEnabled <code>Placement#isAdSenseTargetingEnabled</code> name <code>Placement#name</code> placementCode <code>Placement#placementCode</code> status <code>Placement#status</code> lastModifiedDateTime <code>Placement#lastModifiedDateTime</code> | getPlacementsByStatement | {
"repo_name": "gawkermedia/googleads-java-lib",
"path": "modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201602/PlacementServiceInterface.java",
"license": "apache-2.0",
"size": 7748
} | [
"javax.jws.WebParam",
"javax.jws.WebResult",
"javax.xml.ws.ResponseWrapper"
] | import javax.jws.WebParam; import javax.jws.WebResult; import javax.xml.ws.ResponseWrapper; | import javax.jws.*; import javax.xml.ws.*; | [
"javax.jws",
"javax.xml"
] | javax.jws; javax.xml; | 945,349 |
public Table findTable(String name, boolean caseSensitive)
{
for (Iterator iter = _tables.iterator(); iter.hasNext();)
{
Table table = (Table) iter.next();
if (caseSensitive)
{
if (table.getName().equals(name))
{
return table;
}
}
else
{
if (table.getName().equalsIgnoreCase(name))
{
return table;
}
}
}
return null;
} | Table function(String name, boolean caseSensitive) { for (Iterator iter = _tables.iterator(); iter.hasNext();) { Table table = (Table) iter.next(); if (caseSensitive) { if (table.getName().equals(name)) { return table; } } else { if (table.getName().equalsIgnoreCase(name)) { return table; } } } return null; } | /**
* Finds the table with the specified name, using case insensitive matching.
* Note that this method is not called getTable) to avoid introspection
* problems.
*
* @param name The name of the table to find
* @param caseSensitive Whether case matters for the names
* @return The table or <code>null</code> if there is no such table
*/ | Finds the table with the specified name, using case insensitive matching. Note that this method is not called getTable) to avoid introspection problems | findTable | {
"repo_name": "9ci/ddlutils",
"path": "src/main/java/org/apache/ddlutils/model/Database.java",
"license": "apache-2.0",
"size": 22733
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 415,390 |
@Nullable
Media getMedia(long mediaStoreId); | Media getMedia(long mediaStoreId); | /**
* Gets the Media for the given media store ID, or null if it does not exist
* in the Media Store.
*
* @param mediaStoreId The media store ID to get the Media of.
* @return The metadata about the media item.
*/ | Gets the Media for the given media store ID, or null if it does not exist in the Media Store | getMedia | {
"repo_name": "google/vr180",
"path": "java/com/google/vr180/api/camerainterfaces/MediaProvider.java",
"license": "apache-2.0",
"size": 2616
} | [
"com.google.vr180.CameraApi"
] | import com.google.vr180.CameraApi; | import com.google.vr180.*; | [
"com.google.vr180"
] | com.google.vr180; | 2,544,126 |
public Item removeFirst() {
if (isEmpty())
throw new NoSuchElementException("Deque is empty");
Node node = first;
if (size() == 1) {
first = null;
last = null;
}
else {
first = first.next;
first.prev = null;
}
node.next = null;
size--;
return node.item;
} | Item function() { if (isEmpty()) throw new NoSuchElementException(STR); Node node = first; if (size() == 1) { first = null; last = null; } else { first = first.next; first.prev = null; } node.next = null; size--; return node.item; } | /**
* Remove and return item from the front of the deque
* @return
* @throws NoSuchElementException of deque is empty
*/ | Remove and return item from the front of the deque | removeFirst | {
"repo_name": "pertsodian/java-sandbox",
"path": "deque-randomized-queues/src/Deque.java",
"license": "mit",
"size": 3122
} | [
"java.util.NoSuchElementException"
] | import java.util.NoSuchElementException; | import java.util.*; | [
"java.util"
] | java.util; | 46,018 |
private void deployBPELPackageInODE(BPELDeploymentContext deploymentContext) throws Exception {
File bpelPackage = deploymentContext.getBPELPackageContent();
log.info("Starting deployment of processes from directory "
+ bpelPackage.getAbsolutePath());
final Date deployDate = new Date();
// Create the DU and compile/scan it before doing any other work.
final DeploymentUnitDir deploymentUnitDir = new DeploymentUnitDir(bpelPackage);
// Before coming to this stage, we create the bpel package directory with the static version
// so we don't need to get the version from database. We can directly use static version
// calculated from bpel package directory name.
deploymentUnitDir.setVersion(deploymentUnitDir.getStaticVersion());
try {
deploymentUnitDir.compile();
} catch (CompilationException ce) {
String logMessage = "Deployment failed due to compilation issues. " + ce.getMessage();
log.error(logMessage, ce);
deploymentContext.setFailed(true);
deploymentContext.setDeploymentFailureCause(logMessage);
deploymentContext.setStackTrace(ce);
handleDeploymentError(deploymentContext);
throw new BPELDeploymentException(logMessage, ce);
}
deploymentUnitDir.scan();
DeployDocument deployDocument = deploymentUnitDir.getDeploymentDescriptor();
List<ProcessConfigurationImpl> processConfs = new ArrayList<ProcessConfigurationImpl>();
List<QName> processIds = new ArrayList<QName>();
if (deploymentUnits.containsKey(deploymentUnitDir.getName())) {
String logMessage = "Aborting deployment. Duplicate Deployment unit "
+ deploymentUnitDir.getName() + ".";
log.error(logMessage);
deploymentContext.setFailed(true);
deploymentContext.setDeploymentFailureCause(logMessage);
handleDeploymentError(deploymentContext);
throw new BPELDeploymentException(logMessage);
}
// Validate BPEL package partially before retiring old versions.
validateBPELPackage(deploymentUnitDir);
if (deploymentContext.isExistingPackage()) {
reloadExistingVersionsOfBPELPackage(deploymentContext);
}
// Before updating a BPEL package we need to retire processes in old version
retirePreviousPackageVersions(deploymentUnitDir);
for (TDeployment.Process processDD : deployDocument.getDeploy().getProcessList()) {
QName processId = Utils.toPid(processDD.getName(), deploymentUnitDir.getVersion());
ProcessConfigurationImpl processConf = new ProcessConfigurationImpl(
tenantId,
processDD,
deploymentUnitDir,
deployDate,
parentProcessStore.getEndpointReferenceContext(),
tenantConfigContext);
processConf.setAbsolutePathForBpelArchive(deploymentContext.getBpelArchive().getAbsolutePath());
processIds.add(processId);
processConfs.add(processConf);
readAnalyticsServerProfiles(processDD, deploymentUnitDir);
}
deploymentUnits.put(deploymentUnitDir.getName(), deploymentUnitDir);
processesInDeploymentUnit.put(deploymentUnitDir.getName(), processIds);
for (ProcessConfigurationImpl processConf : processConfs) {
processConfigMap.put(processConf.getProcessId(), processConf);
deploymentContext.addProcessId(processConf.getProcessId());
}
try {
parentProcessStore.onBPELPackageDeployment(
tenantId,
deploymentUnitDir.getName(),
BPELPackageRepositoryUtils.getResourcePathForBPELPackageContent(deploymentContext),
processConfs);
} catch (ContextException ce) {
deploymentContext.setDeploymentFailureCause("BPEL Package deployment failed at " +
"ODE layer. Possible cause: " + ce.getMessage());
deploymentContext.setStackTrace(ce);
deploymentContext.setFailed(true);
handleDeploymentError(deploymentContext);
throw ce;
}
} | void function(BPELDeploymentContext deploymentContext) throws Exception { File bpelPackage = deploymentContext.getBPELPackageContent(); log.info(STR + bpelPackage.getAbsolutePath()); final Date deployDate = new Date(); final DeploymentUnitDir deploymentUnitDir = new DeploymentUnitDir(bpelPackage); deploymentUnitDir.setVersion(deploymentUnitDir.getStaticVersion()); try { deploymentUnitDir.compile(); } catch (CompilationException ce) { String logMessage = STR + ce.getMessage(); log.error(logMessage, ce); deploymentContext.setFailed(true); deploymentContext.setDeploymentFailureCause(logMessage); deploymentContext.setStackTrace(ce); handleDeploymentError(deploymentContext); throw new BPELDeploymentException(logMessage, ce); } deploymentUnitDir.scan(); DeployDocument deployDocument = deploymentUnitDir.getDeploymentDescriptor(); List<ProcessConfigurationImpl> processConfs = new ArrayList<ProcessConfigurationImpl>(); List<QName> processIds = new ArrayList<QName>(); if (deploymentUnits.containsKey(deploymentUnitDir.getName())) { String logMessage = STR + deploymentUnitDir.getName() + "."; log.error(logMessage); deploymentContext.setFailed(true); deploymentContext.setDeploymentFailureCause(logMessage); handleDeploymentError(deploymentContext); throw new BPELDeploymentException(logMessage); } validateBPELPackage(deploymentUnitDir); if (deploymentContext.isExistingPackage()) { reloadExistingVersionsOfBPELPackage(deploymentContext); } retirePreviousPackageVersions(deploymentUnitDir); for (TDeployment.Process processDD : deployDocument.getDeploy().getProcessList()) { QName processId = Utils.toPid(processDD.getName(), deploymentUnitDir.getVersion()); ProcessConfigurationImpl processConf = new ProcessConfigurationImpl( tenantId, processDD, deploymentUnitDir, deployDate, parentProcessStore.getEndpointReferenceContext(), tenantConfigContext); processConf.setAbsolutePathForBpelArchive(deploymentContext.getBpelArchive().getAbsolutePath()); processIds.add(processId); processConfs.add(processConf); readAnalyticsServerProfiles(processDD, deploymentUnitDir); } deploymentUnits.put(deploymentUnitDir.getName(), deploymentUnitDir); processesInDeploymentUnit.put(deploymentUnitDir.getName(), processIds); for (ProcessConfigurationImpl processConf : processConfs) { processConfigMap.put(processConf.getProcessId(), processConf); deploymentContext.addProcessId(processConf.getProcessId()); } try { parentProcessStore.onBPELPackageDeployment( tenantId, deploymentUnitDir.getName(), BPELPackageRepositoryUtils.getResourcePathForBPELPackageContent(deploymentContext), processConfs); } catch (ContextException ce) { deploymentContext.setDeploymentFailureCause(STR + STR + ce.getMessage()); deploymentContext.setStackTrace(ce); deploymentContext.setFailed(true); handleDeploymentError(deploymentContext); throw ce; } } | /**
* Deploy BPEL package in ODE and add process configuration objects to necessary maps in process
* store.
*
* @param deploymentContext information about current deployment
* @throws Exception in case of duplicate deployment unit or if error occurred during deploying package in ODE
*/ | Deploy BPEL package in ODE and add process configuration objects to necessary maps in process store | deployBPELPackageInODE | {
"repo_name": "himasha/carbon-business-process",
"path": "components/bpel/org.wso2.carbon.bpel/src/main/java/org/wso2/carbon/bpel/core/ode/integration/store/TenantProcessStoreImpl.java",
"license": "apache-2.0",
"size": 46592
} | [
"java.io.File",
"java.util.ArrayList",
"java.util.Date",
"java.util.List",
"javax.xml.namespace.QName",
"org.apache.ode.bpel.compiler.api.CompilationException",
"org.apache.ode.bpel.dd.DeployDocument",
"org.apache.ode.bpel.dd.TDeployment",
"org.apache.ode.bpel.iapi.ContextException",
"org.apache.ode.store.DeploymentUnitDir",
"org.wso2.carbon.bpel.core.ode.integration.store.repository.BPELPackageRepositoryUtils"
] | import java.io.File; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.xml.namespace.QName; import org.apache.ode.bpel.compiler.api.CompilationException; import org.apache.ode.bpel.dd.DeployDocument; import org.apache.ode.bpel.dd.TDeployment; import org.apache.ode.bpel.iapi.ContextException; import org.apache.ode.store.DeploymentUnitDir; import org.wso2.carbon.bpel.core.ode.integration.store.repository.BPELPackageRepositoryUtils; | import java.io.*; import java.util.*; import javax.xml.namespace.*; import org.apache.ode.bpel.compiler.api.*; import org.apache.ode.bpel.dd.*; import org.apache.ode.bpel.iapi.*; import org.apache.ode.store.*; import org.wso2.carbon.bpel.core.ode.integration.store.repository.*; | [
"java.io",
"java.util",
"javax.xml",
"org.apache.ode",
"org.wso2.carbon"
] | java.io; java.util; javax.xml; org.apache.ode; org.wso2.carbon; | 25,274 |
public Statements getStatements(
String accessToken,
String xeroTenantId,
Integer page,
Integer pageSize,
String xeroApplicationId,
String xeroUserId)
throws IOException {
try {
TypeReference<Statements> typeRef = new TypeReference<Statements>() {};
HttpResponse response =
getStatementsForHttpResponse(
accessToken, xeroTenantId, page, pageSize, xeroApplicationId, xeroUserId);
return apiClient.getObjectMapper().readValue(response.getContent(), typeRef);
} catch (HttpResponseException e) {
if (logger.isDebugEnabled()) {
logger.debug(
"------------------ HttpResponseException "
+ e.getStatusCode()
+ " : getStatements -------------------");
logger.debug(e.toString());
}
XeroApiExceptionHandler handler = new XeroApiExceptionHandler();
handler.execute(e);
} catch (IOException ioe) {
throw ioe;
}
return null;
} | Statements function( String accessToken, String xeroTenantId, Integer page, Integer pageSize, String xeroApplicationId, String xeroUserId) throws IOException { try { TypeReference<Statements> typeRef = new TypeReference<Statements>() {}; HttpResponse response = getStatementsForHttpResponse( accessToken, xeroTenantId, page, pageSize, xeroApplicationId, xeroUserId); return apiClient.getObjectMapper().readValue(response.getContent(), typeRef); } catch (HttpResponseException e) { if (logger.isDebugEnabled()) { logger.debug( STR + e.getStatusCode() + STR); logger.debug(e.toString()); } XeroApiExceptionHandler handler = new XeroApiExceptionHandler(); handler.execute(e); } catch (IOException ioe) { throw ioe; } return null; } | /**
* Retrieve all statements By passing in parameters, you can search for matching statements
*
* <p><b>200</b> - success returns Statements array of objects response
*
* <p><b>400</b> - bad input parameter
*
* @param xeroTenantId Xero identifier for Tenant
* @param page unique id for single object
* @param pageSize Page size which specifies how many records per page will be returned (default
* 10). Example - https://api.xero.com/bankfeeds.xro/1.0/Statements?pageSize=100 to
* specify page size of 100.
* @param xeroApplicationId The xeroApplicationId parameter
* @param xeroUserId The xeroUserId parameter
* @param accessToken Authorization token for user set in header of each request
* @return Statements
* @throws IOException if an error occurs while attempting to invoke the API *
*/ | Retrieve all statements By passing in parameters, you can search for matching statements 200 - success returns Statements array of objects response 400 - bad input parameter | getStatements | {
"repo_name": "XeroAPI/Xero-Java",
"path": "src/main/java/com/xero/api/client/BankFeedsApi.java",
"license": "mit",
"size": 35862
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"com.google.api.client.http.HttpResponse",
"com.google.api.client.http.HttpResponseException",
"com.xero.api.XeroApiExceptionHandler",
"com.xero.models.bankfeeds.Statements",
"java.io.IOException"
] | import com.fasterxml.jackson.core.type.TypeReference; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpResponseException; import com.xero.api.XeroApiExceptionHandler; import com.xero.models.bankfeeds.Statements; import java.io.IOException; | import com.fasterxml.jackson.core.type.*; import com.google.api.client.http.*; import com.xero.api.*; import com.xero.models.bankfeeds.*; import java.io.*; | [
"com.fasterxml.jackson",
"com.google.api",
"com.xero.api",
"com.xero.models",
"java.io"
] | com.fasterxml.jackson; com.google.api; com.xero.api; com.xero.models; java.io; | 1,159,570 |
public void setEPR(@NotNull WSEndpointReference epr) {
assert epr!=null;
this.epr = epr;
} | void function(@NotNull WSEndpointReference epr) { assert epr!=null; this.epr = epr; } | /**
* Only meant for {@link RuntimeWSDLParser} to call.
*/ | Only meant for <code>RuntimeWSDLParser</code> to call | setEPR | {
"repo_name": "samskivert/ikvm-openjdk",
"path": "build/linux-amd64/impsrc/com/sun/xml/internal/ws/model/wsdl/WSDLPortImpl.java",
"license": "gpl-2.0",
"size": 3979
} | [
"com.sun.istack.internal.NotNull",
"com.sun.xml.internal.ws.api.addressing.WSEndpointReference"
] | import com.sun.istack.internal.NotNull; import com.sun.xml.internal.ws.api.addressing.WSEndpointReference; | import com.sun.istack.internal.*; import com.sun.xml.internal.ws.api.addressing.*; | [
"com.sun.istack",
"com.sun.xml"
] | com.sun.istack; com.sun.xml; | 754,261 |
public static void injectRescueArchives(Context context) {
File storageDirectory = new File(Environment.getExternalStorageDirectory(),
"/substratum/");
if (!storageDirectory.exists() && !storageDirectory.mkdirs()) {
Log.e(SUBSTRATUM_LOG, "Unable to create storage directory");
}
File rescueFile = new File(
Environment.getExternalStorageDirectory().getAbsolutePath() +
File.separator + "substratum" +
File.separator + "SubstratumRescue.zip");
File rescueFileLegacy = new File(
Environment.getExternalStorageDirectory().getAbsolutePath() +
File.separator + "substratum" +
File.separator + "SubstratumRescue_Legacy.zip");
if (rescueFile.exists() && rescueFile.delete()) {
Log.e(SUBSTRATUM_LOG, "Deleted the rescue file!");
}
if (rescueFileLegacy.exists() && rescueFileLegacy.delete()) {
Log.e(SUBSTRATUM_LOG, "Deleted the rescue legacy file!");
}
copyRescueFile(context, "rescue_legacy.dat",
Environment.getExternalStorageDirectory().getAbsolutePath() +
File.separator + "substratum" +
File.separator + "SubstratumRescue_Legacy.zip");
copyRescueFile(context, "rescue.dat",
Environment.getExternalStorageDirectory().getAbsolutePath() +
File.separator + "substratum" +
File.separator + "SubstratumRescue.zip");
} | static void function(Context context) { File storageDirectory = new File(Environment.getExternalStorageDirectory(), STR); if (!storageDirectory.exists() && !storageDirectory.mkdirs()) { Log.e(SUBSTRATUM_LOG, STR); } File rescueFile = new File( Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + STR + File.separator + STR); File rescueFileLegacy = new File( Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + STR + File.separator + STR); if (rescueFile.exists() && rescueFile.delete()) { Log.e(SUBSTRATUM_LOG, STR); } if (rescueFileLegacy.exists() && rescueFileLegacy.delete()) { Log.e(SUBSTRATUM_LOG, STR); } copyRescueFile(context, STR, Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + STR + File.separator + STR); copyRescueFile(context, STR, Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + STR + File.separator + STR); } | /**
* Inject the Substratum Rescue System archives
*
* @param context Context
*/ | Inject the Substratum Rescue System archives | injectRescueArchives | {
"repo_name": "iskandar1023/substratum",
"path": "app/src/main/java/projekt/substratum/common/References.java",
"license": "gpl-3.0",
"size": 45077
} | [
"android.content.Context",
"android.os.Environment",
"android.util.Log",
"java.io.File"
] | import android.content.Context; import android.os.Environment; import android.util.Log; import java.io.File; | import android.content.*; import android.os.*; import android.util.*; import java.io.*; | [
"android.content",
"android.os",
"android.util",
"java.io"
] | android.content; android.os; android.util; java.io; | 241,132 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.