focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "users", action = ActionTypes.WRITE)
@PostMapping
public Object createUser(@RequestParam String username, @RequestParam String password) {
if (AuthConstants.DEFAULT_USER.equals(username)) {
return RestResultUtils.failed(HttpStatus.CONFLICT.value(),
"User `nacos` is default admin user. Please use `/nacos/v1/auth/users/admin` API to init `nacos` users. "
+ "Detail see `https://nacos.io/docs/latest/manual/admin/auth/#31-%E8%AE%BE%E7%BD%AE%E7%AE%A1%E7%90%86%E5%91%98%E5%AF%86%E7%A0%81`");
}
User user = userDetailsService.getUserFromDatabase(username);
if (user != null) {
throw new IllegalArgumentException("user '" + username + "' already exist!");
}
userDetailsService.createUser(username, PasswordEncoderUtil.encode(password));
return RestResultUtils.success("create user ok!");
}
|
@Test
void testCreateUserNamedNacos() {
RestResult<String> result = (RestResult<String>) userController.createUser("nacos", "test");
assertEquals(409, result.getCode());
}
|
private static void register(AtomicReference<HazelcastInstance[]> ref, HazelcastInstance instance) {
isNotNull(instance, "instance");
for (;;) {
HazelcastInstance[] oldInstances = ref.get();
if (oldInstances.length == MAX_REGISTERED_INSTANCES) {
return;
}
HazelcastInstance[] newInstances = new HazelcastInstance[oldInstances.length + 1];
arraycopy(oldInstances, 0, newInstances, 0, oldInstances.length);
newInstances[oldInstances.length] = instance;
if (ref.compareAndSet(oldInstances, newInstances)) {
return;
}
}
}
|
@Test
public void register() {
HazelcastInstance hz1 = mock(HazelcastInstance.class);
HazelcastInstance hz2 = mock(HazelcastInstance.class);
OutOfMemoryErrorDispatcher.registerServer(hz1);
assertArrayEquals(new HazelcastInstance[]{hz1}, OutOfMemoryErrorDispatcher.current());
OutOfMemoryErrorDispatcher.registerServer(hz2);
assertArrayEquals(new HazelcastInstance[]{hz1, hz2}, OutOfMemoryErrorDispatcher.current());
}
|
public static Map<String, Object> flattenKeysInMap(Map<String, Object> map, String separator) {
Map<String, Object> answer = new LinkedHashMap<>();
doFlattenKeysInMap(map, "", ObjectHelper.isNotEmpty(separator) ? separator : "", answer);
return answer;
}
|
@Test
public void testflattenKeysInMap() {
Map<String, Object> root = new LinkedHashMap<>();
Map<String, Object> api = new LinkedHashMap<>();
Map<String, Object> contact = new LinkedHashMap<>();
contact.put("organization", "Apache Software Foundation");
api.put("version", "1.0.0");
api.put("title", "My cool API");
api.put("contact", contact);
root.put("api", api);
root.put("cors", true);
Map<String, Object> flattern = CollectionHelper.flattenKeysInMap(root, ".");
assertEquals(4, flattern.size());
assertEquals(true, flattern.get("cors"));
assertEquals("1.0.0", flattern.get("api.version"));
assertEquals("My cool API", flattern.get("api.title"));
assertEquals("Apache Software Foundation", flattern.get("api.contact.organization"));
}
|
@Override
public SourceConfig getSourceConfig() {
return SourceConfigUtils.convertFromDetails(config.getFunctionDetails());
}
|
@Test
public void testGetSourceConfig() {
SourceContext sourceContext = context;
SourceConfig sinkConfig = sourceContext.getSourceConfig();
Assert.assertNotNull(sinkConfig);
}
|
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return PathAttributes.EMPTY;
}
if(file.getType().contains(Path.Type.upload)) {
// Pending large file upload
final Write.Append append = new B2LargeUploadService(session, fileid, new B2WriteFeature(session, fileid)).append(file, new TransferStatus());
if(append.append) {
return new PathAttributes().withSize(append.offset);
}
return PathAttributes.EMPTY;
}
if(containerService.isContainer(file)) {
try {
final B2BucketResponse info = session.getClient().listBucket(file.getName());
if(null == info) {
throw new NotfoundException(file.getAbsolute());
}
return this.toAttributes(info);
}
catch(B2ApiException e) {
throw new B2ExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map(e);
}
}
else {
final String id = fileid.getVersionId(file);
if(null == id) {
return PathAttributes.EMPTY;
}
B2FileResponse response;
try {
response = this.findFileInfo(file, id);
}
catch(NotfoundException e) {
// Try with reset cache after failure finding node id
response = this.findFileInfo(file, fileid.getVersionId(file));
}
final PathAttributes attr = this.toAttributes(response);
if(attr.isDuplicate()) {
// Throw failure if latest version has hide marker set and lookup was without explicit version
if(StringUtils.isBlank(file.attributes().getVersionId())) {
if(log.isDebugEnabled()) {
log.debug(String.format("Latest version of %s is duplicate", file));
}
throw new NotfoundException(file.getAbsolute());
}
}
return attr;
}
}
|
@Test
public void testFindRoot() throws Exception {
final B2VersionIdProvider fileid = new B2VersionIdProvider(session);
final B2AttributesFinderFeature f = new B2AttributesFinderFeature(session, fileid);
assertEquals(PathAttributes.EMPTY, f.find(new Path("/", EnumSet.of(Path.Type.directory))));
}
|
@Override
public boolean containsShort(K name, short value) {
return false;
}
|
@Test
public void testContainsShort() {
assertFalse(HEADERS.containsShort("name1", (short) 1));
}
|
@ApiOperation(value = "List groups", nickname="listGroups", tags = { "Groups" }, produces = "application/json")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", dataType = "string", value = "Only return group with the given id", paramType = "query"),
@ApiImplicitParam(name = "name", dataType = "string", value = "Only return groups with the given name", paramType = "query"),
@ApiImplicitParam(name = "type", dataType = "string", value = "Only return groups with the given type", paramType = "query"),
@ApiImplicitParam(name = "nameLike", dataType = "string", value = "Only return groups with a name like the given value. Use % as wildcard-character.", paramType = "query"),
@ApiImplicitParam(name = "member", dataType = "string", value = "Only return groups which have a member with the given username.", paramType = "query"),
@ApiImplicitParam(name = "potentialStarter", dataType = "string", value = "Only return groups which members are potential starters for a process-definition with the given id.", paramType = "query"),
@ApiImplicitParam(name = "sort", dataType = "string", value = "Property to sort on, to be used together with the order.", allowableValues = "id,name,type", paramType = "query"),
})
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Indicates the requested groups were returned.")
})
@GetMapping(value = "/identity/groups", produces = "application/json")
public DataResponse<GroupResponse> getGroups(@ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) {
GroupQuery query = identityService.createGroupQuery();
if (allRequestParams.containsKey("id")) {
query.groupId(allRequestParams.get("id"));
}
if (allRequestParams.containsKey("name")) {
query.groupName(allRequestParams.get("name"));
}
if (allRequestParams.containsKey("nameLike")) {
query.groupNameLike(allRequestParams.get("nameLike"));
}
if (allRequestParams.containsKey("type")) {
query.groupType(allRequestParams.get("type"));
}
if (allRequestParams.containsKey("member")) {
query.groupMember(allRequestParams.get("member"));
}
if (restApiInterceptor != null) {
restApiInterceptor.accessGroupInfoWithQuery(query);
}
return paginateList(allRequestParams, query, "id", properties, restResponseFactory::createGroupResponseList);
}
|
@Test
@Deployment
public void testGetGroups() throws Exception {
List<Group> savedGroups = new ArrayList<>();
try {
Group group1 = identityService.newGroup("testgroup1");
group1.setName("Test group");
group1.setType("Test type");
identityService.saveGroup(group1);
savedGroups.add(group1);
Group group2 = identityService.newGroup("testgroup2");
group2.setName("Another group");
group2.setType("Another type");
identityService.saveGroup(group2);
savedGroups.add(group2);
Group group3 = identityService.createGroupQuery().groupId("admin").singleResult();
assertThat(group3).isNotNull();
Group group4 = identityService.createGroupQuery().groupId("sales").singleResult();
assertThat(group4).isNotNull();
// Test filter-less
String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_COLLECTION);
assertResultsPresentInDataResponse(url, group1.getId(), group2.getId(), group3.getId(), group4.getId());
// Test based on name
url = RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_COLLECTION) + "?name=" + encode("Test group");
assertResultsPresentInDataResponse(url, group1.getId());
// Test based on name like
url = RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_COLLECTION) + "?nameLike=" + encode("% group");
assertResultsPresentInDataResponse(url, group2.getId(), group1.getId());
// Test based on type
url = RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_COLLECTION) + "?type=" + encode("Another type");
assertResultsPresentInDataResponse(url, group2.getId());
// Test based on group member
url = RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_COLLECTION) + "?member=kermit";
assertResultsPresentInDataResponse(url, group3.getId());
} finally {
// Delete groups after test passes or fails
if (!savedGroups.isEmpty()) {
for (Group group : savedGroups) {
identityService.deleteGroup(group.getId());
}
}
}
}
|
public ImmutableList<GlobalSetting> parse(final InputStream is) {
return Jsons.toObjects(is, GlobalSetting.class);
}
|
@Test
public void should_parse_settings_file() {
InputStream stream = getResourceAsStream("settings/settings.json");
ImmutableList<GlobalSetting> globalSettings = parser.parse(stream);
assertThat(globalSettings.get(0).includes().get(0), is(join("src", "test", "resources", "settings", "details", "foo.json")));
assertThat(globalSettings.get(1).includes().get(0), is(join("src", "test", "resources", "settings", "details", "bar.json")));
}
|
public static void combine(LongDecimalWithOverflowState state, LongDecimalWithOverflowState otherState)
{
long overflowToAdd = otherState.getOverflow();
Slice currentState = state.getLongDecimal();
Slice otherDecimal = otherState.getLongDecimal();
if (currentState == null) {
state.setLongDecimal(otherDecimal);
}
else {
overflowToAdd += UnscaledDecimal128Arithmetic.addWithOverflow(currentState, otherDecimal, currentState);
}
state.addOverflow(overflowToAdd);
}
|
@Test
public void testCombineUnderflow()
{
addToState(state, TWO.pow(125).negate());
addToState(state, TWO.pow(126).negate());
LongDecimalWithOverflowState otherState = new LongDecimalWithOverflowStateFactory().createSingleState();
addToState(otherState, TWO.pow(125).negate());
addToState(otherState, TWO.pow(126).negate());
DecimalSumAggregation.combine(state, otherState);
assertEquals(state.getOverflow(), -1);
assertEquals(state.getLongDecimal(), unscaledDecimal(TWO.pow(126).negate()));
}
|
public static DistCpOptions parse(String[] args)
throws IllegalArgumentException {
CommandLineParser parser = new CustomParser();
CommandLine command;
try {
command = parser.parse(cliOptions, args, true);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse arguments. " +
Arrays.toString(args), e);
}
DistCpOptions.Builder builder = parseSourceAndTargetPaths(command);
builder
.withAtomicCommit(
command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch()))
.withSyncFolder(
command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch()))
.withDeleteMissing(
command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch()))
.withIgnoreFailures(
command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch()))
.withOverwrite(
command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch()))
.withAppend(
command.hasOption(DistCpOptionSwitch.APPEND.getSwitch()))
.withSkipCRC(
command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch()))
.withBlocking(
!command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch()))
.withVerboseLog(
command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch()))
.withDirectWrite(
command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch()))
.withUseIterator(
command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch()))
.withUpdateRoot(
command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch()));
if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) {
String[] snapshots = getVals(command,
DistCpOptionSwitch.DIFF.getSwitch());
checkSnapshotsArgs(snapshots);
builder.withUseDiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) {
String[] snapshots = getVals(command,
DistCpOptionSwitch.RDIFF.getSwitch());
checkSnapshotsArgs(snapshots);
builder.withUseRdiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) {
builder.withFiltersFile(
getVal(command, DistCpOptionSwitch.FILTERS.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) {
builder.withLogPath(
new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) {
final String workPath = getVal(command,
DistCpOptionSwitch.WORK_PATH.getSwitch());
if (workPath != null && !workPath.isEmpty()) {
builder.withAtomicWorkPath(new Path(workPath));
}
}
if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) {
builder.withTrackMissing(
new Path(getVal(
command,
DistCpOptionSwitch.TRACK_MISSING.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) {
try {
final Float mapBandwidth = Float.parseFloat(
getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()));
builder.withMapBandwidth(mapBandwidth);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Bandwidth specified is invalid: " +
getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e);
}
}
if (command.hasOption(
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) {
try {
final Integer numThreads = Integer.parseInt(getVal(command,
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()));
builder.withNumListstatusThreads(numThreads);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"Number of liststatus threads is invalid: " + getVal(command,
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e);
}
}
if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) {
try {
final Integer maps = Integer.parseInt(
getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()));
builder.maxMaps(maps);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Number of maps is invalid: " +
getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e);
}
}
if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) {
builder.withCopyStrategy(
getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) {
builder.preserve(
getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) {
LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" +
" option. Ignoring.");
}
if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) {
LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" +
" option. Ignoring.");
}
if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) {
final String chunkSizeStr = getVal(command,
DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim());
try {
int csize = Integer.parseInt(chunkSizeStr);
csize = csize > 0 ? csize : 0;
LOG.info("Set distcp blocksPerChunk to " + csize);
builder.withBlocksPerChunk(csize);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("blocksPerChunk is invalid: "
+ chunkSizeStr, e);
}
}
if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) {
final String copyBufferSizeStr = getVal(command,
DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim());
try {
int copyBufferSize = Integer.parseInt(copyBufferSizeStr);
builder.withCopyBufferSize(copyBufferSize);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("copyBufferSize is invalid: "
+ copyBufferSizeStr, e);
}
}
return builder.build();
}
|
@Test
public void testExclusionsOption() {
DistCpOptions options = OptionsParser.parse(new String[] {
"hdfs://localhost:8020/source/first",
"hdfs://localhost:8020/target/"});
Assert.assertNull(options.getFiltersFile());
options = OptionsParser.parse(new String[] {
"-filters",
"/tmp/filters.txt",
"hdfs://localhost:8020/source/first",
"hdfs://localhost:8020/target/"});
assertThat(options.getFiltersFile()).isEqualTo("/tmp/filters.txt");
}
|
@Override
public String formatInteger(Locale locale, Integer value) {
return NumberFormat.getNumberInstance(locale).format(value);
}
|
@Test
public void format_integer() {
assertThat(underTest.formatInteger(Locale.ENGLISH, 10)).isEqualTo("10");
assertThat(underTest.formatInteger(Locale.ENGLISH, 100000)).isEqualTo("100,000");
}
|
String driverPath(File homeDir, Provider provider) {
String dirPath = provider.path;
File dir = new File(homeDir, dirPath);
if (!dir.exists()) {
throw new MessageException("Directory does not exist: " + dirPath);
}
List<File> files = new ArrayList<>(FileUtils.listFiles(dir, new String[] {"jar"}, false));
if (files.isEmpty()) {
throw new MessageException("Directory does not contain JDBC driver: " + dirPath);
}
if (files.size() > 1) {
throw new MessageException("Directory must contain only one JAR file: " + dirPath);
}
return files.get(0).getAbsolutePath();
}
|
@Test
public void driver_file() throws Exception {
File driverFile = new File(homeDir, "extensions/jdbc-driver/oracle/ojdbc6.jar");
FileUtils.touch(driverFile);
String path = underTest.driverPath(homeDir, Provider.ORACLE);
assertThat(path).isEqualTo(driverFile.getAbsolutePath());
}
|
@Override
public ModuleEnvironment modelEnvironment() {
if (moduleEnvironment == null) {
moduleEnvironment =
(ModuleEnvironment) this.getExtensionLoader(ModuleExt.class).getExtension(ModuleEnvironment.NAME);
}
return moduleEnvironment;
}
|
@Test
void testModelEnvironment() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ModuleEnvironment modelEnvironment = moduleModel.modelEnvironment();
Assertions.assertNotNull(modelEnvironment);
frameworkModel.destroy();
}
|
public static boolean isSentToMultisig(Script script) {
List<ScriptChunk> chunks = script.chunks();
if (chunks.size() < 4) return false;
ScriptChunk chunk = chunks.get(chunks.size() - 1);
// Must end in OP_CHECKMULTISIG[VERIFY].
if (!(chunk.equalsOpCode(OP_CHECKMULTISIG) || chunk.equalsOpCode(OP_CHECKMULTISIGVERIFY))) return false;
// Second to last chunk must be an OP_N opcode and there should be that many data chunks (keys).
int nOpCode = chunks.get(chunks.size() - 2).opcode;
if (nOpCode < OP_1 || nOpCode > OP_16) return false;
int numKeys = decodeFromOpN(nOpCode);
if (numKeys < 1 || chunks.size() != 3 + numKeys) return false;
for (int i = 1; i < chunks.size() - 2; i++) {
if (chunks.get(i).isOpCode()) return false;
}
// First chunk must be an OP_N opcode too.
int mOpCode = chunks.get(0).opcode;
return mOpCode >= OP_1 && mOpCode <= OP_16;
}
|
@Test
public void testIsSentToMultisigFailure() {
// at the time this test was written, the following script would result in throwing
// put a non OP_N opcode first and second-to-last positions
Script evil = new ScriptBuilder()
.op(0xff)
.op(0xff)
.op(0xff)
.op(OP_CHECKMULTISIG)
.build();
assertFalse(ScriptPattern.isSentToMultisig(evil));
}
|
public boolean isGreaterOrEqual(Version version) {
return (!version.isUnknown() && compareTo(version) >= 0) || (version.isUnknown() && isUnknown());
}
|
@Test
public void isGreaterOrEqual() throws Exception {
assertTrue(V3_0.isGreaterOrEqual(of(2, 0)));
assertTrue(V3_0.isGreaterOrEqual(of(3, 0)));
assertTrue(V3_0.isGreaterOrEqual(of(3, 0)));
assertFalse(V3_0.isGreaterOrEqual(of(4, 0)));
}
|
public DoubleArrayAsIterable usingTolerance(double tolerance) {
return new DoubleArrayAsIterable(tolerance(tolerance), iterableSubject());
}
|
@Test
public void usingTolerance_containsAtLeast_primitiveDoubleArray_inOrder_success() {
assertThat(array(1.1, TOLERABLE_2POINT2, 3.3))
.usingTolerance(DEFAULT_TOLERANCE)
.containsAtLeast(array(1.1, 2.2))
.inOrder();
}
|
public static String normalizeMock(String mock) {
if (mock == null) {
return mock;
}
mock = mock.trim();
if (mock.length() == 0) {
return mock;
}
if (RETURN_KEY.equalsIgnoreCase(mock)) {
return RETURN_PREFIX + "null";
}
if (ConfigUtils.isDefault(mock) || "fail".equalsIgnoreCase(mock) || "force".equalsIgnoreCase(mock)) {
return "default";
}
if (mock.startsWith(FAIL_PREFIX)) {
mock = mock.substring(FAIL_PREFIX.length()).trim();
}
if (mock.startsWith(FORCE_PREFIX)) {
mock = mock.substring(FORCE_PREFIX.length()).trim();
}
if (mock.startsWith(RETURN_PREFIX) || mock.startsWith(THROW_PREFIX)) {
mock = mock.replace('`', '"');
}
return mock;
}
|
@Test
void testNormalizeMock() {
Assertions.assertNull(MockInvoker.normalizeMock(null));
Assertions.assertEquals("", MockInvoker.normalizeMock(""));
Assertions.assertEquals("", MockInvoker.normalizeMock("fail:"));
Assertions.assertEquals("", MockInvoker.normalizeMock("force:"));
Assertions.assertEquals("throw", MockInvoker.normalizeMock("throw"));
Assertions.assertEquals("default", MockInvoker.normalizeMock("fail"));
Assertions.assertEquals("default", MockInvoker.normalizeMock("force"));
Assertions.assertEquals("default", MockInvoker.normalizeMock("true"));
Assertions.assertEquals("default", MockInvoker.normalizeMock("default"));
Assertions.assertEquals("return null", MockInvoker.normalizeMock("return"));
Assertions.assertEquals("return null", MockInvoker.normalizeMock("return null"));
}
|
public static PostgreSQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) {
Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find PostgreSQL type '%s' in column type when process binary protocol value", binaryColumnType);
return BINARY_PROTOCOL_VALUES.get(binaryColumnType);
}
|
@Test
void assertGetStringBinaryProtocolValueByVarchar() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.VARCHAR);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLStringBinaryProtocolValue.class));
}
|
@Override
public ProtobufSystemInfo.Section toProtobuf() {
ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder();
protobuf.setName(name);
Map<Object, Object> sortedProperties = new TreeMap<>(System.getProperties());
for (Map.Entry<Object, Object> systemProp : sortedProperties.entrySet()) {
if (systemProp.getValue() != null) {
setAttribute(protobuf, Objects.toString(systemProp.getKey()), Objects.toString(systemProp.getValue()));
}
}
return protobuf.build();
}
|
@Test
public void name_is_not_empty() {
assertThat(underTest.toProtobuf().getName()).isEqualTo("Web JVM Properties");
}
|
public boolean isPopulateMetadata() {
return _populateMetadata;
}
|
@Test
public void testIsPopulateRowMetadata() {
// test default
KafkaPartitionLevelStreamConfig config = getStreamConfig("topic", "host1", null, null, null, null, null, null);
Assert.assertFalse(config.isPopulateMetadata());
config = getStreamConfig("topic", "host1", null, null, null, null, null, "bad value");
Assert.assertFalse(config.isPopulateMetadata());
config = getStreamConfig("topic", "host1", null, null, null, null, null, "TrUe");
Assert.assertTrue(config.isPopulateMetadata());
}
|
@Override
public void onChangeLogParsed(Run<?, ?> run, SCM scm, TaskListener listener, ChangeLogSet<?> changelog) throws Exception {
try {
JiraSite jiraSite = JiraSite.get(run.getParent());
if (jiraSite == null) {
return;
}
Collection<String> issueKeys = getIssueKeys(changelog, jiraSite.getIssuePattern());
if (issueKeys.isEmpty()) {
return;
}
String jql = constructJQLQuery(issueKeys);
JiraSession session = jiraSite.getSession();
if (session == null) {
return;
}
// Query for JIRA issues
List<Issue> issues = session.getIssuesFromJqlSearch(jql);
Set<JiraIssue> issuesFromJqlSearch = issues == null ? Collections.emptySet() :
issues.stream().map( JiraIssue::new ).collect( Collectors.toSet() );
// If there are no JIRA issues, do not update the actions
if (issuesFromJqlSearch.isEmpty()) {
return;
}
// Create or update the JiraBuildAction
JiraBuildAction action = run.getAction(JiraBuildAction.class);
if (action == null) {
run.addAction(new JiraBuildAction(run, issuesFromJqlSearch));
} else {
action.addIssues(issuesFromJqlSearch);
}
run.save();
} catch (Exception e ){ // we do not want to fail the build if an issue happen here
LOGGER.warn( "Failure executing Jira query to fetch issues. Skipping recording Jira issues.: {}", e.getMessage() );
// stack trace in debug mode
LOGGER.debug( e.getMessage(), e);
}
}
|
@Test
public void onChangeLogParsedCreatesAction() throws Exception {
JiraSCMListener listener = new JiraSCMListener();
Job job = mock(Job.class);
Run run = mock(Run.class);
ChangeLogSet logSet = mock(ChangeLogSet.class);
final ChangeLogSet.Entry entry = mock(ChangeLogSet.Entry.class);
when(entry.getParent()).thenReturn(logSet);
when(logSet.getRun()).thenReturn(run);
when(run.getParent()).thenReturn(job);
when(entry.getMsg()).thenReturn("TEST-123");
ChangeLogSet<ChangeLogSet.Entry> set = new ChangeLogSet<ChangeLogSet.Entry>(run, null) {
@Override
public boolean isEmptySet() {
return false;
}
@Override
public Iterator<Entry> iterator() {
return Collections.singletonList(entry).iterator();
}
};
// Setup JIRA site
jiraSiteMockedStatic = mockStatic(JiraSite.class);
JiraSite site = mock(JiraSite.class);
JiraSession session = mock(JiraSession.class);
when(site.getIssuePattern()).thenReturn(JiraSite.DEFAULT_ISSUE_PATTERN);
when(site.getSession()).thenReturn(session);
when(JiraSite.get(job)).thenReturn(site);
Issue rawIssue = mock(Issue.class);
when(rawIssue.getKey()).thenReturn("TEST-123");
when(rawIssue.getSummary()).thenReturn("Foo");
when(session.getIssuesFromJqlSearch("key in ('TEST-123')")).thenReturn(Collections.singletonList(rawIssue));
when(run.getAction(JiraBuildAction.class)).thenReturn(null);
ArgumentCaptor<JiraBuildAction> actionArgumentCaptor = ArgumentCaptor.forClass(JiraBuildAction.class);
listener.onChangeLogParsed(run, null,null, set);
verify(run).addAction(actionArgumentCaptor.capture());
JiraBuildAction action = actionArgumentCaptor.getValue();
Assert.assertFalse(action.getIssues().isEmpty());
JiraIssue issue = action.getIssue("TEST-123");
Assert.assertNotNull(issue);
Assert.assertEquals("TEST-123", issue.getKey());
}
|
public static void main(String[] argv) {
//Use JCommander to parse the CLI args into a useful class
CommandLineArgs args = parseCommandLineArgs(argv);
execute(
args.dataFile,
configFromYaml(args.yamlConfig)
);
}
|
@Test
public void runProjectDemo_aggregateEvents() throws IOException {
// Verify the "aggregate-encounters.md" demo works
String[] args = new String[]{
"-c", "src/test/resources/sampleConfig2.yaml",
"-f", "src/test/resources/sampleData.txt.gz"
};
assertDoesNotThrow(
() -> RunAirborneOnFile.main(args)
);
File eventDir = new File("detectedEvents");
File[] eventFiles = eventDir.listFiles();
assertThat(eventFiles.length, is(2));
Stream.of(eventFiles).forEach(file -> file.delete());
Files.deleteIfExists(eventDir.toPath());
File targetAvroFile = new File("allEvents.avro");
if (targetAvroFile.exists()) {
targetAvroFile.delete();
}
}
|
public static Map<String, Object> beanToMap(Object bean, String... properties) {
int mapSize = 16;
Editor<String> keyEditor = null;
if (ArrayUtil.isNotEmpty(properties)) {
mapSize = properties.length;
final Set<String> propertiesSet = CollUtil.set(false, properties);
keyEditor = property -> propertiesSet.contains(property) ? property : null;
}
// 指明了要复制的属性 所以不忽略null值
return beanToMap(bean, new LinkedHashMap<>(mapSize, 1), false, keyEditor);
}
|
@Test
public void beanToMapWithValueEditTest() {
final SubPerson person = new SubPerson();
person.setAge(14);
person.setOpenid("11213232");
person.setName("测试A11");
person.setSubName("sub名字");
final Map<String, Object> map = BeanUtil.beanToMap(person, new LinkedHashMap<>(),
CopyOptions.create().setFieldValueEditor((key, value) -> key + "_" + value));
assertEquals("subName_sub名字", map.get("subName"));
}
|
public static Builder forRegistry(MetricRegistry registry) {
return new Builder(registry);
}
|
@Test
public void reportDoubleGaugeValuesUsingCustomFormatter() throws Exception {
DecimalFormat formatter = new DecimalFormat("##.##########", DecimalFormatSymbols.getInstance(Locale.US));
try (GraphiteReporter graphiteReporter = GraphiteReporter.forRegistry(registry)
.withClock(clock)
.prefixedWith("prefix")
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.filter(MetricFilter.ALL)
.disabledMetricAttributes(Collections.emptySet())
.withFloatingPointFormatter(formatter::format)
.build(graphite)) {
reportGaugeValue(graphiteReporter, 0.000045322);
verifyGraphiteSentCorrectMetricValue("prefix.gauge", "0.000045322", timestamp);
verifyNoMoreInteractions(graphite);
}
}
|
public synchronized T getConfig(String configId) {
try (ConfigSubscriber subscriber = new ConfigSubscriber()) {
ConfigHandle<T> handle = subscriber.subscribe(clazz, configId);
subscriber.nextConfig(true);
return handle.getConfig();
}
}
|
@Test
public void testGetFromFile() {
ConfigGetter<AppConfig> getter = new ConfigGetter<>(AppConfig.class);
AppConfig config = getter.getConfig("file:src/test/resources/configs/foo/app.cfg");
verifyFooValues(config);
}
|
public static Optional<MapStatistics> mergeMapStatistics(List<ColumnStatistics> stats, Object2LongMap<DwrfProto.KeyInfo> keySizes)
{
Map<DwrfProto.KeyInfo, List<ColumnStatistics>> columnStatisticsByKey = new LinkedHashMap<>();
long nonNullValueCount = 0;
for (ColumnStatistics columnStatistics : stats) {
if (columnStatistics.getNumberOfValues() > 0) {
MapStatistics partialStatistics = columnStatistics.getMapStatistics();
if (partialStatistics == null) {
// there are non-null values but no statistics, so we can not say anything about the data
return Optional.empty();
}
// collect column stats for each key for merging later
for (MapStatisticsEntry entry : partialStatistics.getEntries()) {
List<ColumnStatistics> allKeyStats = columnStatisticsByKey.computeIfAbsent(entry.getKey(), (k) -> new ArrayList<>());
allKeyStats.add(entry.getColumnStatistics());
}
nonNullValueCount += columnStatistics.getNumberOfValues();
}
}
// merge all column stats for each key
MapColumnStatisticsBuilder mapStatisticsBuilder = new MapColumnStatisticsBuilder(true);
for (Map.Entry<DwrfProto.KeyInfo, List<ColumnStatistics>> entry : columnStatisticsByKey.entrySet()) {
DwrfProto.KeyInfo key = entry.getKey();
Long keySize = keySizes != null ? keySizes.getLong(key) : null;
ColumnStatistics mergedColumnStatistics = mergeColumnStatistics(entry.getValue(), keySize, null);
mapStatisticsBuilder.addMapStatistics(key, mergedColumnStatistics);
}
mapStatisticsBuilder.increaseValueCount(nonNullValueCount);
return mapStatisticsBuilder.buildMapStatistics();
}
|
@Test(dataProvider = "keySupplier")
public void testMergeMapStatistics(KeyInfo[] keys)
{
// merge two stats with keys: [k0,k1] and [k1,k2]
// column statistics for k1 should be merged together
MapColumnStatisticsBuilder builder1 = new MapColumnStatisticsBuilder(true);
builder1.addMapStatistics(keys[0], new IntegerColumnStatistics(3L, null, null, null, new IntegerStatistics(1L, 2L, 3L)));
builder1.addMapStatistics(keys[1], new IntegerColumnStatistics(5L, null, null, null, new IntegerStatistics(10L, 20L, 30L)));
builder1.increaseValueCount(8);
ColumnStatistics columnStatistics1 = builder1.buildColumnStatistics();
MapColumnStatisticsBuilder builder2 = new MapColumnStatisticsBuilder(true);
builder2.addMapStatistics(keys[1], new IntegerColumnStatistics(7L, null, null, null, new IntegerStatistics(25L, 95L, 100L)));
builder2.addMapStatistics(keys[2], new IntegerColumnStatistics(9L, null, null, null, new IntegerStatistics(12L, 22L, 32L)));
builder2.increaseValueCount(16);
ColumnStatistics columnStatistics2 = builder2.buildColumnStatistics();
MapStatistics mergedMapStatistics = MapColumnStatisticsBuilder.mergeMapStatistics(ImmutableList.of(columnStatistics1, columnStatistics2), null).get();
assertMergedMapStatistics(keys, mergedMapStatistics);
}
|
public static List<Chunk> split(String s) {
int pos = s.indexOf(SLASH);
if (pos == -1) {
throw new RuntimeException("path did not start with or contain '/'");
}
List<Chunk> list = new ArrayList();
int startPos = 0;
int searchPos = 0;
boolean anyDepth = false;
while (pos != -1) {
if (pos == 0) {
startPos = 1;
searchPos = 1;
} else if (s.charAt(pos - 1) == '\\') {
s = s.substring(0, pos - 1) + s.substring(pos);
searchPos = pos;
} else {
String temp = s.substring(startPos, pos);
if (temp.isEmpty()) {
anyDepth = true;
} else {
list.add(new Chunk(anyDepth, temp));
anyDepth = false; // reset
}
startPos = pos + 1;
searchPos = startPos;
}
pos = s.indexOf(SLASH, searchPos);
}
if (startPos != s.length()) {
String temp = s.substring(startPos);
if (!temp.isEmpty()) {
list.add(new Chunk(anyDepth, temp));
}
}
return list;
}
|
@Test
void testClassName() {
List<PathSearch.Chunk> list = PathSearch.split("/hello[3]//world.Foo/.Bar");
logger.debug("list: {}", list);
PathSearch.Chunk first = list.get(0);
assertFalse(first.anyDepth);
assertEquals("hello", first.controlType);
assertNull(first.className);
assertEquals(2, first.index);
PathSearch.Chunk second = list.get(1);
assertTrue(second.anyDepth);
assertEquals("world", second.controlType);
assertEquals("Foo", second.className);
PathSearch.Chunk third = list.get(2);
assertFalse(third.anyDepth);
assertEquals(null, third.controlType);
assertEquals("Bar", third.className);
}
|
@Override
public void close() {
if (closed) {
return;
}
closed = true;
try {
// graceful close
DefaultFuture.closeChannel(channel, ConfigurationUtils.reCalShutdownTime(shutdownTimeout));
} catch (Exception e) {
logger.warn(TRANSPORT_FAILED_CLOSE, "", "", e.getMessage(), e);
}
try {
channel.close();
} catch (Exception e) {
logger.warn(TRANSPORT_FAILED_CLOSE, "", "", e.getMessage(), e);
}
}
|
@Test
void closeTest() {
Assertions.assertFalse(channel.isClosed());
header.close();
Assertions.assertTrue(channel.isClosed());
}
|
@Override
public Iterator<IndexKeyEntries> getSqlRecordIteratorBatch(@Nonnull Comparable value, boolean descending) {
return getSqlRecordIteratorBatch(value, descending, null);
}
|
@Test
public void getSqlRecordIteratorBatchCursorLeftExcludedRightIncludedDescending() {
var expectedOrder = List.of(7, 4, 1);
performCursorTest(expectedOrder, cursor -> store.getSqlRecordIteratorBatch(0, false, 1, true, true, cursor));
}
|
@Override
public CompletableFuture<Void> deleteTopicInBroker(String address, DeleteTopicRequestHeader requestHeader,
long timeoutMillis) {
CompletableFuture<Void> future = new CompletableFuture<>();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_TOPIC_IN_BROKER, requestHeader);
remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> {
if (response.getCode() == ResponseCode.SUCCESS) {
future.complete(null);
} else {
log.warn("deleteTopicInBroker getResponseCommand failed, {} {}, header={}", response.getCode(), response.getRemark(), requestHeader);
future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark()));
}
});
return future;
}
|
@Test
public void assertDeleteTopicInBrokerWithSuccess() throws Exception {
setResponseSuccess(null);
DeleteTopicRequestHeader requestHeader = mock(DeleteTopicRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInBroker(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
|
public void parse(InputStream stream, ContentHandler handler, Metadata metadata,
ParseContext context) throws IOException, SAXException, TikaException {
PDFParserConfig localConfig = defaultConfig;
PDFParserConfig userConfig = context.get(PDFParserConfig.class);
if (userConfig != null) {
localConfig = defaultConfig.cloneAndUpdate(userConfig);
}
if (localConfig.isSetKCMS()) {
System.setProperty("sun.java2d.cmm", "sun.java2d.cmm.kcms.KcmsServiceProvider");
}
IncrementalUpdateRecord incomingIncrementalUpdateRecord = context.get(IncrementalUpdateRecord.class);
context.set(IncrementalUpdateRecord.class, null);
initRenderer(localConfig, context);
PDDocument pdfDocument = null;
String password = "";
PDFRenderingState incomingRenderingState = context.get(PDFRenderingState.class);
TikaInputStream tstream = null;
boolean shouldClose = false;
OCRPageCounter prevOCRCounter = context.get(OCRPageCounter.class);
context.set(OCRPageCounter.class, new OCRPageCounter());
try {
if (shouldSpool(localConfig)) {
if (stream instanceof TikaInputStream) {
tstream = (TikaInputStream) stream;
} else {
tstream = TikaInputStream.get(CloseShieldInputStream.wrap(stream));
shouldClose = true;
}
context.set(PDFRenderingState.class, new PDFRenderingState(tstream));
} else {
tstream = TikaInputStream.cast(stream);
}
scanXRefOffsets(localConfig, tstream, metadata, context);
password = getPassword(metadata, context);
MemoryUsageSetting memoryUsageSetting = null;
if (localConfig.getMaxMainMemoryBytes() >= 0) {
memoryUsageSetting =
MemoryUsageSetting.setupMixed(localConfig.getMaxMainMemoryBytes());
} else {
memoryUsageSetting = MemoryUsageSetting.setupMainMemoryOnly();
}
pdfDocument = getPDDocument(stream, tstream, password,
memoryUsageSetting.streamCache, metadata, context);
boolean hasCollection = hasCollection(pdfDocument, metadata);
checkEncryptedPayload(pdfDocument, hasCollection, localConfig);
boolean hasXFA = hasXFA(pdfDocument, metadata);
boolean hasMarkedContent = hasMarkedContent(pdfDocument, metadata);
extractMetadata(pdfDocument, metadata, context);
extractSignatures(pdfDocument, metadata);
checkIllustrator(pdfDocument, metadata);
AccessChecker checker = localConfig.getAccessChecker();
checker.check(metadata);
renderPagesBeforeParse(tstream, handler, metadata, context, localConfig);
if (handler != null) {
if (shouldHandleXFAOnly(hasXFA, localConfig)) {
handleXFAOnly(pdfDocument, handler, metadata, context);
} else if (localConfig.getOcrStrategy()
.equals(PDFParserConfig.OCR_STRATEGY.OCR_ONLY)) {
OCR2XHTML.process(pdfDocument, handler, context, metadata,
localConfig);
} else if (hasMarkedContent && localConfig.isExtractMarkedContent()) {
PDFMarkedContent2XHTML
.process(pdfDocument, handler, context, metadata,
localConfig);
} else {
PDF2XHTML.process(pdfDocument, handler, context, metadata,
localConfig);
}
}
} catch (InvalidPasswordException e) {
metadata.set(PDF.IS_ENCRYPTED, "true");
throw new EncryptedDocumentException(e);
} finally {
metadata.set(OCR_PAGE_COUNT, context.get(OCRPageCounter.class).getCount());
context.set(OCRPageCounter.class, prevOCRCounter);
//reset the incrementalUpdateRecord even if null
context.set(IncrementalUpdateRecord.class, incomingIncrementalUpdateRecord);
PDFRenderingState currState = context.get(PDFRenderingState.class);
try {
if (currState != null && currState.getRenderResults() != null) {
currState.getRenderResults().close();
}
if (pdfDocument != null) {
pdfDocument.close();
}
} finally {
//replace the one that was here
context.set(PDFRenderingState.class, incomingRenderingState);
if (shouldClose && tstream != null) {
tstream.close();
}
}
}
}
|
@Test
public void testSkipBadPage() throws Exception {
//test file comes from govdocs1
//can't use TikaTest shortcuts because of exception
ContentHandler handler = new BodyContentHandler(-1);
Metadata m = new Metadata();
ParseContext context = new ParseContext();
try (InputStream is = getResourceAsStream("/test-documents/testPDF_bad_page_303226.pdf")) {
AUTO_DETECT_PARSER.parse(is, handler, m, context);
}
//as of PDFBox 2.0.28, exceptions are no longer thrown for this problem
String content = handler.toString();
assertEquals(0, m.getValues(TikaCoreProperties.TIKA_META_EXCEPTION_WARNING).length);
//assertContains("Unknown dir", m.get(TikaCoreProperties.TIKA_META_EXCEPTION_WARNING));
assertContains("1309.61", content);
//now try throwing exception immediately
PDFParserConfig config = new PDFParserConfig();
config.setCatchIntermediateIOExceptions(false);
context.set(PDFParserConfig.class, config);
handler = new BodyContentHandler(-1);
m = new Metadata();
try (InputStream is = getResourceAsStream("/test-documents/testPDF_bad_page_303226.pdf")) {
AUTO_DETECT_PARSER.parse(is, handler, m, context);
}
content = handler.toString();
assertEquals(0, m.getValues(TikaCoreProperties.TIKA_META_EXCEPTION_WARNING).length);
assertContains("1309.61", content);
}
|
public JvmMetrics getJvmMetrics() {
return jvmMetrics;
}
|
@Test
public void testReferenceOfSingletonJvmMetrics() {
JvmMetrics jvmMetrics = JvmMetrics.initSingleton("NodeManagerModule", null);
Assert.assertEquals("NodeManagerMetrics should reference the singleton" +
" JvmMetrics instance", jvmMetrics, metrics.getJvmMetrics());
}
|
public void renameDirectory(Path source, Path target, Runnable runWhenPathNotExist) {
if (pathExists(target)) {
throw new StarRocksConnectorException("Unable to rename from %s to %s. msg: target directory already exists",
source, target);
}
if (!pathExists(target.getParent())) {
createDirectory(target.getParent(), conf);
}
runWhenPathNotExist.run();
try {
if (!FileSystem.get(source.toUri(), conf).rename(source, target)) {
throw new StarRocksConnectorException("Failed to rename %s to %s: rename returned false", source, target);
}
} catch (IOException e) {
throw new StarRocksConnectorException("Failed to rename %s to %s, msg: %s", source, target, e.getMessage());
}
}
|
@Test
public void testRenameDir() {
HiveRemoteFileIO hiveRemoteFileIO = new HiveRemoteFileIO(new Configuration());
FileSystem fs = new MockedRemoteFileSystem(HDFS_HIVE_TABLE);
hiveRemoteFileIO.setFileSystem(fs);
FeConstants.runningUnitTest = true;
ExecutorService executorToRefresh = Executors.newSingleThreadExecutor();
ExecutorService executorToLoad = Executors.newSingleThreadExecutor();
CachingRemoteFileIO cachingFileIO = new CachingRemoteFileIO(hiveRemoteFileIO, executorToRefresh, 10, 10, 10);
RemoteFileOperations ops = new RemoteFileOperations(cachingFileIO, executorToLoad, executorToLoad,
false, true, new Configuration());
new MockUp<HiveWriteUtils>() {
@Mock
public boolean pathExists(Path path, Configuration conf) {
return true;
}
};
Path writePath = new Path("hdfs://hadoop01:9000/tmp/starrocks/queryid");
Path targetPath = new Path("hdfs://hadoop01:9000/user/hive/warehouse/test.db/t1");
ExceptionChecker.expectThrowsWithMsg(
StarRocksConnectorException.class,
"Unable to rename from hdfs://hadoop01:9000/tmp/starrocks/queryid to " +
"hdfs://hadoop01:9000/user/hive/warehouse/test.db/t1. msg: target directory already exists",
() -> ops.renameDirectory(writePath, targetPath, () -> {
}));
}
|
@Override
public AttributedList<Path> run(final Session<?> session) throws BackgroundException {
try {
final AttributedList<Path> list;
listener.reset();
if(this.isCached()) {
list = cache.get(directory);
listener.chunk(directory, list);
}
else {
final ListService service = session.getFeature(ListService.class);
if(log.isDebugEnabled()) {
log.debug(String.format("Run with feature %s", service));
}
list = service.list(directory, listener);
if(list.isEmpty()) {
listener.chunk(directory, list);
}
if(log.isDebugEnabled()) {
log.debug(String.format("Notify listener %s", listener));
}
}
listener.finish(directory, list, Optional.empty());
return list;
}
catch(ListCanceledException e) {
if(log.isWarnEnabled()) {
log.warn(String.format("Return partial directory listing for %s", directory));
}
listener.finish(directory, e.getChunk(), Optional.of(e));
return e.getChunk();
}
catch(BackgroundException e) {
if(log.isWarnEnabled()) {
log.warn(String.format("Notify listener for %s with error %s", directory, e));
}
listener.finish(directory, AttributedList.emptyList(), Optional.of(e));
throw e;
}
}
|
@Test
public void testRun() throws Exception {
final Host host = new Host(new TestProtocol());
final Session<?> session = new NullSession(host) {
@Override
public AttributedList<Path> list(final Path file, final ListProgressListener listener) {
return new AttributedList<>(Collections.singletonList(new Path("/home/jenkins/f", EnumSet.of(Path.Type.file))));
}
};
final PathCache cache = new PathCache(1);
final ListWorker worker = new ListWorker(cache,
new Path("/home/jenkins", EnumSet.of(Path.Type.directory)),
new DisabledListProgressListener());
final AttributedList<Path> list = worker.run(session);
assertFalse(list.isEmpty());
assertFalse(cache.containsKey(new Path("/home/jenkins", EnumSet.of(Path.Type.directory))));
worker.cleanup(list);
assertTrue(cache.containsKey(new Path("/home/jenkins", EnumSet.of(Path.Type.directory))));
}
|
@Override
public int hashCode() {
int hash = 3;
hash = 97 * hash + (this.qualifyingNames != null ? this.qualifyingNames.hashCode() : 0);
hash = 97 * hash + (this.resultType != null ? this.resultType.toString().hashCode() : 0);
return hash;
}
|
@Test
public void testHashCodeWithNullResultType() {
List<String> qualifyingNames = Collections.singletonList( "mapstruct" );
SelectionParameters params = new SelectionParameters( null, qualifyingNames, null, null );
assertThat( params.hashCode() )
.as( "ResultType nulls hashCode" )
.isEqualTo( ( 3 * 97 + qualifyingNames.hashCode() ) * 97 );
}
|
public static FileSystem get(URI uri) throws IOException {
return FileSystemSafetyNet.wrapWithSafetyNetWhenActivated(getUnguardedFileSystem(uri));
}
|
@Test
void testGet() throws URISyntaxException, IOException {
String scheme = "file";
assertThat(getFileSystemWithoutSafetyNet(scheme + ":///test/test"))
.isInstanceOf(LocalFileSystem.class);
try {
getFileSystemWithoutSafetyNet(scheme + "://test/test");
} catch (IOException ioe) {
assertThat(ioe.getMessage()).startsWith("Found local file path with authority '");
}
assertThat(getFileSystemWithoutSafetyNet(scheme + ":/test/test"))
.isInstanceOf(LocalFileSystem.class);
assertThat(getFileSystemWithoutSafetyNet(scheme + ":test/test"))
.isInstanceOf(LocalFileSystem.class);
assertThat(getFileSystemWithoutSafetyNet("/test/test")).isInstanceOf(LocalFileSystem.class);
assertThat(getFileSystemWithoutSafetyNet("test/test")).isInstanceOf(LocalFileSystem.class);
}
|
public static Wallet createBasic(Network network) {
return new Wallet(network, KeyChainGroup.createBasic(network));
}
|
@Test
public void createBasic() {
Wallet wallet = Wallet.createBasic(TESTNET);
assertEquals(0, wallet.getKeyChainGroupSize());
wallet.importKey(new ECKey());
assertEquals(1, wallet.getKeyChainGroupSize());
}
|
@Override
public ObjectNode encode(Criterion criterion, CodecContext context) {
EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context);
return encoder.encode();
}
|
@Test
public void matchIcmpv6CodeTest() {
Criterion criterion = Criteria.matchIcmpv6Code((byte) 250);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
|
public int runCommand(final String command) {
int errorCode = NO_ERROR;
RemoteServerSpecificCommand.validateClient(terminal.writer(), restClient);
try {
// Commands executed by the '-e' parameter do not need to execute specific CLI
// commands. For RUN SCRIPT commands, users can use the '-f' command parameter.
handleLine(command);
} catch (final EndOfFileException exception) {
// Ignore - only used by runInteractively() to exit the CLI
} catch (final Exception exception) {
errorCode = ERROR;
LOGGER.error("An error occurred while running a command. Error = "
+ exception.getMessage(), exception);
terminal.printError(ErrorMessageUtil.buildErrorMessage(exception),
exception.toString());
}
terminal.flush();
return errorCode;
}
|
@Test
public void shouldSubstituteVariablesOnRunCommand() {
// Given:
final StringBuilder builder = new StringBuilder();
builder.append("SET '" + KsqlConfig.KSQL_VARIABLE_SUBSTITUTION_ENABLE + "' = 'true';");
builder.append("DEFINE var = '" + ORDER_DATA_PROVIDER.sourceName() + "';");
builder.append("CREATE STREAM shouldRunCommand AS SELECT * FROM ${var};");
// When:
localCli.runCommand(builder.toString());
// Then:
assertThat(terminal.getOutputString(),
containsString("Created query with ID CSAS_SHOULDRUNCOMMAND"));
}
|
@Override
public DefaultIssueLocation message(String message) {
validateMessage(message);
String sanitizedMessage = sanitizeNulls(message);
this.message = abbreviate(trim(sanitizedMessage), Issue.MESSAGE_MAX_SIZE);
return this;
}
|
@Test
public void should_not_trim_on_messageFormattings_message_method(){
assertThat(new DefaultIssueLocation().message(" message ", Collections.emptyList()).message()).isEqualTo(" message ");
}
|
@Override
public PostScript readPostScript(byte[] data, int offset, int length)
throws IOException
{
long cpuStart = THREAD_MX_BEAN.getCurrentThreadCpuTime();
CodedInputStream input = CodedInputStream.newInstance(data, offset, length);
DwrfProto.PostScript postScript = DwrfProto.PostScript.parseFrom(input);
HiveWriterVersion writerVersion = postScript.hasWriterVersion() && postScript.getWriterVersion() > 0 ? ORC_HIVE_8732 : ORIGINAL;
OptionalInt stripeCacheLength = OptionalInt.empty();
Optional<DwrfStripeCacheMode> stripeCacheMode = Optional.empty();
if (postScript.hasCacheSize() && postScript.hasCacheMode()) {
stripeCacheLength = OptionalInt.of(postScript.getCacheSize());
stripeCacheMode = Optional.of(toStripeCacheMode(postScript.getCacheMode()));
}
runtimeStats.addMetricValue("DwrfReadPostScriptTimeNanos", RuntimeUnit.NANO, THREAD_MX_BEAN.getCurrentThreadCpuTime() - cpuStart);
return new PostScript(
ImmutableList.of(),
postScript.getFooterLength(),
0,
toCompression(postScript.getCompression()),
postScript.getCompressionBlockSize(),
writerVersion,
stripeCacheLength,
stripeCacheMode);
}
|
@Test
public void testReadPostScript()
throws IOException
{
byte[] data = baseProtoPostScript.toByteArray();
PostScript postScript = dwrfMetadataReader.readPostScript(data, 0, data.length);
assertEquals(postScript.getHiveWriterVersion(), HiveWriterVersion.ORC_HIVE_8732);
assertEquals(postScript.getFooterLength(), footerLength);
assertEquals(postScript.getCompression(), CompressionKind.ZSTD);
assertEquals(postScript.getCompressionBlockSize(), compressionBlockSize);
assertEquals(postScript.getDwrfStripeCacheLength().getAsInt(), 12);
assertEquals(postScript.getDwrfStripeCacheMode().get(), DwrfStripeCacheMode.INDEX_AND_FOOTER);
}
|
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
if (args.size() < 2) {
printInfo(err);
return 1;
}
int index = 0;
String input = args.get(index);
String option = "all";
if ("-o".equals(input)) {
option = args.get(1);
index += 2;
}
if (!OPTIONS.contains(option) || (args.size() - index < 1)) {
printInfo(err);
return 1;
}
input = args.get(index++);
if (!REPORT.equals(option)) {
if (args.size() - index < 1) {
printInfo(err);
return 1;
}
}
if (ALL.equals(option)) {
return recoverAll(input, args.get(index), out, err);
} else if (PRIOR.equals(option)) {
return recoverPrior(input, args.get(index), out, err);
} else if (AFTER.equals(option)) {
return recoverAfter(input, args.get(index), out, err);
} else if (REPORT.equals(option)) {
return reportOnly(input, out, err);
} else {
return 1;
}
}
|
@Test
void repairPriorCorruptRecord() throws Exception {
String output = run(new DataFileRepairTool(), "-o", "prior", corruptRecordFile.getPath(), repairedFile.getPath());
assertTrue(output.contains("Number of blocks: 3 Number of corrupt blocks: 1"), output);
assertTrue(output.contains("Number of records: 8 Number of corrupt records: 2"), output);
checkFileContains(repairedFile, "apple", "banana", "celery", "date");
}
|
@Override
public void writeInt(final int v) throws IOException {
ensureAvailable(INT_SIZE_IN_BYTES);
MEM.putInt(buffer, ARRAY_BYTE_BASE_OFFSET + pos, v);
pos += INT_SIZE_IN_BYTES;
}
|
@Test
public void testWriteIntForPositionVByteOrder() throws Exception {
int expected = 100;
out.writeInt(10, expected, LITTLE_ENDIAN);
out.writeInt(14, expected, BIG_ENDIAN);
int actual1 = Bits.readInt(out.buffer, 10, false);
int actual2 = Bits.readInt(out.buffer, 14, true);
assertEquals(expected, actual1);
assertEquals(expected, actual2);
}
|
public MethodBuilder sticky(Boolean sticky) {
this.sticky = sticky;
return getThis();
}
|
@Test
void sticky() {
MethodBuilder builder = MethodBuilder.newBuilder();
builder.sticky(true);
Assertions.assertTrue(builder.build().getSticky());
}
|
public static ServiceInfo selectInstances(ServiceInfo serviceInfo, String cluster) {
return selectInstances(serviceInfo, cluster, false, false);
}
|
@Test
void testSelectInstances() {
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setGroupName("groupName");
serviceInfo.setName("serviceName");
serviceInfo.setChecksum("checkSum");
serviceInfo.setAllIPs(false);
ServiceInfo cluster = ServiceUtil.selectInstances(serviceInfo, "cluster");
assertNotNull(cluster);
}
|
@Override public void onEvent(ApplicationEvent event) {
// only onRequest is used
}
|
@Test void onEvent_skipsErrorWhenSet() {
setEventType(RequestEvent.Type.FINISHED);
setBaseUri("/");
when(request.getProperty(SpanCustomizer.class.getName())).thenReturn(span);
Exception error = new Exception();
when(requestEvent.getException()).thenReturn(error);
when(request.getProperty("error")).thenReturn("madness");
listener.onEvent(requestEvent);
verify(request).getProperty(SpanCustomizer.class.getName());
verify(request).getProperty("error");
verify(request).getUriInfo();
verify(request).setProperty("http.route", ""); // empty means no route found
verifyNoMoreInteractions(request); // no setting of error
}
|
public boolean isSkipTlsVerify() {
return skipTlsVerify;
}
|
@Test
public void testDefaultSkipTlsVerifyIsFalse() {
SplunkHECConfiguration config = new SplunkHECConfiguration();
assertFalse(config.isSkipTlsVerify());
}
|
List<Endpoint> endpoints() {
try {
String urlString = String.format("%s/api/v1/namespaces/%s/pods", kubernetesMaster, namespace);
return enrichWithPublicAddresses(parsePodsList(callGet(urlString)));
} catch (RestClientException e) {
return handleKnownException(e);
}
}
|
@Test
public void endpointsByNamespaceWithNodeName() throws JsonProcessingException {
// given
// create KubernetesClient with useNodeNameAsExternalAddress=true
cleanUpClient();
kubernetesClient = newKubernetesClient(true);
stub(String.format("/api/v1/namespaces/%s/pods", NAMESPACE), podsListResponse());
stub(String.format("/api/v1/namespaces/%s/endpoints", NAMESPACE), endpointsListResponse());
stub(String.format("/api/v1/namespaces/%s/services/hazelcast-0", NAMESPACE), service(servicePort(32123, 5701, 31916)));
stub(String.format("/api/v1/namespaces/%s/services/service-1", NAMESPACE), service(servicePort(32124, 5701, 31917)));
stub(String.format("/api/v1/namespaces/%s/pods/hazelcast-0", NAMESPACE),
pod("hazelcast-0", NAMESPACE, "node-name-1", 5701));
stub(String.format("/api/v1/namespaces/%s/pods/hazelcast-1", NAMESPACE),
pod("hazelcast-1", NAMESPACE, "node-name-1", 5701));
String forbiddenBody = "\"reason\":\"Forbidden\"";
stub("/api/v1/nodes/node-name-1", HttpURLConnection.HTTP_FORBIDDEN, forbiddenBody);
stub("/api/v1/nodes/node-name-2", HttpURLConnection.HTTP_FORBIDDEN, forbiddenBody);
// when
List<Endpoint> result = kubernetesClient.endpoints();
// then
assertThat(formatPrivate(result)).containsExactlyInAnyOrder(ready("192.168.0.25", 5701), ready("172.17.0.5", 5702));
assertThat(formatPublic(result)).containsExactlyInAnyOrder(ready("node-name-1", 31916), ready("node-name-2", 31917));
}
|
@Override
public void processElement(StreamRecord<Event> streamRecord)
throws InterruptedException, TimeoutException, ExecutionException {
Event event = streamRecord.getValue();
if (event instanceof SchemaChangeEvent) {
processSchemaChangeEvents((SchemaChangeEvent) event);
} else if (event instanceof DataChangeEvent) {
processDataChangeEvents(streamRecord, (DataChangeEvent) event);
} else {
throw new RuntimeException("Unknown event type in Stream record: " + event);
}
}
|
@Test
void testProcessElement() throws Exception {
final int maxParallelism = 4;
final int parallelism = 2;
final OperatorID opID = new OperatorID();
final TableId tableId = TableId.tableId("testProcessElement");
final RowType rowType = DataTypes.ROW(DataTypes.BIGINT(), DataTypes.STRING());
List<OneInputStreamOperatorTestHarness<Event, Event>> testHarnesses = new ArrayList<>();
for (int subtaskIndex = 0; subtaskIndex < parallelism; subtaskIndex++) {
OneInputStreamOperatorTestHarness<Event, Event> testHarness =
createTestHarness(maxParallelism, parallelism, subtaskIndex, opID);
testHarnesses.add(testHarness);
testHarness.setup(EventSerializer.INSTANCE);
testHarness.open();
Map<String, String> meta = new HashMap<>();
meta.put("subtask", String.valueOf(subtaskIndex));
BinaryRecordDataGenerator generator = new BinaryRecordDataGenerator(rowType);
List<Event> testData =
Arrays.asList(
DataChangeEvent.updateEvent(
tableId,
generator.generate(
new Object[] {1L, BinaryStringData.fromString("1")}),
generator.generate(
new Object[] {2L, BinaryStringData.fromString("2")}),
meta),
DataChangeEvent.updateEvent(
tableId,
generator.generate(
new Object[] {3L, BinaryStringData.fromString("3")}),
generator.generate(
new Object[] {4L, BinaryStringData.fromString("4")}),
meta));
for (Event event : testData) {
testHarness.processElement(event, 0);
}
Collection<StreamRecord<Event>> result = testHarness.getRecordOutput();
assertThat(result.stream().map(StreamRecord::getValue).collect(Collectors.toList()))
.isEqualTo(testData);
}
for (int subtaskIndex = 0; subtaskIndex < parallelism; subtaskIndex++) {
testHarnesses.get(subtaskIndex).close();
}
}
|
@CheckForNull
public ByteOrderMark detectBOM(byte[] buffer) {
return Arrays.stream(boms)
.filter(b -> isBom(b, buffer))
.findAny()
.orElse(null);
}
|
@Test
public void detectBOM() throws URISyntaxException, IOException {
byte[] b = ByteOrderMark.UTF_16BE.getBytes();
assertThat(charsets.detectBOM(b)).isEqualTo(ByteOrderMark.UTF_16BE);
assertThat(charsets.detectBOM(readFile("UTF-8"))).isEqualTo(ByteOrderMark.UTF_8);
assertThat(charsets.detectBOM(readFile("UTF-16BE"))).isEqualTo(ByteOrderMark.UTF_16BE);
assertThat(charsets.detectBOM(readFile("UTF-16LE"))).isEqualTo(ByteOrderMark.UTF_16LE);
assertThat(charsets.detectBOM(readFile("UTF-32BE"))).isEqualTo(ByteOrderMark.UTF_32BE);
assertThat(charsets.detectBOM(readFile("UTF-32LE"))).isEqualTo(ByteOrderMark.UTF_32LE);
}
|
public static int compareVersion(final String versionA, final String versionB) {
final String[] sA = versionA.split("\\.");
final String[] sB = versionB.split("\\.");
int expectSize = 3;
if (sA.length != expectSize || sB.length != expectSize) {
throw new IllegalArgumentException("version must be like x.y.z(-beta)");
}
int first = Objects.compare(sA[0], sB[0], STRING_COMPARATOR);
if (first != 0) {
return first;
}
int second = Objects.compare(sA[1], sB[1], STRING_COMPARATOR);
if (second != 0) {
return second;
}
return Objects.compare(sA[2].split("-")[0], sB[2].split("-")[0], STRING_COMPARATOR);
}
|
@Test
void testVersionCompareResourceNotExist() {
URL resource = VersionUtils.class.getClassLoader().getResource("nacos-version.txt");
assertNotNull(resource);
File originFile = new File(resource.getFile());
File tempFile = new File(originFile.getAbsolutePath() + ".rename");
assertTrue(originFile.renameTo(tempFile));
// not throw any exception
VersionUtils.compareVersion("1.2.1", "1.2.1");
assertTrue(tempFile.renameTo(originFile));
}
|
@Override
public QueryTarget create(InternalSerializationService serializationService, Extractors extractors, boolean isKey) {
return new HazelcastJsonQueryTarget(serializationService, extractors, isKey);
}
|
@Test
@Parameters({
"true",
"false"
})
public void test_create(boolean key) {
Extractors extractors = Extractors.newBuilder(SERIALIZATION_SERVICE).build();
HazelcastJsonQueryTargetDescriptor descriptor = HazelcastJsonQueryTargetDescriptor.INSTANCE;
// when
QueryTarget target = descriptor.create(SERIALIZATION_SERVICE, extractors, key);
// then
assertThat(target).isInstanceOf(HazelcastJsonQueryTarget.class);
}
|
@Override
public Properties decode(ByteBuf buf, State state) {
String value = buf.toString(CharsetUtil.UTF_8);
Properties result = new Properties();
for (String entry : value.split("\n")) {
if (entry.length() < 2) {
continue;
}
String[] pair = entry.split(":");
if (pair.length != 2 || pair[0].length() == 0 ) {
continue;
}
String second = pair[1];
if (second.charAt(second.length() - 1) == '\r') {
second = second.substring(0, second.length() - 1);
}
result.put(pair[0], second);
}
return result;
}
|
@Test
public void testDecode() {
Properties p = decoder.decode(Unpooled.copiedBuffer(info, StandardCharsets.UTF_8), null);
Assert.assertEquals(p.getProperty("redis_version"), "5.0.10");
Assert.assertEquals(p.getProperty("redis_mode"), "standalone");
Assert.assertNull(p.getProperty("config_file"));
}
|
public FEELFnResult<Map<String, Object>> invoke(@ParameterName("entries") List<Object> entries) {
if (entries == null) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "entries", "cannot be null"));
}
Map<String, Object> result = new HashMap<>();
for (int i = 0; i < entries.size(); i++) {
final int h_index = i + 1;
if (entries.get(i) instanceof Map) {
Map<?, ?> map = (Map<?, ?>) entries.get(i);
String key;
Object value;
if (map.get("key") instanceof String) {
key = (String) map.get("key");
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "entry of index " + (h_index) + " is missing a `key` entry"));
}
if (map.containsKey("value")) {
value = map.get("value");
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "entry of index " + (h_index) + " is missing a `value` entry"));
}
if (result.containsKey(key)) {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "entry of index " + (h_index) + " contains duplicate key"));
}
result.put(key, value);
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "entry of index " + (h_index) + " is not a valid context"));
}
}
return FEELFnResult.ofResult(result);
}
|
@Test
void invokeListNull() {
FunctionTestUtil.assertResultError(contextFunction.invoke(null), InvalidParametersEvent.class);
}
|
public Set<Integer> nodesThatShouldBeDown(ClusterState state) {
return calculate(state).nodesThatShouldBeDown();
}
|
@Test
void retired_node_is_counted_as_down() {
GroupAvailabilityCalculator calc = calcForHierarchicCluster(
DistributionBuilder.withGroups(3).eachWithNodeCount(2), 0.99);
assertThat(calc.nodesThatShouldBeDown(clusterState(
"distributor:6 storage:6 .1.s:r")), equalTo(indices(0)));
}
|
@Override
public void init() {
transactionManager = new UserTransactionManager();
userTransactionService = new UserTransactionServiceImp();
userTransactionService.init();
}
|
@Test
void assertInit() throws Exception {
transactionManagerProvider.init();
assertNull(transactionManagerProvider.getTransactionManager().getTransaction());
assertFalse(transactionManagerProvider.getTransactionManager().getForceShutdown());
assertTrue(transactionManagerProvider.getTransactionManager().getStartupTransactionService());
}
|
@Override
public boolean isSatisfied(int index, TradingRecord tradingRecord) {
if (tradingRecord != null && !tradingRecord.isClosed()) {
Num entryPrice = tradingRecord.getCurrentPosition().getEntry().getNetPrice();
Num currentPrice = this.referencePrice.getValue(index);
Num threshold = this.stopLossThreshold.getValue(index);
if (tradingRecord.getCurrentPosition().getEntry().isBuy()) {
return currentPrice.isLessThan(entryPrice.minus(threshold));
} else {
return currentPrice.isGreaterThan(entryPrice.plus(threshold));
}
}
return false;
}
|
@Test
public void testClosedPosition() {
ZonedDateTime initialEndDateTime = ZonedDateTime.now();
for (int i = 0; i < 10; i++) {
series.addBar(initialEndDateTime.plusDays(i), 100, 105, 95, 100);
}
AverageTrueRangeStopLossRule rule = new AverageTrueRangeStopLossRule(series, 5, 2);
// Enter and exit position
TradingRecord tradingRecord = new BaseTradingRecord();
tradingRecord.enter(0, series.numOf(100), series.numOf(1));
tradingRecord.exit(5, series.numOf(110), series.numOf(1));
assertFalse(rule.isSatisfied(9, tradingRecord));
}
|
@Override
public Optional<EfestoOutputPMML> evaluateInput(EfestoInput<PMMLRequestData> toEvaluate,
EfestoRuntimeContext context) {
return executeEfestoInput(toEvaluate, context);
}
|
@Test
void evaluateWrongIdentifier() {
modelLocalUriId = getModelLocalUriIdFromPmmlIdFactory(FILE_NAME, "wrongmodel");
PMMLRequestData pmmlRequestData = getPMMLRequestData(MODEL_NAME, FILE_NAME);
EfestoInput<PMMLRequestData> efestoInput = new BaseEfestoInput<>(modelLocalUriId, pmmlRequestData);
Optional<EfestoOutputPMML> retrieved = kieRuntimeServicePMMLRequestData.evaluateInput(efestoInput,
getEfestoContext(memoryCompilerClassLoader));
assertThat(retrieved).isNotNull().isNotPresent();
}
|
public boolean containsMessage(long ledgerId, long entryId) {
if (lastMutableBucket.containsMessage(ledgerId, entryId)) {
return true;
}
return findImmutableBucket(ledgerId).map(bucket -> bucket.containsMessage(ledgerId, entryId))
.orElse(false);
}
|
@Test(dataProvider = "delayedTracker")
public void testContainsMessage(BucketDelayedDeliveryTracker tracker) {
tracker.addMessage(1, 1, 10);
tracker.addMessage(2, 2, 20);
assertTrue(tracker.containsMessage(1, 1));
clockTime.set(20);
Set<Position> scheduledMessages = tracker.getScheduledMessages(1);
assertEquals(scheduledMessages.stream().findFirst().get().getEntryId(), 1);
tracker.addMessage(3, 3, 30);
tracker.addMessage(4, 4, 30);
tracker.addMessage(5, 5, 30);
tracker.addMessage(6, 6, 30);
assertTrue(tracker.containsMessage(3, 3));
tracker.close();
}
|
@Override
public void updatePod(Pod pod) {
checkNotNull(pod, ERR_NULL_POD);
checkArgument(!Strings.isNullOrEmpty(pod.getMetadata().getUid()),
ERR_NULL_POD_UID);
kubevirtPodStore.updatePod(pod);
log.debug(String.format(MSG_POD, pod.getMetadata().getName(), MSG_UPDATED));
}
|
@Test(expected = IllegalArgumentException.class)
public void testUpdateUnregisteredPod() {
target.updatePod(POD);
}
|
@VisibleForTesting
static void checkAuthorization(ReqContext reqContext, IAuthorizer auth, String operation, String function)
throws AuthorizationException {
checkAuthorization(reqContext, auth, operation, function, true);
}
|
@Test
public void testStrict() throws Exception {
ReqContext jt = new ReqContext(new Subject());
SingleUserPrincipal jumpTopo = new SingleUserPrincipal("jump_topo");
jt.subject().getPrincipals().add(jumpTopo);
ReqContext jc = new ReqContext(new Subject());
SingleUserPrincipal jumpClient = new SingleUserPrincipal("jump_client");
jc.subject().getPrincipals().add(jumpClient);
ReqContext other = new ReqContext(new Subject());
SingleUserPrincipal otherUser = new SingleUserPrincipal("other");
other.subject().getPrincipals().add(otherUser);
Map<String, AclFunctionEntry> acl = new HashMap<>();
acl.put("jump", new AclFunctionEntry(Collections.singletonList(jumpClient.getName()), jumpTopo.getName()));
Map<String, Object> conf = new HashMap<>();
conf.put(Config.DRPC_AUTHORIZER_ACL_STRICT, true);
conf.put(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN, DefaultPrincipalToLocal.class.getName());
DRPCSimpleACLAuthorizer auth = new DRPCSimpleACLAuthorizer() {
@Override
protected Map<String, AclFunctionEntry> readAclFromConfig() {
return acl;
}
};
auth.prepare(conf);
//JUMP
DRPC.checkAuthorization(jt, auth, "fetchRequest", "jump");
assertThrows(() -> DRPC.checkAuthorization(jc, auth, "fetchRequest", "jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(other, auth, "fetchRequest", "jump"), AuthorizationException.class);
DRPC.checkAuthorization(jt, auth, "result", "jump");
assertThrows(() -> DRPC.checkAuthorization(jc, auth, "result", "jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(other, auth, "result", "jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jt, auth, "execute", "jump"), AuthorizationException.class);
DRPC.checkAuthorization(jc, auth, "execute", "jump");
assertThrows(() -> DRPC.checkAuthorization(other, auth, "execute", "jump"), AuthorizationException.class);
//not_jump (closed in strict mode)
assertThrows(() -> DRPC.checkAuthorization(jt, auth, "fetchRequest", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jc, auth, "fetchRequest", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(other, auth, "fetchRequest", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jt, auth, "result", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jc, auth, "result", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(other, auth, "result", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jt, auth, "execute", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(jc, auth, "execute", "not_jump"), AuthorizationException.class);
assertThrows(() -> DRPC.checkAuthorization(other, auth, "execute", "not_jump"), AuthorizationException.class);
}
|
@GET
@UnitOfWork
public List<Person> listPeople() {
return peopleDAO.findAll();
}
|
@Test
void listPeople() {
final List<Person> people = Collections.singletonList(person);
when(PERSON_DAO.findAll()).thenReturn(people);
final List<Person> response = RESOURCES.target("/people")
.request().get(new GenericType<List<Person>>() {
});
verify(PERSON_DAO).findAll();
assertThat(response).containsAll(people);
}
|
public static HttpServer2.Builder loadSslConfiguration(
HttpServer2.Builder builder) {
return loadSslConfiguration(builder, null);
}
|
@Test
void testLoadSslConfiguration() throws Exception {
Configuration conf = provisionCredentialsForSSL();
TestBuilder builder = (TestBuilder) new TestBuilder();
builder = (TestBuilder) WebAppUtils.loadSslConfiguration(
builder, conf);
String keypass = "keypass";
String storepass = "storepass";
String trustpass = "trustpass";
// make sure we get the right passwords in the builder
assertEquals(keypass, ((TestBuilder) builder).keypass);
assertEquals(storepass, ((TestBuilder) builder).keystorePassword);
assertEquals(trustpass, ((TestBuilder) builder).truststorePassword);
}
|
@Override
public long getNumBytesProduced() {
checkState(
subpartitionBytesByPartitionIndex.size() == numOfPartitions,
"Not all partition infos are ready");
return subpartitionBytesByPartitionIndex.values().stream()
.flatMapToLong(Arrays::stream)
.reduce(0L, Long::sum);
}
|
@Test
void testGetNumBytesProduced() {
PointwiseBlockingResultInfo resultInfo =
new PointwiseBlockingResultInfo(new IntermediateDataSetID(), 2, 2);
resultInfo.recordPartitionInfo(0, new ResultPartitionBytes(new long[] {32L, 32L}));
resultInfo.recordPartitionInfo(1, new ResultPartitionBytes(new long[] {64L, 64L}));
assertThat(resultInfo.getNumBytesProduced()).isEqualTo(192L);
}
|
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) {
// Set of Visited Schemas
IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>();
// Stack that contains the Schemas to process and afterVisitNonTerminal
// functions.
// Deque<Either<Schema, Supplier<SchemaVisitorAction>>>
// Using Either<...> has a cost we want to avoid...
Deque<Object> dq = new ArrayDeque<>();
dq.push(start);
Object current;
while ((current = dq.poll()) != null) {
if (current instanceof Supplier) {
// We are executing a non-terminal post visit.
SchemaVisitor.SchemaVisitorAction action = ((Supplier<SchemaVisitor.SchemaVisitorAction>) current).get();
switch (action) {
case CONTINUE:
break;
case SKIP_SIBLINGS:
while (dq.peek() instanceof Schema) {
dq.remove();
}
break;
case TERMINATE:
return visitor.get();
case SKIP_SUBTREE:
default:
throw new UnsupportedOperationException("Invalid action " + action);
}
} else {
Schema schema = (Schema) current;
boolean terminate;
if (visited.containsKey(schema)) {
terminate = visitTerminal(visitor, schema, dq);
} else {
Schema.Type type = schema.getType();
switch (type) {
case ARRAY:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType()));
visited.put(schema, schema);
break;
case RECORD:
terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema)
.collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator());
visited.put(schema, schema);
break;
case UNION:
terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes());
visited.put(schema, schema);
break;
case MAP:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType()));
visited.put(schema, schema);
break;
default:
terminate = visitTerminal(visitor, schema, dq);
break;
}
}
if (terminate) {
return visitor.get();
}
}
}
return visitor.get();
}
|
@Test
public void testVisit9() {
String s9 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
Assert.assertEquals("c1.ct2.\"int\"!", Schemas.visit(new Schema.Parser().parse(s9), new TestVisitor()));
}
|
public static List<Map<String, String>> getTags(List<Rule> rules) {
if (CollectionUtils.isEmpty(rules)) {
return Collections.emptyList();
}
List<Map<String, String>> tags = new ArrayList<>();
for (Rule rule : rules) {
for (Route route : rule.getRoute()) {
tags.add(route.getTags());
}
}
return tags;
}
|
@Test
public void testGetTags() {
List<Map<String, String>> tags = RuleUtils.getTags(list);
Assert.assertEquals(2, tags.size());
Assert.assertEquals("1.0.1", tags.get(0).get("version"));
Assert.assertEquals("1.0.0", tags.get(1).get("version"));
}
|
public int allocate(final String label)
{
return allocate(label, DEFAULT_TYPE_ID);
}
|
@Test
void shouldStoreMultipleLabels()
{
final int abc = manager.allocate("abc");
final int def = manager.allocate("def");
final int ghi = manager.allocate("ghi");
reader.forEach(consumer);
final InOrder inOrder = Mockito.inOrder(consumer);
inOrder.verify(consumer).accept(abc, "abc");
inOrder.verify(consumer).accept(def, "def");
inOrder.verify(consumer).accept(ghi, "ghi");
inOrder.verifyNoMoreInteractions();
}
|
private String joinResource(Resource resource) {
if (SignType.SPECIFIED.equals(resource.getType())) {
return resource.getName();
}
StringBuilder result = new StringBuilder();
String namespaceId = resource.getNamespaceId();
if (StringUtils.isNotBlank(namespaceId)) {
// https://github.com/alibaba/nacos/issues/10347
if (!DEFAULT_NAMESPACE_ID.equals(namespaceId)) {
result.append(namespaceId);
}
}
String group = resource.getGroup();
if (StringUtils.isBlank(group)) {
result.append(Constants.Resource.SPLITTER).append('*');
} else {
result.append(Constants.Resource.SPLITTER).append(group);
}
String resourceName = resource.getName();
if (StringUtils.isBlank(resourceName)) {
result.append(Constants.Resource.SPLITTER).append(resource.getType().toLowerCase()).append("/*");
} else {
result.append(Constants.Resource.SPLITTER).append(resource.getType().toLowerCase()).append('/')
.append(resourceName);
}
return result.toString();
}
|
@Test
void joinResource() throws Exception {
Method method = nacosRoleServiceClass.getDeclaredMethod("joinResource", Resource.class);
method.setAccessible(true);
Resource resource = new Resource("public", "group", AuthConstants.UPDATE_PASSWORD_ENTRY_POINT, "rw", null);
Object invoke = method.invoke(nacosRoleService, new Resource[] {resource});
assertNotNull(invoke);
}
|
public boolean isUnknown() {
return pack() == UNKNOWN_VERSION;
}
|
@Test
public void isUnknown() {
assertTrue(Version.UNKNOWN.isUnknown());
assertTrue(Version.of(UNKNOWN_VERSION, UNKNOWN_VERSION).isUnknown());
assertTrue(Version.of(0, 0).isUnknown());
}
|
public static String toJson(MetadataUpdate metadataUpdate) {
return toJson(metadataUpdate, false);
}
|
@Test
public void testSetPropertiesToJson() {
String action = MetadataUpdateParser.SET_PROPERTIES;
Map<String, String> props =
ImmutableMap.of(
"prop1", "val1",
"prop2", "val2");
String propsMap = "{\"prop1\":\"val1\",\"prop2\":\"val2\"}";
String expected = String.format("{\"action\":\"%s\",\"updates\":%s}", action, propsMap);
MetadataUpdate update = new MetadataUpdate.SetProperties(props);
String actual = MetadataUpdateParser.toJson(update);
assertThat(actual)
.as("Set properties should serialize to the correct JSON value")
.isEqualTo(expected);
}
|
@Override
public int read() {
return (mPosition < mLimit) ? (mData[mPosition++] & 0xff) : -1;
}
|
@Test
void testRead() throws IOException {
UnsafeByteArrayInputStream stream = new UnsafeByteArrayInputStream("abc".getBytes());
assertThat(stream.read(), is((int) 'a'));
assertThat(stream.available(), is(2));
stream.skip(1);
assertThat(stream.available(), is(1));
byte[] bytes = new byte[1];
int read = stream.read(bytes);
assertThat(read, is(1));
assertThat(bytes, is("c".getBytes()));
stream.reset();
assertThat(stream.position(), is(0));
assertThat(stream.size(), is(3));
stream.position(1);
assertThat(stream.read(), is((int) 'b'));
}
|
public Authority getAuthority() {
return mUri.getAuthority();
}
|
@Test
public void authorityTypeTests() {
assertTrue(new AlluxioURI("file", Authority.fromString("localhost:8080"), "/b/c").getAuthority()
instanceof SingleMasterAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString("zk@host:2181"), "/b/c").getAuthority()
instanceof ZookeeperAuthority);
assertTrue(new AlluxioURI("alluxio://zk@host1:2181,host2:2181,host3:2181/b/c").getAuthority()
instanceof ZookeeperAuthority);
assertTrue(new AlluxioURI("alluxio://zk@host1:2181;host2:2181;host3:2181/b/c").getAuthority()
instanceof ZookeeperAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString(""), "/b/c").getAuthority()
instanceof NoAuthority);
assertTrue(new AlluxioURI("file", null, "/b/c").getAuthority()
instanceof NoAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString(null), "/b/c").getAuthority()
instanceof NoAuthority);
assertTrue(new AlluxioURI("file:///b/c").getAuthority()
instanceof NoAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString("ebj@logical"), "/b/c").getAuthority()
instanceof EmbeddedLogicalAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString("zk@logical"), "/b/c").getAuthority()
instanceof ZookeeperLogicalAuthority);
assertTrue(new AlluxioURI("file", Authority.fromString("localhost"), "/b/c").getAuthority()
instanceof UnknownAuthority);
}
|
public String[] getFileTypeDisplayNames( Locale locale ) {
return new String[] { "Jobs", "XML" };
}
|
@Test
public void testGetFileTypeDisplayNames() throws Exception {
String[] names = jobFileListener.getFileTypeDisplayNames( null );
assertNotNull( names );
assertEquals( 2, names.length );
assertEquals( "Jobs", names[0] );
assertEquals( "XML", names[1] );
}
|
@Override
public void delete(K key) {
begin();
transactionalMap.delete(key);
commit();
}
|
@Test
public void testDelete() {
map.put(23, "value-23");
assertTrue(map.containsKey(23));
adapter.delete(23);
assertFalse(map.containsKey(23));
}
|
@Override
@Deprecated
public OffsetAndMetadata committed(TopicPartition partition) {
return committed(partition, Duration.ofMillis(defaultApiTimeoutMs));
}
|
@Test
public void testCommitted() {
time = new MockTime(1);
consumer = newConsumer();
Map<TopicPartition, OffsetAndMetadata> topicPartitionOffsets = mockTopicPartitionOffset();
completeFetchedCommittedOffsetApplicationEventSuccessfully(topicPartitionOffsets);
assertEquals(topicPartitionOffsets, consumer.committed(topicPartitionOffsets.keySet(), Duration.ofMillis(1000)));
verify(applicationEventHandler).addAndGet(ArgumentMatchers.isA(FetchCommittedOffsetsEvent.class));
final Metric metric = consumer.metrics()
.get(consumer.metricsRegistry().metricName("committed-time-ns-total", "consumer-metrics"));
assertTrue((double) metric.metricValue() > 0);
}
|
public static void mergeParams(
Map<String, ParamDefinition> params,
Map<String, ParamDefinition> paramsToMerge,
MergeContext context) {
if (paramsToMerge == null) {
return;
}
Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream())
.forEach(
name -> {
ParamDefinition paramToMerge = paramsToMerge.get(name);
if (paramToMerge == null) {
return;
}
if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) {
Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name);
Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name);
mergeParams(
baseMap,
toMergeMap,
MergeContext.copyWithParentMode(
context, params.getOrDefault(name, paramToMerge).getMode()));
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else if (paramToMerge.getType() == ParamType.STRING_MAP
&& paramToMerge.isLiteral()) {
Map<String, String> baseMap = stringMapValueOrEmpty(params, name);
Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name);
baseMap.putAll(toMergeMap);
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else {
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, paramToMerge.getValue()));
}
});
}
|
@Test
public void testMergeAllowMoreRestrictiveMode() throws JsonProcessingException {
Map<String, ParamDefinition> allParams =
parseParamDefMap("{'tomerge': {'type': 'STRING','value': 'hello', 'mode': 'MUTABLE'}}");
Map<String, ParamDefinition> paramsToMerge =
parseParamDefMap(
"{'tomerge': {'type': 'STRING', 'value': 'goodbye', 'mode': 'MUTABLE_ON_START'}}");
ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext);
assertEquals(1, allParams.size());
assertEquals("goodbye", allParams.get("tomerge").asStringParamDef().getValue());
assertEquals(ParamMode.MUTABLE_ON_START, allParams.get("tomerge").asStringParamDef().getMode());
}
|
public static int symLink(String target, String linkname) throws IOException{
if (target == null || linkname == null) {
LOG.warn("Can not create a symLink with a target = " + target
+ " and link =" + linkname);
return 1;
}
// Run the input paths through Java's File so that they are converted to the
// native OS form
File targetFile = new File(
Path.getPathWithoutSchemeAndAuthority(new Path(target)).toString());
File linkFile = new File(
Path.getPathWithoutSchemeAndAuthority(new Path(linkname)).toString());
String[] cmd = Shell.getSymlinkCommand(
targetFile.toString(),
linkFile.toString());
ShellCommandExecutor shExec;
try {
if (Shell.WINDOWS &&
linkFile.getParentFile() != null &&
!new Path(target).isAbsolute()) {
// Relative links on Windows must be resolvable at the time of
// creation. To ensure this we run the shell command in the directory
// of the link.
//
shExec = new ShellCommandExecutor(cmd, linkFile.getParentFile());
} else {
shExec = new ShellCommandExecutor(cmd);
}
shExec.execute();
} catch (Shell.ExitCodeException ec) {
int returnVal = ec.getExitCode();
if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) {
LOG.warn("Fail to create symbolic links on Windows. "
+ "The default security settings in Windows disallow non-elevated "
+ "administrators and all non-administrators from creating symbolic links. "
+ "This behavior can be changed in the Local Security Policy management console");
} else if (returnVal != 0) {
LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed "
+ returnVal + " with: " + ec.getMessage());
}
return returnVal;
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Error while create symlink " + linkname + " to " + target
+ "." + " Exception: " + StringUtils.stringifyException(e));
}
throw e;
}
return shExec.getExitCode();
}
|
@Test (timeout = 30000)
public void testSymlinkDelete() throws Exception {
File file = new File(del, FILE);
file.createNewFile();
File link = new File(del, "_link");
// create the symlink
FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
Verify.exists(file);
Verify.exists(link);
// make sure that deleting a symlink works properly
Verify.delete(link);
Verify.notExists(link);
Verify.exists(file);
}
|
public int run(String[] args) throws Exception {
if (args.length == 0) {
System.err.println("Too few arguments!");
printUsage();
return 1;
}
Path pattern = new Path(args[0]);
FileSystem fs = pattern.getFileSystem(getConf());
fs.setVerifyChecksum(true);
for (Path p : FileUtil.stat2Paths(fs.globStatus(pattern), pattern)) {
List<FileStatus> inputFiles = new ArrayList<FileStatus>();
FileStatus status = fs.getFileStatus(p);
if (status.isDirectory()) {
FileStatus[] files = fs.listStatus(p);
Collections.addAll(inputFiles, files);
} else {
inputFiles.add(status);
}
return dumpTypedBytes(inputFiles);
}
return -1;
}
|
@Test
public void testDumping() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.build();
FileSystem fs = cluster.getFileSystem();
PrintStream psBackup = System.out;
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream psOut = new PrintStream(out);
System.setOut(psOut);
DumpTypedBytes dumptb = new DumpTypedBytes(conf);
try {
Path root = new Path("/typedbytestest");
assertTrue(fs.mkdirs(root));
assertTrue(fs.exists(root));
OutputStreamWriter writer = new OutputStreamWriter(fs.create(new Path(
root, "test.txt")));
try {
for (int i = 0; i < 100; i++) {
writer.write("" + (10 * i) + "\n");
}
} finally {
writer.close();
}
String[] args = new String[1];
args[0] = "/typedbytestest";
int ret = dumptb.run(args);
assertEquals("Return value != 0.", 0, ret);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(in));
int counter = 0;
Object key = tbinput.read();
while (key != null) {
assertEquals(Long.class, key.getClass()); // offset
Object value = tbinput.read();
assertEquals(String.class, value.getClass());
assertTrue("Invalid output.",
Integer.parseInt(value.toString()) % 10 == 0);
counter++;
key = tbinput.read();
}
assertEquals("Wrong number of outputs.", 100, counter);
} finally {
try {
fs.close();
} catch (Exception e) {
}
System.setOut(psBackup);
cluster.shutdown();
}
}
|
public static <T> CheckedSupplier<T> recover(CheckedSupplier<T> supplier,
CheckedFunction<Throwable, T> exceptionHandler) {
return () -> {
try {
return supplier.get();
} catch (Throwable throwable) {
return exceptionHandler.apply(throwable);
}
};
}
|
@Test
public void shouldRecoverFromSpecificResult() throws Throwable {
CheckedSupplier<String> supplier = () -> "Wrong Result";
CheckedSupplier<String> callableWithRecovery = CheckedFunctionUtils.recover(supplier, (result) -> result.equals("Wrong Result"), (r) -> "Bla");
String result = callableWithRecovery.get();
assertThat(result).isEqualTo("Bla");
}
|
public static StreamExchangeMode getBatchStreamExchangeMode(
ReadableConfig config, StreamExchangeMode requiredExchangeMode) {
if (requiredExchangeMode == StreamExchangeMode.BATCH) {
return StreamExchangeMode.BATCH;
}
final GlobalStreamExchangeMode globalExchangeMode =
getGlobalStreamExchangeMode(config).orElse(null);
if (globalExchangeMode == GlobalStreamExchangeMode.ALL_EDGES_BLOCKING) {
return StreamExchangeMode.BATCH;
}
final BatchShuffleMode shuffleMode = config.get(ExecutionOptions.BATCH_SHUFFLE_MODE);
if (shuffleMode == BatchShuffleMode.ALL_EXCHANGES_BLOCKING) {
return StreamExchangeMode.BATCH;
} else if (shuffleMode == BatchShuffleMode.ALL_EXCHANGES_HYBRID_FULL) {
return StreamExchangeMode.HYBRID_FULL;
} else if (shuffleMode == BatchShuffleMode.ALL_EXCHANGES_HYBRID_SELECTIVE) {
return StreamExchangeMode.HYBRID_SELECTIVE;
}
return StreamExchangeMode.UNDEFINED;
}
|
@Test
void testBatchStreamExchangeMode() {
final Configuration configuration = new Configuration();
assertThat(getBatchStreamExchangeMode(configuration, null))
.isEqualTo(StreamExchangeMode.BATCH);
configuration.set(
ExecutionOptions.BATCH_SHUFFLE_MODE, BatchShuffleMode.ALL_EXCHANGES_BLOCKING);
assertThat(getBatchStreamExchangeMode(configuration, null))
.isEqualTo(StreamExchangeMode.BATCH);
configuration.set(
ExecutionOptions.BATCH_SHUFFLE_MODE, BatchShuffleMode.ALL_EXCHANGES_HYBRID_FULL);
assertThat(getBatchStreamExchangeMode(configuration, null))
.isEqualTo(StreamExchangeMode.HYBRID_FULL);
configuration.set(
ExecutionOptions.BATCH_SHUFFLE_MODE,
BatchShuffleMode.ALL_EXCHANGES_HYBRID_SELECTIVE);
assertThat(getBatchStreamExchangeMode(configuration, null))
.isEqualTo(StreamExchangeMode.HYBRID_SELECTIVE);
configuration.set(
ExecutionOptions.BATCH_SHUFFLE_MODE, BatchShuffleMode.ALL_EXCHANGES_PIPELINED);
assertThat(getBatchStreamExchangeMode(configuration, null))
.isEqualTo(StreamExchangeMode.UNDEFINED);
configuration.set(
ExecutionOptions.BATCH_SHUFFLE_MODE, BatchShuffleMode.ALL_EXCHANGES_PIPELINED);
assertThat(getBatchStreamExchangeMode(configuration, StreamExchangeMode.BATCH))
.isEqualTo(StreamExchangeMode.BATCH);
}
|
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
ReflectionUtils.doWithMethods(bean.getClass(), recurringJobFinderMethodCallback);
return bean;
}
|
@Test
void beansWithMethodsAnnotatedWithRecurringAnnotationContainingPropertyPlaceholdersWillBeResolved() {
new ApplicationContextRunner()
.withBean(RecurringJobPostProcessor.class)
.withBean(JobScheduler.class, () -> jobScheduler)
.withPropertyValues("my-job.id=my-recurring-job")
.withPropertyValues("my-job.cron=0 0/15 * * *")
.withPropertyValues("my-job.zone-id=Asia/Taipei")
.run(context -> {
context.getBean(RecurringJobPostProcessor.class)
.postProcessAfterInitialization(new MyServiceWithRecurringAnnotationContainingPropertyPlaceholder(), "not important");
verify(jobScheduler).scheduleRecurrently(eq("my-recurring-job"), any(JobDetails.class), eq(CronExpression.create("0 0/15 * * *")), eq(ZoneId.of("Asia/Taipei")));
});
}
|
public static SerializableFunction<byte[], Row> getProtoBytesToRowFunction(
String fileDescriptorPath, String messageName) {
ProtoSchemaInfo dynamicProtoDomain = getProtoDomain(fileDescriptorPath, messageName);
ProtoDomain protoDomain = dynamicProtoDomain.getProtoDomain();
@SuppressWarnings("unchecked")
ProtoDynamicMessageSchema<DynamicMessage> protoDynamicMessageSchema =
ProtoDynamicMessageSchema.forDescriptor(protoDomain, messageName);
return new SimpleFunction<byte[], Row>() {
@Override
public Row apply(byte[] input) {
try {
List<String> messageElements = Splitter.on('.').splitToList(messageName);
String messageTypeByName = messageElements.get(messageElements.size() - 1);
final Descriptors.Descriptor descriptor =
protoDomain
.getFileDescriptor(dynamicProtoDomain.getFileName())
.findMessageTypeByName(messageTypeByName);
DynamicMessage dynamicMessage = DynamicMessage.parseFrom(descriptor, input);
SerializableFunction<DynamicMessage, Row> res =
protoDynamicMessageSchema.getToRowFunction();
return res.apply(dynamicMessage);
} catch (InvalidProtocolBufferException e) {
LOG.error("Error parsing to DynamicMessage", e);
throw new RuntimeException(e);
}
}
};
}
|
@Test(expected = java.lang.RuntimeException.class)
public void testProtoBytesToRowFunctionReturnsRowFailure() {
// Create a proto bytes to row function
SerializableFunction<byte[], Row> protoBytesToRowFunction =
ProtoByteUtils.getProtoBytesToRowFunction(DESCRIPTOR_PATH, MESSAGE_NAME);
// Create some test input bytes that are not matching
byte[] inputBytes = new byte[] {1, 2, 3, 4, 5};
// Call the proto bytes to row function that should fail because the input does not match
protoBytesToRowFunction.apply(inputBytes);
}
|
@Override
public Long del(byte[]... keys) {
if (isQueueing() || isPipelined()) {
for (byte[] key: keys) {
write(key, LongCodec.INSTANCE, RedisCommands.DEL, key);
}
return null;
}
CommandBatchService es = new CommandBatchService(executorService);
for (byte[] key: keys) {
es.writeAsync(key, StringCodec.INSTANCE, RedisCommands.DEL, key);
}
BatchResult<Long> b = (BatchResult<Long>) es.execute();
return b.getResponses().stream().collect(Collectors.summarizingLong(v -> v)).getSum();
}
|
@Test
public void testDel() {
testInCluster(connection -> {
List<byte[]> keys = new ArrayList<>();
for (int i = 0; i < 10; i++) {
byte[] key = ("test" + i).getBytes();
keys.add(key);
connection.set(key, ("test" + i).getBytes());
}
assertThat(connection.del(keys.toArray(new byte[0][]))).isEqualTo(10);
});
}
|
@Override
public boolean addClass(final Class<?> stepClass) {
if (stepClasses.contains(stepClass)) {
return true;
}
checkNoComponentAnnotations(stepClass);
if (hasCucumberContextConfiguration(stepClass)) {
checkOnlyOneClassHasCucumberContextConfiguration(stepClass);
withCucumberContextConfiguration = stepClass;
}
stepClasses.add(stepClass);
return true;
}
|
@Test
void shouldNotFailWithCucumberContextConfigurationMetaAnnotation() {
final ObjectFactory factory = new SpringFactory();
factory.addClass(WithMetaAnnotation.class);
assertDoesNotThrow(factory::start);
}
|
static BeamZetaSqlCatalog create(
SchemaPlus calciteSchema, JavaTypeFactory typeFactory, AnalyzerOptions options) {
BeamZetaSqlCatalog catalog =
new BeamZetaSqlCatalog(
calciteSchema, new SimpleCatalog(calciteSchema.getName()), typeFactory);
catalog.addFunctionsToCatalog(options);
return catalog;
}
|
@Test
public void rejectsScalarFunctionImplWithUnsupportedReturnType() throws NoSuchMethodException {
JdbcConnection jdbcConnection = createJdbcConnection();
SchemaPlus calciteSchema = jdbcConnection.getCurrentSchemaPlus();
Method method = ReturnsArrayTimeFn.class.getMethod("eval");
calciteSchema.add("return_array", ScalarFunctionImpl.create(method));
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Calcite type TIME not allowed in function return_array");
BeamZetaSqlCatalog.create(
calciteSchema, jdbcConnection.getTypeFactory(), SqlAnalyzer.baseAnalyzerOptions());
}
|
public static Connection fromHostList(String... brokers) {
return fromHostList(Arrays.asList(brokers), getDefault());
}
|
@Test
public void testBrokerList() {
// Create the connection
String broker1 = "127.0.0.1:1234";
String broker2 = "localhost:2345";
Connection connection = ConnectionFactory.fromHostList(broker1, broker2);
// Check that the broker list has the right length and has the same servers
List<String> brokers = ImmutableList.of(broker1, broker2);
Assert.assertEquals(connection.getBrokerList(), brokers);
}
|
@Override
public void execute(ComputationStep.Context context) {
PostMeasuresComputationCheck.Context extensionContext = new ContextImpl();
for (PostMeasuresComputationCheck extension : extensions) {
extension.onCheck(extensionContext);
}
}
|
@Test
public void context_contains_project_uuid_from_analysis_metada_holder() {
Project project = Project.from(newPrivateProjectDto());
analysisMetadataHolder.setProject(project);
PostMeasuresComputationCheck check = mock(PostMeasuresComputationCheck.class);
newStep(check).execute(new TestComputationStepContext());
ArgumentCaptor<Context> contextArgumentCaptor = ArgumentCaptor.forClass(Context.class);
verify(check).onCheck(contextArgumentCaptor.capture());
assertThat(contextArgumentCaptor.getValue().getProjectUuid()).isEqualTo(project.getUuid());
}
|
@Override
public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) {
ByteBuf newlyByteBuf = payload.getByteBuf().readBytes(readLengthFromMeta(columnDef.getColumnMeta(), payload));
try {
return MySQLJsonValueDecoder.decode(newlyByteBuf);
} finally {
newlyByteBuf.release();
}
}
|
@Test
void assertReadJsonValueWithMeta2() {
columnDef.setColumnMeta(2);
when(byteBuf.readUnsignedShortLE()).thenReturn(2);
when(byteBuf.readBytes(2)).thenReturn(jsonValueByteBuf);
assertThat(new MySQLJsonBinlogProtocolValue().read(columnDef, payload), is(EXPECTED_JSON));
}
|
public void init(AtomicReference<PipelineRuleOutputFilterState> activeState) {
reload(activeState, ReloadTrigger.empty());
}
|
@Test
void init(MessageFactory messageFactory) {
final var defaultStreamDestinationFilter = StreamDestinationFilterRuleDTO.builder()
.id("54e3deadbeefdeadbeef0001")
.title("Test 1")
.streamId(defaultStream.getId())
.destinationType("indexer")
.rule(RuleBuilder.builder().build()) // Not needed because we mock the RuleBuilderService
.build();
final var testStreamDestinationFilter = StreamDestinationFilterRuleDTO.builder()
.id("54e3deadbeefdeadbeef0002")
.title("Test 2")
.streamId(testStream.getId())
.destinationType("indexer")
.rule(RuleBuilder.builder().build()) // Not needed because we mock the RuleBuilderService
.build();
final var defaultStreamRuleDao = RuleDao.builder()
.id(defaultStreamDestinationFilter.id())
.title("does-not-matter")
.source("""
rule "%s"
when has_field(field : "source")
then
__remove_from_stream_destination__(stream_id : "%s", destination_type : "indexer");
end
""".formatted(ruleTitle(defaultStreamDestinationFilter), defaultStream.getId()))
.build();
final var testStreamRuleDao = RuleDao.builder()
.id(testStreamDestinationFilter.id())
.title("does-not-matter")
.source("""
rule "%s"
when has_field(field : "source")
then
__remove_from_stream_destination__(stream_id : "%s", destination_type : "indexer");
end
""".formatted(ruleTitle(testStreamDestinationFilter), testStream.getId()))
.build();
doAnswer((Answer<Void>) invocation -> {
final Consumer<StreamDestinationFilterService.GroupByStreamResult> consumer = invocation.getArgument(0);
// The consumer must be called once for each stream group
consumer.accept(new StreamDestinationFilterService.GroupByStreamResult(defaultStream.getId(), Set.of(defaultStreamDestinationFilter)));
consumer.accept(new StreamDestinationFilterService.GroupByStreamResult(testStream.getId(), Set.of(testStreamDestinationFilter)));
return null;
}).when(filterService).forEachEnabledFilterGroupedByStream(any());
when(resolverFactory.create(any(), any())).thenReturn(createResolver(List.of(defaultStreamRuleDao, testStreamRuleDao)));
// Mock each rule source generator call to return the correct source for the given rule
when(ruleBuilderService.generateRuleSource(eq(ruleTitle(defaultStreamDestinationFilter)), any(RuleBuilder.class), anyBoolean()))
.thenReturn(defaultStreamRuleDao.source());
when(ruleBuilderService.generateRuleSource(eq(ruleTitle(testStreamDestinationFilter)), any(RuleBuilder.class), anyBoolean()))
.thenReturn(testStreamRuleDao.source());
final var stateUpdater = new PipelineRuleOutputFilterStateUpdater(
filterService,
(pipelines, destinations, activeStreams) -> new PipelineRuleOutputFilterState(
pipelines,
destinations,
activeStreams,
new MetricRegistry(),
1,
true
),
Map.of("indexer", mock(FilteredMessageOutput.class)),
ruleBuilderService,
resolverFactory,
ruleParser,
new MetricRegistry()
);
final var activeState = new AtomicReference<PipelineRuleOutputFilterState>();
stateUpdater.init(activeState);
assertThat(activeState.get()).isNotNull().satisfies(state -> {
assertThat(state.isEmpty()).isFalse();
assertThat(state.getCurrentPipelines().keySet()).containsExactlyInAnyOrder(defaultStream.getId(), testStream.getId());
assertThat(state.getDestinations()).containsExactlyInAnyOrder("indexer");
assertThat(state.getActiveStreams()).containsExactlyInAnyOrder(defaultStream.getId(), testStream.getId());
// Asser default stream pipeline
assertThat(state.getCurrentPipelines().get(defaultStream.getId())).isNotNull().satisfies(pipeline -> {
assertThat(pipeline.id()).isEqualTo(defaultStream.getId());
assertThat(pipeline.name()).isEqualTo("Stream Destination Filter: " + defaultStream.getId());
assertThat(pipeline.stages()).hasSize(1);
assertThat(pipeline.stages().first()).satisfies(stage -> {
assertThat(stage.stage()).isEqualTo(0);
assertThat(stage.getRules()).hasSize(1).first().satisfies(rule -> {
assertThat(rule.id()).isEqualTo(defaultStreamDestinationFilter.id());
assertThat(rule.name()).isEqualTo(ruleTitle(defaultStreamDestinationFilter));
});
assertThat(stage.ruleReferences())
.containsExactlyInAnyOrder(ruleTitle(defaultStreamDestinationFilter));
});
});
// Asser test stream pipeline
assertThat(state.getCurrentPipelines().get(testStream.getId())).isNotNull().satisfies(pipeline -> {
assertThat(pipeline.id()).isEqualTo(testStream.getId());
assertThat(pipeline.name()).isEqualTo("Stream Destination Filter: " + testStream.getId());
assertThat(pipeline.stages()).hasSize(1);
assertThat(pipeline.stages().first()).satisfies(stage -> {
assertThat(stage.stage()).isEqualTo(0);
assertThat(stage.getRules()).hasSize(1).first().satisfies(rule -> {
assertThat(rule.id()).isEqualTo(testStreamDestinationFilter.id());
assertThat(rule.name()).isEqualTo(ruleTitle(testStreamDestinationFilter));
});
assertThat(stage.ruleReferences())
.containsExactlyInAnyOrder(ruleTitle(testStreamDestinationFilter));
});
});
final var message = messageFactory.createMessage("message", "source", Tools.nowUTC());
// The message doesn't have a stream, so we shouldn't get any pipelines
assertThat(state.getPipelinesForMessage(message)).isEmpty();
// Add the default stream to test that we get the default stream pipeline
message.addStream(defaultStream);
assertThat(state.getPipelinesForMessage(message))
.hasSize(1)
.containsExactlyInAnyOrder(state.getCurrentPipelines().get(defaultStream.getId()));
// Add the test stream to test that we now get both pipelines
message.addStream(testStream);
assertThat(state.getPipelinesForMessage(message))
.hasSize(2)
.containsAll(state.getCurrentPipelines().values());
// Remove default stream to test that we now only get the test stream pipeline
message.removeStream(defaultStream);
assertThat(state.getPipelinesForMessage(message))
.hasSize(1)
.containsExactlyInAnyOrder(state.getCurrentPipelines().get(testStream.getId()));
});
}
|
@Override
public String get(String name) {
checkKey(name);
String value = null;
String[] keyParts = splitKey(name);
String ns = registry.getNamespaceURI(keyParts[0]);
if (ns != null) {
try {
XMPProperty prop = xmpData.getProperty(ns, keyParts[1]);
if (prop != null && prop.getOptions().isSimple()) {
value = prop.getValue();
} else if (prop != null && prop.getOptions().isArray()) {
prop = xmpData.getArrayItem(ns, keyParts[1], 1);
value = prop.getValue();
}
// in all other cases, null is returned
} catch (XMPException e) {
// Ignore
}
}
return value;
}
|
@Test
public void get_nullInput_throw() {
String notInitialized = null;
assertThrows(PropertyTypeException.class, () -> {
xmpMeta.get(notInitialized);
});
}
|
@Override
public Operation createPartitionOperation(int partitionId) {
for (int i = 0; i < partitions.length; i++) {
if (partitions[i] == partitionId) {
return new PutAllOperation(name, mapEntries[i], triggerMapLoader);
}
}
throw new IllegalArgumentException("Unknown partitionId " + partitionId + " (" + Arrays.toString(partitions) + ")");
}
|
@Test(expected = IllegalArgumentException.class)
public void testCreatePartitionOperation() {
factory.createPartitionOperation(0);
}
|
public void validate(ExternalIssueReport report, Path reportPath) {
if (report.rules != null && report.issues != null) {
Set<String> ruleIds = validateRules(report.rules, reportPath);
validateIssuesCctFormat(report.issues, ruleIds, reportPath);
} else if (report.rules == null && report.issues != null) {
String documentationLink = documentationLinkGenerator.getDocumentationLink(DOCUMENTATION_SUFFIX);
LOGGER.warn("External issues were imported with a deprecated format which will be removed soon. " +
"Please switch to the newest format to fully benefit from Clean Code: {}", documentationLink);
validateIssuesDeprecatedFormat(report.issues, reportPath);
} else {
throw new IllegalStateException(String.format("Failed to parse report '%s': invalid report detected.", reportPath));
}
}
|
@Test
public void validate_whenIssueRuleIdNotPresentInReport_shouldThrowException() throws IOException {
ExternalIssueReport report = read(REPORTS_LOCATION);
report.issues[0].ruleId = null;
assertThatThrownBy(() -> validator.validate(report, reportPath))
.isInstanceOf(IllegalStateException.class)
.hasMessage("Failed to parse report 'report-path': missing mandatory field 'ruleId'.");
}
|
@GetMapping(value = "/{appId}/{clusterName}/{namespace:.+}")
public ApolloConfig queryConfig(@PathVariable String appId, @PathVariable String clusterName,
@PathVariable String namespace,
@RequestParam(value = "dataCenter", required = false) String dataCenter,
@RequestParam(value = "releaseKey", defaultValue = "-1") String clientSideReleaseKey,
@RequestParam(value = "ip", required = false) String clientIp,
@RequestParam(value = "label", required = false) String clientLabel,
@RequestParam(value = "messages", required = false) String messagesAsString,
HttpServletRequest request, HttpServletResponse response) throws IOException {
String originalNamespace = namespace;
//strip out .properties suffix
namespace = namespaceUtil.filterNamespaceName(namespace);
//fix the character case issue, such as FX.apollo <-> fx.apollo
namespace = namespaceUtil.normalizeNamespace(appId, namespace);
if (Strings.isNullOrEmpty(clientIp)) {
clientIp = WebUtils.tryToGetClientIp(request);
}
ApolloNotificationMessages clientMessages = transformMessages(messagesAsString);
List<Release> releases = Lists.newLinkedList();
String appClusterNameLoaded = clusterName;
if (!ConfigConsts.NO_APPID_PLACEHOLDER.equalsIgnoreCase(appId)) {
Release currentAppRelease = configService.loadConfig(appId, clientIp, clientLabel, appId, clusterName, namespace,
dataCenter, clientMessages);
if (currentAppRelease != null) {
releases.add(currentAppRelease);
//we have cluster search process, so the cluster name might be overridden
appClusterNameLoaded = currentAppRelease.getClusterName();
}
}
//if namespace does not belong to this appId, should check if there is a public configuration
if (!namespaceBelongsToAppId(appId, namespace)) {
Release publicRelease = this.findPublicConfig(appId, clientIp, clientLabel, clusterName, namespace,
dataCenter, clientMessages);
if (Objects.nonNull(publicRelease)) {
releases.add(publicRelease);
}
}
if (releases.isEmpty()) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
String.format(
"Could not load configurations with appId: %s, clusterName: %s, namespace: %s",
appId, clusterName, originalNamespace));
Tracer.logEvent("Apollo.Config.NotFound",
assembleKey(appId, clusterName, originalNamespace, dataCenter));
return null;
}
auditReleases(appId, clusterName, dataCenter, clientIp, releases);
String mergedReleaseKey = releases.stream().map(Release::getReleaseKey)
.collect(Collectors.joining(ConfigConsts.CLUSTER_NAMESPACE_SEPARATOR));
if (mergedReleaseKey.equals(clientSideReleaseKey)) {
// Client side configuration is the same with server side, return 304
response.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
Tracer.logEvent("Apollo.Config.NotModified",
assembleKey(appId, appClusterNameLoaded, originalNamespace, dataCenter));
return null;
}
ApolloConfig apolloConfig = new ApolloConfig(appId, appClusterNameLoaded, originalNamespace,
mergedReleaseKey);
apolloConfig.setConfigurations(mergeReleaseConfigurations(releases));
Tracer.logEvent("Apollo.Config.Found", assembleKey(appId, appClusterNameLoaded,
originalNamespace, dataCenter));
return apolloConfig;
}
|
@Test
public void testQueryConfigFile() throws Exception {
String someClientSideReleaseKey = "1";
String someServerSideNewReleaseKey = "2";
HttpServletResponse someResponse = mock(HttpServletResponse.class);
String someNamespaceName = String.format("%s.%s", defaultClusterName, "properties");
when(configService.loadConfig(someAppId, someClientIp, someClientLabel, someAppId, someClusterName, defaultNamespaceName,
someDataCenter, someNotificationMessages)).thenReturn(someRelease);
when(someRelease.getReleaseKey()).thenReturn(someServerSideNewReleaseKey);
when(namespaceUtil.filterNamespaceName(someNamespaceName)).thenReturn(defaultNamespaceName);
when(namespaceUtil.normalizeNamespace(someAppId, defaultNamespaceName)).thenReturn(defaultNamespaceName);
ApolloConfig result = configController.queryConfig(someAppId, someClusterName,
someNamespaceName, someDataCenter, someClientSideReleaseKey,
someClientIp, someClientLabel, someMessagesAsString, someRequest, someResponse);
verify(configService, times(1)).loadConfig(someAppId, someClientIp, someClientLabel, someAppId, someClusterName,
defaultNamespaceName, someDataCenter, someNotificationMessages);
assertEquals(someAppId, result.getAppId());
assertEquals(someClusterName, result.getCluster());
assertEquals(someNamespaceName, result.getNamespaceName());
assertEquals(someServerSideNewReleaseKey, result.getReleaseKey());
}
|
public void verifyAndValidate(final String jwt) {
try {
Jws<Claims> claimsJws = Jwts.parser()
.verifyWith(tokenConfigurationParameter.getPublicKey())
.build()
.parseSignedClaims(jwt);
// Log the claims for debugging purposes
Claims claims = claimsJws.getPayload();
log.info("Token claims: {}", claims);
// Additional checks (e.g., expiration, issuer, etc.)
if (claims.getExpiration().before(new Date())) {
throw new JwtException("Token has expired");
}
log.info("Token is valid");
} catch (ExpiredJwtException e) {
log.error("Token has expired", e);
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "Token has expired", e);
} catch (JwtException e) {
log.error("Invalid JWT token", e);
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "Invalid JWT token", e);
} catch (Exception e) {
log.error("Error validating token", e);
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Error validating token", e);
}
}
|
@Test
void givenValidToken_whenVerifyAndValidate_thenLogTokenIsValid() {
// Given
String token = Jwts.builder()
.claim("user_id", "12345")
.issuedAt(new Date())
.expiration(new Date(System.currentTimeMillis() + 86400000L)) // 1 day expiration
.signWith(keyPair.getPrivate())
.compact();
// When & Then
tokenService.verifyAndValidate(token);
}
|
public static boolean canDrop(
FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) {
Objects.requireNonNull(pred, "pred cannnot be null");
Objects.requireNonNull(columns, "columns cannnot be null");
return pred.accept(new DictionaryFilter(columns, dictionaries));
}
|
@Test
public void testLtMissingColumn() throws Exception {
BinaryColumn b = binaryColumn("missing_column");
assertTrue(
"Should drop block for any non-null query",
canDrop(lt(b, Binary.fromString("any")), ccmd, dictionaries));
}
|
public static Time parseTime(final String str) {
try {
return new Time(LocalTime.parse(str).toNanoOfDay() / 1000000);
} catch (DateTimeParseException e) {
throw new KsqlException("Failed to parse time '" + str
+ "': " + e.getMessage()
+ TIME_HELP_MESSAGE,
e
);
}
}
|
@Test
public void shouldParseTime() {
assertThat(SqlTimeTypes.parseTime("10:00:00"), is(new Time(36000000)));
assertThat(SqlTimeTypes.parseTime("10:00"), is(new Time(36000000)));
assertThat(SqlTimeTypes.parseTime("10:00:00.001"), is(new Time(36000001)));
}
|
@Override
public TaskConfig convertJsonToTaskConfig(String configJson) {
final TaskConfig taskConfig = new TaskConfig();
ArrayList<String> exceptions = new ArrayList<>();
try {
Map<String, Object> configMap = (Map) GSON.fromJson(configJson, Object.class);
if (configMap.isEmpty()) {
exceptions.add("The Json for Task Config cannot be empty");
}
for (Map.Entry<String, Object> entry : configMap.entrySet()) {
TaskConfigProperty property = new TaskConfigProperty(entry.getKey(), null);
property.with(Property.REQUIRED, true);
Map propertyValue = (Map) entry.getValue();
if (propertyValue != null) {
if (propertyValue.containsKey("default-value")) {
if (!(propertyValue.get("default-value") instanceof String)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a not-null 'default-value' of type String", entry.getKey()));
} else {
property.withDefault((String) propertyValue.get("default-value"));
}
}
if (propertyValue.containsKey("display-name")) {
if (!(propertyValue.get("display-name") instanceof String)) {
exceptions.add(String.format("Key: '%s' - 'display-name' should be of type String", entry.getKey()));
} else {
property.with(Property.DISPLAY_NAME, (String) propertyValue.get("display-name"));
}
}
if (propertyValue.containsKey("display-order")) {
if (!(propertyValue.get("display-order") instanceof String && StringUtil.isInteger((String) propertyValue.get("display-order")))) {
exceptions.add(String.format("Key: '%s' - 'display-order' should be a String containing a numerical value", entry.getKey()));
} else {
property.with(Property.DISPLAY_ORDER, Integer.parseInt((String) propertyValue.get("display-order")));
}
}
if (propertyValue.containsKey("secure")) {
if (!(propertyValue.get("secure") instanceof Boolean)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a 'secure' field of type Boolean", entry.getKey()));
} else {
property.with(Property.SECURE, (Boolean) propertyValue.get("secure"));
}
}
if (propertyValue.containsKey("required")) {
if (!(propertyValue.get("required") instanceof Boolean)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a 'required' field of type Boolean", entry.getKey()));
} else {
property.with(Property.REQUIRED, (Boolean) propertyValue.get("required"));
}
}
}
taskConfig.add(property);
}
if (!exceptions.isEmpty()) {
throw new RuntimeException(StringUtils.join(exceptions, ", "));
}
return taskConfig;
} catch (Exception e) {
LOGGER.error("Error occurred while converting the Json to Task Config. Error: {}. The Json received was '{}'.", e.getMessage(), configJson);
throw new RuntimeException(String.format("Error occurred while converting the Json to Task Config. Error: %s.", e.getMessage()));
}
}
|
@Test
public void shouldConvertTaskConfigJsonToTaskConfig() {
String json = "{\"URL\":{\"default-value\":\"\",\"secure\":false,\"required\":true,\"display-name\":\"Url\",\"display-order\":\"0\"}," +
"\"USER\":{\"default-value\":\"foo\",\"secure\":true,\"required\":false,\"display-order\":\"1\"}," +
"\"PASSWORD\":{}," +
"\"FOO\":null" +
"}";
TaskConfig config = new JsonBasedTaskExtensionHandler_V1().convertJsonToTaskConfig(json);
Property url = config.get("URL");
assertThat(url.getOption(Property.REQUIRED), is(true));
assertThat(url.getOption(Property.SECURE), is(false));
assertThat(url.getOption(Property.DISPLAY_NAME), is("Url"));
assertThat(url.getOption(Property.DISPLAY_ORDER), is(0));
Property user = config.get("USER");
assertThat(user.getOption(Property.REQUIRED), is(false));
assertThat(user.getOption(Property.SECURE), is(true));
assertThat(user.getOption(Property.DISPLAY_NAME), is("USER"));
assertThat(user.getOption(Property.DISPLAY_ORDER), is(1));
Property password = config.get("PASSWORD");
assertThat(password.getOption(Property.REQUIRED), is(true));
assertThat(password.getOption(Property.SECURE), is(false));
assertThat(password.getOption(Property.DISPLAY_NAME), is("PASSWORD"));
assertThat(password.getOption(Property.DISPLAY_ORDER), is(0));
Property foo = config.get("FOO");
assertThat(foo.getOption(Property.REQUIRED), is(true));
assertThat(foo.getOption(Property.SECURE), is(false));
assertThat(foo.getOption(Property.DISPLAY_NAME), is("FOO"));
assertThat(foo.getOption(Property.DISPLAY_ORDER), is(0));
}
|
public static String convertToHtml(String input) {
return new Markdown().convert(StringEscapeUtils.escapeHtml4(input));
}
|
@Test
public void shouldDecorateUnorderedList() {
assertThat(Markdown.convertToHtml(" * one\r* two\r\n* three\n * \n *five"))
.isEqualTo("<ul><li>one</li>\r<li>two</li>\r\n<li>three</li>\n<li> </li>\n</ul> *five");
assertThat(Markdown.convertToHtml(" * one\r* two")).isEqualTo("<ul><li>one</li>\r<li>two</li></ul>");
assertThat(Markdown.convertToHtml("* \r*")).isEqualTo("<ul><li> </li>\r</ul>*");
}
|
public KiePMMLDroolsType declareType(DerivedField derivedField) {
String generatedType = getSanitizedClassName(derivedField.getName().toUpperCase());
String fieldName =derivedField.getName();
String fieldType = derivedField.getDataType().value();
fieldTypeMap.put(fieldName, new KiePMMLOriginalTypeGeneratedType(fieldType, generatedType));
return new KiePMMLDroolsType(generatedType, DATA_TYPE.byName(fieldType).getMappedClass().getSimpleName());
}
|
@Test
void declareType() {
DerivedField derivedField = getDerivedField("FieldName");
KiePMMLDroolsType retrieved = fieldASTFactory.declareType(derivedField);
commonValidateKiePMMLDroolsType(retrieved, derivedField);
}
|
public QueryResult queryMessage(String topic, String key, int maxNum, long begin,
long end) throws MQClientException,
InterruptedException {
return queryMessage(topic, key, maxNum, begin, end, false);
}
|
@Test
public void assertQueryMessage() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
doAnswer(invocation -> {
InvokeCallback callback = invocation.getArgument(3);
QueryMessageResponseHeader responseHeader = new QueryMessageResponseHeader();
responseHeader.setIndexLastUpdatePhyoffset(1L);
responseHeader.setIndexLastUpdateTimestamp(System.currentTimeMillis());
RemotingCommand response = mock(RemotingCommand.class);
when(response.decodeCommandCustomHeader(QueryMessageResponseHeader.class)).thenReturn(responseHeader);
when(response.getBody()).thenReturn(getMessageResult());
when(response.getCode()).thenReturn(ResponseCode.SUCCESS);
callback.operationSucceed(response);
return null;
}).when(mQClientAPIImpl).queryMessage(anyString(), any(), anyLong(), any(InvokeCallback.class), any());
QueryResult actual = mqAdminImpl.queryMessage(defaultTopic, "keys", 100, 1L, 50L, false);
assertNotNull(actual);
assertEquals(1, actual.getMessageList().size());
assertEquals(defaultTopic, actual.getMessageList().get(0).getTopic());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.