focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public boolean handleResult(int returncode, GoPublisher goPublisher) {
if (returncode == HttpURLConnection.HTTP_NOT_FOUND) {
deleteQuietly(checksumFile);
goPublisher.taggedConsumeLineWithPrefix(GoPublisher.ERR, "[WARN] The md5checksum property file was not found on the server. Hence, Go can not verify the integrity of the artifacts.");
return true;
}
if (returncode == HttpURLConnection.HTTP_NOT_MODIFIED) {
LOG.info("[Agent Fetch Artifact] Not downloading checksum file as it has not changed");
return true;
}
if (returncode == HttpURLConnection.HTTP_OK) {
LOG.info("[Agent Fetch Artifact] Saved checksum property file [{}]", checksumFile);
return true;
}
return returncode < HttpURLConnection.HTTP_BAD_REQUEST;
}
|
@Test
public void shouldHandleResultIfHttpCodeSaysFileNotFound() {
StubGoPublisher goPublisher = new StubGoPublisher();
assertThat(checksumFileHandler.handleResult(HttpServletResponse.SC_NOT_FOUND, goPublisher), is(true));
assertThat(goPublisher.getMessage(), containsString(String.format("[WARN] The md5checksum property file was not found on the server. Hence, Go can not verify the integrity of the artifacts.", file)));
}
|
public static Set<String> extractUniquePrefixes(final Iterator<String> iterator, final String delimiter) {
Set<String> uniquePrefixes = new HashSet<>();
try{
iterator.forEachRemaining(element -> {
String prefix = element.substring(0, element.indexOf(delimiter));
uniquePrefixes.add(prefix);
});
return uniquePrefixes;
}catch (IndexOutOfBoundsException e){
throw new ConfigParseException(String.format("Unable to extract prefix with delimiter: %s", delimiter), e);
}
}
|
@Test
public void testExtractUniquePrefixes() {
Iterator<String> iterator = List.of("backendA.circuitBreaker.test1", "backendA.circuitBreaker.test2", "backendB.circuitBreaker.test3").iterator();
Set<String> prefixes = StringParseUtil.extractUniquePrefixes(iterator, ".");
Assertions.assertThat(prefixes).containsExactlyInAnyOrder("backendA", "backendB");
}
|
@Override
public Network network() {
return network;
}
|
@Test
public void getNetwork() {
AddressParser parser = AddressParser.getDefault();
Network mainNet = parser.parseAddress("17kzeh4N8g49GFvdDzSf8PjaPfyoD1MndL").network();
assertEquals(MAINNET, mainNet);
Network testNet = parser.parseAddress("n4eA2nbYqErp7H6jebchxAN59DmNpksexv").network();
assertEquals(TESTNET, testNet);
}
|
public static boolean isPrimitiveNumber(Class clazz) {
return clazz.isPrimitive() && !clazz.equals(boolean.class);
}
|
@Test
public void testIsPrimitiveNumber() {
assertTrue(FluxBuilder.isPrimitiveNumber(int.class));
assertFalse(FluxBuilder.isPrimitiveNumber(boolean.class));
assertFalse(FluxBuilder.isPrimitiveNumber(String.class));
}
|
public void write(final ConsumerRecord<byte[], byte[]> record) throws IOException {
if (!writable) {
throw new IOException("Write permission denied.");
}
final File dirty = dirty(file);
final File tmp = tmp(file);
// first write to the dirty copy
appendRecordToFile(record, dirty, filesystem);
// atomically rename the dirty copy to the "live" copy while copying the live copy to
// the "dirty" copy via a temporary hard link
Files.createLink(tmp.toPath(), file.toPath());
Files.move(
dirty.toPath(),
file.toPath(),
StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.ATOMIC_MOVE
);
Files.move(tmp.toPath(), dirty.toPath());
// keep the dirty copy in sync with the live copy, which now has the write
appendRecordToFile(record, dirty, filesystem);
}
|
@Test
public void shouldWriteRecordWithNewLineCharacterInCommand() throws IOException {
// Given
final String commandId = buildKey("stream1");
final String command =
"{\"statement\":\"CREATE STREAM stream1 (id INT, f\\n1 INT) WITH (kafka_topic='topic1')\"}";
// When
replayFile.write(newStreamRecord(commandId, command));
// Then
final List<String> commands = Files.readAllLines(internalReplayFile.toPath());
assertThat(commands.size(), is(1));
assertThat(commands.get(0), is(
"\"stream/stream1/create\"" + KEY_VALUE_SEPARATOR
+ "{\"statement\":"
+ "\"CREATE STREAM stream1 (id INT, f\\n1 INT) WITH (kafka_topic='topic1')\"}"
));
}
|
@Override
public List<Class<? extends Event>> subscribeTypes() {
List<Class<? extends Event>> result = new LinkedList<>();
result.add(MetadataEvent.InstanceMetadataEvent.class);
result.add(MetadataEvent.ServiceMetadataEvent.class);
result.add(ClientEvent.ClientDisconnectEvent.class);
return result;
}
|
@Test
void testSubscribeTypes() {
List<Class<? extends Event>> classes = namingMetadataManager.subscribeTypes();
assertEquals(3, classes.size());
}
|
@Override
public ImportResult importItem(
UUID jobId,
IdempotentImportExecutor executor,
TokensAndUrlAuthData authData,
SocialActivityContainerResource resource)
throws Exception {
if (resource == null) {
// Nothing to import
return ImportResult.OK;
}
monitor.debug(
() -> String.format("Number of Posts: %d", resource.getCounts().get("activitiesCount")));
final LongAdder totalImportedFilesSizes = new LongAdder();
// Import social activity
for (SocialActivityModel activity : resource.getActivities()) {
if (activity.getType() == SocialActivityType.NOTE
|| activity.getType() == SocialActivityType.POST) {
executor.importAndSwallowIOExceptions(
activity,
currentActivity -> {
ItemImportResult<String> insertActivityResult = insertActivity(activity, authData);
if (insertActivityResult != null && insertActivityResult.hasBytes()) {
totalImportedFilesSizes.add(insertActivityResult.getBytes());
}
return insertActivityResult;
});
}
}
return ImportResult.OK.copyWithBytes(totalImportedFilesSizes.longValue());
}
|
@Test
public void testImportSingleActivity() throws Exception {
String postContent = "activityContent";
SocialActivityModel activity =
new SocialActivityModel(
"activityId",
Instant.now(),
SocialActivityType.POST,
Collections.emptyList(),
new SocialActivityLocation("test", 1.1, 2.2),
"activityTitle",
postContent,
"activityUrl");
SocialActivityContainerResource resource =
new SocialActivityContainerResource(
"123",
new SocialActivityActor("321", "John Doe", "url"),
Collections.singletonList(activity));
Call call = mock(Call.class);
Response dummySuccessfulResponse =
new Response.Builder()
.code(200)
.request(new Request.Builder().url("http://example.com").build())
.protocol(Protocol.HTTP_1_1)
.message("all good!")
.body(ResponseBody.create(MediaType.parse("text/xml"), "<a>ok!</a>"))
.build();
when(call.execute()).thenReturn(dummySuccessfulResponse);
when(client.newCall(any())).thenReturn(call);
DaybookPostsImporter importer =
new DaybookPostsImporter(
monitor, client, new ObjectMapper(), "http://example.com", "export-service");
importer.importItem(UUID.randomUUID(), executor, authData, resource);
ArgumentCaptor<Request> requestCaptor = ArgumentCaptor.forClass(Request.class);
verify(client, times(1)).newCall(requestCaptor.capture());
RequestBody untypedBody = requestCaptor.getValue().body();
assertTrue(untypedBody instanceof FormBody);
FormBody actual = (FormBody) untypedBody;
assertEquals(
"DaybookPostsImporter changed the order of fields in the body.", "content", actual.name(2));
assertEquals(postContent, actual.value(2));
}
|
@Override
public long transferTo(long position, long count, WritableByteChannel target) throws IOException {
checkNotNull(target);
Util.checkNotNegative(position, "position");
Util.checkNotNegative(count, "count");
checkOpen();
checkReadable();
long transferred = 0; // will definitely either be assigned or an exception will be thrown
// no need to synchronize here; this method does not make use of the channel's position
boolean completed = false;
try {
if (!beginBlocking()) {
return 0; // AsynchronousCloseException will be thrown
}
file.readLock().lockInterruptibly();
try {
transferred = file.transferTo(position, count, target);
file.setLastAccessTime(fileSystemState.now());
completed = true;
} finally {
file.readLock().unlock();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
endBlocking(completed);
}
return transferred;
}
|
@Test
public void testTransferTo() throws IOException {
RegularFile file = regularFile(10);
FileChannel channel = channel(file, READ);
ByteBufferChannel writeChannel = new ByteBufferChannel(buffer("1234567890"));
assertEquals(10, channel.transferTo(0, 100, writeChannel));
assertEquals(0, channel.position());
}
|
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.TRADE
&& chatMessage.getType() != ChatMessageType.GAMEMESSAGE
&& chatMessage.getType() != ChatMessageType.SPAM
&& chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATTERN.matcher(message);
if (matcher.find())
{
final String boss = matcher.group("boss");
final int kc = Integer.parseInt(matcher.group("kc"));
final String pre = matcher.group("pre");
final String post = matcher.group("post");
if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post))
{
unsetKc(boss);
return;
}
String renamedBoss = KILLCOUNT_RENAMES
.getOrDefault(boss, boss)
// The config service doesn't support keys with colons in them
.replace(":", "");
if (boss != renamedBoss)
{
// Unset old TOB kc
unsetKc(boss);
unsetPb(boss);
unsetKc(boss.replace(":", "."));
unsetPb(boss.replace(":", "."));
// Unset old story mode
unsetKc("Theatre of Blood Story Mode");
unsetPb("Theatre of Blood Story Mode");
}
setKc(renamedBoss, kc);
// We either already have the pb, or need to remember the boss for the upcoming pb
if (lastPb > -1)
{
log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb);
if (renamedBoss.contains("Theatre of Blood"))
{
// TOB team size isn't sent in the kill message, but can be computed from varbits
int tobTeamSize = tobTeamSize();
lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players");
}
else if (renamedBoss.contains("Tombs of Amascut"))
{
// TOA team size isn't sent in the kill message, but can be computed from varbits
int toaTeamSize = toaTeamSize();
lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players");
}
final double pb = getPb(renamedBoss);
// If a raid with a team size, only update the pb if it is lower than the existing pb
// so that the pb is the overall lowest of any team size
if (lastTeamSize == null || pb == 0 || lastPb < pb)
{
log.debug("Setting overall pb (old: {})", pb);
setPb(renamedBoss, lastPb);
}
if (lastTeamSize != null)
{
log.debug("Setting team size pb: {}", lastTeamSize);
setPb(renamedBoss + " " + lastTeamSize, lastPb);
}
lastPb = -1;
lastTeamSize = null;
}
else
{
lastBossKill = renamedBoss;
lastBossTime = client.getTickCount();
}
return;
}
matcher = DUEL_ARENA_WINS_PATTERN.matcher(message);
if (matcher.find())
{
final int oldWins = getKc("Duel Arena Wins");
final int wins = matcher.group(2).equals("one") ? 1 :
Integer.parseInt(matcher.group(2).replace(",", ""));
final String result = matcher.group(1);
int winningStreak = getKc("Duel Arena Win Streak");
int losingStreak = getKc("Duel Arena Lose Streak");
if (result.equals("won") && wins > oldWins)
{
losingStreak = 0;
winningStreak += 1;
}
else if (result.equals("were defeated"))
{
losingStreak += 1;
winningStreak = 0;
}
else
{
log.warn("unrecognized duel streak chat message: {}", message);
}
setKc("Duel Arena Wins", wins);
setKc("Duel Arena Win Streak", winningStreak);
setKc("Duel Arena Lose Streak", losingStreak);
}
matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message);
if (matcher.find())
{
int losses = matcher.group(1).equals("one") ? 1 :
Integer.parseInt(matcher.group(1).replace(",", ""));
setKc("Duel Arena Losses", losses);
}
matcher = KILL_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = NEW_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = HS_PB_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group("floor"));
String floortime = matcher.group("floortime");
String floorpb = matcher.group("floorpb");
String otime = matcher.group("otime");
String opb = matcher.group("opb");
String pb = MoreObjects.firstNonNull(floorpb, floortime);
setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb));
if (otime != null)
{
pb = MoreObjects.firstNonNull(opb, otime);
setPb("Hallowed Sepulchre", timeStringToSeconds(pb));
}
}
matcher = HS_KC_FLOOR_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group(1));
int kc = Integer.parseInt(matcher.group(2).replaceAll(",", ""));
setKc("Hallowed Sepulchre Floor " + floor, kc);
}
matcher = HS_KC_GHC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hallowed Sepulchre", kc);
}
matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hunter Rumours", kc);
}
if (lastBossKill != null && lastBossTime != client.getTickCount())
{
lastBossKill = null;
lastBossTime = -1;
}
matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message);
if (matcher.find())
{
String item = matcher.group(1);
int petId = findPet(item);
if (petId != -1)
{
final List<Integer> petList = new ArrayList<>(getPetList());
if (!petList.contains(petId))
{
log.debug("New pet added: {}/{}", item, petId);
petList.add(petId);
setPetList(petList);
}
}
}
matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Guardians of the Rift", kc);
}
}
|
@Test
public void testHsOverallPb_NoPb()
{
ChatMessage chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 5 time: <col=ff0000>3:26</col> (new personal best)<br>Overall time: <col=ff0000>9:17</col>. Personal best: 9:15<br>", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 5", 3 * 60 + 26.0);
verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre", 9 * 60 + 15.0);
// Precise times
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 5 time: <col=ff0000>3:26.20</col> (new personal best)<br>Overall time: <col=ff0000>9:17.00</col>. Personal best: 9:15.40<br>", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 5", 3 * 60 + 26.2);
verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre", 9 * 60 + 15.4);
}
|
public String getColonSeparatedKey() {
StringBuilder serviceNameBuilder = new StringBuilder();
serviceNameBuilder.append(this.getServiceInterface());
append(serviceNameBuilder, VERSION_KEY, false);
append(serviceNameBuilder, GROUP_KEY, false);
return serviceNameBuilder.toString();
}
|
@Test
void testGetColonSeparatedKey() {
URL url1 = URL.valueOf(
"10.20.130.230:20880/context/path?interface=org.apache.dubbo.test.interfaceName&group=group&version=1.0.0");
assertURLStrDecoder(url1);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName:1.0.0:group", url1.getColonSeparatedKey());
URL url2 = URL.valueOf(
"10.20.130.230:20880/context/path?interface=org.apache.dubbo.test.interfaceName&version=1.0.0");
assertURLStrDecoder(url2);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName:1.0.0:", url2.getColonSeparatedKey());
URL url3 = URL.valueOf(
"10.20.130.230:20880/context/path?interface=org.apache.dubbo.test.interfaceName&group=group");
assertURLStrDecoder(url3);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName::group", url3.getColonSeparatedKey());
URL url4 = URL.valueOf("10.20.130.230:20880/context/path?interface=org.apache.dubbo.test.interfaceName");
assertURLStrDecoder(url4);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName::", url4.getColonSeparatedKey());
URL url5 = URL.valueOf("10.20.130.230:20880/org.apache.dubbo.test.interfaceName");
assertURLStrDecoder(url5);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName::", url5.getColonSeparatedKey());
URL url6 = URL.valueOf(
"10.20.130.230:20880/org.apache.dubbo.test.interfaceName?interface=org.apache.dubbo.test.interfaceName1");
assertURLStrDecoder(url6);
Assertions.assertEquals("org.apache.dubbo.test.interfaceName1::", url6.getColonSeparatedKey());
}
|
public static ResourceModel processResource(final Class<?> resourceClass)
{
return processResource(resourceClass, null);
}
|
@Test(expectedExceptions = NullPointerException.class, description = "hard fails with NPE on missing criteria parameter")
public void failsOnMissingBatchFinderMethodBatchParamParameter() {
@RestLiCollection(name = "batchFinderWithMissingBatchParam")
class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord>
{
@BatchFinder(value = "batchFinderWithMissingBatchParam", batchParam = "criteria")
public List<EmptyRecord> batchFinderWithMissingBatchParam() {
return Collections.emptyList();
}
}
RestLiAnnotationReader.processResource(LocalClass.class);
Assert.fail("#validateBatchFinderMethod should fail throwing a ResourceConfigException");
}
|
@Override
public Optional<Track<T>> clean(Track<T> track) {
TreeSet<Point<T>> points = new TreeSet<>(track.points());
Optional<Point<T>> firstNonNull = firstPointWithAltitude(points);
if (!firstNonNull.isPresent()) {
return Optional.empty();
}
SortedSet<Point<T>> pointsMissingAltitude = points.headSet(firstNonNull.get());
TreeSet<Point<T>> fixedPoints = extrapolateAltitudes(pointsMissingAltitude, firstNonNull.get());
pointsMissingAltitude.clear();
points.addAll(fixedPoints);
Optional<Point<T>> gapStart;
Optional<Point<T>> gapEnd = firstNonNull;
while (gapEnd.isPresent()) {
gapStart = firstPointWithoutAltitude(points.tailSet(gapEnd.get()));
if (!gapStart.isPresent()) {
break;
}
gapEnd = firstPointWithAltitude(points.tailSet(gapStart.get()));
if (!gapEnd.isPresent()) {
pointsMissingAltitude = points.tailSet(gapStart.get());
fixedPoints = extrapolateAltitudes(pointsMissingAltitude, points.lower(gapStart.get()));
pointsMissingAltitude.clear();
points.addAll(fixedPoints);
// extrapolateAltitudes(points.tailSet(gapStart.get()), points.lower(gapStart.get()));
} else {
pointsMissingAltitude = points.subSet(gapStart.get(), gapEnd.get());
fixedPoints = interpolateAltitudes(pointsMissingAltitude, points.lower(gapStart.get()), gapEnd.get());
pointsMissingAltitude.clear();
points.addAll(fixedPoints);
// interpolateAltitudes(points.subSet(gapStart.get(), gapEnd.get()), points.lower(gapStart.get()), gapEnd.get());
}
}
return Optional.of(Track.of(points));
}
|
@Test
public void testFillingFinalAltitudes() {
Track<NoRawData> testTrack = trackWithNoFinalAltitudes();
Track<NoRawData> cleanedTrack = (new FillMissingAltitudes<NoRawData>()).clean(testTrack).get();
ArrayList<Point<NoRawData>> points = new ArrayList<>(cleanedTrack.points());
assertTrue(
points.get(3).altitude().equals(points.get(1).altitude()) &&
points.get(2).altitude().equals(points.get(1).altitude()),
"The last points should have their altitudes filled"
);
}
|
public byte exitStatus() {
return exitStatus.exitStatus();
}
|
@Test
void with_passed_scenarios() {
Runtime runtime = createRuntime();
bus.send(testCaseFinishedWithStatus(Status.PASSED));
assertThat(runtime.exitStatus(), is(equalTo((byte) 0x0)));
}
|
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
String[] parts = remaining.split(":");
if (parts.length < 3) {
throw new IllegalArgumentException(
"Google PubSub Lite Endpoint format \"projectId:location:destinationName[:subscriptionName]\"");
}
GooglePubsubLiteEndpoint pubsubEndpoint = new GooglePubsubLiteEndpoint(uri, this);
LOG.debug("Google Cloud ProjectId {}", parts[0]);
pubsubEndpoint.setProjectId(parts[0]);
LOG.debug("Google Cloud LocationId {}", parts[1]);
pubsubEndpoint.setLocation(parts[1]);
LOG.debug("Google Cloud DestinationName {}", parts[2]);
pubsubEndpoint.setDestinationName(parts[2]);
LOG.debug("Google Cloud ServiceAccountKey {}", serviceAccountKey);
pubsubEndpoint.setServiceAccountKey(serviceAccountKey);
setProperties(pubsubEndpoint, parameters);
return pubsubEndpoint;
}
|
@Test
public void testCreateEndpointMissingFields() {
String uri = "google-pubsub-lite:123456789012:europe-west3";
String remaining = "123456789012:europe-west3";
Map<String, Object> parameters = new HashMap<>();
Exception exception = assertThrows(IllegalArgumentException.class,
() -> googlePubsubLiteComponent.createEndpoint(uri, remaining, parameters));
String expectedMessage = "Google PubSub Lite Endpoint format \"projectId:location:destinationName[:subscriptionName]\"";
String actualMessage = exception.getMessage();
assertTrue(actualMessage.contains(expectedMessage));
}
|
static boolean isRealTask(Task task) {
return task.getSeq() >= 0
&& StepHelper.retrieveStepStatus(task.getOutputData()) != StepInstance.Status.NOT_CREATED;
}
|
@Test
public void testIsRealTask() {
when(task.getTaskType()).thenReturn(Constants.MAESTRO_TASK_NAME);
when(task.getSeq()).thenReturn(-1);
Assert.assertFalse(TaskHelper.isRealTask(task));
when(task.getSeq()).thenReturn(1);
when(task.getOutputData())
.thenReturn(
Collections.singletonMap(
Constants.STEP_RUNTIME_SUMMARY_FIELD,
StepRuntimeSummary.builder().runtimeState(new StepRuntimeState()).build()));
Assert.assertFalse(TaskHelper.isRealTask(task));
when(task.getOutputData()).thenReturn(Collections.emptyMap());
Assert.assertTrue(TaskHelper.isRealTask(task));
}
|
public TaskInfo cancel()
{
taskStateMachine.cancel();
return getTaskInfo();
}
|
@Test
public void testCancel()
{
SqlTask sqlTask = createInitialTask();
TaskInfo taskInfo = sqlTask.updateTask(TEST_SESSION,
Optional.of(PLAN_FRAGMENT),
ImmutableList.of(),
createInitialEmptyOutputBuffers(PARTITIONED)
.withBuffer(OUT, 0)
.withNoMoreBufferIds(),
Optional.of(new TableWriteInfo(Optional.empty(), Optional.empty(), Optional.empty())));
assertEquals(taskInfo.getTaskStatus().getState(), TaskState.RUNNING);
assertNull(taskInfo.getStats().getEndTime());
taskInfo = sqlTask.getTaskInfo();
assertEquals(taskInfo.getTaskStatus().getState(), TaskState.RUNNING);
assertNull(taskInfo.getStats().getEndTime());
taskInfo = sqlTask.cancel();
assertEquals(taskInfo.getTaskStatus().getState(), TaskState.CANCELED);
assertNotNull(taskInfo.getStats().getEndTime());
taskInfo = sqlTask.getTaskInfo();
assertEquals(taskInfo.getTaskStatus().getState(), TaskState.CANCELED);
assertNotNull(taskInfo.getStats().getEndTime());
}
|
@Override
public void setNoMorePages()
{
PendingRead pendingRead;
synchronized (this) {
state.compareAndSet(NO_MORE_BUFFERS, FLUSHING);
noMorePages.set(true);
pendingRead = this.pendingRead;
this.pendingRead = null;
log.info("Task %s: %s pages and %s bytes was written into TempStorage", taskId, totalStoragePagesAdded.get(), totalStorageBytesAdded.get());
}
if (pendingRead != null) {
processPendingRead(pendingRead);
}
checkFlushComplete();
}
|
@Test
public void testAddAfterNoMorePages()
{
SpoolingOutputBuffer buffer = createSpoolingOutputBuffer();
for (int i = 0; i < 2; i++) {
addPage(buffer, createPage(i));
}
compareTotalBuffered(buffer, 2);
buffer.setNoMorePages();
// should not be added
addPage(buffer, createPage(2));
compareTotalBuffered(buffer, 2);
// get the two pages added
assertBufferResultEquals(TYPES, getBufferResult(buffer, BUFFER_ID, 0, sizeOfPages(3), MAX_WAIT), bufferResult(0, createPage(0), createPage(1)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, BUFFER_ID, 2, sizeOfPages(3), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 2, true));
}
|
@Override
public void reset() {
resetCount++;
super.reset();
initEvaluatorMap();
initCollisionMaps();
root.recursiveReset();
resetTurboFilterList();
cancelScheduledTasks();
fireOnReset();
resetListenersExceptResetResistant();
resetStatusListenersExceptResetResistant();
}
|
@Test
public void resetTest() {
Logger root = lc.getLogger(Logger.ROOT_LOGGER_NAME);
Logger a = lc.getLogger("a");
Logger ab = lc.getLogger("a.b");
ab.setLevel(Level.WARN);
root.setLevel(Level.INFO);
lc.reset();
assertEquals(Level.DEBUG, root.getEffectiveLevel());
assertTrue(root.isDebugEnabled());
assertEquals(Level.DEBUG, a.getEffectiveLevel());
assertEquals(Level.DEBUG, ab.getEffectiveLevel());
assertEquals(Level.DEBUG, root.getLevel());
assertNull(a.getLevel());
assertNull(ab.getLevel());
}
|
public static DistCpOptions parse(String[] args)
throws IllegalArgumentException {
CommandLineParser parser = new CustomParser();
CommandLine command;
try {
command = parser.parse(cliOptions, args, true);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse arguments. " +
Arrays.toString(args), e);
}
DistCpOptions.Builder builder = parseSourceAndTargetPaths(command);
builder
.withAtomicCommit(
command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch()))
.withSyncFolder(
command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch()))
.withDeleteMissing(
command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch()))
.withIgnoreFailures(
command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch()))
.withOverwrite(
command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch()))
.withAppend(
command.hasOption(DistCpOptionSwitch.APPEND.getSwitch()))
.withSkipCRC(
command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch()))
.withBlocking(
!command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch()))
.withVerboseLog(
command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch()))
.withDirectWrite(
command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch()))
.withUseIterator(
command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch()))
.withUpdateRoot(
command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch()));
if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) {
String[] snapshots = getVals(command,
DistCpOptionSwitch.DIFF.getSwitch());
checkSnapshotsArgs(snapshots);
builder.withUseDiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) {
String[] snapshots = getVals(command,
DistCpOptionSwitch.RDIFF.getSwitch());
checkSnapshotsArgs(snapshots);
builder.withUseRdiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) {
builder.withFiltersFile(
getVal(command, DistCpOptionSwitch.FILTERS.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) {
builder.withLogPath(
new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) {
final String workPath = getVal(command,
DistCpOptionSwitch.WORK_PATH.getSwitch());
if (workPath != null && !workPath.isEmpty()) {
builder.withAtomicWorkPath(new Path(workPath));
}
}
if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) {
builder.withTrackMissing(
new Path(getVal(
command,
DistCpOptionSwitch.TRACK_MISSING.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) {
try {
final Float mapBandwidth = Float.parseFloat(
getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()));
builder.withMapBandwidth(mapBandwidth);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Bandwidth specified is invalid: " +
getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e);
}
}
if (command.hasOption(
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) {
try {
final Integer numThreads = Integer.parseInt(getVal(command,
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()));
builder.withNumListstatusThreads(numThreads);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"Number of liststatus threads is invalid: " + getVal(command,
DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e);
}
}
if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) {
try {
final Integer maps = Integer.parseInt(
getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()));
builder.maxMaps(maps);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Number of maps is invalid: " +
getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e);
}
}
if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) {
builder.withCopyStrategy(
getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) {
builder.preserve(
getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch()));
}
if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) {
LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" +
" option. Ignoring.");
}
if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) {
LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" +
" option. Ignoring.");
}
if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) {
final String chunkSizeStr = getVal(command,
DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim());
try {
int csize = Integer.parseInt(chunkSizeStr);
csize = csize > 0 ? csize : 0;
LOG.info("Set distcp blocksPerChunk to " + csize);
builder.withBlocksPerChunk(csize);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("blocksPerChunk is invalid: "
+ chunkSizeStr, e);
}
}
if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) {
final String copyBufferSizeStr = getVal(command,
DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim());
try {
int copyBufferSize = Integer.parseInt(copyBufferSizeStr);
builder.withCopyBufferSize(copyBufferSize);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("copyBufferSize is invalid: "
+ copyBufferSizeStr, e);
}
}
return builder.build();
}
|
@Test
public void testLogPath() {
DistCpOptions options = OptionsParser.parse(new String[] {
"hdfs://localhost:8020/source/first",
"hdfs://localhost:8020/target/"});
Assert.assertNull(options.getLogPath());
options = OptionsParser.parse(new String[] {
"-log",
"hdfs://localhost:8020/logs",
"hdfs://localhost:8020/source/first",
"hdfs://localhost:8020/target/"});
Assert.assertEquals(options.getLogPath(), new Path("hdfs://localhost:8020/logs"));
}
|
public static boolean isFileExtractable(String path) {
String type = getExtension(path);
return isZip(type)
|| isTar(type)
|| isRar(type)
|| isGzippedTar(type)
|| is7zip(type)
|| isBzippedTar(type)
|| isXzippedTar(type)
|| isLzippedTar(type)
|| isBzip2(type)
|| isGzip(type)
|| isLzma(type)
|| isXz(type);
}
|
@Test
public void isFileExtractableTest() throws Exception {
// extension in code. So, it return true
assertTrue(CompressedHelper.isFileExtractable("/test/test.zip"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.rar"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tar"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tar.gz"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tgz"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tar.bz2"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tbz"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.tar.lzma"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.jar"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.apk"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.7z"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.txt.gz"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.txt.bz2"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.txt.lzma"));
assertTrue(CompressedHelper.isFileExtractable("/test/test.txt.xz"));
// extension not in code. So, it return false
assertFalse(CompressedHelper.isFileExtractable("/test/test.z"));
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
void with_parse_error() {
Runtime runtime = Runtime.builder()
.withFeatureSupplier(() -> {
throw new FeatureParserException("oops");
})
.build();
assertThrows(FeatureParserException.class, runtime::run);
}
|
public Option<Dataset<Row>> loadAsDataset(SparkSession spark, List<CloudObjectMetadata> cloudObjectMetadata,
String fileFormat, Option<SchemaProvider> schemaProviderOption, int numPartitions) {
if (LOG.isDebugEnabled()) {
LOG.debug("Extracted distinct files " + cloudObjectMetadata.size()
+ " and some samples " + cloudObjectMetadata.stream().map(CloudObjectMetadata::getPath).limit(10).collect(Collectors.toList()));
}
if (isNullOrEmpty(cloudObjectMetadata)) {
return Option.empty();
}
DataFrameReader reader = spark.read().format(fileFormat);
String datasourceOpts = getStringWithAltKeys(properties, CloudSourceConfig.SPARK_DATASOURCE_OPTIONS, true);
if (schemaProviderOption.isPresent()) {
Schema sourceSchema = schemaProviderOption.get().getSourceSchema();
if (sourceSchema != null && !sourceSchema.equals(InputBatch.NULL_SCHEMA)) {
reader = reader.schema(AvroConversionUtils.convertAvroSchemaToStructType(sourceSchema));
}
}
if (StringUtils.isNullOrEmpty(datasourceOpts)) {
// fall back to legacy config for BWC. TODO consolidate in HUDI-6020
datasourceOpts = getStringWithAltKeys(properties, S3EventsHoodieIncrSourceConfig.SPARK_DATASOURCE_OPTIONS, true);
}
if (StringUtils.nonEmpty(datasourceOpts)) {
final ObjectMapper mapper = new ObjectMapper();
Map<String, String> sparkOptionsMap = null;
try {
sparkOptionsMap = mapper.readValue(datasourceOpts, Map.class);
} catch (IOException e) {
throw new HoodieException(String.format("Failed to parse sparkOptions: %s", datasourceOpts), e);
}
LOG.info(String.format("sparkOptions loaded: %s", sparkOptionsMap));
reader = reader.options(sparkOptionsMap);
}
List<String> paths = new ArrayList<>();
for (CloudObjectMetadata o : cloudObjectMetadata) {
paths.add(o.getPath());
}
boolean isCommaSeparatedPathFormat = properties.getBoolean(SPARK_DATASOURCE_READER_COMMA_SEPARATED_PATH_FORMAT.key(), false);
Dataset<Row> dataset;
if (isCommaSeparatedPathFormat) {
dataset = reader.load(String.join(",", paths));
} else {
dataset = reader.load(paths.toArray(new String[cloudObjectMetadata.size()]));
}
// add partition column from source path if configured
if (containsConfigProperty(properties, PATH_BASED_PARTITION_FIELDS)) {
String[] partitionKeysToAdd = getStringWithAltKeys(properties, PATH_BASED_PARTITION_FIELDS).split(",");
// Add partition column for all path-based partition keys. If key is not present in path, the value will be null.
for (String partitionKey : partitionKeysToAdd) {
String partitionPathPattern = String.format("%s=", partitionKey);
LOG.info(String.format("Adding column %s to dataset", partitionKey));
dataset = dataset.withColumn(partitionKey, split(split(input_file_name(), partitionPathPattern).getItem(1), "/").getItem(0));
}
}
dataset = coalesceOrRepartition(dataset, numPartitions);
return Option.of(dataset);
}
|
@Test
public void partitionKeyNotPresentInPath() {
List<CloudObjectMetadata> input = Collections.singletonList(new CloudObjectMetadata("src/test/resources/data/partitioned/country=US/state=CA/data.json", 1));
TypedProperties properties = new TypedProperties();
properties.put("hoodie.deltastreamer.source.cloud.data.reader.comma.separated.path.format", "false");
properties.put("hoodie.deltastreamer.source.cloud.data.partition.fields.from.path", "unknown");
CloudObjectsSelectorCommon cloudObjectsSelectorCommon = new CloudObjectsSelectorCommon(properties);
Option<Dataset<Row>> result = cloudObjectsSelectorCommon.loadAsDataset(sparkSession, input, "json", Option.empty(), 1);
Assertions.assertTrue(result.isPresent());
Assertions.assertEquals(1, result.get().count());
Row expected = RowFactory.create("some data", null);
Assertions.assertEquals(Collections.singletonList(expected), result.get().collectAsList());
}
|
static String formatRequestBody(String scope) throws IOException {
try {
StringBuilder requestParameters = new StringBuilder();
requestParameters.append("grant_type=client_credentials");
if (scope != null && !scope.trim().isEmpty()) {
scope = scope.trim();
String encodedScope = URLEncoder.encode(scope, StandardCharsets.UTF_8.name());
requestParameters.append("&scope=").append(encodedScope);
}
return requestParameters.toString();
} catch (UnsupportedEncodingException e) {
// The world has gone crazy!
throw new IOException(String.format("Encoding %s not supported", StandardCharsets.UTF_8.name()));
}
}
|
@Test
public void testFormatRequestBodyWithEscaped() throws IOException {
String questionMark = "%3F";
String exclamationMark = "%21";
String expected = String.format("grant_type=client_credentials&scope=earth+is+great%s", exclamationMark);
String actual = HttpAccessTokenRetriever.formatRequestBody("earth is great!");
assertEquals(expected, actual);
expected = String.format("grant_type=client_credentials&scope=what+on+earth%s%s%s%s%s", questionMark, exclamationMark, questionMark, exclamationMark, questionMark);
actual = HttpAccessTokenRetriever.formatRequestBody("what on earth?!?!?");
assertEquals(expected, actual);
}
|
public AuthenticationRequest startAuthenticationProcess(HttpServletRequest httpRequest) throws ComponentInitializationException, MessageDecodingException, SamlValidationException, SharedServiceClientException, DienstencatalogusException, SamlSessionException {
BaseHttpServletRequestXMLMessageDecoder decoder = decodeXMLRequest(httpRequest);
AuthenticationRequest authenticationRequest = createAuthenticationRequest(httpRequest, decoder);
SAMLBindingContext bindingContext = createAndValidateBindingContext(decoder);
validateAuthenticationRequest(authenticationRequest);
parseAuthentication(authenticationRequest);
validateWithOtherDomainServices(authenticationRequest, bindingContext);
return authenticationRequest;
}
|
@Test //entrance
public void parseInvalidAuthenticationRequestTest() {
String samlRequest = readXMLFile(authnRequestEntranceInvalidFile);
String decodeSAMLRequest = encodeAuthnRequest(samlRequest);
httpServletRequestMock.setParameter("SAMLRequest", decodeSAMLRequest);
Exception exception = assertThrows(SamlValidationException.class,
() -> authenticationService.startAuthenticationProcess(httpServletRequestMock));
assertEquals("AuthnRequest validation error", exception.getMessage());
}
|
public void init() {
if (isInitiated.compareAndSet(false, true)) {
Assert.notNull(applicationContext, () -> "Application must not be null");
Map<String, ReadinessCheckCallback> beansOfType = applicationContext
.getBeansOfType(ReadinessCheckCallback.class);
readinessCheckCallbacks = HealthCheckComparatorSupport.sortMapAccordingToValue(beansOfType,
HealthCheckComparatorSupport.getComparatorToUse(applicationContext.getAutowireCapableBeanFactory()));
String applicationCallbackInfo = "Found " + readinessCheckCallbacks.size() + " ReadinessCheckCallback implementation: " + String.join(",", beansOfType.keySet());
logger.info(applicationCallbackInfo);
}
}
|
@Test
public void applicationContextNull() {
assertThatThrownBy(() -> new ReadinessCheckCallbackProcessor().init()).hasMessage("Application must not be null");
}
|
@Override
public void registerStore(final StateStore store,
final StateRestoreCallback stateRestoreCallback,
final CommitCallback commitCallback) {
final String storeName = store.name();
// TODO (KAFKA-12887): we should not trigger user's exception handler for illegal-argument but always
// fail-crash; in this case we would not need to immediately close the state store before throwing
if (CHECKPOINT_FILE_NAME.equals(storeName)) {
store.close();
throw new IllegalArgumentException(format("%sIllegal store name: %s, which collides with the pre-defined " +
"checkpoint file name", logPrefix, storeName));
}
if (stores.containsKey(storeName)) {
store.close();
throw new IllegalArgumentException(format("%sStore %s has already been registered.", logPrefix, storeName));
}
if (stateRestoreCallback instanceof StateRestoreListener) {
log.warn("The registered state restore callback is also implementing the state restore listener interface, " +
"which is not expected and would be ignored");
}
final StateStoreMetadata storeMetadata = isLoggingEnabled(storeName) ?
new StateStoreMetadata(
store,
getStorePartition(storeName),
stateRestoreCallback,
commitCallback,
converterForStore(store)) :
new StateStoreMetadata(store, commitCallback);
// register the store first, so that if later an exception is thrown then eventually while we call `close`
// on the state manager this state store would be closed as well
stores.put(storeName, storeMetadata);
if (!stateUpdaterEnabled) {
maybeRegisterStoreWithChangelogReader(storeName);
}
log.debug("Registered state store {} to its state manager", storeName);
}
|
@Test
public void shouldThrowIllegalArgumentExceptionIfStoreNameIsSameAsCheckpointFileName() {
final ProcessorStateManager stateManager = getStateManager(Task.TaskType.ACTIVE);
assertThrows(IllegalArgumentException.class, () ->
stateManager.registerStore(new MockKeyValueStore(CHECKPOINT_FILE_NAME, true), null, null));
}
|
static String getPassword(Configuration conf, String alias) {
String password = null;
try {
char[] passchars = conf.getPassword(alias);
if (passchars != null) {
password = new String(passchars);
}
}
catch (IOException ioe) {
LOG.warn("Setting password to null since IOException is caught"
+ " when getting password", ioe);
password = null;
}
return password;
}
|
@Test
public void testGetPassword() throws Exception {
File testDir = GenericTestUtils.getTestDir();
Configuration conf = new Configuration();
final Path jksPath = new Path(testDir.toString(), "test.jks");
final String ourUrl =
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
File file = new File(testDir, "test.jks");
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};
// ensure that we get nulls when the key isn't there
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));
// create new aliases
try {
provider.createCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY, keypass);
provider.createCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY, trustpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
// make sure we get back the right key directly from api
assertArrayEquals(keypass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY).getCredential());
assertArrayEquals(storepass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY).getCredential());
assertArrayEquals(trustpass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY).getCredential());
// use WebAppUtils as would be used by loadSslConfiguration
Assert.assertEquals("keypass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
Assert.assertEquals("storepass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
Assert.assertEquals("trustpass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));
// let's make sure that a password that doesn't exist returns null
Assert.assertEquals(null, DFSUtil.getPassword(conf,"invalid-alias"));
}
|
public static double shuffleCompressionRatio(
SparkSession spark, FileFormat outputFileFormat, String outputCodec) {
if (outputFileFormat == FileFormat.ORC || outputFileFormat == FileFormat.PARQUET) {
return columnarCompression(shuffleCodec(spark), outputCodec);
} else if (outputFileFormat == FileFormat.AVRO) {
return rowBasedCompression(shuffleCodec(spark), outputCodec);
} else {
return 1.0;
}
}
|
@Test
public void testCodecNameNormalization() {
configureShuffle("zStD", true);
double ratio = shuffleCompressionRatio(PARQUET, "ZstD");
assertThat(ratio).isEqualTo(2.0);
}
|
@VisibleForTesting
static Estimate calculateDataSizeForPartitioningKey(
HiveColumnHandle column,
Type type,
List<HivePartition> partitions,
Map<String, PartitionStatistics> statistics,
double averageRowsPerPartition)
{
if (!hasDataSize(type)) {
return Estimate.unknown();
}
double dataSize = 0;
for (HivePartition partition : partitions) {
int length = getSize(partition.getKeys().get(column));
double rowCount = getPartitionRowCount(partition.getPartitionId().getPartitionName(), statistics).orElse(averageRowsPerPartition);
dataSize += length * rowCount;
}
return Estimate.of(dataSize);
}
|
@Test
public void testCalculateDataSizeForPartitioningKey()
{
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_2,
BIGINT,
ImmutableList.of(partition("p1=string1/p2=1234")),
ImmutableMap.of("p1=string1/p2=1234", rowsCount(1000)),
2000),
Estimate.unknown());
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=string1/p2=1234")),
ImmutableMap.of("p1=string1/p2=1234", rowsCount(1000)),
2000),
Estimate.of(7000));
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=string1/p2=1234")),
ImmutableMap.of("p1=string1/p2=1234", PartitionStatistics.empty()),
2000),
Estimate.of(14000));
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=string1/p2=1234"), partition("p1=str2/p2=1234")),
ImmutableMap.of("p1=string1/p2=1234", rowsCount(1000), "p1=str2/p2=1234", rowsCount(2000)),
3000),
Estimate.of(15000));
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=string1/p2=1234"), partition("p1=str2/p2=1234")),
ImmutableMap.of("p1=string1/p2=1234", rowsCount(1000), "p1=str2/p2=1234", PartitionStatistics.empty()),
3000),
Estimate.of(19000));
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=string1/p2=1234"), partition("p1=str2/p2=1234")),
ImmutableMap.of(),
3000),
Estimate.of(33000));
assertEquals(
calculateDataSizeForPartitioningKey(
PARTITION_COLUMN_1,
VARCHAR,
ImmutableList.of(partition("p1=__HIVE_DEFAULT_PARTITION__/p2=1234"), partition("p1=str2/p2=1234")),
ImmutableMap.of(),
3000),
Estimate.of(12000));
}
|
public void createNewCodeDefinition(DbSession dbSession, String projectUuid, String mainBranchUuid,
String defaultBranchName, String newCodeDefinitionType, @Nullable String newCodeDefinitionValue) {
boolean isCommunityEdition = editionProvider.get().filter(EditionProvider.Edition.COMMUNITY::equals).isPresent();
NewCodePeriodType newCodePeriodType = parseNewCodeDefinitionType(newCodeDefinitionType);
NewCodePeriodDto dto = new NewCodePeriodDto();
dto.setType(newCodePeriodType);
dto.setProjectUuid(projectUuid);
if (isCommunityEdition) {
dto.setBranchUuid(mainBranchUuid);
}
getNewCodeDefinitionValueProjectCreation(newCodePeriodType, newCodeDefinitionValue, defaultBranchName).ifPresent(dto::setValue);
if (!CaycUtils.isNewCodePeriodCompliant(dto.getType(), dto.getValue())) {
throw new IllegalArgumentException("Failed to set the New Code Definition. The given value is not compatible with the Clean as You Code methodology. "
+ "Please refer to the documentation for compliant options.");
}
dbClient.newCodePeriodDao().insert(dbSession, dto);
}
|
@Test
public void createNewCodeDefinition_throw_IAE_if_type_is_not_allowed() {
assertThatThrownBy(() -> newCodeDefinitionResolver.createNewCodeDefinition(dbSession, DEFAULT_PROJECT_ID, MAIN_BRANCH_UUID, MAIN_BRANCH, SPECIFIC_ANALYSIS.name(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Invalid type 'SPECIFIC_ANALYSIS'. `newCodeDefinitionType` can only be set with types: [PREVIOUS_VERSION, NUMBER_OF_DAYS, REFERENCE_BRANCH]");
}
|
@Override
public final Map<PCollection<?>, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PCollection<?>> outputs, OutputT newOutput) {
return ReplacementOutputs.singleton(outputs, newOutput);
}
|
@Test
public void testMapOutputsMultipleOriginalOutputsFails() {
PCollection<Integer> input = pipeline.apply(Create.of(1, 2, 3));
PCollection<Integer> output = input.apply("Map", MapElements.via(fn));
PCollection<Integer> reappliedOutput = input.apply("ReMap", MapElements.via(fn));
thrown.expect(IllegalArgumentException.class);
factory.mapOutputs(
PValues.expandOutput(PCollectionList.of(output).and(input).and(reappliedOutput)),
reappliedOutput);
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
public void testEqualsAndHashCode() {
CommonUpstream commonUpstream = new CommonUpstream();
commonUpstream.setProtocol("protocol");
commonUpstream.setUpstreamHost("host");
commonUpstream.setUpstreamUrl("url");
commonUpstream.setStatus(true);
commonUpstream.setTimestamp(1650549243L);
ZombieUpstream upstream1 = ZombieUpstream.builder().selectorId("id").zombieCheckTimes(10)
.commonUpstream(commonUpstream).build();
ZombieUpstream upstream2 = ZombieUpstream.builder().selectorId("id").zombieCheckTimes(10)
.commonUpstream(commonUpstream).build();
assertThat(ImmutableSet.of(upstream1, upstream2), hasSize(1));
}
|
@Override
public boolean setReadOnly() {
throw new UnsupportedOperationException("Not implemented");
}
|
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testSetReadOnly() {
fs.getFile("nonsuch.txt").setReadOnly();
}
|
public static String formatSql(final AstNode root) {
final StringBuilder builder = new StringBuilder();
new Formatter(builder).process(root, 0);
return StringUtils.stripEnd(builder.toString(), "\n");
}
|
@Test
public void shouldFormatSelectStarCorrectlyWithJoin() {
final String statementString = "CREATE STREAM S AS SELECT address.*, itemid.* "
+ "FROM address INNER JOIN itemid ON address.address = itemid.address->address;";
final Statement statement = parseSingle(statementString);
assertThat(SqlFormatter.formatSql(statement),
equalTo("CREATE STREAM S AS SELECT\n"
+ " ADDRESS.*,\n"
+ " ITEMID.*\n"
+ "FROM ADDRESS ADDRESS\n"
+ "INNER JOIN ITEMID ITEMID ON ((ADDRESS.ADDRESS = ITEMID.ADDRESS->ADDRESS))\n"
+ "EMIT CHANGES"));
}
|
public static String getByFilename(String filename) {
String extension = FilenameUtils.getExtension(filename);
String mime = null;
if (!isNullOrEmpty(extension)) {
mime = MAP.get(extension.toLowerCase(Locale.ENGLISH));
}
return mime != null ? mime : DEFAULT;
}
|
@Test
public void getByFilename_default_mime_type() {
assertThat(MediaTypes.getByFilename("")).isEqualTo(MediaTypes.DEFAULT);
assertThat(MediaTypes.getByFilename("unknown.extension")).isEqualTo(MediaTypes.DEFAULT);
}
|
@SuppressWarnings("java:S2583")
public static boolean verify(@NonNull JWKSet jwks, @NonNull JWSObject jws) {
if (jwks == null) {
throw new IllegalArgumentException("no JWKS provided to verify JWS");
}
if (jwks.getKeys() == null || jwks.getKeys().isEmpty()) {
return false;
}
var header = jws.getHeader();
if (!JWSAlgorithm.ES256.equals(header.getAlgorithm())) {
throw new UnsupportedOperationException(
"only supports ES256, found: " + header.getAlgorithm());
}
var key = jwks.getKeyByKeyId(header.getKeyID());
if (key == null) {
return false;
}
try {
var processor = new DefaultJWSVerifierFactory();
var verifier = processor.createJWSVerifier(jws.getHeader(), key.toECKey().toPublicKey());
return jws.verify(verifier);
} catch (JOSEException e) {
throw FederationExceptions.badSignature(e);
}
}
|
@Test
void verifyBadSignature() throws ParseException {
var jws = toJws(ECKEY, "test").serialize();
jws = tamperSignature(jws);
var in = JWSObject.parse(jws);
// when & then
assertFalse(JwsVerifier.verify(JWKS, in));
}
|
@Override
public String getOtp() throws OtpInfoException {
checkSecret();
try {
OTP otp = HOTP.generateOTP(getSecret(), getAlgorithm(true), getDigits(), getCounter());
return otp.toString();
} catch (NoSuchAlgorithmException | InvalidKeyException e) {
throw new RuntimeException(e);
}
}
|
@Test
public void testHotpInfoOtp() throws OtpInfoException {
for (int i = 0; i < HOTPTest.VECTORS.length; i++) {
HotpInfo info = new HotpInfo(HOTPTest.SECRET, OtpInfo.DEFAULT_ALGORITHM, OtpInfo.DEFAULT_DIGITS, i);
assertEquals(info.getOtp(), HOTPTest.VECTORS[i]);
}
}
|
@Override
public void open() throws Exception {
if (useSplittableTimers()
&& areSplittableTimersConfigured()
&& getTimeServiceManager().isPresent()) {
this.watermarkProcessor =
new MailboxWatermarkProcessor(
output, mailboxExecutor, getTimeServiceManager().get());
}
}
|
@Test
void testStateDoesNotInterfere() throws Exception {
try (KeyedOneInputStreamOperatorTestHarness<Integer, Tuple2<Integer, String>, String>
testHarness = createTestHarness()) {
testHarness.open();
testHarness.processElement(new Tuple2<>(0, "SET_STATE:HELLO"), 0);
testHarness.processElement(new Tuple2<>(1, "SET_STATE:CIAO"), 0);
testHarness.processElement(new Tuple2<>(1, "EMIT_STATE"), 0);
testHarness.processElement(new Tuple2<>(0, "EMIT_STATE"), 0);
assertThat(extractResult(testHarness))
.contains("ON_ELEMENT:1:CIAO", "ON_ELEMENT:0:HELLO");
}
}
|
@Override
public void removeInstance(String namespaceId, String serviceName, Instance instance) {
boolean ephemeral = instance.isEphemeral();
String clientId = IpPortBasedClient.getClientId(instance.toInetAddr(), ephemeral);
if (!clientManager.contains(clientId)) {
Loggers.SRV_LOG.warn("remove instance from non-exist client: {}", clientId);
return;
}
Service service = getService(namespaceId, serviceName, ephemeral);
clientOperationService.deregisterInstance(service, instance, clientId);
}
|
@Test
void testRemoveInstance() {
when(clientManager.contains(Mockito.anyString())).thenReturn(true);
instanceOperatorClient.removeInstance("A", "B", new Instance());
Mockito.verify(clientOperationService).deregisterInstance(Mockito.any(), Mockito.any(), Mockito.anyString());
}
|
public static String underlineToCamel(String param) {
return formatCamel(param, UNDERLINE);
}
|
@Test
void underlineToCamel() {
assertThat(StringFormatUtils.underlineToCamel(null)).isEqualTo("");
assertThat(StringFormatUtils.underlineToCamel(" ")).isEqualTo("");
assertThat(StringFormatUtils.underlineToCamel("abc_def_gh")).isEqualTo("abcDefGh");
}
|
void addAll(ReplicaMap other) {
map.putAll(other.map);
}
|
@Test
public void testAddAll() {
ReplicaMap temReplicaMap = new ReplicaMap();
Block tmpBlock = new Block(5678, 5678, 5678);
temReplicaMap.add(bpid, new FinalizedReplica(tmpBlock, null, null));
map.addAll(temReplicaMap);
assertNull(map.get(bpid, 1234));
assertNotNull(map.get(bpid, 5678));
}
|
public static JobId toJobID(String jid) {
return TypeConverter.toYarn(JobID.forName(jid));
}
|
@Test
@Timeout(120000)
public void testToJobID() {
JobId jid = MRApps.toJobID("job_1_1");
assertEquals(1, jid.getAppId().getClusterTimestamp());
assertEquals(1, jid.getAppId().getId());
assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field
}
|
public List<ShardingCondition> createShardingConditions(final InsertStatementContext sqlStatementContext, final List<Object> params) {
List<ShardingCondition> result = null == sqlStatementContext.getInsertSelectContext()
? createShardingConditionsWithInsertValues(sqlStatementContext, params)
: createShardingConditionsWithInsertSelect(sqlStatementContext, params);
appendGeneratedKeyConditions(sqlStatementContext, result);
return result;
}
|
@Test
void assertCreateShardingConditionsInsertStatementWithGeneratedKeyContextAndTableRule() {
GeneratedKeyContext generatedKeyContext = mock(GeneratedKeyContext.class);
when(insertStatementContext.getGeneratedKeyContext()).thenReturn(Optional.of(generatedKeyContext));
when(generatedKeyContext.isGenerated()).thenReturn(true);
when(generatedKeyContext.getGeneratedValues()).thenReturn(Collections.singleton("foo_col_1"));
when(shardingRule.findShardingTable("foo_table")).thenReturn(Optional.of(new ShardingTable(Collections.singleton("foo_col_1"), "test")));
when(shardingRule.findShardingColumn(any(), any())).thenReturn(Optional.of("foo_sharding_col"));
List<ShardingCondition> shardingConditions = shardingConditionEngine.createShardingConditions(insertStatementContext, Collections.emptyList());
assertThat(shardingConditions.get(0).getStartIndex(), is(0));
assertFalse(shardingConditions.get(0).getValues().isEmpty());
}
|
public static Object convertValue(String className, Object cleanValue, ClassLoader classLoader) {
// "null" string is converted to null
cleanValue = "null".equals(cleanValue) ? null : cleanValue;
if (!isPrimitive(className) && cleanValue == null) {
return null;
}
Class<?> clazz = loadClass(className, classLoader);
// if it is not a String, it has to be an instance of the desired type
if (!(cleanValue instanceof String)) {
if (clazz.isInstance(cleanValue)) {
return cleanValue;
}
throw new IllegalArgumentException(new StringBuilder().append("Object ").append(cleanValue)
.append(" is not a String or an instance of ").append(className).toString());
}
String value = (String) cleanValue;
try {
if (clazz.isAssignableFrom(String.class)) {
return value;
} else if (clazz.isAssignableFrom(BigDecimal.class)) {
return parseBigDecimal(value);
} else if (clazz.isAssignableFrom(BigInteger.class)) {
return parseBigInteger(value);
} else if (clazz.isAssignableFrom(Boolean.class) || clazz.isAssignableFrom(boolean.class)) {
return parseBoolean(value);
} else if (clazz.isAssignableFrom(Byte.class) || clazz.isAssignableFrom(byte.class)) {
return Byte.parseByte(value);
} else if (clazz.isAssignableFrom(Character.class) || clazz.isAssignableFrom(char.class)) {
return parseChar(value);
} else if (clazz.isAssignableFrom(Double.class) || clazz.isAssignableFrom(double.class)) {
return Double.parseDouble(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Float.class) || clazz.isAssignableFrom(float.class)) {
return Float.parseFloat(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Integer.class) || clazz.isAssignableFrom(int.class)) {
return Integer.parseInt(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(LocalDate.class)) {
return LocalDate.parse(value, DateTimeFormatter.ISO_LOCAL_DATE);
} else if (clazz.isAssignableFrom(LocalDateTime.class)) {
return LocalDateTime.parse(value, DateTimeFormatter.ISO_LOCAL_DATE_TIME);
} else if (clazz.isAssignableFrom(LocalTime.class)) {
return LocalTime.parse(value, DateTimeFormatter.ISO_LOCAL_TIME);
} else if (clazz.isAssignableFrom(Long.class) || clazz.isAssignableFrom(long.class)) {
return Long.parseLong(cleanStringForNumberParsing(value));
} else if (clazz.isAssignableFrom(Short.class) || clazz.isAssignableFrom(short.class)) {
return Short.parseShort(cleanStringForNumberParsing(value));
} else if (Enum.class.isAssignableFrom(clazz)) {
return Enum.valueOf(((Class<? extends Enum>) clazz), value);
}
} catch (RuntimeException e) {
throw new IllegalArgumentException(new StringBuilder().append("Impossible to parse '")
.append(value).append("' as ").append(className).append(" [")
.append(e.getMessage()).append("]").toString());
}
throw new IllegalArgumentException(new StringBuilder().append("Class ").append(className)
.append(" is not natively supported. Please use an MVEL expression" +
" to use it.").toString());
}
|
@Test
public void convertValueFailPrimitiveNullTest() {
assertThatThrownBy(() -> convertValue("int", null, classLoader))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(" is not a String or an instance of");
}
|
public long scan(
final UnsafeBuffer termBuffer,
final long rebuildPosition,
final long hwmPosition,
final long nowNs,
final int termLengthMask,
final int positionBitsToShift,
final int initialTermId)
{
boolean lossFound = false;
int rebuildOffset = (int)rebuildPosition & termLengthMask;
if (rebuildPosition < hwmPosition)
{
final int rebuildTermCount = (int)(rebuildPosition >>> positionBitsToShift);
final int hwmTermCount = (int)(hwmPosition >>> positionBitsToShift);
final int rebuildTermId = initialTermId + rebuildTermCount;
final int hwmTermOffset = (int)hwmPosition & termLengthMask;
final int limitOffset = rebuildTermCount == hwmTermCount ? hwmTermOffset : termLengthMask + 1;
rebuildOffset = scanForGap(termBuffer, rebuildTermId, rebuildOffset, limitOffset, this);
if (rebuildOffset < limitOffset)
{
if (scannedTermOffset != activeTermOffset || scannedTermId != activeTermId)
{
activateGap(nowNs);
lossFound = true;
}
checkTimerExpiry(nowNs);
}
}
return pack(rebuildOffset, lossFound);
}
|
@Test
void shouldHandleNonZeroInitialTermOffset()
{
lossDetector = getLossHandlerWithLongRetry();
final long rebuildPosition = ACTIVE_TERM_POSITION + (ALIGNED_FRAME_LENGTH * 3L);
final long hwmPosition = ACTIVE_TERM_POSITION + (ALIGNED_FRAME_LENGTH * 5L);
insertDataFrame(offsetOfMessage(2));
insertDataFrame(offsetOfMessage(4));
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
currentTime = TimeUnit.MILLISECONDS.toNanos(40);
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
verify(lossHandler).onGapDetected(TERM_ID, offsetOfMessage(3), gapLength());
verifyNoMoreInteractions(lossHandler);
}
|
public static <T> Partition<T> of(
int numPartitions,
PartitionWithSideInputsFn<? super T> partitionFn,
Requirements requirements) {
Contextful ctfFn =
Contextful.fn(
(T element, Contextful.Fn.Context c) ->
partitionFn.partitionFor(element, numPartitions, c),
requirements);
return new Partition<>(new PartitionDoFn<T>(numPartitions, ctfFn, partitionFn));
}
|
@Test
@Category(NeedsRunner.class)
public void testPartitionWithSideInputs() {
PCollectionView<Integer> gradesView =
pipeline.apply("grades", Create.of(50)).apply(View.asSingleton());
Create.Values<Integer> studentsPercentage = Create.of(5, 45, 90, 29, 55, 65);
PCollectionList<Integer> studentsGrades =
pipeline
.apply(studentsPercentage)
.apply(
Partition.of(
2,
(elem, numPartitions, ct) -> {
Integer grades = ct.sideInput(gradesView);
return elem < grades ? 0 : 1;
},
Requirements.requiresSideInputs(gradesView)));
assertTrue(studentsGrades.size() == 2);
PAssert.that(studentsGrades.get(0)).containsInAnyOrder(5, 29, 45);
PAssert.that(studentsGrades.get(1)).containsInAnyOrder(55, 65, 90);
pipeline.run();
}
|
public static List<UpdateRequirement> forUpdateTable(
TableMetadata base, List<MetadataUpdate> metadataUpdates) {
Preconditions.checkArgument(null != base, "Invalid table metadata: null");
Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null");
Builder builder = new Builder(base, false);
builder.require(new UpdateRequirement.AssertTableUUID(base.uuid()));
metadataUpdates.forEach(builder::update);
return builder.build();
}
|
@Test
public void setCurrentSchema() {
int schemaId = 3;
when(metadata.currentSchemaId()).thenReturn(schemaId);
List<UpdateRequirement> requirements =
UpdateRequirements.forUpdateTable(
metadata,
ImmutableList.of(
new MetadataUpdate.SetCurrentSchema(schemaId),
new MetadataUpdate.SetCurrentSchema(schemaId + 1),
new MetadataUpdate.SetCurrentSchema(schemaId + 2)));
requirements.forEach(req -> req.validate(metadata));
assertThat(requirements)
.hasSize(2)
.hasOnlyElementsOfTypes(
UpdateRequirement.AssertTableUUID.class, UpdateRequirement.AssertCurrentSchemaID.class);
assertTableUUID(requirements);
assertThat(requirements)
.element(1)
.asInstanceOf(InstanceOfAssertFactories.type(UpdateRequirement.AssertCurrentSchemaID.class))
.extracting(UpdateRequirement.AssertCurrentSchemaID::schemaId)
.isEqualTo(schemaId);
}
|
@Override
public void onNotificationOpened() {
}
|
@Test
public void onNotificationOpened_neverClearAllNotifications() throws Exception {
createUUT().onNotificationOpened();
verify(mNotificationManager, never()).cancelAll();
}
|
@SuppressWarnings({"SimplifyBooleanReturn"})
public static Map<String, ParamDefinition> cleanupParams(Map<String, ParamDefinition> params) {
if (params == null || params.isEmpty()) {
return params;
}
Map<String, ParamDefinition> mapped =
params.entrySet().stream()
.collect(
MapHelper.toListMap(
Map.Entry::getKey,
p -> {
ParamDefinition param = p.getValue();
if (param.getType() == ParamType.MAP) {
MapParamDefinition mapParamDef = param.asMapParamDef();
if (mapParamDef.getValue() == null
&& (mapParamDef.getInternalMode() == InternalParamMode.OPTIONAL)) {
return mapParamDef;
}
return MapParamDefinition.builder()
.name(mapParamDef.getName())
.value(cleanupParams(mapParamDef.getValue()))
.expression(mapParamDef.getExpression())
.name(mapParamDef.getName())
.validator(mapParamDef.getValidator())
.tags(mapParamDef.getTags())
.mode(mapParamDef.getMode())
.meta(mapParamDef.getMeta())
.build();
} else {
return param;
}
}));
Map<String, ParamDefinition> filtered =
mapped.entrySet().stream()
.filter(
p -> {
ParamDefinition param = p.getValue();
if (param.getInternalMode() == InternalParamMode.OPTIONAL) {
if (param.getValue() == null && param.getExpression() == null) {
return false;
} else if (param.getType() == ParamType.MAP
&& param.asMapParamDef().getValue() != null
&& param.asMapParamDef().getValue().isEmpty()) {
return false;
} else {
return true;
}
} else {
Checks.checkTrue(
param.getValue() != null || param.getExpression() != null,
String.format(
"[%s] is a required parameter (type=[%s])",
p.getKey(), param.getType()));
return true;
}
})
.collect(MapHelper.toListMap(Map.Entry::getKey, Map.Entry::getValue));
return cleanIntermediateMetadata(filtered);
}
|
@Test
public void testParameterConversionRemoveInternalMode() throws JsonProcessingException {
Map<String, ParamDefinition> allParams =
parseParamDefMap(
"{'optional': {'type': 'STRING', 'value': 'hello', 'internal_mode': 'OPTIONAL'}}");
Map<String, ParamDefinition> convertedParams = ParamsMergeHelper.cleanupParams(allParams);
assertNull(convertedParams.get("optional").asStringParamDef().getInternalMode());
}
|
@Override
public Set<DeviceId> getNetconfDevices() {
return netconfDeviceMap.keySet();
}
|
@Test
public void testGetNetconfDevices() {
Set<DeviceId> devices = new HashSet<>();
devices.add(deviceId1);
devices.add(deviceId2);
assertTrue("Incorrect devices", ctrl.getNetconfDevices().containsAll(devices));
}
|
public static String getLogicIndexName(final String actualIndexName, final String actualTableName) {
String indexNameSuffix = UNDERLINE + actualTableName;
return actualIndexName.endsWith(indexNameSuffix) ? actualIndexName.substring(0, actualIndexName.lastIndexOf(indexNameSuffix)) : actualIndexName;
}
|
@Test
void assertGetLogicIndexNameWithMultiIndexNameSuffix() {
assertThat(IndexMetaDataUtils.getLogicIndexName("order_t_order_index_t_order", "t_order"), is("order_t_order_index"));
}
|
@Override
public String getName() {
return "DroneCI";
}
|
@Test
public void getName() {
assertThat(underTest.getName()).isEqualTo("DroneCI");
}
|
@Override
public PipelineChannel newInstance(final int importerBatchSize, final PipelineChannelAckCallback ackCallback) {
int queueSize = this.queueSize / importerBatchSize;
return new MemoryPipelineChannel(queueSize, ackCallback);
}
|
@Test
void assertInitWithNonZeroBlockQueueSize() throws Exception {
PipelineChannelCreator creator = TypedSPILoader.getService(PipelineChannelCreator.class, "MEMORY", PropertiesBuilder.build(new Property("block-queue-size", "2000")));
assertThat(Plugins.getMemberAccessor().get(MemoryPipelineChannelCreator.class.getDeclaredField("queueSize"), creator), is(2000));
PipelineChannel channel = creator.newInstance(1000, new InventoryTaskAckCallback(new AtomicReference<>()));
assertInstanceOf(ArrayBlockingQueue.class, Plugins.getMemberAccessor().get(MemoryPipelineChannel.class.getDeclaredField("queue"), channel));
}
|
static <ID, T> TaskExecutors<ID, T> batchExecutors(final String name,
int workerCount,
final TaskProcessor<T> processor,
final AcceptorExecutor<ID, T> acceptorExecutor) {
final AtomicBoolean isShutdown = new AtomicBoolean();
final TaskExecutorMetrics metrics = new TaskExecutorMetrics(name);
registeredMonitors.put(name, metrics);
return new TaskExecutors<>(idx -> new BatchWorkerRunnable<>("TaskBatchingWorker-" + name + '-' + idx, isShutdown, metrics, processor, acceptorExecutor), workerCount, isShutdown);
}
|
@Test
public void testBatchSuccessfulProcessing() throws Exception {
taskExecutors = TaskExecutors.batchExecutors("TEST", 1, processor, acceptorExecutor);
taskBatchQueue.add(asList(successfulTaskHolder(1), successfulTaskHolder(2)));
processor.expectSuccesses(2);
}
|
@Override
public <T extends MigrationStep> MigrationStepRegistry add(long migrationNumber, String description, Class<T> stepClass) {
validate(migrationNumber);
requireNonNull(description, "description can't be null");
checkArgument(!description.isEmpty(), "description can't be empty");
requireNonNull(stepClass, "MigrationStep class can't be null");
checkState(!migrations.containsKey(migrationNumber), "A migration is already registered for migration number '%s'", migrationNumber);
this.migrations.put(migrationNumber, new RegisteredMigrationStep(migrationNumber, description, stepClass));
return this;
}
|
@Test
public void add_fails_with_ISE_when_called_twice_with_same_migration_number() {
underTest.add(12, "dsd", MigrationStep.class);
assertThatThrownBy(() -> underTest.add(12, "dfsdf", MigrationStep.class))
.isInstanceOf(IllegalStateException.class)
.hasMessage("A migration is already registered for migration number '12'");
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
void throwsFeignExceptionWithoutBody() {
server.enqueue(new MockResponse().setBody("success!"));
TestInterface api = Feign.builder().decoder((response, type) -> {
throw new IOException("timeout");
})
.target(TestInterface.class, "http://localhost:" + server.getPort());
try {
api.noContent();
} catch (FeignException e) {
assertThat(e.getMessage())
.isEqualTo("timeout reading POST http://localhost:" + server.getPort() + "/");
assertThat(e.contentUTF8()).isEqualTo("");
}
}
|
@Override
public void onPluginChanged(final List<PluginData> pluginDataList, final DataEventTypeEnum eventType) {
WebsocketData<PluginData> websocketData =
new WebsocketData<>(ConfigGroupEnum.PLUGIN.name(), eventType.name(), pluginDataList);
WebsocketCollector.send(GsonUtils.getInstance().toJson(websocketData), eventType);
}
|
@Test
public void testOnPluginChanged() {
String message = "{\"groupType\":\"PLUGIN\",\"eventType\":\"UPDATE\",\"data\":[{\"id\":\"2\",\"name\":\"waf\","
+ "\"config\":\"{\\\\\\\"model\\\\\\\":\\\\\\\"black\\\\\\\"}\",\"role\":\"1\",\"enabled\":true}]}";
MockedStatic.Verification verification = () -> WebsocketCollector.send(message, DataEventTypeEnum.UPDATE);
try (MockedStatic<WebsocketCollector> mockedStatic = mockStatic(WebsocketCollector.class)) {
mockedStatic.when(verification).thenAnswer((Answer<Void>) invocation -> null);
websocketDataChangedListener.onPluginChanged(pluginDataList, DataEventTypeEnum.UPDATE);
mockedStatic.verify(verification);
}
}
|
public final void tag(I input, ScopedSpan span) {
if (input == null) throw new NullPointerException("input == null");
if (span == null) throw new NullPointerException("span == null");
if (span.isNoop()) return;
tag(span, input, span.context());
}
|
@Test void tag_customizer_withContext_doesntParseNoop() {
tag.tag(input, context, NoopSpanCustomizer.INSTANCE);
verifyNoMoreInteractions(parseValue); // parsing is lazy
}
|
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload,
final ConnectionSession connectionSession) {
switch (commandPacketType) {
case COM_QUIT:
return new MySQLComQuitPacket();
case COM_INIT_DB:
return new MySQLComInitDbPacket(payload);
case COM_FIELD_LIST:
return new MySQLComFieldListPacket(payload);
case COM_QUERY:
return new MySQLComQueryPacket(payload);
case COM_STMT_PREPARE:
return new MySQLComStmtPreparePacket(payload);
case COM_STMT_EXECUTE:
MySQLServerPreparedStatement serverPreparedStatement =
connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex()));
return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount());
case COM_STMT_SEND_LONG_DATA:
return new MySQLComStmtSendLongDataPacket(payload);
case COM_STMT_RESET:
return new MySQLComStmtResetPacket(payload);
case COM_STMT_CLOSE:
return new MySQLComStmtClosePacket(payload);
case COM_SET_OPTION:
return new MySQLComSetOptionPacket(payload);
case COM_PING:
return new MySQLComPingPacket();
case COM_RESET_CONNECTION:
return new MySQLComResetConnectionPacket();
default:
return new MySQLUnsupportedCommandPacket(commandPacketType);
}
}
|
@Test
void assertNewInstanceWithComChangeUserPacket() {
assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_CHANGE_USER, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class));
}
|
@Override
public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) {
return sqlStatementContext instanceof InsertStatementContext && ((InsertStatementContext) sqlStatementContext).containsInsertColumns();
}
|
@Test
void assertIsNotGenerateSQLTokenWithoutInsertColumns() {
assertFalse(new EncryptInsertDerivedColumnsTokenGenerator(mock(EncryptRule.class)).isGenerateSQLToken(mock(InsertStatementContext.class, RETURNS_DEEP_STUBS)));
}
|
@Override
public URL getResource(String name) {
ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name);
log.trace("Received request to load resource '{}'", name);
for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) {
URL url = null;
switch (classLoadingSource) {
case APPLICATION:
url = super.getResource(name);
break;
case PLUGIN:
url = findResource(name);
break;
case DEPENDENCIES:
url = findResourceFromDependencies(name);
break;
}
if (url != null) {
log.trace("Found resource '{}' in {} classpath", name, classLoadingSource);
return url;
} else {
log.trace("Couldn't find resource '{}' in {}", name, classLoadingSource);
}
}
return null;
}
|
@Test
void parentFirstGetExtensionsIndexExistsInParentAndDependencyAndPlugin() throws URISyntaxException, IOException {
URL resource = parentLastPluginClassLoader.getResource(LegacyExtensionFinder.EXTENSIONS_RESOURCE);
assertFirstLine("plugin", resource);
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
public void testJsonSerializeDeserialize() {
// default key created by S3Options.SSECustomerKeyFactory
SSECustomerKey emptyKey = SSECustomerKey.builder().build();
assertThat(jsonSerializeDeserialize(emptyKey)).isEqualToComparingFieldByField(emptyKey);
SSECustomerKey key = SSECustomerKey.builder().key("key").algorithm("algo").md5("md5").build();
assertThat(jsonSerializeDeserialize(key)).isEqualToComparingFieldByField(key);
}
|
public static byte[] gzip(String content, String charset) throws UtilException {
return gzip(StrUtil.bytes(content, charset));
}
|
@Test
public void gzipTest() {
final String data = "我是一个需要压缩的很长很长的字符串";
final byte[] bytes = StrUtil.utf8Bytes(data);
final byte[] gzip = ZipUtil.gzip(bytes);
//保证gzip长度正常
assertEquals(68, gzip.length);
final byte[] unGzip = ZipUtil.unGzip(gzip);
//保证正常还原
assertEquals(data, StrUtil.utf8Str(unGzip));
}
|
@Override
public void registerModified(Weapon weapon) {
LOGGER.info("Registering {} for modify in context.", weapon.getName());
register(weapon, UnitActions.MODIFY.getActionValue());
}
|
@Test
void shouldSaveModifiedStudentWithoutWritingToDb() {
armsDealer.registerModified(weapon1);
armsDealer.registerModified(weapon2);
assertEquals(2, context.get(UnitActions.MODIFY.getActionValue()).size());
verifyNoMoreInteractions(weaponDatabase);
}
|
@Override
public Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
if (boolean.class == type) {
return resultSet.getBoolean(columnIndex);
}
if (byte.class == type) {
return resultSet.getByte(columnIndex);
}
if (short.class == type) {
return resultSet.getShort(columnIndex);
}
if (int.class == type) {
return resultSet.getInt(columnIndex);
}
if (long.class == type) {
return resultSet.getLong(columnIndex);
}
if (float.class == type) {
return resultSet.getFloat(columnIndex);
}
if (double.class == type) {
return resultSet.getDouble(columnIndex);
}
if (String.class == type) {
return resultSet.getString(columnIndex);
}
if (BigDecimal.class == type) {
return resultSet.getBigDecimal(columnIndex);
}
if (byte[].class == type) {
return resultSet.getBytes(columnIndex);
}
if (Date.class == type) {
return resultSet.getDate(columnIndex);
}
if (Time.class == type) {
return resultSet.getTime(columnIndex);
}
if (Timestamp.class == type) {
return resultSet.getTimestamp(columnIndex);
}
if (Blob.class == type) {
return resultSet.getBlob(columnIndex);
}
if (Clob.class == type) {
return resultSet.getClob(columnIndex);
}
if (Array.class == type) {
return resultSet.getArray(columnIndex);
}
return resultSet.getObject(columnIndex);
}
|
@Test
void assertGetValueByTime() throws SQLException {
ResultSet resultSet = mock(ResultSet.class);
when(resultSet.getTime(1)).thenReturn(new Time(0L));
assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, Time.class), is(new Time(0L)));
}
|
public byte[] data()
{
if (buf.hasArray()) {
byte[] array = buf.array();
int offset = buf.arrayOffset();
if (offset == 0 && array.length == size) {
// If the backing array is exactly what we need, return it without copy.
return array;
}
else {
// Else use it to make an efficient copy.
return Arrays.copyOfRange(array, offset, offset + size);
}
}
else {
// No backing array -> use ByteBuffer#get().
byte[] array = new byte[size];
ByteBuffer dup = buf.duplicate();
dup.position(0);
dup.get(array);
return array;
}
}
|
@Test
public void testDataArrayExtendsFurther()
{
byte[] data = new byte[]{10, 11, 12};
ByteBuffer buffer = ByteBuffer.wrap(data, 0, 2).slice();
Msg msg = new Msg(buffer);
assertThat(msg.data(), is(new byte[]{10, 11}));
}
|
@Override
public int getColumnCount() {
return _columnNamesArray.size();
}
|
@Test
public void testGetColumnCount() {
// Run the test
final int result = _resultTableResultSetUnderTest.getColumnCount();
// Verify the results
assertEquals(2, result);
}
|
@Override
public String mask(final Object plainValue) {
String result = null == plainValue ? null : String.valueOf(plainValue);
if (Strings.isNullOrEmpty(result)) {
return result;
}
char[] chars = result.toCharArray();
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if ('A' <= c && c <= 'Z') {
chars[i] = uppercaseLetterCodes.get(random.nextInt(uppercaseLetterCodes.size()));
} else if ('a' <= c && c <= 'z') {
chars[i] = lowercaseLetterCodes.get(random.nextInt(lowercaseLetterCodes.size()));
} else if ('0' <= c && c <= '9') {
chars[i] = digitalCodes.get(random.nextInt(digitalCodes.size()));
} else {
chars[i] = specialCodes.get(random.nextInt(specialCodes.size()));
}
}
return new String(chars);
}
|
@Test
void assertMask() {
GenericTableRandomReplaceAlgorithm maskAlgorithm = (GenericTableRandomReplaceAlgorithm) TypedSPILoader.getService(MaskAlgorithm.class, "GENERIC_TABLE_RANDOM_REPLACE",
PropertiesBuilder.build(new Property("uppercase-letter-codes", "A,B,C,D"),
new Property("lowercase-letter-codes", "a,b,c,d"), new Property("digital-codes", "1,2,3,4"), new Property("special-codes", "~!@#")));
assertThat(maskAlgorithm.mask(""), is(""));
assertThat(maskAlgorithm.mask("Ab1!").charAt(0), anyOf(is('A'), is('B'), is('C'), is('D')));
assertThat(maskAlgorithm.mask("Ab1!").charAt(1), anyOf(is('a'), is('b'), is('c'), is('d')));
assertThat(maskAlgorithm.mask("Ab1!").charAt(2), anyOf(is('1'), is('2'), is('3'), is('4')));
assertThat(maskAlgorithm.mask("Ab1!").charAt(3), anyOf(is('~'), is('!'), is('@'), is('#')));
}
|
public static boolean isIpAddress(String address) {
try {
getAddressMatcher(address);
return true;
} catch (InvalidAddressException e) {
return false;
}
}
|
@Test
public void testIsIpAddress() {
assertTrue(AddressUtil.isIpAddress("10.10.10.10"));
assertTrue(AddressUtil.isIpAddress("111.12-66.123.*"));
assertTrue(AddressUtil.isIpAddress("111-255.12-66.123.*"));
assertTrue(AddressUtil.isIpAddress("255.255.123.*"));
assertTrue(AddressUtil.isIpAddress("255.11-255.123.0"));
assertFalse(AddressUtil.isIpAddress("255.11-256.123.0"));
assertFalse(AddressUtil.isIpAddress("111.12-66-.123.*"));
assertFalse(AddressUtil.isIpAddress("111.12*66-.123.-*"));
assertFalse(AddressUtil.isIpAddress("as11d.897.hazelcast.com"));
assertFalse(AddressUtil.isIpAddress("192.111.10.com"));
assertFalse(AddressUtil.isIpAddress("192.111.10.999"));
assertTrue(AddressUtil.isIpAddress("::1"));
assertTrue(AddressUtil.isIpAddress("0:0:0:0:0:0:0:1"));
assertTrue(AddressUtil.isIpAddress("2001:db8:85a3:0:0:8a2e:370:7334"));
assertTrue(AddressUtil.isIpAddress("2001::370:7334"));
assertTrue(AddressUtil.isIpAddress("fe80::62c5:0:fe05:480a%en0"));
assertTrue(AddressUtil.isIpAddress("fe80::62c5:0:fe05:480a%en0"));
assertTrue(AddressUtil.isIpAddress("2001:db8:85a3:*:0:8a2e:370:7334"));
assertTrue(AddressUtil.isIpAddress("fe80::62c5:0-ffff:fe05:480a"));
assertTrue(AddressUtil.isIpAddress("fe80::62c5:*:fe05:480a"));
assertFalse(AddressUtil.isIpAddress("2001:acdb8:85a3:0:0:8a2e:370:7334"));
assertFalse(AddressUtil.isIpAddress("2001::370::7334"));
assertFalse(AddressUtil.isIpAddress("2001:370::7334.155"));
assertFalse(AddressUtil.isIpAddress("2001:**:85a3:*:0:8a2e:370:7334"));
assertFalse(AddressUtil.isIpAddress("fe80::62c5:0-ffff:fe05-:480a"));
assertFalse(AddressUtil.isIpAddress("fe80::62c5:*:fe05-fffddd:480a"));
assertFalse(AddressUtil.isIpAddress("fe80::62c5:*:fe05-ffxd:480a"));
}
|
@SuppressWarnings("unchecked")
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
if (!(statement.getStatement() instanceof CreateSource)
&& !(statement.getStatement() instanceof CreateAsSelect)) {
return statement;
}
try {
if (statement.getStatement() instanceof CreateSource) {
final ConfiguredStatement<CreateSource> createStatement =
(ConfiguredStatement<CreateSource>) statement;
return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement);
} else {
final ConfiguredStatement<CreateAsSelect> createStatement =
(ConfiguredStatement<CreateAsSelect>) statement;
return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse(
createStatement);
}
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
throw new KsqlStatementException(
ErrorMessageUtil.buildErrorMessage(e),
statement.getMaskedStatementText(),
e.getCause());
}
}
|
@Test
public void shouldThrowIfCtasValueFormatDoesnotSupportInference() {
// Given:
givenFormatsAndProps(null, "kafka",
ImmutableMap.of("VALUE_SCHEMA_ID", new IntegerLiteral(42)));
givenDDLSchemaAndFormats(LOGICAL_SCHEMA, "kafka", "delimited",
SerdeFeature.UNWRAP_SINGLES, SerdeFeature.UNWRAP_SINGLES);
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> injector.inject(ctasStatement)
);
// Then:
assertThat(e.getMessage(),
containsString("VALUE_FORMAT should support schema inference when "
+ "VALUE_SCHEMA_ID is provided. Current format is DELIMITED."));
}
|
@SuppressWarnings("unchecked")
public void replaceAllInt(final IntObjectToObjectFunction<? super V, ? extends V> function)
{
requireNonNull(function);
final int[] keys = this.keys;
final Object[] values = this.values;
@DoNotSub final int length = values.length;
@DoNotSub int remaining = size;
for (@DoNotSub int index = 0; remaining > 0 && index < length; index++)
{
final Object oldValue = values[index];
if (null != oldValue)
{
final V newValue = function.apply(keys[index], (V)oldValue);
requireNonNull(newValue, "null values are not supported");
values[index] = newValue;
--remaining;
}
}
}
|
@Test
void replaceAllIntThrowsNullPointerExceptionIfFunctionIsNull()
{
assertThrowsExactly(NullPointerException.class, () -> cache.replaceAllInt(null));
}
|
@Override
public boolean isFinished()
{
return finishing && outputPage == null;
}
|
@Test(dataProvider = "hashEnabledValues", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of.*")
public void testMemoryLimit(boolean hashEnabled)
{
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(100, BYTE))
.addPipelineContext(0, true, true, false)
.addDriverContext();
OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName());
List<Type> buildTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes);
Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder
.addSequencePage(10000, 20)
.build());
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(
1,
new PlanNodeId("test"),
buildTypes.get(0),
0,
rowPagesBuilder.getHashChannel(),
10,
new JoinCompiler(createTestMetadataManager()));
Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext);
Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator);
while (!driver.isFinished()) {
driver.process();
}
}
|
@Override
public ScheduleResult schedule()
{
List<RemoteTask> newTasks = IntStream.range(0, partitionToNode.size())
.mapToObj(partition -> taskScheduler.scheduleTask(partitionToNode.get(partition), partition))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(toImmutableList());
// no need to call stage.transitionToSchedulingSplits() since there is no table splits
return ScheduleResult.nonBlocked(true, newTasks, 0);
}
|
@Test
public void testMultipleNodes()
{
FixedCountScheduler nodeScheduler = new FixedCountScheduler(
(node, partition) -> Optional.of(taskFactory.createTableScanTask(
new TaskId("test", 1, 0, 1, 0),
node, ImmutableList.of(),
new NodeTaskMap.NodeStatsTracker(delta -> {}, delta -> {}, (age, delta) -> {}))),
generateRandomNodes(5));
ScheduleResult result = nodeScheduler.schedule();
assertTrue(result.isFinished());
assertTrue(result.getBlocked().isDone());
assertEquals(result.getNewTasks().size(), 5);
assertEquals(result.getNewTasks().stream().map(RemoteTask::getNodeId).collect(toImmutableSet()).size(), 5);
}
|
public static UVariableDecl create(
CharSequence identifier, UExpression type, @Nullable UExpression initializer) {
return new AutoValue_UVariableDecl(StringName.of(identifier), type, initializer);
}
|
@Test
public void equality() {
new EqualsTester()
.addEqualityGroup(UVariableDecl.create("foo", UClassIdent.create("java.lang.String"), null))
.addEqualityGroup(
UVariableDecl.create(
"foo", UClassIdent.create("java.lang.String"), ULiteral.stringLit("bar")))
.addEqualityGroup(
UVariableDecl.create("foo", UClassIdent.create("java.lang.Integer"), null))
.addEqualityGroup(UVariableDecl.create("baz", UClassIdent.create("java.lang.String"), null))
.testEquals();
}
|
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStart,
final Range<Instant> windowEnd,
final Optional<Position> position
) {
try {
final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore
.store(QueryableStoreTypes.sessionStore(), partition);
return KsMaterializedQueryResult.rowIterator(
findSession(store, key, windowStart, windowEnd).iterator());
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
}
|
@Test
public void shouldFetchWithCorrectParams() {
// When:
table.get(A_KEY, PARTITION, WINDOW_START_BOUNDS, WINDOW_END_BOUNDS);
// Then:
verify(cacheBypassFetcher).fetch(sessionStore, A_KEY);
}
|
public String readFile(final String inputFile) throws IOException {
Path inputFilePath = Paths.get(inputFile);
return Files.readString(Paths.get(inputFilePath.toString()));
}
|
@Test
public void testReadFile() throws IOException {
String content = "x y z";
Path path = Paths.get(temporaryDirectory.getPath(), FOOBAR);
File letters = path.toFile();
Files.write(letters.toPath(), Collections.singletonList(content));
String result = fileUtil.readFile(letters.getPath());
assertAll(
() -> assertTrue(Files.exists(letters.toPath())),
() -> assertTrue(result.length() > 0),
() -> assertTrue(result.startsWith("x")),
() -> assertTrue(result.endsWith("z\n")));
}
|
public static void setAttributeValue(Document document, String containerNodeName, String attributeName, String attributeValue) {
asStream(document.getElementsByTagName(containerNodeName))
.map(Node::getAttributes)
.map(attributes -> attributes.getNamedItem(attributeName))
.filter(Objects::nonNull)
.forEach(attributeNode -> attributeNode.setNodeValue(attributeValue));
}
|
@Test
public void setAttributeValue() throws Exception {
final String newValue = "NEW_VALUE";
Document document = DOMParserUtil.getDocument(XML);
DOMParserUtil.setAttributeValue(document, MAIN_NODE, MAIN_ATTRIBUTE_NAME, newValue);
Map<Node, String> retrieved = DOMParserUtil.getAttributeValues(document, MAIN_NODE, MAIN_ATTRIBUTE_NAME);
assertThat(newValue).isEqualTo(retrieved.values().toArray()[0]);
DOMParserUtil.setAttributeValue(document, MAIN_NODE, NOT_EXISTING, newValue);
retrieved = DOMParserUtil.getAttributeValues(document, MAIN_NODE, NOT_EXISTING);
assertThat(retrieved).isEmpty();
DOMParserUtil.setAttributeValue(document, CHILD_NODE, CHILD_ATTRIBUTE_NAME, newValue);
retrieved = DOMParserUtil.getAttributeValues(document, CHILD_NODE, CHILD_ATTRIBUTE_NAME);
assertThat(retrieved).hasSize(2);
retrieved.values().forEach(attributeValue -> assertThat(attributeValue).isEqualTo(newValue));
}
|
@Override
public String getScheme() {
return scheme;
}
|
@Test
public void testGetScheme() {
// Tests s3 paths.
assertEquals("s3", S3ResourceId.fromUri("s3://my_bucket/tmp dir/").getScheme());
// Tests bucket with no ending '/'.
assertEquals("s3", S3ResourceId.fromUri("s3://my_bucket").getScheme());
}
|
public static ByteBuf copyMedium(int value) {
ByteBuf buf = buffer(3);
buf.writeMedium(value);
return buf;
}
|
@Test
public void testWrapSingleMedium() {
ByteBuf buffer = copyMedium(42);
assertEquals(3, buffer.capacity());
assertEquals(42, buffer.readMedium());
assertFalse(buffer.isReadable());
buffer.release();
}
|
public void sanitizeEnv(Map<String, String> environment, Path pwd,
List<Path> appDirs, List<String> userLocalDirs, List<String>
containerLogDirs, Map<Path, List<String>> resources,
Path nmPrivateClasspathJarDir,
Set<String> nmVars) throws IOException {
// Based on discussion in YARN-7654, for ENTRY_POINT enabled
// docker container, we forward user defined environment variables
// without node manager environment variables. This is the reason
// that we skip sanitizeEnv method.
boolean overrideDisable = Boolean.parseBoolean(
environment.get(
Environment.
YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE.
name()));
if (overrideDisable) {
environment.remove("WORK_DIR");
return;
}
/**
* Non-modifiable environment variables
*/
addToEnvMap(environment, nmVars, Environment.CONTAINER_ID.name(),
container.getContainerId().toString());
addToEnvMap(environment, nmVars, Environment.NM_PORT.name(),
String.valueOf(this.context.getNodeId().getPort()));
addToEnvMap(environment, nmVars, Environment.NM_HOST.name(),
this.context.getNodeId().getHost());
addToEnvMap(environment, nmVars, Environment.NM_HTTP_PORT.name(),
String.valueOf(this.context.getHttpPort()));
addToEnvMap(environment, nmVars, Environment.LOCAL_DIRS.name(),
StringUtils.join(",", appDirs));
addToEnvMap(environment, nmVars, Environment.LOCAL_USER_DIRS.name(),
StringUtils.join(",", userLocalDirs));
addToEnvMap(environment, nmVars, Environment.LOG_DIRS.name(),
StringUtils.join(",", containerLogDirs));
addToEnvMap(environment, nmVars, Environment.USER.name(),
container.getUser());
addToEnvMap(environment, nmVars, Environment.LOGNAME.name(),
container.getUser());
addToEnvMap(environment, nmVars, Environment.HOME.name(),
conf.get(
YarnConfiguration.NM_USER_HOME_DIR,
YarnConfiguration.DEFAULT_NM_USER_HOME_DIR
)
);
addToEnvMap(environment, nmVars, Environment.PWD.name(), pwd.toString());
addToEnvMap(environment, nmVars, Environment.LOCALIZATION_COUNTERS.name(),
container.localizationCountersAsString());
if (!Shell.WINDOWS) {
addToEnvMap(environment, nmVars, "JVM_PID", "$$");
}
// TODO: Remove Windows check and use this approach on all platforms after
// additional testing. See YARN-358.
if (Shell.WINDOWS) {
sanitizeWindowsEnv(environment, pwd,
resources, nmPrivateClasspathJarDir);
}
// put AuxiliaryService data to environment
for (Map.Entry<String, ByteBuffer> meta : containerManager
.getAuxServiceMetaData().entrySet()) {
AuxiliaryServiceHelper.setServiceDataIntoEnv(
meta.getKey(), meta.getValue(), environment);
nmVars.add(AuxiliaryServiceHelper.getPrefixServiceName(meta.getKey()));
}
}
|
@Test
public void testPrependDistcache() throws Exception {
// Test is only relevant on Windows
assumeWindows();
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
Map<String, String> userSetEnv = new HashMap<String, String>();
userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id");
userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST");
userSetEnv.put(Environment.NM_PORT.name(), "user_set_NM_PORT");
userSetEnv.put(Environment.NM_HTTP_PORT.name(), "user_set_NM_HTTP_PORT");
userSetEnv.put(Environment.LOCAL_DIRS.name(), "user_set_LOCAL_DIR");
userSetEnv.put(Environment.USER.key(), "user_set_" +
Environment.USER.key());
userSetEnv.put(Environment.LOGNAME.name(), "user_set_LOGNAME");
userSetEnv.put(Environment.PWD.name(), "user_set_PWD");
userSetEnv.put(Environment.HOME.name(), "user_set_HOME");
userSetEnv.put(Environment.CLASSPATH.name(), "APATH");
containerLaunchContext.setEnvironment(userSetEnv);
Container container = mock(Container.class);
when(container.getContainerId()).thenReturn(cId);
when(container.getLaunchContext()).thenReturn(containerLaunchContext);
when(container.localizationCountersAsString()).thenReturn("1,2,3,4,5");
Dispatcher dispatcher = mock(Dispatcher.class);
EventHandler<Event> eventHandler = new EventHandler<Event>() {
public void handle(Event event) {
Assert.assertTrue(event instanceof ContainerExitEvent);
ContainerExitEvent exitEvent = (ContainerExitEvent) event;
Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
exitEvent.getType());
}
};
when(dispatcher.getEventHandler()).thenReturn(eventHandler);
Configuration conf = new Configuration();
ContainerLaunch launch = new ContainerLaunch(distContext, conf,
dispatcher, exec, null, container, dirsHandler, containerManager);
String testDir = System.getProperty("test.build.data",
"target/test-dir");
Path pwd = new Path(testDir);
List<Path> appDirs = new ArrayList<Path>();
List<String> userLocalDirs = new ArrayList<>();
List<String> containerLogs = new ArrayList<String>();
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
Path userjar = new Path("user.jar");
List<String> lpaths = new ArrayList<String>();
lpaths.add("userjarlink.jar");
resources.put(userjar, lpaths);
Path nmp = new Path(testDir);
Set<String> nmEnvTrack = new LinkedHashSet<>();
launch.sanitizeEnv(userSetEnv, pwd, appDirs, userLocalDirs, containerLogs,
resources, nmp, nmEnvTrack);
List<String> result =
getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name()));
Assert.assertTrue(result.size() > 1);
Assert.assertTrue(
result.get(result.size() - 1).endsWith("userjarlink.jar"));
//Then, with user classpath first
userSetEnv.put(Environment.CLASSPATH_PREPEND_DISTCACHE.name(), "true");
cId = ContainerId.newContainerId(appAttemptId, 1);
when(container.getContainerId()).thenReturn(cId);
launch = new ContainerLaunch(distContext, conf,
dispatcher, exec, null, container, dirsHandler, containerManager);
launch.sanitizeEnv(userSetEnv, pwd, appDirs, userLocalDirs, containerLogs,
resources, nmp, nmEnvTrack);
result =
getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name()));
Assert.assertTrue(result.size() > 1);
Assert.assertTrue(
result.get(0).endsWith("userjarlink.jar"));
}
|
public static long currentTimeMillis() {
// When an exception occurs in the Ticker mechanism, fall back.
if (isFallback) {
return System.currentTimeMillis();
}
if (!isTickerAlive) {
try {
startTicker();
} catch (Exception e) {
isFallback = true;
}
}
return currentTimeMillis;
}
|
@Test
void testCurrentTimeMillis() {
assertTrue(0 < TimeUtils.currentTimeMillis());
}
|
@Override
public UnboundedReader<KV<byte[], byte[]>> createReader(
PipelineOptions options, @Nullable SyntheticRecordsCheckpoint checkpoint) {
if (checkpoint == null) {
return new SyntheticUnboundedReader(this, this.startOffset);
} else {
return new SyntheticUnboundedReader(this, checkpoint.getCurrentCheckMarkPosition());
}
}
|
@Test
public void lastElementShouldBeInclusive() throws IOException {
int endPosition = 2;
checkpoint = new SyntheticRecordsCheckpoint(0);
UnboundedSource.UnboundedReader<KV<byte[], byte[]>> reader =
source.createReader(pipeline.getOptions(), checkpoint);
reader.start();
reader.advance();
KV<byte[], byte[]> currentElement = reader.getCurrent();
KV<byte[], byte[]> expectedElement = sourceOptions.genRecord(endPosition).kv;
assertEquals(expectedElement, currentElement);
}
|
public String getDomainObjectName() {
if (stringHasValue(domainObjectName)) {
return domainObjectName;
}
String finalDomainObjectName;
if (stringHasValue(runtimeTableName)) {
finalDomainObjectName = JavaBeansUtil.getCamelCaseString(runtimeTableName, true);
} else {
finalDomainObjectName = JavaBeansUtil.getCamelCaseString(introspectedTableName, true);
}
if (domainObjectRenamingRule != null) {
Pattern pattern = Pattern.compile(domainObjectRenamingRule.getSearchString());
String replaceString = domainObjectRenamingRule.getReplaceString();
replaceString = replaceString == null ? "" : replaceString; //$NON-NLS-1$
Matcher matcher = pattern.matcher(finalDomainObjectName);
finalDomainObjectName = JavaBeansUtil.getFirstCharacterUppercase(matcher.replaceAll(replaceString));
}
return finalDomainObjectName;
}
|
@Test
void testNormalCaseWithPrefix() {
FullyQualifiedTable fqt = new FullyQualifiedTable(null, "myschema", "sys_mytable", null, null, false, null, null, null, false, null, null);
assertThat(fqt.getDomainObjectName()).isEqualTo("SysMytable");
}
|
public IsJson(Matcher<? super ReadContext> jsonMatcher) {
this.jsonMatcher = jsonMatcher;
}
|
@Test
public void shouldNotMatchInvalidJson() {
assertThat(INVALID_JSON, not(isJson()));
assertThat(new Object(), not(isJson()));
assertThat(new Object[]{}, not(isJson()));
assertThat("hi there", not(isJson()));
assertThat(new Integer(42), not(isJson()));
assertThat(Boolean.TRUE, not(isJson()));
assertThat(false, not(isJson()));
assertThat(null, not(isJson()));
}
|
public DropTypeCommand create(final DropType statement) {
final String typeName = statement.getTypeName();
final boolean ifExists = statement.getIfExists();
if (!ifExists && !metaStore.resolveType(typeName).isPresent()) {
throw new KsqlException("Type " + typeName + " does not exist.");
}
return new DropTypeCommand(typeName);
}
|
@Test
public void shouldNotFailCreateTypeIfTypeDoesNotExistAndIfExistsSet () {
// Given:
final DropType dropType = new DropType(Optional.empty(), NOT_EXISTING_TYPE, true);
// When:
final DropTypeCommand cmd = factory.create(dropType);
// Then:
assertThat(cmd.getTypeName(), equalTo(NOT_EXISTING_TYPE));
}
|
@Override
public void run() {
if (processor != null) {
processor.execute();
} else {
if (!beforeHook()) {
logger.info("before-feature hook returned [false], aborting: {}", this);
} else {
scenarios.forEachRemaining(this::processScenario);
}
afterFeature();
}
}
|
@Test
void testLowerCase() {
run("lower-case.feature");
}
|
@Override
public String rpcType() {
return RpcTypeEnum.DUBBO.getName();
}
|
@Test
public void testRpcType() {
assertEquals(RpcTypeEnum.DUBBO.getName(), shenyuClientRegisterDubboService.rpcType());
}
|
public static LockTime of(long rawValue) {
if (rawValue < 0)
throw new IllegalArgumentException("illegal negative lock time: " + rawValue);
return rawValue < LockTime.THRESHOLD
? new HeightLock(rawValue)
: new TimeLock(rawValue);
}
|
@Test(expected = IllegalArgumentException.class)
@Parameters(method = "invalidValueVectors")
public void invalidValues(long value) {
LockTime lockTime = LockTime.of(value);
}
|
@ApiOperation(value = "List Deployments", tags = { "Deployment" }, nickname="listDeployments")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", dataType = "string", value = "Only return deployments with the given name.", paramType = "query"),
@ApiImplicitParam(name = "nameLike", dataType = "string", value = "Only return deployments with a name like the given name.", paramType = "query"),
@ApiImplicitParam(name = "category", dataType = "string", value = "Only return deployments with the given category.", paramType = "query"),
@ApiImplicitParam(name = "categoryNotEquals", dataType = "string", value = "Only return deployments which do not have the given category.", paramType = "query"),
@ApiImplicitParam(name = "parentDeploymentId", dataType = "string", value = "Only return deployments with the given parent deployment id.", paramType = "query"),
@ApiImplicitParam(name = "parentDeploymentIdLike", dataType = "string", value = "Only return deployments with a parent deployment id like the given value.", paramType = "query"),
@ApiImplicitParam(name = "tenantId", dataType = "string", value = "Only return deployments with the given tenantId.", paramType = "query"),
@ApiImplicitParam(name = "tenantIdLike", dataType = "string", value = "Only return deployments with a tenantId like the given value.", paramType = "query"),
@ApiImplicitParam(name = "withoutTenantId", dataType = "boolean", value = "If true, only returns deployments without a tenantId set. If false, the withoutTenantId parameter is ignored.", paramType = "query"),
@ApiImplicitParam(name = "sort", dataType = "string", value = "Property to sort on, to be used together with the order.", allowableValues = "id,name,deployTime,tenantId", paramType = "query"),
})
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Indicates the request was successful."),
})
@GetMapping(value = "/repository/deployments", produces = "application/json")
public DataResponse<DeploymentResponse> getDeployments(@ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) {
DeploymentQuery deploymentQuery = repositoryService.createDeploymentQuery();
// Apply filters
if (allRequestParams.containsKey("name")) {
deploymentQuery.deploymentName(allRequestParams.get("name"));
}
if (allRequestParams.containsKey("nameLike")) {
deploymentQuery.deploymentNameLike(allRequestParams.get("nameLike"));
}
if (allRequestParams.containsKey("category")) {
deploymentQuery.deploymentCategory(allRequestParams.get("category"));
}
if (allRequestParams.containsKey("categoryNotEquals")) {
deploymentQuery.deploymentCategoryNotEquals(allRequestParams.get("categoryNotEquals"));
}
if (allRequestParams.containsKey("parentDeploymentId")) {
deploymentQuery.parentDeploymentId(allRequestParams.get("parentDeploymentId"));
}
if (allRequestParams.containsKey("parentDeploymentIdLike")) {
deploymentQuery.parentDeploymentIdLike(allRequestParams.get("parentDeploymentIdLike"));
}
if (allRequestParams.containsKey("tenantId")) {
deploymentQuery.deploymentTenantId(allRequestParams.get("tenantId"));
}
if (allRequestParams.containsKey("tenantIdLike")) {
deploymentQuery.deploymentTenantIdLike(allRequestParams.get("tenantIdLike"));
}
if (allRequestParams.containsKey("withoutTenantId")) {
boolean withoutTenantId = Boolean.parseBoolean(allRequestParams.get("withoutTenantId"));
if (withoutTenantId) {
deploymentQuery.deploymentWithoutTenantId();
}
}
if (restApiInterceptor != null) {
restApiInterceptor.accessDeploymentsWithQuery(deploymentQuery);
}
return paginateList(allRequestParams, deploymentQuery, "id", allowedSortProperties, restResponseFactory::createDeploymentResponseList);
}
|
@Test
public void testGetDeployments() throws Exception {
try {
// Alter time to ensure different deployTimes
Calendar yesterday = Calendar.getInstance();
yesterday.add(Calendar.DAY_OF_MONTH, -1);
processEngineConfiguration.getClock().setCurrentTime(yesterday.getTime());
Deployment firstDeployment = repositoryService.createDeployment().name("Deployment 1").category("DEF").addClasspathResource("org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml")
.deploy();
processEngineConfiguration.getClock().setCurrentTime(Calendar.getInstance().getTime());
Deployment secondDeployment = repositoryService.createDeployment().name("Deployment 2").category("ABC")
.addClasspathResource("org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml").tenantId("myTenant").deploy();
String baseUrl = RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT_COLLECTION);
assertResultsPresentInDataResponse(baseUrl, firstDeployment.getId(), secondDeployment.getId());
// Check name filtering
String url = baseUrl + "?name=" + encode("Deployment 1");
assertResultsPresentInDataResponse(url, firstDeployment.getId());
// Check name-like filtering
url = baseUrl + "?nameLike=" + encode("%ment 2");
assertResultsPresentInDataResponse(url, secondDeployment.getId());
// Check category filtering
url = baseUrl + "?category=DEF";
assertResultsPresentInDataResponse(url, firstDeployment.getId());
// Check category-not-equals filtering
url = baseUrl + "?categoryNotEquals=DEF";
assertResultsPresentInDataResponse(url, secondDeployment.getId());
// Check tenantId filtering
url = baseUrl + "?tenantId=myTenant";
assertResultsPresentInDataResponse(url, secondDeployment.getId());
// Check tenantId filtering
url = baseUrl + "?tenantId=unexistingTenant";
assertResultsPresentInDataResponse(url);
// Check tenantId like filtering
url = baseUrl + "?tenantIdLike=" + encode("%enant");
assertResultsPresentInDataResponse(url, secondDeployment.getId());
// Check without tenantId filtering
url = baseUrl + "?withoutTenantId=true";
assertResultsPresentInDataResponse(url, firstDeployment.getId());
} finally {
// Always cleanup any created deployments, even if the test failed
List<Deployment> deployments = repositoryService.createDeploymentQuery().list();
for (Deployment deployment : deployments) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
}
|
@SafeVarargs
public static <E> Set<E> union(final Supplier<Set<E>> constructor, final Set<E>... set) {
final Set<E> result = constructor.get();
for (final Set<E> s : set) {
result.addAll(s);
}
return result;
}
|
@Test
public void testUnion() {
final Set<String> oneSet = mkSet("a", "b", "c");
final Set<String> anotherSet = mkSet("c", "d", "e");
final Set<String> union = union(TreeSet::new, oneSet, anotherSet);
assertEquals(mkSet("a", "b", "c", "d", "e"), union);
assertEquals(TreeSet.class, union.getClass());
}
|
public static String sanitizeUri(String uri) {
// use xxxxx as replacement as that works well with JMX also
String sanitized = uri;
if (uri != null) {
sanitized = ALL_SECRETS.matcher(sanitized).replaceAll("$1=xxxxxx");
sanitized = USERINFO_PASSWORD.matcher(sanitized).replaceFirst("$1xxxxxx$3");
}
return sanitized;
}
|
@Test
public void testSanitizeUriWithUserInfo() {
String uri = "jt400://GEORGE:HARRISON@LIVERPOOL/QSYS.LIB/BEATLES.LIB/PENNYLANE.DTAQ";
String expected = "jt400://GEORGE:xxxxxx@LIVERPOOL/QSYS.LIB/BEATLES.LIB/PENNYLANE.DTAQ";
assertEquals(expected, URISupport.sanitizeUri(uri));
}
|
static com.google.cloud.datacatalog.v1beta1.Schema toDataCatalog(Schema schema) {
com.google.cloud.datacatalog.v1beta1.Schema.Builder schemaBuilder =
com.google.cloud.datacatalog.v1beta1.Schema.newBuilder();
for (Schema.Field field : schema.getFields()) {
schemaBuilder.addColumns(fromBeamField(field));
}
return schemaBuilder.build();
}
|
@Test
public void testToDataCatalog() {
assertEquals(TEST_DC_SCHEMA, SchemaUtils.toDataCatalog(TEST_SCHEMA));
}
|
public Set<String> names() {
return Collections.unmodifiableSet(configKeys.keySet());
}
|
@Test
public void testNames() {
final ConfigDef configDef = new ConfigDef()
.define("a", Type.STRING, Importance.LOW, "docs")
.define("b", Type.STRING, Importance.LOW, "docs");
Set<String> names = configDef.names();
assertEquals(new HashSet<>(Arrays.asList("a", "b")), names);
// should be unmodifiable
try {
names.add("new");
fail();
} catch (UnsupportedOperationException e) {
// expected
}
}
|
@Override
public Path relativePathFromScmRoot(Path path) {
RepositoryBuilder builder = getVerifiedRepositoryBuilder(path);
return builder.getGitDir().toPath().getParent().relativize(path);
}
|
@Test
public void relativePathFromScmRoot_should_return_relative_path_for_file_in_project_subdir() throws IOException {
Path relpath = Paths.get("sub/dir/to/somefile.xoo");
Path path = worktree.resolve(relpath);
Files.createDirectories(path.getParent());
Files.createFile(path);
assertThat(newGitScmProvider().relativePathFromScmRoot(path)).isEqualTo(relpath);
}
|
@Override
public List<ConfigKeyInfo> connectorPluginConfig(String pluginName) {
Plugins p = plugins();
Class<?> pluginClass;
try {
pluginClass = p.pluginClass(pluginName);
} catch (ClassNotFoundException cnfe) {
throw new NotFoundException("Unknown plugin " + pluginName + ".");
}
try (LoaderSwap loaderSwap = p.withClassLoader(pluginClass.getClassLoader())) {
Object plugin = p.newPlugin(pluginName);
// Contains definitions coming from Connect framework
ConfigDef baseConfigDefs = null;
// Contains definitions specifically declared on the plugin
ConfigDef pluginConfigDefs;
if (plugin instanceof SinkConnector) {
baseConfigDefs = SinkConnectorConfig.configDef();
pluginConfigDefs = ((SinkConnector) plugin).config();
} else if (plugin instanceof SourceConnector) {
baseConfigDefs = SourceConnectorConfig.configDef();
pluginConfigDefs = ((SourceConnector) plugin).config();
} else if (plugin instanceof Converter) {
pluginConfigDefs = ((Converter) plugin).config();
} else if (plugin instanceof HeaderConverter) {
pluginConfigDefs = ((HeaderConverter) plugin).config();
} else if (plugin instanceof Transformation) {
pluginConfigDefs = ((Transformation<?>) plugin).config();
} else if (plugin instanceof Predicate) {
pluginConfigDefs = ((Predicate<?>) plugin).config();
} else {
throw new BadRequestException("Invalid plugin class " + pluginName + ". Valid types are sink, source, converter, header_converter, transformation, predicate.");
}
// Track config properties by name and, if the same property is defined in multiple places,
// give precedence to the one defined by the plugin class
// Preserve the ordering of properties as they're returned from each ConfigDef
Map<String, ConfigKey> configsMap = new LinkedHashMap<>(pluginConfigDefs.configKeys());
if (baseConfigDefs != null)
baseConfigDefs.configKeys().forEach(configsMap::putIfAbsent);
List<ConfigKeyInfo> results = new ArrayList<>();
for (ConfigKey configKey : configsMap.values()) {
results.add(AbstractHerder.convertConfigKey(configKey));
}
return results;
} catch (ClassNotFoundException e) {
throw new ConnectException("Failed to load plugin class or one of its dependencies", e);
}
}
|
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testGetConnectorConfigDefWithInvalidPluginType() throws Exception {
String connName = "AnotherPlugin";
AbstractHerder herder = testHerder();
when(worker.getPlugins()).thenReturn(plugins);
when(plugins.pluginClass(anyString())).thenReturn((Class) Object.class);
when(plugins.newPlugin(anyString())).thenReturn(new DirectoryConfigProvider());
assertThrows(BadRequestException.class, () -> herder.connectorPluginConfig(connName));
}
|
@Override
public ChannelFuture writeData(final ChannelHandlerContext ctx, final int streamId, ByteBuf data, int padding,
final boolean endOfStream, ChannelPromise promise) {
promise = promise.unvoid();
final Http2Stream stream;
try {
stream = requireStream(streamId);
// Verify that the stream is in the appropriate state for sending DATA frames.
switch (stream.state()) {
case OPEN:
case HALF_CLOSED_REMOTE:
// Allowed sending DATA frames in these states.
break;
default:
throw new IllegalStateException("Stream " + stream.id() + " in unexpected state " + stream.state());
}
} catch (Throwable e) {
data.release();
return promise.setFailure(e);
}
// Hand control of the frame to the flow controller.
flowController().addFlowControlled(stream,
new FlowControlledData(stream, data, padding, endOfStream, promise));
return promise;
}
|
@Test
public void dataFramesShouldMerge() throws Exception {
createStream(STREAM_ID, false);
final ByteBuf data = dummyData().retain();
ChannelPromise promise1 = newPromise();
encoder.writeData(ctx, STREAM_ID, data, 0, true, promise1);
ChannelPromise promise2 = newPromise();
encoder.writeData(ctx, STREAM_ID, data, 0, true, promise2);
// Now merge the two payloads.
List<FlowControlled> capturedWrites = payloadCaptor.getAllValues();
FlowControlled mergedPayload = capturedWrites.get(0);
mergedPayload.merge(ctx, capturedWrites.get(1));
assertEquals(16, mergedPayload.size());
assertFalse(promise1.isDone());
assertFalse(promise2.isDone());
// Write the merged payloads and verify it was written correctly.
mergedPayload.write(ctx, 16);
assertEquals(0, mergedPayload.size());
assertEquals("abcdefghabcdefgh", writtenData.get(0));
assertEquals(0, data.refCnt());
assertTrue(promise1.isSuccess());
assertTrue(promise2.isSuccess());
}
|
@GetMapping("/configs")
public Result<List<ConfigInfo>> getConfigList(ConfigInfo configInfo) {
if (StringUtils.isEmpty(configInfo.getPluginType())) {
return new Result<>(ResultCodeType.MISS_PARAM.getCode(), ResultCodeType.MISS_PARAM.getMessage());
}
Optional<PluginType> optionalPluginType = PluginType.getPluginType(configInfo.getPluginType());
if (!optionalPluginType.isPresent()) {
return new Result<>(ResultCodeType.FAIL.getCode(), "Invalid plugin name.");
}
PluginType pluginType = optionalPluginType.get();
boolean exactMatchFlag = false;
if (pluginType == PluginType.OTHER) {
if (StringUtils.isEmpty(configInfo.getGroup())) {
return new Result<>(ResultCodeType.MISS_PARAM.getCode(), ResultCodeType.MISS_PARAM.getMessage());
}
exactMatchFlag = true;
}
return configService.getConfigList(configInfo, pluginType, exactMatchFlag);
}
|
@Test
public void getConfigList() {
Result<List<ConfigInfo>> result = configController.getConfigList(configInfo);
Assert.assertTrue(result.isSuccess());
Assert.assertNotNull(result.getData());
Assert.assertEquals(1, result.getData().size());
ConfigInfo info = result.getData().get(0);
Assert.assertEquals(info.getKey(), KEY);
Assert.assertEquals(info.getGroup(), GROUP);
Assert.assertEquals(info.getServiceName(), SERVICE_NAME);
Result<List<ConfigInfo>> newResult = configController.getConfigList(new ConfigInfo());
Assert.assertFalse(newResult.isSuccess());
Assert.assertNull(newResult.getData());
}
|
@Override
public String load(ImageTarball imageTarball, Consumer<Long> writtenByteCountListener)
throws InterruptedException, IOException {
// Runs 'docker load'.
Process dockerProcess = docker("load");
try (NotifyingOutputStream stdin =
new NotifyingOutputStream(dockerProcess.getOutputStream(), writtenByteCountListener)) {
imageTarball.writeTo(stdin);
} catch (IOException ex) {
// Tries to read from stderr. Not using getStderrOutput(), as we want to show the error
// message from the tarball I/O write failure when reading from stderr fails.
String error;
try (InputStreamReader stderr =
new InputStreamReader(dockerProcess.getErrorStream(), StandardCharsets.UTF_8)) {
error = CharStreams.toString(stderr);
} catch (IOException ignored) {
// This ignores exceptions from reading stderr and uses the original exception from
// writing to stdin.
error = ex.getMessage();
}
throw new IOException("'docker load' command failed with error: " + error, ex);
}
try (InputStreamReader stdout =
new InputStreamReader(dockerProcess.getInputStream(), StandardCharsets.UTF_8)) {
String output = CharStreams.toString(stdout);
if (dockerProcess.waitFor() != 0) {
throw new IOException(
"'docker load' command failed with error: " + getStderrOutput(dockerProcess));
}
return output;
}
}
|
@Test
public void testLoad_stdoutFail() throws InterruptedException {
DockerClient testDockerClient = new CliDockerClient(ignored -> mockProcessBuilder);
Mockito.when(mockProcess.waitFor()).thenReturn(1);
Mockito.when(mockProcess.getOutputStream()).thenReturn(ByteStreams.nullOutputStream());
Mockito.when(mockProcess.getInputStream())
.thenReturn(new ByteArrayInputStream("ignored".getBytes(StandardCharsets.UTF_8)));
Mockito.when(mockProcess.getErrorStream())
.thenReturn(new ByteArrayInputStream("error".getBytes(StandardCharsets.UTF_8)));
try {
testDockerClient.load(imageTarball, ignored -> {});
Assert.fail("Process should have failed");
} catch (IOException ex) {
Assert.assertEquals("'docker load' command failed with error: error", ex.getMessage());
}
}
|
public CompletableFuture<UsernameReservation> reserveUsernameHash(final Account account, final List<byte[]> requestedUsernameHashes) {
if (account.getUsernameHash().filter(
oldHash -> requestedUsernameHashes.stream().anyMatch(hash -> Arrays.equals(oldHash, hash)))
.isPresent()) {
// if we are trying to reserve our already-confirmed username hash, we don't need to do
// anything, and can give the client a success response (they may try to confirm it again,
// but that's a no-op other than rotaing their username link which they may need to do
// anyway). note this is *not* the case for reserving our already-reserved username hash,
// which should extend the reservation's TTL.
return CompletableFuture.completedFuture(new UsernameReservation(account, account.getUsernameHash().get()));
}
final AtomicReference<byte[]> reservedUsernameHash = new AtomicReference<>();
return redisDeleteAsync(account)
.thenCompose(ignored -> updateWithRetriesAsync(
account,
a -> true,
a -> checkAndReserveNextUsernameHash(a, new ArrayDeque<>(requestedUsernameHashes))
.thenAccept(reservedUsernameHash::set),
() -> accounts.getByAccountIdentifierAsync(account.getUuid()).thenApply(Optional::orElseThrow),
AccountChangeValidator.USERNAME_CHANGE_VALIDATOR,
MAX_UPDATE_ATTEMPTS))
.whenComplete((updatedAccount, throwable) -> {
if (throwable == null) {
// Make a best effort to clear any stale data that may have been cached while this operation was in progress
redisDeleteAsync(updatedAccount);
}
})
.thenApply(updatedAccount -> new UsernameReservation(updatedAccount, reservedUsernameHash.get()));
}
|
@Test
void testReserveUsernameHash() {
final Account account = AccountsHelper.generateTestAccount("+18005551234", UUID.randomUUID(), UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]);
when(accounts.getByAccountIdentifierAsync(account.getUuid())).thenReturn(CompletableFuture.completedFuture(Optional.of(account)));
final List<byte[]> usernameHashes = List.of(TestRandomUtil.nextBytes(32), TestRandomUtil.nextBytes(32));
when(accounts.reserveUsernameHash(any(), any(), any())).thenReturn(CompletableFuture.completedFuture(null));
UsernameReservation result = accountsManager.reserveUsernameHash(account, usernameHashes).join();
assertArrayEquals(usernameHashes.get(0), result.reservedUsernameHash());
verify(accounts, times(1)).reserveUsernameHash(eq(account), any(), eq(Duration.ofMinutes(5)));
}
|
public static DisruptContext minimumDelay(long delay)
{
if (delay < 0)
{
throw new IllegalArgumentException("Delay cannot be smaller than 0");
}
return new MinimumDelayDisruptContext(delay);
}
|
@Test
public void testMinimumDelay()
{
final long latency = 4200;
DisruptContexts.MinimumDelayDisruptContext context =
(DisruptContexts.MinimumDelayDisruptContext) DisruptContexts.minimumDelay(latency);
Assert.assertEquals(context.mode(), DisruptMode.MINIMUM_DELAY);
Assert.assertEquals(context.delay(), latency);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.