focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public String getColumnLabel() {
return "*";
}
|
@Test
void assertGetColumnLabel() {
assertTrue(new ShorthandProjection(new IdentifierValue("owner"), Collections.emptyList()).getColumnLabel().contains("*"));
}
|
static boolean isSupportedProtocol(URL url) {
String protocol = url.getProtocol().toLowerCase(java.util.Locale.ENGLISH);
return protocol.equals(HTTPConstants.PROTOCOL_HTTP) || protocol.equals(HTTPConstants.PROTOCOL_HTTPS);
}
|
@Test
public void testHttps() throws Exception {
Assertions.assertTrue(AuthManager.isSupportedProtocol(new URL("https:")));
}
|
@Override
protected int rsv(WebSocketFrame msg) {
return msg instanceof TextWebSocketFrame || msg instanceof BinaryWebSocketFrame?
msg.rsv() | WebSocketExtension.RSV1 : msg.rsv();
}
|
@Test
public void testIllegalStateWhenCompressionInProgress() {
WebSocketExtensionFilter selectivityCompressionFilter = new WebSocketExtensionFilter() {
@Override
public boolean mustSkip(WebSocketFrame frame) {
return frame.content().readableBytes() < 100;
}
};
final EmbeddedChannel encoderChannel = new EmbeddedChannel(
new PerMessageDeflateEncoder(9, 15, false, selectivityCompressionFilter));
byte[] firstPayload = new byte[200];
random.nextBytes(firstPayload);
byte[] finalPayload = new byte[90];
random.nextBytes(finalPayload);
BinaryWebSocketFrame firstPart = new BinaryWebSocketFrame(false, 0, Unpooled.wrappedBuffer(firstPayload));
final ContinuationWebSocketFrame finalPart = new ContinuationWebSocketFrame(true, 0,
Unpooled.wrappedBuffer(finalPayload));
assertTrue(encoderChannel.writeOutbound(firstPart));
BinaryWebSocketFrame outboundFirstPart = encoderChannel.readOutbound();
//first part is compressed
assertEquals(WebSocketExtension.RSV1, outboundFirstPart.rsv());
assertFalse(Arrays.equals(firstPayload, ByteBufUtil.getBytes(outboundFirstPart.content())));
assertTrue(outboundFirstPart.release());
//final part throwing exception
try {
assertThrows(EncoderException.class, new Executable() {
@Override
public void execute() throws Throwable {
encoderChannel.writeOutbound(finalPart);
}
});
} finally {
assertTrue(finalPart.release());
assertFalse(encoderChannel.finishAndReleaseAll());
}
}
|
public PageListResponse<IndexSetFieldTypeSummary> getIndexSetFieldTypeSummary(final Set<String> streamIds,
final String fieldName,
final Predicate<String> indexSetPermissionPredicate) {
return getIndexSetFieldTypeSummary(streamIds, fieldName, indexSetPermissionPredicate, 1, 50, DEFAULT_SORT.id(), DEFAULT_SORT.direction());
}
|
@Test
void testComplexPaginationScenario() {
Predicate<String> indexSetPermissionPredicate = indexSetID -> indexSetID.contains("canSee");
final Set<String> allStreams = Set.of("stream_id_1", "stream_id_2", "stream_id_3", "stream_id_4", "stream_id_5");
doReturn(Set.of("canSee1", "cannotSee", "canSeeButDoesNotExist", "canSee2", "canSee3"))
.when(streamService)
.indexSetIdsByIds(allStreams);
doReturn(List.of("Stream1", "Stream2")).when(streamService).streamTitlesForIndexSet("canSee1");
doReturn(List.of("Stream2")).when(streamService).streamTitlesForIndexSet("canSee2");
doReturn(List.of("Stream3")).when(streamService).streamTitlesForIndexSet("canSee3");
doReturn(List.of("text", "keyword")).when(indexFieldTypesService).fieldTypeHistory("canSee1", "field_name", true);
doReturn(List.of("text")).when(indexFieldTypesService).fieldTypeHistory("canSee2", "field_name", true);
doReturn(List.of()).when(indexFieldTypesService).fieldTypeHistory("canSee3", "field_name", true);
mockIndexSetConfig("canSee1", "Aa");
mockIndexSetConfig("canSee2", "Ab");
mockIndexSetConfig("canSee3", "Z");
doReturn(Optional.empty()).when(indexSetService).get("canSeeButDoesNotExist");
final PageListResponse<IndexSetFieldTypeSummary> allResultsOnSinglePageSortedByIdAsc = toTest.getIndexSetFieldTypeSummary(allStreams, "field_name", indexSetPermissionPredicate,
1, 5, DEFAULT_SORT_FIELD, Sorting.Direction.ASC);
assertThat(allResultsOnSinglePageSortedByIdAsc.elements())
.isNotNull()
.isEqualTo(List.of(
new IndexSetFieldTypeSummary("canSee1", "Aa", List.of("Stream1", "Stream2"), List.of("text", "keyword")),
new IndexSetFieldTypeSummary("canSee2", "Ab", List.of("Stream2"), List.of("text")),
new IndexSetFieldTypeSummary("canSee3", "Z", List.of("Stream3"), List.of())
));
assertThat(allResultsOnSinglePageSortedByIdAsc.total()).isEqualTo(3);
assertThat(allResultsOnSinglePageSortedByIdAsc.paginationInfo().count()).isEqualTo(3);
final PageListResponse<IndexSetFieldTypeSummary> thirdSingleElemPageWithSortByIdDesc = toTest.getIndexSetFieldTypeSummary(allStreams, "field_name", indexSetPermissionPredicate,
3, 1, DEFAULT_SORT_FIELD, Sorting.Direction.DESC);
assertThat(thirdSingleElemPageWithSortByIdDesc.elements())
.isNotNull()
.isEqualTo(List.of(
new IndexSetFieldTypeSummary("canSee1", "Aa", List.of("Stream1", "Stream2"), List.of("text", "keyword"))
));
assertThat(thirdSingleElemPageWithSortByIdDesc.total()).isEqualTo(3);
assertThat(thirdSingleElemPageWithSortByIdDesc.paginationInfo().count()).isEqualTo(1);
final PageListResponse<IndexSetFieldTypeSummary> firstTwoElemPageWithSortByTitleDesc = toTest.getIndexSetFieldTypeSummary(allStreams, "field_name", indexSetPermissionPredicate,
1, 2, INDEX_SET_TITLE, Sorting.Direction.DESC);
assertThat(firstTwoElemPageWithSortByTitleDesc.elements())
.isNotNull()
.isEqualTo(List.of(
new IndexSetFieldTypeSummary("canSee3", "Z", List.of("Stream3"), List.of()),
new IndexSetFieldTypeSummary("canSee2", "Ab", List.of("Stream2"), List.of("text"))
));
assertThat(firstTwoElemPageWithSortByTitleDesc.total()).isEqualTo(3);
assertThat(firstTwoElemPageWithSortByTitleDesc.paginationInfo().count()).isEqualTo(2);
}
|
@GET
@Path("sql")
@ManualAuthorization
public String handleGetSql(@QueryParam("sql") String sqlQuery, @QueryParam("trace") String traceEnabled,
@QueryParam("queryOptions") String queryOptions, @Context HttpHeaders httpHeaders) {
try {
LOGGER.debug("Trace: {}, Running query: {}", traceEnabled, sqlQuery);
return executeSqlQuery(httpHeaders, sqlQuery, traceEnabled, queryOptions, "/sql");
} catch (ProcessingException pe) {
LOGGER.error("Caught exception while processing get request {}", pe.getMessage());
return constructQueryExceptionResponse(pe);
} catch (WebApplicationException wae) {
LOGGER.error("Caught exception while processing get request", wae);
throw wae;
} catch (Exception e) {
LOGGER.error("Caught exception while processing get request", e);
return constructQueryExceptionResponse(QueryException.getException(QueryException.INTERNAL_ERROR, e));
}
}
|
@Test
public void testInvalidQuery() {
String response = _pinotQueryResource.handleGetSql("INVALID QUERY", null, null, null);
Assert.assertTrue(response.contains(String.valueOf(QueryException.SQL_PARSING_ERROR_CODE)));
Assert.assertFalse(response.contains("retry the query using the multi-stage query engine"));
}
|
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) {
return api.send(request);
}
|
@Test
public void getChatMember() {
restrictChatMember();
ChatMember chatMember = bot.execute(new GetChatMember(groupId, memberBot)).chatMember();
ChatMemberTest.check(chatMember);
assertEquals(ChatMember.Status.restricted, chatMember.status());
assertEquals(Integer.valueOf(0), chatMember.untilDate());
assertFalse(chatMember.canPostMessages());
assertFalse(chatMember.canEditMessages());
assertTrue(chatMember.isMember());
assertTrue(chatMember.canChangeInfo());
assertTrue(chatMember.canInviteUsers());
assertTrue(chatMember.canPinMessages());
assertTrue(chatMember.canSendPolls());
assertTrue(chatMember.canSendMessages());
assertTrue(chatMember.canSendAudios());
assertTrue(chatMember.canSendDocuments());
assertTrue(chatMember.canSendPhotos());
assertTrue(chatMember.canSendVideos());
assertTrue(chatMember.canSendVideoNotes());
assertTrue(chatMember.canSendVoiceNotes());
assertFalse(chatMember.canSendOtherMessages());
assertFalse(chatMember.canAddWebPagePreviews());
assertTrue(chatMember.canManageTopics());
}
|
@Override
public boolean isValid() {
return hasOnlyFields(PREFIXES) && prefixes() != null;
}
|
@Test
public void testIsValid() {
assertTrue(config.isValid());
}
|
public void addPipeline(String groupName, PipelineConfig pipeline) {
String sanitizedGroupName = BasicPipelineConfigs.sanitizedGroupName(groupName);
if (!this.hasGroup(sanitizedGroupName)) {
createNewGroup(sanitizedGroupName, pipeline);
return;
}
for (PipelineConfigs pipelines : this) {
if (pipelines.save(pipeline, sanitizedGroupName)) {
return;
}
}
}
|
@Test
public void shouldSaveNewPipelineGroupOnTheTop() {
PipelineConfigs defaultGroup = createGroup("defaultGroup", createPipelineConfig("pipeline1", "stage1"));
PipelineConfigs defaultGroup2 = createGroup("defaultGroup2", createPipelineConfig("pipeline2", "stage2"));
PipelineGroups pipelineGroups = new PipelineGroups(defaultGroup, defaultGroup2);
PipelineConfig pipelineConfig = createPipelineConfig("pipeline3", "stage1");
pipelineGroups.addPipeline("defaultGroup3", pipelineConfig);
PipelineConfigs group = createGroup("defaultGroup3", pipelineConfig);
assertThat(pipelineGroups.indexOf(group), is(0));
}
|
@Override
public Object getDefaultValue() {
return defaultValue;
}
|
@Test
public void testGetDefaultValue() throws Exception {
NumberField f = new NumberField("test", "Name", 9001, "foo", ConfigurationField.Optional.NOT_OPTIONAL);
assertEquals(9001, f.getDefaultValue());
}
|
public static <T> T ifOverridden(Supplier<T> supplier, @NonNull Class<?> base, @NonNull Class<?> derived, @NonNull String methodName, @NonNull Class<?>... types) {
if (isOverridden(base, derived, methodName, types)) {
return supplier.get();
} else {
throw new AbstractMethodError("The class " + derived.getName() + " must override at least one of the "
+ base.getSimpleName() + "." + methodName + " methods");
}
}
|
@Test
public void ifOverriddenSuccess() {
assertTrue(Util.ifOverridden(() -> true, BaseClass.class, DerivedClassSuccess.class, "method"));
}
|
public void copy(String src, String destination) throws RolloverFailure {
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
bis = new BufferedInputStream(new FileInputStream(src));
bos = new BufferedOutputStream(new FileOutputStream(destination));
byte[] inbuf = new byte[BUF_SIZE];
int n;
while ((n = bis.read(inbuf)) != -1) {
bos.write(inbuf, 0, n);
}
bis.close();
bis = null;
bos.close();
bos = null;
} catch (IOException ioe) {
String msg = "Failed to copy [" + src + "] to [" + destination + "]";
addError(msg, ioe);
throw new RolloverFailure(msg);
} finally {
if (bis != null) {
try {
bis.close();
} catch (IOException e) {
// ignore
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
// ignore
}
}
}
}
|
@Test
public void basicCopyingWorks() throws IOException {
String dir = CoreTestConstants.OUTPUT_DIR_PREFIX + "/fu" + diff;
File dirFile = new File(dir);
dirFile.mkdir();
String src = CoreTestConstants.TEST_INPUT_PREFIX + "compress1.copy";
String target = CoreTestConstants.OUTPUT_DIR_PREFIX + "/fu" + diff + "/copyingWorks.txt";
fileUtil.copy(src, target);
Compare.compare(src, target);
}
|
public final void isNotEmpty() {
if (Iterables.isEmpty(checkNotNull(actual))) {
failWithoutActual(simpleFact("expected not to be empty"));
}
}
|
@Test
public void iterableIsNotEmptyWithFailure() {
expectFailureWhenTestingThat(asList()).isNotEmpty();
assertFailureKeys("expected not to be empty");
}
|
public ScanResults run(ScanTarget scanTarget) throws ExecutionException, InterruptedException {
return runAsync(scanTarget).get();
}
|
@Test
public void run_whenPortScannerFailed_returnsFailedScanResult()
throws ExecutionException, InterruptedException {
Injector injector =
Guice.createInjector(
new FakeUtcClockModule(),
new FakePluginExecutionModule(),
new FailedPortScannerBootstrapModule(),
new FakeServiceFingerprinterBootstrapModule(),
new FakeVulnDetectorBootstrapModule());
scanningWorkflow = injector.getInstance(DefaultScanningWorkflow.class);
ScanResults scanResults = scanningWorkflow.run(buildScanTarget());
assertThat(scanResults.getScanStatus()).isEqualTo(ScanStatus.FAILED);
assertThat(scanResults.getStatusMessage())
.contains("Plugin execution error on '/fake/PORT_SCAN/FailedPortScanner/v0.1'");
assertThat(scanResults.getScanFindingsList()).isEmpty();
}
|
public synchronized boolean registerProducer(final String group, final DefaultMQProducerImpl producer) {
if (null == group || null == producer) {
return false;
}
MQProducerInner prev = this.producerTable.putIfAbsent(group, producer);
if (prev != null) {
log.warn("the producer group[{}] exist already.", group);
return false;
}
return true;
}
|
@Test
public void testRegisterProducer() {
boolean flag = mqClientInstance.registerProducer(group, mock(DefaultMQProducerImpl.class));
assertThat(flag).isTrue();
flag = mqClientInstance.registerProducer(group, mock(DefaultMQProducerImpl.class));
assertThat(flag).isFalse();
mqClientInstance.unregisterProducer(group);
flag = mqClientInstance.registerProducer(group, mock(DefaultMQProducerImpl.class));
assertThat(flag).isTrue();
}
|
public MemoryRecords build() {
if (aborted) {
throw new IllegalStateException("Attempting to build an aborted record batch");
}
close();
return builtRecords;
}
|
@Test
public void testUnsupportedCompress() {
BiFunction<Byte, Compression, MemoryRecordsBuilder> builderBiFunction = (magic, compression) ->
new MemoryRecordsBuilder(ByteBuffer.allocate(128), magic, compression, TimestampType.CREATE_TIME, 0L, 0L,
RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, 128);
Arrays.asList(MAGIC_VALUE_V0, MAGIC_VALUE_V1).forEach(magic -> {
Exception e = assertThrows(IllegalArgumentException.class, () -> builderBiFunction.apply(magic, Compression.zstd().build()));
assertEquals(e.getMessage(), "ZStandard compression is not supported for magic " + magic);
});
}
|
@Override
protected int command() {
if (!validateConfigFilePresent()) {
return 1;
}
final MigrationConfig config;
try {
config = MigrationConfig.load(getConfigFile());
} catch (KsqlException | MigrationException e) {
LOGGER.error(e.getMessage());
return 1;
}
return command(
config,
MigrationsUtil::getKsqlClient,
getMigrationsDir(getConfigFile(), config)
);
}
|
@Test
public void shouldFailOnMissingMigrationFile() throws Exception {
// Given:
final List<String> migratedVersions = ImmutableList.of("1", "2", "3");
final List<String> migrationFiles = ImmutableList.of("1", "3");
final List<String> checksums = givenExistingMigrationFiles(migrationFiles);
givenAppliedMigrations(migratedVersions, ImmutableList.of(checksums.get(0), "missing", checksums.get(1)));
// When:
final int result = command.command(config, cfg -> ksqlClient, migrationsDir);
// Then:
assertThat(result, is(1));
// verification stops on failure, so version "1" is never queried
verifyClientCallsForVersions(ImmutableList.of("2", "3"));
}
|
private static void convertToTelemetry(JsonElement jsonElement, long systemTs, Map<Long, List<KvEntry>> result, PostTelemetryMsg.Builder builder) {
if (jsonElement.isJsonObject()) {
parseObject(systemTs, result, builder, jsonElement.getAsJsonObject());
} else if (jsonElement.isJsonArray()) {
jsonElement.getAsJsonArray().forEach(je -> {
if (je.isJsonObject()) {
parseObject(systemTs, result, builder, je.getAsJsonObject());
} else {
throw new JsonSyntaxException(CAN_T_PARSE_VALUE + je);
}
});
} else {
throw new JsonSyntaxException(CAN_T_PARSE_VALUE + jsonElement);
}
}
|
@Test
public void testParseAsDoubleWithZero() {
var result = JsonConverter.convertToTelemetry(JsonParser.parseString("{\"meterReadingDelta\": 42.0}"), 0L);
Assertions.assertEquals(42.0, result.get(0L).get(0).getDoubleValue().get(), 0.0);
}
|
@Override
@CacheEvict(cacheNames = RedisKeyConstants.NOTIFY_TEMPLATE,
allEntries = true) // allEntries 清空所有缓存,因为 id 不是直接的缓存 code,不好清理
public void deleteNotifyTemplate(Long id) {
// 校验存在
validateNotifyTemplateExists(id);
// 删除
notifyTemplateMapper.deleteById(id);
}
|
@Test
public void testDeleteNotifyTemplate_success() {
// mock 数据
NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class);
notifyTemplateMapper.insert(dbNotifyTemplate);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbNotifyTemplate.getId();
// 调用
notifyTemplateService.deleteNotifyTemplate(id);
// 校验数据不存在了
assertNull(notifyTemplateMapper.selectById(id));
}
|
public static void activateParams( VariableSpace childVariableSpace, NamedParams childNamedParams, VariableSpace parent, String[] listParameters,
String[] mappingVariables, String[] inputFields ) {
activateParams( childVariableSpace, childNamedParams, parent, listParameters, mappingVariables, inputFields, true );
}
|
@Test
public void activateParamsWithTruePassParametersFlagTest() throws Exception {
String childParam = "childParam";
String childValue = "childValue";
String paramOverwrite = "paramOverwrite";
String parentValue = "parentValue";
String stepValue = "stepValue";
String parentAndChildParameter = "parentAndChildParameter";
VariableSpace parent = new Variables();
parent.setVariable( paramOverwrite, parentValue );
parent.setVariable( parentAndChildParameter, parentValue );
TransMeta childVariableSpace = new TransMeta();
childVariableSpace.addParameterDefinition( childParam, "", "" );
childVariableSpace.setParameterValue( childParam, childValue );
childVariableSpace.addParameterDefinition( parentAndChildParameter, "", "" );
childVariableSpace.setParameterValue( parentAndChildParameter, childValue );
String[] parameters = childVariableSpace.listParameters();
StepWithMappingMeta.activateParams( childVariableSpace, childVariableSpace, parent,
parameters, new String[] { childParam, paramOverwrite }, new String[] { childValue, stepValue }, true );
//childVariableSpace.setVariable( parentAndChildParameter, parentValue);
Assert.assertEquals( childValue, childVariableSpace.getVariable( childParam ) );
// the step parameter prevails
Assert.assertEquals( stepValue, childVariableSpace.getVariable( paramOverwrite ) );
Assert.assertEquals( parentValue, childVariableSpace.getVariable( parentAndChildParameter ) );
}
|
@Override
public ResourceAllocationResult tryFulfillRequirements(
Map<JobID, Collection<ResourceRequirement>> missingResources,
TaskManagerResourceInfoProvider taskManagerResourceInfoProvider,
BlockedTaskManagerChecker blockedTaskManagerChecker) {
final ResourceAllocationResult.Builder resultBuilder = ResourceAllocationResult.builder();
final List<InternalResourceInfo> registeredResources =
getAvailableResources(
taskManagerResourceInfoProvider, resultBuilder, blockedTaskManagerChecker);
final List<InternalResourceInfo> pendingResources =
getPendingResources(taskManagerResourceInfoProvider, resultBuilder);
ResourceProfile totalCurrentResources =
Stream.concat(registeredResources.stream(), pendingResources.stream())
.map(internalResourceInfo -> internalResourceInfo.totalProfile)
.reduce(ResourceProfile.ZERO, ResourceProfile::merge);
for (Map.Entry<JobID, Collection<ResourceRequirement>> resourceRequirements :
missingResources.entrySet()) {
final JobID jobId = resourceRequirements.getKey();
final Collection<ResourceRequirement> unfulfilledJobRequirements =
tryFulfillRequirementsForJobWithResources(
jobId, resourceRequirements.getValue(), registeredResources);
if (!unfulfilledJobRequirements.isEmpty()) {
totalCurrentResources =
totalCurrentResources.merge(
tryFulfillRequirementsForJobWithPendingResources(
jobId,
unfulfilledJobRequirements,
pendingResources,
resultBuilder));
}
}
// Unlike tryFulfillRequirementsForJobWithPendingResources, which updates pendingResources
// to the latest state after a new PendingTaskManager is created,
// tryFulFillRequiredResources will not update pendingResources even after new
// PendingTaskManagers are created.
// This is because the pendingResources are no longer needed afterward.
tryFulFillRequiredResources(
registeredResources, pendingResources, totalCurrentResources, resultBuilder);
return resultBuilder.build();
}
|
@Test
void testFulFillRequirementShouldTakeRedundantInAccount() {
DefaultResourceAllocationStrategy strategy = createStrategy(1);
final TaskManagerInfo taskManager =
new TestingTaskManagerInfo(
DEFAULT_SLOT_RESOURCE.multiply(5),
DEFAULT_SLOT_RESOURCE.multiply(5),
DEFAULT_SLOT_RESOURCE);
final JobID jobId = new JobID();
final ResourceProfile largeResource = DEFAULT_SLOT_RESOURCE.multiply(4);
final List<ResourceRequirement> requirements =
Collections.singletonList(ResourceRequirement.create(largeResource, 1));
final TaskManagerResourceInfoProvider taskManagerResourceInfoProvider =
TestingTaskManagerResourceInfoProvider.newBuilder()
.setRegisteredTaskManagersSupplier(() -> Collections.singleton(taskManager))
.build();
final ResourceAllocationResult result =
strategy.tryFulfillRequirements(
Collections.singletonMap(jobId, requirements),
taskManagerResourceInfoProvider,
resourceID -> false);
assertThat(result.getUnfulfillableJobs()).isEmpty();
assertThat(result.getPendingTaskManagersToAllocate()).hasSize(1);
assertThat(result.getAllocationsOnPendingResources()).isEmpty();
}
|
@VisibleForTesting
static boolean hasEnoughCurvature(final int[] xs, final int[] ys, final int middlePointIndex) {
// Calculate the radianValue formed between middlePointIndex, and one point in either
// direction
final int startPointIndex = middlePointIndex - CURVATURE_NEIGHBORHOOD;
final int startX = xs[startPointIndex];
final int startY = ys[startPointIndex];
final int endPointIndex = middlePointIndex + CURVATURE_NEIGHBORHOOD;
final int endX = xs[endPointIndex];
final int endY = ys[endPointIndex];
final int middleX = xs[middlePointIndex];
final int middleY = ys[middlePointIndex];
final int firstSectionXDiff = startX - middleX;
final int firstSectionYDiff = startY - middleY;
final double firstSectionLength =
Math.sqrt(firstSectionXDiff * firstSectionXDiff + firstSectionYDiff * firstSectionYDiff);
final int secondSectionXDiff = endX - middleX;
final int secondSectionYDiff = endY - middleY;
final double secondSectionLength =
Math.sqrt(
secondSectionXDiff * secondSectionXDiff + secondSectionYDiff * secondSectionYDiff);
final double dotProduct =
firstSectionXDiff * secondSectionXDiff + firstSectionYDiff * secondSectionYDiff;
final double radianValue = Math.acos(dotProduct / firstSectionLength / secondSectionLength);
return radianValue <= CURVATURE_THRESHOLD;
}
|
@Test
public void testHasEnoughCurvature90Degrees() {
final int[] Xs = new int[3];
final int[] Ys = new int[3];
Xs[0] = -50;
Ys[0] = 0;
Xs[1] = 0;
Ys[1] = 0;
Xs[2] = 0;
Ys[2] = -50;
Assert.assertTrue(GestureTypingDetector.hasEnoughCurvature(Xs, Ys, 1));
Xs[0] = -50;
Ys[0] = 0;
Xs[1] = 0;
Ys[1] = 0;
Xs[2] = 0;
Ys[2] = 50;
Assert.assertTrue(GestureTypingDetector.hasEnoughCurvature(Xs, Ys, 1));
Xs[0] = 0;
Ys[0] = -50;
Xs[1] = 0;
Ys[1] = 0;
Xs[2] = 50;
Ys[2] = 0;
Assert.assertTrue(GestureTypingDetector.hasEnoughCurvature(Xs, Ys, 1));
}
|
static String encodeElement(Object obj, URLEscaper.Escaping escaping, UriComponent.Type componentType)
{
StringBuilder builder = new StringBuilder();
encodeDataObject(obj, escaping, componentType, builder);
return builder.toString();
}
|
@Test(dataProvider = "unicode")
public void testUnicode(Object obj, String expectedNoEsc, String expectedPathSegEsc, String expectedQueryParamEsc)
{
String actualNoEsc = URIParamUtils.encodeElement(obj, NO_ESCAPING, null);
Assert.assertEquals(actualNoEsc, expectedNoEsc);
String actualPathSegEsc = URIParamUtils.encodeElement(obj, URL_ESCAPING,
UriComponent.Type.PATH_SEGMENT);
Assert.assertEquals(actualPathSegEsc, expectedPathSegEsc);
String actualQueryParamEsc = URIParamUtils.encodeElement(obj, URL_ESCAPING,
UriComponent.Type.QUERY_PARAM);
Assert.assertEquals(actualQueryParamEsc, expectedQueryParamEsc);
}
|
public void evaluate(AuthenticationContext context) {
if (context == null) {
return;
}
this.authenticationStrategy.evaluate(context);
}
|
@Test
public void evaluate4() {
if (MixAll.isMac()) {
return;
}
this.authConfig.setAuthenticationWhitelist("11");
this.evaluator = new AuthenticationEvaluator(authConfig);
DefaultAuthenticationContext context = new DefaultAuthenticationContext();
context.setRpcCode("11");
context.setUsername("test");
context.setContent("test".getBytes(StandardCharsets.UTF_8));
context.setSignature("test");
this.evaluator.evaluate(context);
}
|
@Override
public void removeTaskManager(InstanceID instanceId) {
Preconditions.checkNotNull(instanceId);
unWantedTaskManagers.remove(instanceId);
final FineGrainedTaskManagerRegistration taskManager =
Preconditions.checkNotNull(taskManagerRegistrations.remove(instanceId));
totalRegisteredResource = totalRegisteredResource.subtract(taskManager.getTotalResource());
LOG.debug("Remove task manager {}.", instanceId);
for (AllocationID allocationId : taskManager.getAllocatedSlots().keySet()) {
slots.remove(allocationId);
}
}
|
@Test
void testRemoveUnknownTaskManager() {
assertThatThrownBy(
() -> {
final FineGrainedTaskManagerTracker taskManagerTracker =
new FineGrainedTaskManagerTracker();
taskManagerTracker.removeTaskManager(new InstanceID());
})
.isInstanceOf(NullPointerException.class);
}
|
@Override
public String get(String name) {
checkKey(name);
String value = null;
String[] keyParts = splitKey(name);
String ns = registry.getNamespaceURI(keyParts[0]);
if (ns != null) {
try {
XMPProperty prop = xmpData.getProperty(ns, keyParts[1]);
if (prop != null && prop.getOptions().isSimple()) {
value = prop.getValue();
} else if (prop != null && prop.getOptions().isArray()) {
prop = xmpData.getArrayItem(ns, keyParts[1], 1);
value = prop.getValue();
}
// in all other cases, null is returned
} catch (XMPException e) {
// Ignore
}
}
return value;
}
|
@Test
public void get_unknownPrefixKey_throw() {
assertThrows(PropertyTypeException.class, () -> {
xmpMeta.get("unknown:key");
});
}
|
public MergePolicyConfig setBatchSize(int batchSize) {
this.batchSize = checkPositive("batchSize", batchSize);
return this;
}
|
@Test(expected = IllegalArgumentException.class)
public void setBatchSize_withNegative() {
config.setBatchSize(-1);
}
|
public StringSubject factValue(String key) {
return doFactValue(key, null);
}
|
@Test
public void factValueFailMultipleKeys() {
Object unused =
expectFailureWhenTestingThat(fact("foo", "the foo"), fact("foo", "the other foo"))
.factValue("foo");
assertFailureKeys("expected to contain a single fact with key", "but contained multiple");
assertFailureValue("expected to contain a single fact with key", "foo");
assertFailureValue("but contained multiple", "[foo: the foo, foo: the other foo]");
}
|
@Override
public void resetConfigStats(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT);
syncFuture(f);
}
|
@Test
public void testResetConfigStats() {
RedisClusterNode master = getFirstMaster();
connection.resetConfigStats(master);
}
|
@Override
public Set<EntityExcerpt> listEntityExcerpts() {
return outputService.loadAll().stream()
.map(this::createExcerpt)
.collect(Collectors.toSet());
}
|
@Test
@MongoDBFixtures("OutputFacadeTest.json")
public void listEntityExcerpts() {
final EntityExcerpt expectedEntityExcerpt = EntityExcerpt.builder()
.id(ModelId.of("5adf239e4b900a0fdb4e5197"))
.type(ModelTypes.OUTPUT_V1)
.title("STDOUT")
.build();
final Set<EntityExcerpt> entityExcerpts = facade.listEntityExcerpts();
assertThat(entityExcerpts).containsOnly(expectedEntityExcerpt);
}
|
@Override
public Http2Headers decodeHeaders(int streamId, ByteBuf headerBlock) throws Http2Exception {
try {
final Http2Headers headers = newHeaders();
hpackDecoder.decode(streamId, headerBlock, headers, validateHeaders);
headerArraySizeAccumulator = HEADERS_COUNT_WEIGHT_NEW * headers.size() +
HEADERS_COUNT_WEIGHT_HISTORICAL * headerArraySizeAccumulator;
return headers;
} catch (Http2Exception e) {
throw e;
} catch (Throwable e) {
// Default handler for any other types of errors that may have occurred. For example,
// the Header builder throws IllegalArgumentException if the key or value was invalid
// for any reason (e.g. the key was an invalid pseudo-header).
throw connectionError(COMPRESSION_ERROR, e, "Error decoding headers: %s", e.getMessage());
}
}
|
@Test
public void decodeLargerThanHeaderListSizeButLessThanGoAwayWithInitialDecoderSettings() throws Exception {
final ByteBuf buf = encode(b(":method"), b("GET"), b("test_header"),
b(String.format("%09000d", 0).replace('0', 'A')));
final int streamId = 1;
try {
Http2Exception.HeaderListSizeException e = assertThrows(Http2Exception.HeaderListSizeException.class,
new Executable() {
@Override
public void execute() throws Throwable {
decoder.decodeHeaders(streamId, buf);
}
});
assertEquals(streamId, e.streamId());
} finally {
buf.release();
}
}
|
public DataSource<T> loadDataSource(Path csvPath, String responseName) throws IOException {
return loadDataSource(csvPath, Collections.singleton(responseName));
}
|
@Test
public void testLoadMultiOutputAsSingleOutput() throws IOException {
URL path = CSVLoader.class.getResource("/org/tribuo/data/csv/test-multioutput.csv");
Set<String> responses = new LinkedHashSet<>(Arrays.asList("R1", "R2"));
CSVLoader<MockOutput> loader = new CSVLoader<>(new MockOutputFactory());
DataSource<MockOutput> source = loader.loadDataSource(path, responses);
MutableDataset<MockOutput> data = new MutableDataset<>(source);
assertEquals(6, data.size());
assertEquals("[R1=TRUE, R2=FALSE]", data.getExample(0).getOutput().label);
assertEquals("[R1=TRUE, R2=TRUE]", data.getExample(1).getOutput().label);
assertEquals("[R1=FALSE, R2=FALSE]", data.getExample(2).getOutput().label);
}
|
@Override
public Object evaluate(final ProcessingDTO processingDTO) {
return getFromPossibleSources(name, processingDTO)
.orElse(mapMissingTo);
}
|
@Test
void evaluateNull() {
final KiePMMLConstant kiePMMLConstant = new KiePMMLConstant("NAME", Collections.emptyList(), "WRONG-CONSTANT", null);
final KiePMMLDerivedField kiePMMLDerivedField = KiePMMLDerivedField.builder("ANOTHER_FIELD",
Collections.emptyList(),
DATA_TYPE.DOUBLE,
OP_TYPE.CONTINUOUS,
kiePMMLConstant)
.build();
final List<KiePMMLDerivedField> derivedFields = Collections.singletonList(kiePMMLDerivedField);
final List<KiePMMLNameValue> kiePMMLNameValues = Collections.singletonList(new KiePMMLNameValue("UNKNOWN",
"WRONG"));
final KiePMMLFieldRef kiePMMLFieldRef = new KiePMMLFieldRef(FIELD_NAME, Collections.emptyList(), null);
ProcessingDTO processingDTO = getProcessingDTO(derivedFields, kiePMMLNameValues);
final Object retrieved = kiePMMLFieldRef.evaluate(processingDTO);
assertThat(retrieved).isNull();
}
|
public ResourceID getResourceID() {
return unresolvedTaskManagerLocation.getResourceID();
}
|
@Test
void testMaximumRegistrationDurationAfterConnectionLoss() throws Exception {
configuration.set(
TaskManagerOptions.REGISTRATION_TIMEOUT, TimeUtils.parseDuration("100 ms"));
final TaskSlotTable<Task> taskSlotTable =
TaskSlotUtils.createTaskSlotTable(1, EXECUTOR_EXTENSION.getExecutor());
final TaskManagerServices taskManagerServices =
new TaskManagerServicesBuilder().setTaskSlotTable(taskSlotTable).build();
final TaskExecutor taskExecutor =
createTaskExecutor(taskManagerServices, new HeartbeatServicesImpl(10L, 10L));
taskExecutor.start();
final CompletableFuture<ResourceID> registrationFuture = new CompletableFuture<>();
final OneShotLatch secondRegistration = new OneShotLatch();
try {
final TestingResourceManagerGateway testingResourceManagerGateway =
new TestingResourceManagerGateway();
testingResourceManagerGateway.setRegisterTaskExecutorFunction(
taskExecutorRegistration -> {
if (registrationFuture.complete(taskExecutorRegistration.getResourceId())) {
return createRegistrationResponse(testingResourceManagerGateway);
} else {
secondRegistration.trigger();
return CompletableFuture.completedFuture(
new Failure(
new FlinkException(
"Only the first registration should succeed.")));
}
});
rpc.registerGateway(
testingResourceManagerGateway.getAddress(), testingResourceManagerGateway);
resourceManagerLeaderRetriever.notifyListener(
testingResourceManagerGateway.getAddress(), UUID.randomUUID());
final ResourceID registrationResourceId = registrationFuture.get();
assertThat(registrationResourceId)
.isEqualTo(
taskManagerServices.getUnresolvedTaskManagerLocation().getResourceID());
secondRegistration.await();
final Throwable error = testingFatalErrorHandler.getErrorFuture().get();
assertThat(error).isNotNull();
assertThat(ExceptionUtils.stripExecutionException(error))
.isInstanceOf(RegistrationTimeoutException.class);
testingFatalErrorHandler.clearError();
} finally {
RpcUtils.terminateRpcEndpoint(taskExecutor);
}
}
|
public <T> HttpRestResult<T> putForm(String url, Header header, Query query, Map<String, String> bodyValues,
Type responseType) throws Exception {
RequestHttpEntity requestHttpEntity = new RequestHttpEntity(
header.setContentType(MediaType.APPLICATION_FORM_URLENCODED), query, bodyValues);
return execute(url, HttpMethod.PUT, requestHttpEntity, responseType);
}
|
@Test
void testPutForm() throws Exception {
when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse);
when(mockResponse.getStatusCode()).thenReturn(200);
when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes()));
Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML);
HttpRestResult<String> result = restTemplate.putForm("http://127.0.0.1:8848/nacos/test", header, new HashMap<>(),
String.class);
assertTrue(result.ok());
assertEquals(Header.EMPTY, result.getHeader());
assertEquals("test", result.getData());
assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE));
}
|
@Override
public Mono<CheckAccountExistenceResponse> checkAccountExistence(final CheckAccountExistenceRequest request) {
final ServiceIdentifier serviceIdentifier =
ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getServiceIdentifier());
return RateLimitUtil.rateLimitByRemoteAddress(rateLimiters.getCheckAccountExistenceLimiter())
.then(Mono.fromFuture(() -> accountsManager.getByServiceIdentifierAsync(serviceIdentifier)))
.map(Optional::isPresent)
.map(accountExists -> CheckAccountExistenceResponse.newBuilder()
.setAccountExists(accountExists)
.build());
}
|
@Test
void checkAccountExistenceRateLimited() {
final Duration retryAfter = Duration.ofSeconds(11);
when(rateLimiter.validateReactive(anyString()))
.thenReturn(Mono.error(new RateLimitExceededException(retryAfter)));
//noinspection ResultOfMethodCallIgnored
GrpcTestUtils.assertRateLimitExceeded(retryAfter,
() -> unauthenticatedServiceStub().checkAccountExistence(CheckAccountExistenceRequest.newBuilder()
.setServiceIdentifier(ServiceIdentifierUtil.toGrpcServiceIdentifier(new AciServiceIdentifier(UUID.randomUUID())))
.build()),
accountsManager);
}
|
public void updateSourceTopics(final Map<String, List<String>> allSourceTopicsByNodeName) {
sourceNodesByTopic.clear();
for (final Map.Entry<String, SourceNode<?, ?>> sourceNodeEntry : sourceNodesByName.entrySet()) {
final String sourceNodeName = sourceNodeEntry.getKey();
final SourceNode<?, ?> sourceNode = sourceNodeEntry.getValue();
final List<String> updatedSourceTopics = allSourceTopicsByNodeName.get(sourceNodeName);
if (updatedSourceTopics == null) {
log.error("Unable to find source node {} in updated topics map {}",
sourceNodeName, allSourceTopicsByNodeName);
throw new IllegalStateException("Node " + sourceNodeName + " not found in full topology");
}
log.trace("Updating source node {} with new topics {}", sourceNodeName, updatedSourceTopics);
for (final String topic : updatedSourceTopics) {
if (sourceNodesByTopic.containsKey(topic)) {
log.error("Tried to subscribe topic {} to two nodes when updating topics from {}",
topic, allSourceTopicsByNodeName);
throw new IllegalStateException("Topic " + topic + " was already registered to source node "
+ sourceNodesByTopic.get(topic).name());
}
sourceNodesByTopic.put(topic, sourceNode);
}
}
}
|
@Test
public void shouldThrowIfSourceNodeToUpdateDoesNotExist() {
final String existingSourceNode = "source-1";
final String nonExistingSourceNode = "source-2";
final String topicOfExistingSourceNode = "topic-1";
final String topicOfNonExistingSourceNode = "topic-2";
topology.addSource(nonExistingSourceNode, topicOfNonExistingSourceNode);
final ProcessorTopology processorTopology = topology.getInternalBuilder("X").buildTopology();
final Throwable exception = assertThrows(
IllegalStateException.class,
() -> processorTopology.updateSourceTopics(Collections.singletonMap(
existingSourceNode, Collections.singletonList(topicOfExistingSourceNode)
))
);
assertThat(exception.getMessage(), is("Node " + nonExistingSourceNode + " not found in full topology"));
}
|
protected String defaultTransformations(InputStream inputStream) throws IOException {
String html = readFullyAsString(inputStream);
if (!CollectionUtils.isEmpty(swaggerUiOAuthProperties.getConfigParameters()))
html = addInitOauth(html);
if (swaggerUiConfig.isCsrfEnabled()) {
if (swaggerUiConfig.getCsrf().isUseLocalStorage())
html = addCSRFLocalStorage(html);
else if (swaggerUiConfig.getCsrf().isUseSessionStorage())
html = addCSRFSessionStorage(html);
else
html = addCSRF(html);
}
if (swaggerUiConfig.getSyntaxHighlight().isPresent())
html = addSyntaxHighlight(html);
if (swaggerUiConfig.getQueryConfigEnabled() == null || !swaggerUiConfig.getQueryConfigEnabled())
html = addParameters(html);
else
html = addParameter(html, QUERY_CONFIG_ENABLED_PROPERTY, swaggerUiConfig.getQueryConfigEnabled().toString());
if (swaggerUiConfig.isDisableSwaggerDefaultUrl())
html = overwriteSwaggerDefaultUrl(html);
if(StringUtils.isNotEmpty(swaggerUiConfig.getUrl()) && StringUtils.isEmpty(swaggerUiConfig.getConfigUrl())){
html = setConfiguredApiDocsUrl(html);
}
return html;
}
|
@Test
void setApiDocUrlCorrectly() throws IOException {
var html = underTest.defaultTransformations(is);
assertThat(html, containsString(apiDocUrl));
}
|
@SuppressWarnings({"unchecked", "rawtypes"})
public static int compareTo(final Comparable thisValue, final Comparable otherValue, final OrderDirection orderDirection, final NullsOrderType nullsOrderType,
final boolean caseSensitive) {
if (null == thisValue && null == otherValue) {
return 0;
}
if (null == thisValue) {
return NullsOrderType.FIRST == nullsOrderType ? -1 : 1;
}
if (null == otherValue) {
return NullsOrderType.FIRST == nullsOrderType ? 1 : -1;
}
if (!caseSensitive && thisValue instanceof String && otherValue instanceof String) {
return compareToCaseInsensitiveString((String) thisValue, (String) otherValue, orderDirection);
}
return OrderDirection.ASC == orderDirection ? thisValue.compareTo(otherValue) : -thisValue.compareTo(otherValue);
}
|
@Test
void assertCompareToWhenBothNull() {
assertThat(CompareUtils.compareTo(null, null, OrderDirection.DESC, NullsOrderType.FIRST, caseSensitive), is(0));
}
|
@Override
public void run() {
try {
backgroundJobServer.getJobSteward().notifyThreadOccupied();
MDCMapper.loadMDCContextFromJob(job);
performJob();
} catch (Exception e) {
if (isJobDeletedWhileProcessing(e)) {
// nothing to do anymore as Job is deleted
return;
} else if (isJobServerStopped(e)) {
updateJobStateToFailedAndRunJobFilters("Job processing was stopped as background job server has stopped", e);
Thread.currentThread().interrupt();
} else if (isJobNotFoundException(e)) {
updateJobStateToFailedAndRunJobFilters("Job method not found", e);
} else {
updateJobStateToFailedAndRunJobFilters("An exception occurred during the performance of the job", e);
}
} finally {
backgroundJobServer.getJobSteward().notifyThreadIdle();
MDC.clear();
}
}
|
@Test
void onFailureAfterAllRetriesServerFilterOnFailedAfterRetriesNotCalled() {
Job job = aFailedJobWithRetries().withEnqueuedState(Instant.now()).build();
when(backgroundJobServer.getBackgroundJobRunner(job)).thenReturn(null);
BackgroundJobPerformer backgroundJobPerformer = new BackgroundJobPerformer(backgroundJobServer, job);
backgroundJobPerformer.run();
assertThat(logAllStateChangesFilter.onFailedAfterRetriesIsCalled(job)).isTrue();
}
|
public static JsonElement parseString(String json) throws JsonSyntaxException {
return parseReader(new StringReader(json));
}
|
@Test
public void testParseInvalidJson() {
assertThrows(JsonSyntaxException.class, () -> JsonParser.parseString("[[]"));
}
|
DataTableType lookupTableTypeByType(Type type) {
return lookupTableTypeByType(type, Function.identity());
}
|
@Test
void null_float_transformed_to_null() {
DataTableTypeRegistry registry = new DataTableTypeRegistry(Locale.ENGLISH);
DataTableType dataTableType = registry.lookupTableTypeByType(LIST_OF_LIST_OF_FLOAT);
assertEquals(
singletonList(singletonList(null)),
dataTableType.transform(singletonList(singletonList(null))));
}
|
ImmutableMap<PCollection<?>, FieldAccessDescriptor> getPCollectionFieldAccess() {
return ImmutableMap.copyOf(pCollectionFieldAccess);
}
|
@Test
public void testFieldAccessKnownAndUnknownMainInputs() {
Pipeline p = Pipeline.create();
FieldAccessVisitor fieldAccessVisitor = new FieldAccessVisitor();
Schema schema =
Schema.of(Field.of("field1", FieldType.STRING), Field.of("field2", FieldType.STRING));
PCollection<Row> source =
p.apply(Create.of(Row.withSchema(schema).addValues("foo", "bar").build()))
.setRowSchema(schema);
source.apply(new FieldAccessTransform(FieldAccessDescriptor.withFieldNames("field1")));
source.apply(ParDo.of(new UnknownDoFn())).setRowSchema(schema);
p.traverseTopologically(fieldAccessVisitor);
assertTrue(fieldAccessVisitor.getPCollectionFieldAccess().get(source).getAllFields());
}
|
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStartBounds,
final Range<Instant> windowEndBounds,
final Optional<Position> position
) {
try {
final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore
.store(QueryableStoreTypes.timestampedWindowStore(), partition);
final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds);
final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds);
try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it
= cacheBypassFetcher.fetch(store, key, lower, upper)) {
final Builder<WindowedRow> builder = ImmutableList.builder();
while (it.hasNext()) {
final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next();
final Instant windowStart = Instant.ofEpochMilli(next.key);
if (!windowStartBounds.contains(windowStart)) {
continue;
}
final Instant windowEnd = windowStart.plus(windowSize);
if (!windowEndBounds.contains(windowEnd)) {
continue;
}
final TimeWindow window =
new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli());
final WindowedRow row = WindowedRow.of(
stateStore.schema(),
new Windowed<>(key, window),
next.value.value(),
next.value.timestamp()
);
builder.add(row);
}
return KsMaterializedQueryResult.rowIterator(builder.build().iterator());
}
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
}
|
@Test
public void shouldFetchWithEndUpperBoundIfLowest() {
// Given:
final Range<Instant> startBounds = Range.closed(
NOW,
NOW.plusSeconds(20)
);
final Range<Instant> endBounds = Range.closed(
NOW.plusSeconds(5),
NOW.plusSeconds(10)
);
// When:
table.get(A_KEY, PARTITION, startBounds, endBounds);
// Then:
verify(cacheBypassFetcher).fetch(
eq(tableStore), any(), any(), eq(endBounds.upperEndpoint().minus(WINDOW_SIZE)));
}
|
@Override
public void startMediaRequest(
@NonNull String[] mimeTypes, int requestId, @NonNull InsertionRequestCallback callback) {
mCurrentRunningLocalProxy.dispose();
mCurrentRequest = requestId;
mCurrentCallback = callback;
final Intent pickingIntent = getMediaInsertRequestIntent(mimeTypes, requestId);
mContext.startActivity(pickingIntent);
}
|
@Test
public void testStartsPickActivityWithRequest() {
mUnderTest.startMediaRequest(new String[] {"media/png"}, 123, mCallback);
Mockito.verifyZeroInteractions(mCallback);
final Intent mediaInsertionIntent =
Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext())
.getNextStartedActivity();
Assert.assertEquals(
MediaInsertion.INTENT_MEDIA_INSERTION_REQUEST_ACTION, mediaInsertionIntent.getAction());
Assert.assertEquals(
Intent.FLAG_ACTIVITY_NEW_TASK,
mediaInsertionIntent.getFlags() & Intent.FLAG_ACTIVITY_NEW_TASK);
Assert.assertEquals(
0 /*do not set this flag*/,
mediaInsertionIntent.getFlags() & Intent.FLAG_ACTIVITY_NO_HISTORY);
Assert.assertEquals(
Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS,
mediaInsertionIntent.getFlags() & Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
Assert.assertArrayEquals(
new String[] {"media/png"},
mediaInsertionIntent.getStringArrayExtra(
MediaInsertion.INTENT_MEDIA_INSERTION_REQUEST_MEDIA_MIMES_KEY));
Assert.assertEquals(
123,
mediaInsertionIntent.getIntExtra(
MediaInsertion.INTENT_MEDIA_INSERTION_REQUEST_MEDIA_REQUEST_ID_KEY, 0));
Mockito.verifyZeroInteractions(mCallback);
}
|
@Override
public <V> MultiLabel generateOutput(V label) {
if (label instanceof Collection) {
Collection<?> c = (Collection<?>) label;
List<Pair<String,Boolean>> dimensions = new ArrayList<>();
for (Object o : c) {
dimensions.add(MultiLabel.parseElement(o.toString()));
}
return MultiLabel.createFromPairList(dimensions);
}
return MultiLabel.parseString(label.toString());
}
|
@Test
public void testGenerateOutput_labelSet() {
MultiLabelFactory factory = new MultiLabelFactory();
Set<Label> labels = new HashSet<>();
labels.add(new Label("a"));
labels.add(new Label("b"));
labels.add(new Label("c"));
MultiLabel output = factory.generateOutput(labels);
assertEquals(3, output.getLabelSet().size());
assertEquals("a,b,c", output.getLabelString());
}
|
@InvokeOnHeader(Web3jConstants.ETH_GET_WORK)
void ethGetWork(Message message) throws IOException {
Request<?, EthGetWork> request = web3j.ethGetWork();
setRequestId(message, request);
EthGetWork response = request.send();
boolean hasError = checkForError(message, response);
if (!hasError) {
message.setBody(response.getResult());
}
}
|
@Test
public void ethGetWorkTest() throws Exception {
EthGetWork response = Mockito.mock(EthGetWork.class);
Mockito.when(mockWeb3j.ethGetWork()).thenReturn(request);
Mockito.when(request.send()).thenReturn(response);
Mockito.when(response.getResult()).thenReturn(Collections.EMPTY_LIST);
Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GET_WORK);
template.send(exchange);
List body = exchange.getIn().getBody(List.class);
assertEquals(Collections.EMPTY_LIST, body);
}
|
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) {
return invoke(n, BigDecimal.ZERO);
}
|
@Test
void invokeNull() {
FunctionTestUtil.assertResultError(roundHalfDownFunction.invoke(null), InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(roundHalfDownFunction.invoke((BigDecimal) null, null),
InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(roundHalfDownFunction.invoke(BigDecimal.ONE, null),
InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(roundHalfDownFunction.invoke(null, BigDecimal.ONE),
InvalidParametersEvent.class);
}
|
@Override
public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final JoinWindows windows) {
return join(otherStream, toValueJoinerWithKey(joiner), windows);
}
|
@Test
public void shouldNotAllowNullJoinWindowsOnJoin() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.join(testStream, MockValueJoiner.TOSTRING_JOINER, null));
assertThat(exception.getMessage(), equalTo("windows can't be null"));
}
|
@SuppressWarnings("unchecked")
public V putIfAbsent(final int key, final V value)
{
final V val = (V)mapNullValue(value);
requireNonNull(val, "value cannot be null");
final int[] keys = this.keys;
final Object[] values = this.values;
@DoNotSub final int mask = values.length - 1;
@DoNotSub int index = Hashing.hash(key, mask);
Object mappedValue;
while (null != (mappedValue = values[index]))
{
if (key == keys[index])
{
break;
}
index = ++index & mask;
}
final V oldValue = unmapNullValue(mappedValue);
if (null == oldValue)
{
if (null == mappedValue)
{
++size;
keys[index] = key;
}
values[index] = val;
if (size > resizeThreshold)
{
increaseCapacity();
}
}
return oldValue;
}
|
@Test
void putIfAbsentThrowsNullPointerExceptionIfValueIsNull()
{
final NullPointerException exception =
assertThrowsExactly(NullPointerException.class, () -> intToObjectMap.putIfAbsent(42, null));
assertEquals("value cannot be null", exception.getMessage());
}
|
public static void notNull(Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message);
}
}
|
@Test(expected = IllegalArgumentException.class)
public void assertNotNull() {
Assert.notNull(null, "object is null");
}
|
public void setRepeated(boolean repeated) {
this.repeated = repeated;
}
|
@Test
public void setRepeated() {
SAExposureConfig saExposureConfig = new SAExposureConfig(1,1,true);
saExposureConfig.setRepeated(false);
assertFalse(saExposureConfig.isRepeated());
}
|
public static String getRmPrincipal(Configuration conf) throws IOException {
String principal = conf.get(YarnConfiguration.RM_PRINCIPAL);
String prepared = null;
if (principal != null) {
prepared = getRmPrincipal(principal, conf);
}
return prepared;
}
|
@Test
public void testGetRMPrincipalHA_String() throws IOException {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.RM_ADDRESS + ".rm0", "myhost");
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, "rm0");
String result = YarnClientUtils.getRmPrincipal("test/_HOST@REALM", conf);
assertEquals("The hostname translation did not produce the expected "
+ "results: " + result, "test/myhost@REALM", result);
try {
result = YarnClientUtils.getRmPrincipal(null, conf);
fail("The hostname translation succeeded even though the RM principal "
+ "was null: " + result);
} catch (IllegalArgumentException ex) {
// Expected
}
conf = new Configuration();
conf.set(YarnConfiguration.RM_ADDRESS + ".rm0", "myhost");
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
try {
YarnClientUtils.getRmPrincipal("test/_HOST@REALM", conf);
fail("The hostname translation succeeded even though no RM ids were set");
} catch (IOException ex) {
// Expected
}
conf = new Configuration();
conf.set(YarnConfiguration.RM_ADDRESS + ".rm0", "myhost");
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_ID, "rm0");
result = YarnClientUtils.getRmPrincipal("test/_HOST@REALM", conf);
assertEquals("The hostname translation did not produce the expected "
+ "results: " + result, "test/myhost@REALM", result);
result = YarnClientUtils.getRmPrincipal("test/yourhost@REALM", conf);
assertEquals("The hostname translation did not produce the expected "
+ "results: " + result, "test/yourhost@REALM", result);
}
|
public @Nullable String formatDiff(A actual, E expected) {
return null;
}
|
@Test
public void testTransforming_actual_formatDiff() {
assertThat(LENGTHS.formatDiff("foo", 4)).isNull();
}
|
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException {
try {
databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases );
schemaName = rep.getStepAttributeString( id_step, "schema" );
tableName = rep.getStepAttributeString( id_step, "table" );
commitSize = rep.getStepAttributeString( id_step, "commit" );
truncateTable = rep.getStepAttributeBoolean( id_step, "truncate" );
ignoreErrors = rep.getStepAttributeBoolean( id_step, "ignore_errors" );
useBatchUpdate = rep.getStepAttributeBoolean( id_step, "use_batch" );
specifyFields = rep.getStepAttributeBoolean( id_step, "specify_fields" );
partitioningEnabled = rep.getStepAttributeBoolean( id_step, "partitioning_enabled" );
partitioningField = rep.getStepAttributeString( id_step, "partitioning_field" );
partitioningDaily = rep.getStepAttributeBoolean( id_step, "partitioning_daily" );
partitioningMonthly = rep.getStepAttributeBoolean( id_step, "partitioning_monthly" );
tableNameInField = rep.getStepAttributeBoolean( id_step, "tablename_in_field" );
tableNameField = rep.getStepAttributeString( id_step, "tablename_field" );
tableNameInTable = rep.getStepAttributeBoolean( id_step, "tablename_in_table" );
returningGeneratedKeys = rep.getStepAttributeBoolean( id_step, "return_keys" );
generatedKeyField = rep.getStepAttributeString( id_step, "return_field" );
int nrCols = rep.countNrStepAttributes( id_step, "column_name" );
int nrStreams = rep.countNrStepAttributes( id_step, "stream_name" );
int nrRows = ( nrCols < nrStreams ? nrStreams : nrCols );
allocate( nrRows );
for ( int idx = 0; idx < nrRows; idx++ ) {
fieldDatabase[ idx ] = Const.NVL( rep.getStepAttributeString( id_step, idx, "column_name" ), "" );
fieldStream[ idx ] = Const.NVL( rep.getStepAttributeString( id_step, idx, "stream_name" ), "" );
}
} catch ( Exception e ) {
throw new KettleException( "Unexpected error reading step information from the repository", e );
}
}
|
@Test
public void testReadRep() throws KettleException {
//check variable
String commitSize = "${test}";
Repository rep = new MemoryRepository();
rep.saveStepAttribute( null, null, "commit", commitSize );
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.readRep( rep, metaStore, null, databases );
assertEquals( commitSize, tableOutputMeta.getCommitSize() );
//check integer size
int commitSizeInt = 1;
Repository rep2 = new MemoryRepository();
rep2.saveStepAttribute( null, null, "commit", commitSizeInt );
TableOutputMeta tableOutputMeta2 = new TableOutputMeta();
tableOutputMeta2.readRep( rep2, metaStore, null, databases );
assertEquals( String.valueOf( commitSizeInt ), tableOutputMeta2.getCommitSize() );
}
|
@VisibleForTesting
static int checkJar(Path file) throws Exception {
final URI uri = file.toUri();
int numSevereIssues = 0;
try (final FileSystem fileSystem =
FileSystems.newFileSystem(
new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()),
Collections.emptyMap())) {
if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) {
return 0;
}
if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) {
numSevereIssues++;
}
if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) {
numSevereIssues++;
}
numSevereIssues +=
getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/"));
numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/"));
}
return numSevereIssues;
}
|
@Test
void testRejectedOnMissingLicenseFile(@TempDir Path tempDir) throws Exception {
assertThat(
JarFileChecker.checkJar(
createJar(
tempDir,
Entry.fileEntry(VALID_NOTICE_CONTENTS, VALID_NOTICE_PATH))))
.isEqualTo(1);
}
|
public static List<String> readLines(Reader input) throws IOException {
BufferedReader reader = toBufferedReader(input);
List<String> list = new ArrayList<>();
while (true) {
String line = reader.readLine();
if (null != line) {
if (StringUtil.isNotEmpty(line)) {
list.add(line.trim());
}
} else {
break;
}
}
return list;
}
|
@Test
public void testReadLines() throws IOException {
File tempFile = new File(tempDir.toFile(), "testReadLines.txt");
try (
PrintWriter writer = new PrintWriter(tempFile)) {
writer.println("test string 1");
writer.println("test string 2");
writer.println("test string 3");
}
FileReader fileReader = new FileReader(tempFile);
List<String> lines = IoUtil.readLines(fileReader);
fileReader.close();
Assert.assertEquals(3, lines.size());
Assert.assertEquals("test string 1", lines.get(0));
Assert.assertEquals("test string 2", lines.get(1));
Assert.assertEquals("test string 3", lines.get(2));
}
|
public void publish(DefaultGoPublisher goPublisher, String destPath, File source, JobIdentifier jobIdentifier) {
if (!source.exists()) {
String message = "Failed to find " + source.getAbsolutePath();
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
bomb(message);
}
int publishingAttempts = 0;
Throwable lastException = null;
while (publishingAttempts < PUBLISH_MAX_RETRIES) {
File tmpDir = null;
try {
publishingAttempts++;
tmpDir = FileUtil.createTempFolder();
File dataToUpload = new File(tmpDir, source.getName() + ".zip");
zipUtil.zip(source, dataToUpload, Deflater.BEST_SPEED);
long size = 0;
if (source.isDirectory()) {
size = FileUtils.sizeOfDirectory(source);
} else {
size = source.length();
}
goPublisher.taggedConsumeLineWithPrefix(PUBLISH, "Uploading artifacts from " + source.getAbsolutePath() + " to " + getDestPath(destPath));
String normalizedDestPath = FilenameUtils.separatorsToUnix(destPath);
String url = urlService.getUploadUrlOfAgent(jobIdentifier, normalizedDestPath, publishingAttempts);
int statusCode = httpService.upload(url, size, dataToUpload, artifactChecksums(source, normalizedDestPath));
if (statusCode == HttpURLConnection.HTTP_ENTITY_TOO_LARGE) {
String message = String.format("Artifact upload for file %s (Size: %s) was denied by the server. This usually happens when server runs out of disk space.",
source.getAbsolutePath(), size);
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
LOGGER.error("[Artifact Upload] Artifact upload was denied by the server. This usually happens when server runs out of disk space.");
publishingAttempts = PUBLISH_MAX_RETRIES;
bomb(message + ". HTTP return code is " + statusCode);
}
if (statusCode < HttpURLConnection.HTTP_OK || statusCode >= HttpURLConnection.HTTP_MULT_CHOICE) {
bomb("Failed to upload " + source.getAbsolutePath() + ". HTTP return code is " + statusCode);
}
return;
} catch (Throwable e) {
String message = "Failed to upload " + source.getAbsolutePath();
LOGGER.error(message, e);
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
lastException = e;
} finally {
FileUtils.deleteQuietly(tmpDir);
}
}
if (lastException != null) {
throw new RuntimeException(lastException);
}
}
|
@Test
public void shouldUploadArtifactChecksumForADirectory() throws IOException {
String data = "Some text whose checksum can be asserted";
String secondData = "some more";
FileUtils.writeStringToFile(tempFile, data, UTF_8);
File anotherFile = artifactFolder.resolve("bond/james_bond/another_file").toFile();
FileUtils.writeStringToFile(anotherFile, secondData, UTF_8);
when(httpService.upload(any(String.class), eq(FileUtils.sizeOfDirectory(artifactFolder.toFile())), any(File.class), eq(expectedProperties(data, secondData)))).thenReturn(HttpServletResponse.SC_OK);
goArtifactsManipulatorStub.publish(goPublisher, "dest", artifactFolder.toFile(), jobIdentifier);
}
|
public IssueQuery create(SearchRequest request) {
try (DbSession dbSession = dbClient.openSession(false)) {
final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone());
Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules());
Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet());
Collection<String> issueKeys = collectIssueKeys(dbSession, request);
if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) {
ruleUuids.add("non-existing-uuid");
}
IssueQuery.Builder builder = IssueQuery.builder()
.issueKeys(issueKeys)
.severities(request.getSeverities())
.cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories())
.impactSoftwareQualities(request.getImpactSoftwareQualities())
.impactSeverities(request.getImpactSeverities())
.statuses(request.getStatuses())
.resolutions(request.getResolutions())
.issueStatuses(request.getIssueStatuses())
.resolved(request.getResolved())
.prioritizedRule(request.getPrioritizedRule())
.rules(ruleDtos)
.ruleUuids(ruleUuids)
.assigneeUuids(request.getAssigneeUuids())
.authors(request.getAuthors())
.scopes(request.getScopes())
.languages(request.getLanguages())
.tags(request.getTags())
.types(request.getTypes())
.pciDss32(request.getPciDss32())
.pciDss40(request.getPciDss40())
.owaspAsvs40(request.getOwaspAsvs40())
.owaspAsvsLevel(request.getOwaspAsvsLevel())
.owaspTop10(request.getOwaspTop10())
.owaspTop10For2021(request.getOwaspTop10For2021())
.stigAsdR5V3(request.getStigAsdV5R3())
.casa(request.getCasa())
.sansTop25(request.getSansTop25())
.cwe(request.getCwe())
.sonarsourceSecurity(request.getSonarsourceSecurity())
.assigned(request.getAssigned())
.createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone))
.createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone))
.facetMode(request.getFacetMode())
.timeZone(timeZone)
.codeVariants(request.getCodeVariants());
List<ComponentDto> allComponents = new ArrayList<>();
boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents);
addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request);
setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone);
String sort = request.getSort();
if (!isNullOrEmpty(sort)) {
builder.sort(sort);
builder.asc(request.getAsc());
}
return builder.build();
}
}
|
@Test
public void application_search_project_issues() {
ProjectData projectData1 = db.components().insertPublicProject();
ComponentDto project1 = projectData1.getMainBranchComponent();
ProjectData projectData2 = db.components().insertPublicProject();
ComponentDto project2 = projectData2.getMainBranchComponent();
ProjectData applicationData = db.components().insertPublicApplication();
ComponentDto applicationMainBranch = applicationData.getMainBranchComponent();
db.components().insertComponents(newProjectCopy("PC1", project1, applicationMainBranch));
db.components().insertComponents(newProjectCopy("PC2", project2, applicationMainBranch));
userSession.registerApplication(applicationData.getProjectDto())
.registerProjects(projectData1.getProjectDto(), projectData2.getProjectDto())
.registerBranches(applicationData.getMainBranchDto());
IssueQuery result = underTest.create(new SearchRequest().setComponentUuids(singletonList(applicationMainBranch.uuid())));
assertThat(result.viewUuids()).containsExactlyInAnyOrder(applicationMainBranch.uuid());
}
|
public static void updateKeyForBlobStore(Map<String, Object> conf, BlobStore blobStore, CuratorFramework zkClient, String key,
NimbusInfo nimbusDetails) {
try {
// Most of clojure tests currently try to access the blobs using getBlob. Since, updateKeyForBlobStore
// checks for updating the correct version of the blob as a part of nimbus ha before performing any
// operation on it, there is a necessity to stub several test cases to ignore this method. It is a valid
// trade off to return if nimbusDetails which include the details of the current nimbus host port data are
// not initialized as a part of the test. Moreover, this applies to only local blobstore when used along with
// nimbus ha.
if (nimbusDetails == null) {
return;
}
boolean isListContainsCurrentNimbusInfo = false;
List<String> stateInfo;
if (zkClient.checkExists().forPath(BLOBSTORE_SUBTREE + "/" + key) == null) {
return;
}
stateInfo = zkClient.getChildren().forPath(BLOBSTORE_SUBTREE + "/" + key);
if (stateInfo == null || stateInfo.isEmpty()) {
return;
}
LOG.debug("StateInfo for update {}", stateInfo);
Set<NimbusInfo> nimbusInfoList = getNimbodesWithLatestSequenceNumberOfBlob(zkClient, key);
for (NimbusInfo nimbusInfo : nimbusInfoList) {
if (nimbusInfo.getHost().equals(nimbusDetails.getHost())) {
isListContainsCurrentNimbusInfo = true;
break;
}
}
if (!isListContainsCurrentNimbusInfo && downloadUpdatedBlob(conf, blobStore, key, nimbusInfoList)) {
LOG.debug("Updating state inside zookeeper for an update");
createStateInZookeeper(conf, key, nimbusDetails);
}
} catch (KeeperException.NoNodeException | KeyNotFoundException e) {
//race condition with a delete
return;
} catch (Exception exp) {
throw new RuntimeException(exp);
}
}
|
@Test
public void testUpdateKeyForBlobStore_missingNode() {
zkClientBuilder.withExists(BLOBSTORE_KEY, false);
BlobStoreUtils.updateKeyForBlobStore(conf, blobStore, zkClientBuilder.build(), KEY, nimbusDetails);
zkClientBuilder.verifyExists(true);
zkClientBuilder.verifyGetChildren(false);
verify(nimbusDetails, never()).getHost();
verify(conf, never()).get(anyString());
}
|
@Override
public SortedSet<Path> convertFrom(String value) {
if (value == null) {
throw new ParameterException("Path list must not be null.");
}
return Arrays.stream(value.split(SEPARATOR))
.map(StringUtils::trimToNull)
.filter(Objects::nonNull)
.map(Paths::get)
.collect(Collectors.toCollection(sortedPathSupplier()));
}
|
@Test
public void testConvertFrom() {
// Verify path set sizes.
assertEquals(0, converter.convertFrom("").size());
assertEquals(0, converter.convertFrom(",").size());
assertEquals(0, converter.convertFrom(",,,").size());
assertEquals(1, converter.convertFrom("/another-dir").size());
assertEquals(1, converter.convertFrom("/another-dir;/some-dir;/finally-dir").size());
assertEquals(3, converter.convertFrom("/another-dir, /some-dir, /finally-dir").size());
// Verify path sorting.
final String unsortedPaths = "/some-dir,/Z-dir,/z-dir,/another-dir/sub,/another-dir";
final SortedSet<Path> result = converter.convertFrom(unsortedPaths);
assertEquals(5, result.size());
assertEquals("Paths were not sorted as expected",
"/Z-dir,/another-dir,/another-dir/sub,/some-dir,/z-dir",
result.stream().map(Path::toString).collect(Collectors.joining(",")));
}
|
@SuppressWarnings("unchecked")
@Override
public void onClosed(final String remoteAddress, final Connection conn) {
final Set<PeerPair> pairs = (Set<PeerPair>) conn.getAttribute(PAIR_ATTR);
if (pairs != null && !pairs.isEmpty()) {
// Clear request contexts when connection disconnected.
for (final Map.Entry<String, ConcurrentMap<PeerPair, PeerRequestContext>> entry : this.peerRequestContexts
.entrySet()) {
final ConcurrentMap<PeerPair, PeerRequestContext> groupCtxs = entry.getValue();
synchronized (Utils.withLockObject(groupCtxs)) {
for (PeerPair pair : pairs) {
final PeerRequestContext ctx = groupCtxs.remove(pair);
if (ctx != null) {
ctx.destroy();
}
}
}
}
} else {
LOG.info("Connection disconnected: {}", remoteAddress);
}
}
|
@Test
public void testOnClosed() {
mockNode();
final AppendEntriesRequestProcessor processor = (AppendEntriesRequestProcessor) newProcessor();
PeerPair pair = processor.pairOf(this.peerIdStr, this.serverId);
final PeerRequestContext ctx = processor.getOrCreatePeerRequestContext(this.groupId, pair, this.conn);
assertNotNull(ctx);
assertSame(ctx, processor.getPeerRequestContext(this.groupId, pair));
assertSame(ctx, processor.getOrCreatePeerRequestContext(this.groupId, pair, this.conn));
processor.onClosed(null, this.conn);
assertNull(processor.getPeerRequestContext(this.groupId, pair));
assertNotSame(ctx, processor.getOrCreatePeerRequestContext(this.groupId, pair, this.conn));
}
|
public static String formatLocalizedErrorMessage(Message localizedErrorMessage, Locale locale) {
var bundle = ResourceBundle.getBundle(BUNDLE, locale);
var localizedMessage = bundle.getString(localizedErrorMessage.messageKey());
var key = localizedErrorMessage.messageKey();
if (!key.isBlank()) {
localizedMessage = localizedMessage.formatted((Object[]) localizedErrorMessage.args());
}
return localizedMessage;
}
|
@Test
void tes_negotiatePreferredLocales_simpleError() {
var errorMessage = new Message("error.serverError");
var locale = Locale.GERMANY;
var result = LocaleUtils.formatLocalizedErrorMessage(errorMessage, locale);
assertEquals("Ohh nein! Unerwarteter Serverfehler. Bitte versuchen Sie es erneut.", result);
}
|
@SuppressWarnings("unchecked")
public static <T> T[] removeValues(T[] values, Predicate<T> shouldRemove, Class<T> type) {
Collection<T> collection = new ArrayList<>(values.length);
for (T value : values) {
if (shouldRemove.negate().test(value)) {
collection.add(value);
}
}
T[] array = (T[]) Array.newInstance(type, collection.size());
return collection.toArray(array);
}
|
@Test
void removesEvenNumbers() {
Integer[] values = {22, 23};
assertThat(removeValues(values, number -> number % 2 == 0, Integer.class))
.containsExactly(23);
}
|
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
throw new ReadOnlyBufferException();
}
|
@Test
public void testSetBytesWithChannel() {
final ByteBuf buf = newBuffer(wrappedBuffer(new byte[8]));
try {
assertThrows(ReadOnlyBufferException.class, new Executable() {
@Override
public void execute() throws IOException {
buf.setBytes(0, new ScatteringByteChannel() {
@Override
public long read(ByteBuffer[] dsts, int offset, int length) {
return 0;
}
@Override
public long read(ByteBuffer[] dsts) {
return 0;
}
@Override
public int read(ByteBuffer dst) {
return 0;
}
@Override
public boolean isOpen() {
return true;
}
@Override
public void close() {
}
}, 4);
}
});
} finally {
buf.release();
}
}
|
public static void unitize2(double[] array) {
double n = norm(array);
for (int i = 0; i < array.length; i++) {
array[i] /= n;
}
}
|
@Test
public void testUnitize2() {
System.out.println("unitize2");
double[] data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0};
MathEx.unitize2(data);
assertEquals(1, MathEx.norm2(data), 1E-7);
}
|
public Service createGenericResourceService(String name, String version, String resource, String referencePayload)
throws EntityAlreadyExistsException {
log.info("Creating a new Service '{}-{}' for generic resource {}", name, version, resource);
// Check if corresponding Service already exists.
Service existingService = serviceRepository.findByNameAndVersion(name, version);
if (existingService != null) {
log.warn("A Service '{}-{}' is already existing. Throwing an Exception", name, version);
throw new EntityAlreadyExistsException(
String.format("Service '%s-%s' is already present in store", name, version));
}
// Create new service with GENERIC_REST type.
Service service = new Service();
service.setName(name);
service.setVersion(version);
service.setType(ServiceType.GENERIC_REST);
service.setMetadata(new Metadata());
// Now create basic crud operations for the resource.
Operation createOp = new Operation();
createOp.setName("POST /" + resource);
createOp.setMethod("POST");
service.addOperation(createOp);
Operation getOp = new Operation();
getOp.setName("GET /" + resource + "/:id");
getOp.setMethod("GET");
getOp.setDispatcher(DispatchStyles.URI_PARTS);
getOp.setDispatcherRules("id");
service.addOperation(getOp);
Operation updateOp = new Operation();
updateOp.setName("PUT /" + resource + "/:id");
updateOp.setMethod("PUT");
updateOp.setDispatcher(DispatchStyles.URI_PARTS);
updateOp.setDispatcherRules("id");
service.addOperation(updateOp);
Operation listOp = new Operation();
listOp.setName("GET /" + resource);
listOp.setMethod("GET");
service.addOperation(listOp);
Operation delOp = new Operation();
delOp.setName("DELETE /" + resource + "/:id");
delOp.setMethod("DELETE");
delOp.setDispatcher(DispatchStyles.URI_PARTS);
delOp.setDispatcherRules("id");
service.addOperation(delOp);
serviceRepository.save(service);
log.info("Having created Service '{}' for generic resource {}", service.getId(), resource);
// If reference payload is provided, record a first resource.
if (referencePayload != null) {
GenericResource genericResource = new GenericResource();
genericResource.setServiceId(service.getId());
genericResource.setReference(true);
try {
Document document = Document.parse(referencePayload);
genericResource.setPayload(document);
genericResourceRepository.save(genericResource);
} catch (JsonParseException jpe) {
log.error("Cannot parse the provided reference payload as JSON: {}", referencePayload);
log.error("Reference is ignored, please provide JSON the next time");
}
}
// Publish a Service create event before returning.
publishServiceChangeEvent(service, ChangeType.CREATED);
return service;
}
|
@Test
void testCreateGenericResourceServiceFailure() throws EntityAlreadyExistsException {
assertThrows(EntityAlreadyExistsException.class, () -> {
try {
Service first = service.createGenericResourceService("Order Service", "1.0", "order", null);
} catch (Exception e) {
fail("No exception should be raised on first save()!");
}
Service second = service.createGenericResourceService("Order Service", "1.0", "order", null);
});
}
|
@Override
protected Object getContent(ScmGetRequest request) {
GithubScm.validateUserHasPushPermission(request.getApiUrl(), request.getCredentials().getPassword().getPlainText(), request.getOwner(), request.getRepo());
String url = String.format("%s/repos/%s/%s/contents/%s",
request.getApiUrl(),
request.getOwner(),
request.getRepo(),
request.getPath());
if(request.getBranch() != null){ //if branch is present fetch this file from branch
url += "?ref="+request.getBranch();
}
try {
Map ghContent = HttpRequest.get(url)
.withAuthorizationToken(request.getCredentials().getPassword().getPlainText())
.to(Map.class);
if(ghContent == null){
throw new ServiceException.UnexpectedErrorException("Failed to load file: "+request.getPath());
}
String base64Data = (String)ghContent.get("content");
// JENKINS-47887 - this content contains \n which breaks IE11
base64Data = base64Data == null ? null : base64Data.replace("\n", "");
return new GithubFile(new GitContent.Builder()
.sha((String)ghContent.get("sha"))
.name((String)ghContent.get("name"))
.repo(request.getRepo())
.owner(request.getOwner())
.path(request.getPath())
.base64Data(base64Data)
.build());
} catch (IOException e) {
throw new ServiceException.UnexpectedErrorException(String.format("Failed to load file %s: %s", request.getPath(),e.getMessage()), e);
}
}
|
@Test
public void unauthorizedAccessToContentForOrgFolderGHEShouldFail() throws UnirestException, IOException {
User alice = User.get("alice");
alice.setFullName("Alice Cooper");
alice.addProperty(new Mailer.UserProperty("[email protected]"));
String aliceCredentialId = createGithubEnterpriseCredential(alice);
StaplerRequest staplerRequest = mockStapler(GithubEnterpriseScm.ID);
MultiBranchProject mbp = mockMbp(aliceCredentialId, user, GithubEnterpriseScm.DOMAIN_NAME);
try {
//Bob trying to access content but his credential is not setup so should fail
new GithubScmContentProvider().getContent(staplerRequest, mbp);
}catch (ServiceException.PreconditionRequired e){
assertEquals("Can't access content from github: no credential found", e.getMessage());
return;
}
fail("Should have failed with PreConditionException");
}
|
static String generateClassName(final OpenAPI document) {
final Info info = document.getInfo();
if (info == null) {
return DEFAULT_CLASS_NAME;
}
final String title = info.getTitle();
if (title == null) {
return DEFAULT_CLASS_NAME;
}
final String className = title.chars().filter(Character::isJavaIdentifierPart).filter(c -> c < 'z').boxed()
.collect(Collector.of(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append,
StringBuilder::toString));
if (className.isEmpty() || !Character.isJavaIdentifierStart(className.charAt(0))) {
return DEFAULT_CLASS_NAME;
}
return className;
}
|
@Test
public void shouldUseDefaultClassNameIfInfoOrTitleIsNotPresent() {
final OpenAPI openapi = new OpenAPI();
assertThat(RestDslSourceCodeGenerator.generateClassName(openapi))
.isEqualTo(RestDslSourceCodeGenerator.DEFAULT_CLASS_NAME);
openapi.setInfo(new Info());
assertThat(RestDslSourceCodeGenerator.generateClassName(openapi))
.isEqualTo(RestDslSourceCodeGenerator.DEFAULT_CLASS_NAME);
}
|
@Override
public void handleReply(Reply reply) {
if (failure.get() != null) {
return;
}
if (containsFatalErrors(reply.getErrors())) {
failure.compareAndSet(null, new IOException(formatErrors(reply)));
return;
}
long now = System.currentTimeMillis();
long latency = now - (long) reply.getContext();
numReplies.incrementAndGet();
accumulateReplies(now, latency);
}
|
@Test
public void requireThatDualPutXML2JsonFeederWorks() throws Throwable {
ByteArrayOutputStream dump = new ByteArrayOutputStream();
assertFeed(new FeederParams().setDumpStream(dump),
"<vespafeed>" +
" <document documenttype='simple' documentid='id:simple:simple::0'>" +
" <my_str>foo</my_str>" +
" </document>" +
" <document documenttype='simple' documentid='id:simple:simple::1'>" +
" <my_str>bar</my_str>" +
" </document>" +
" <remove documenttype='simple' documentid='id:simple:simple::2'/>" +
"</vespafeed>",
new MessageHandler() {
@Override
public void handleMessage(Message msg) {
Reply reply = ((DocumentMessage)msg).createReply();
reply.swapState(msg);
reply.popHandler().handleReply(reply);
}
},
"",
"(.+\n)+" +
"\\s*\\d+,\\s*3,.+\n");
assertEquals(154, dump.size());
assertEquals("""
[
{"id":"id:simple:simple::0","fields":{"my_str":"foo"}},
{"id":"id:simple:simple::1","fields":{"my_str":"bar"}},
{"remove":"id:simple:simple::2"}
]""",
dump.toString());
assertFeed(dump.toString(),
new MessageHandler() {
@Override
public void handleMessage(Message msg) {
Reply reply = ((DocumentMessage)msg).createReply();
reply.swapState(msg);
reply.popHandler().handleReply(reply);
}
},
"",
"(.+\n)+" +
"\\s*\\d+,\\s*3,.+\n");
}
|
@Override
public void registerInstance(String serviceName, String ip, int port) throws NacosException {
registerInstance(serviceName, ip, port, Constants.DEFAULT_CLUSTER_NAME);
}
|
@Test
void testRegisterInstance7() throws NacosException {
Throwable exception = assertThrows(NacosException.class, () -> {
//given
String serviceName = "service1";
String groupName = "group1";
Instance instance = new Instance();
instance.setClusterName("cluster1,cluster2");
//when
client.registerInstance(serviceName, groupName, instance);
});
assertTrue(exception.getMessage().contains(
"Instance 'clusterName' should be characters with only 0-9a-zA-Z-. (current: cluster1,cluster2)"));
}
|
public List<NamespaceBO> findNamespaceBOs(String appId, Env env, String clusterName, boolean includeDeletedItems) {
List<NamespaceDTO> namespaces = namespaceAPI.findNamespaceByCluster(appId, env, clusterName);
if (namespaces == null || namespaces.size() == 0) {
throw BadRequestException.namespaceNotExists();
}
List<NamespaceBO> namespaceBOs = Collections.synchronizedList(new LinkedList<>());
List<String> exceptionNamespaces = Collections.synchronizedList(new LinkedList<>());
CountDownLatch latch = new CountDownLatch(namespaces.size());
for (NamespaceDTO namespace : namespaces) {
executorService.submit(() -> {
NamespaceBO namespaceBO;
try {
namespaceBO = transformNamespace2BO(env, namespace, includeDeletedItems);
namespaceBOs.add(namespaceBO);
} catch (Exception e) {
LOGGER.error("parse namespace error. app id:{}, env:{}, clusterName:{}, namespace:{}",
appId, env, clusterName, namespace.getNamespaceName(), e);
exceptionNamespaces.add(namespace.getNamespaceName());
} finally {
latch.countDown();
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
//ignore
}
if(namespaceBOs.size() != namespaces.size()){
throw new RuntimeException(String
.format("Parse namespaces error, expected: %s, but actual: %s, cannot get those namespaces: %s", namespaces.size(), namespaceBOs.size(), exceptionNamespaces));
}
return namespaceBOs.stream()
.sorted(Comparator.comparing(o -> o.getBaseInfo().getId()))
.collect(Collectors.toList());
}
|
@Test
public void testFindNamespace() {
AppNamespace applicationAppNamespace = mock(AppNamespace.class);
AppNamespace hermesAppNamespace = mock(AppNamespace.class);
NamespaceDTO application = new NamespaceDTO();
application.setId(1);
application.setClusterName(testClusterName);
application.setAppId(testAppId);
application.setNamespaceName(testNamespaceName);
NamespaceDTO hermes = new NamespaceDTO();
hermes.setId(2);
hermes.setClusterName("default");
hermes.setAppId(testAppId);
hermes.setNamespaceName("hermes");
List<NamespaceDTO> namespaces = Arrays.asList(application, hermes);
ReleaseDTO someRelease = new ReleaseDTO();
someRelease.setConfigurations("{\"a\":\"123\",\"b\":\"123\"}");
ItemDTO i1 = new ItemDTO("a", "123", "", 1);
ItemDTO i2 = new ItemDTO("b", "1", "", 2);
ItemDTO i3 = new ItemDTO("", "", "#dddd", 3);
ItemDTO i4 = new ItemDTO("c", "1", "", 4);
List<ItemDTO> someItems = Arrays.asList(i1, i2, i3, i4);
when(applicationAppNamespace.getFormat()).thenReturn(ConfigFileFormat.Properties.getValue());
when(hermesAppNamespace.getFormat()).thenReturn(ConfigFileFormat.XML.getValue());
when(appNamespaceService.findByAppIdAndName(testAppId, testNamespaceName))
.thenReturn(applicationAppNamespace);
when(appNamespaceService.findPublicAppNamespace("hermes")).thenReturn(hermesAppNamespace);
when(namespaceAPI.findNamespaceByCluster(testAppId, Env.DEV, testClusterName)).thenReturn(namespaces);
when(releaseService.loadLatestRelease(testAppId, Env.DEV, testClusterName,
testNamespaceName)).thenReturn(someRelease);
when(releaseService.loadLatestRelease(testAppId, Env.DEV, testClusterName, "hermes")).thenReturn(someRelease);
when(itemService.findItems(testAppId, Env.DEV, testClusterName, testNamespaceName)).thenReturn(someItems);
List<NamespaceBO> namespaceVOs = namespaceService.findNamespaceBOs(testAppId, Env.DEV, testClusterName);
assertEquals(2, namespaceVOs.size());
when(namespaceAPI.findNamespaceByCluster(testAppId, Env.DEV, testClusterName)).thenReturn(Lists.list(application));
namespaceVOs = namespaceService.findNamespaceBOs(testAppId, Env.DEV, testClusterName);
assertEquals(1, namespaceVOs.size());
NamespaceBO namespaceVO = namespaceVOs.get(0);
assertEquals(4, namespaceVO.getItems().size());
assertEquals("a", namespaceVO.getItems().get(0).getItem().getKey());
assertEquals(2, namespaceVO.getItemModifiedCnt());
assertEquals(testAppId, namespaceVO.getBaseInfo().getAppId());
assertEquals(testClusterName, namespaceVO.getBaseInfo().getClusterName());
assertEquals(testNamespaceName, namespaceVO.getBaseInfo().getNamespaceName());
ReleaseDTO errorRelease = new ReleaseDTO();
errorRelease.setConfigurations("\"a\":\"123\",\"b\":\"123\"");
when(releaseService.loadLatestRelease(testAppId, Env.DEV, testClusterName, testNamespaceName)).thenReturn(errorRelease);
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(()-> namespaceService.findNamespaceBOs(testAppId, Env.DEV, testClusterName))
.withMessageStartingWith("Parse namespaces error, expected: 1, but actual: 0, cannot get those namespaces: [application]");
}
|
public GrantDTO create(GrantDTO grantDTO, @Nullable User currentUser) {
return create(grantDTO, requireNonNull(currentUser, "currentUser cannot be null").getName());
}
|
@Test
public void createWithGrantDTOAndUserObject() {
final ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
final GRN grantee = GRNTypes.USER.toGRN("jane");
final GRN target = GRNTypes.DASHBOARD.toGRN("54e3deadbeefdeadbeef0000");
final User user = mock(User.class);
when(user.getName()).thenReturn("john");
final GrantDTO grantDTO = GrantDTO.of(grantee, Capability.OWN, target).toBuilder()
// Ensure that the time tests work
.createdAt(now.minusHours(1))
.updatedAt(now.minusHours(1))
.build();
final GrantDTO grant = dbService.create(grantDTO, user);
assertThat(grant.id()).isNotBlank();
assertThat(grant.grantee()).isEqualTo(grantee);
assertThat(grant.capability()).isEqualTo(Capability.OWN);
assertThat(grant.target()).isEqualTo(target);
assertThat(grant.createdBy()).isEqualTo("john");
assertThat(grant.createdAt()).isAfter(grantDTO.createdAt());
assertThat(grant.updatedBy()).isEqualTo("john");
assertThat(grant.updatedAt()).isAfter(grantDTO.updatedAt());
}
|
public CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>>
requestThreadInfoSamples(
Map<Long, ExecutionAttemptID> threads,
final ThreadInfoSamplesRequest requestParams) {
checkNotNull(threads, "threads must not be null");
checkNotNull(requestParams, "requestParams must not be null");
CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>> resultFuture =
new CompletableFuture<>();
scheduledExecutor.execute(
() ->
requestThreadInfoSamples(
threads,
requestParams.getNumSamples(),
requestParams.getDelayBetweenSamples(),
requestParams.getMaxStackTraceDepth(),
CollectionUtil.newHashMapWithExpectedSize(threads.size()),
resultFuture));
return resultFuture;
}
|
@Test
void testShouldThrowExceptionIfTaskIsNotRunningBeforeSampling()
throws ExecutionException, InterruptedException {
Set<SampleableTask> tasks = new HashSet<>();
tasks.add(new NotRunningTask());
Map<Long, ExecutionAttemptID> threads = collectExecutionAttempts(tasks);
final CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>>
sampleFuture =
threadInfoSampleService.requestThreadInfoSamples(threads, requestParams);
assertThatFuture(sampleFuture).eventuallyFails();
assertThat(sampleFuture.handle((ignored, e) -> e).get())
.isInstanceOf(IllegalStateException.class);
}
|
public static Ip4Prefix valueOf(int address, int prefixLength) {
return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength);
}
|
@Test(expected = IllegalArgumentException.class)
public void testInvalidValueOfShortArrayIPv4() {
Ip4Prefix ipPrefix;
byte[] value;
value = new byte[] {1, 2, 3};
ipPrefix = Ip4Prefix.valueOf(value, 24);
}
|
public boolean matchesBeacon(Beacon beacon) {
// All identifiers must match, or the corresponding region identifier must be null.
for (int i = mIdentifiers.size(); --i >= 0; ) {
final Identifier identifier = mIdentifiers.get(i);
Identifier beaconIdentifier = null;
if (i < beacon.mIdentifiers.size()) {
beaconIdentifier = beacon.getIdentifier(i);
}
if ((beaconIdentifier == null && identifier != null) ||
(beaconIdentifier != null && identifier != null && !identifier.equals(beaconIdentifier))) {
return false;
}
}
if (mBluetoothAddress != null && !mBluetoothAddress.equalsIgnoreCase(beacon.mBluetoothAddress)) {
return false;
}
return true;
}
|
@Test
public void testBeaconMatchesRegionWithShorterIdentifierList() {
Beacon beacon = new AltBeacon.Builder().setId1("1").setId2("2").setId3("3").setRssi(4)
.setBeaconTypeCode(5).setTxPower(6).setBluetoothAddress("1:2:3:4:5:6").build();
Region region = new Region("myRegion", Collections.singletonList(Identifier.parse("1")));
assertTrue("Beacon should match region with first identifier equal and shorter Identifier list", region.matchesBeacon(beacon));
}
|
@Override
public void execute() throws MojoExecutionException {
if (pathToModelDir == null) {
throw new MojoExecutionException("pathToModelDir parameter must not be null");
}
// skip if input file does not exist
if (inputCamelSchemaFile == null || !inputCamelSchemaFile.exists()) {
getLog().info("Input Camel schema file: " + inputCamelSchemaFile + " does not exist. Skip EIP document enrichment");
return;
}
validateExists(inputCamelSchemaFile, "inputCamelSchemaFile");
validateIsFile(inputCamelSchemaFile, "inputCamelSchemaFile");
validateExists(camelCoreXmlDir, "camelCoreXmlDir");
validateIsDirectory(camelCoreModelDir, "camelCoreModelDir");
validateIsDirectory(camelCoreXmlDir, "camelCoreXmlDir");
try {
runPlugin();
} catch (Exception e) {
throw new MojoExecutionException("Error during plugin execution", e);
}
if (deleteFilesAfterRun != null) {
deleteFilesAfterDone(deleteFilesAfterRun);
}
}
|
@Test
public void testExecuteCamelCoreIsNull() {
eipDocumentationEnricherMojo.camelCoreModelDir = null;
when(mockInputSchema.exists()).thenReturn(true);
when(mockInputSchema.isFile()).thenReturn(true);
try {
eipDocumentationEnricherMojo.execute();
fail("Expected MojoExecutionException");
} catch (MojoExecutionException e) {
// Expected.
}
}
|
@VisibleForTesting
public Journal getJournal(String jid) {
return journalsById.get(jid);
}
|
@Test(timeout=100000)
public void testJournalDirPerNameSpace() {
Collection<String> nameServiceIds = DFSUtilClient.getNameServiceIds(conf);
setupStaticHostResolution(2, "journalnode");
for (String nsId : nameServiceIds) {
String jid = "test-journalid-" + nsId;
Journal nsJournal = jn.getJournal(jid);
JNStorage journalStorage = nsJournal.getStorage();
File editsDir = new File(MiniDFSCluster.getBaseDirectory() +
File.separator + "TestJournalNode" + File.separator
+ nsId + File.separator + jid);
assertEquals(editsDir.toString(), journalStorage.getRoot().toString());
}
}
|
public List<ProtobufSystemInfo.Section> load() {
return globalSections.stream()
.map(SystemInfoSection::toProtobuf)
.toList();
}
|
@Test
public void call_only_SystemInfoSection_that_inherit_Global() {
// two globals and one standard
SystemInfoSection[] sections = new SystemInfoSection[] {
new TestGlobalSystemInfoSection("foo"), new TestSystemInfoSection("bar"), new TestGlobalSystemInfoSection("baz")};
GlobalInfoLoader underTest = new GlobalInfoLoader(sections);
List<ProtobufSystemInfo.Section> loadedInfo = underTest.load();
assertThat(loadedInfo).extracting(ProtobufSystemInfo.Section::getName)
.containsExactlyInAnyOrder("foo", "baz");
}
|
@Override
public List<ZuulFilter<?, ?>> putFiltersForClasses(String[] classNames) throws Exception {
List<ZuulFilter<?, ?>> newFilters = new ArrayList<>();
for (String className : classNames) {
newFilters.add(putFilterForClassName(className));
}
return Collections.unmodifiableList(newFilters);
}
|
@Test
void testPutFiltersForClasses() throws Exception {
loader.putFiltersForClasses(new String[] {TestZuulFilter.class.getName()});
Collection<ZuulFilter<?, ?>> filters = registry.getAllFilters();
assertEquals(1, filters.size());
}
|
public static String getProcessTriggerInstanceNodePath(final String instanceId, final String taskId) {
return String.join("/", "", ROOT_NODE, COMPUTE_NODE, SHOW_PROCESS_LIST_TRIGGER, String.join(":", instanceId, taskId));
}
|
@Test
void assertGetProcessTriggerInstanceIdNodePath() {
assertThat(ComputeNode.getProcessTriggerInstanceNodePath("foo_instance", "foo_process_id"),
is("/nodes/compute_nodes/show_process_list_trigger/foo_instance:foo_process_id"));
assertThat(ComputeNode.getProcessTriggerInstanceNodePath("foo_instance", "foo_process_id"),
is("/nodes/compute_nodes/show_process_list_trigger/foo_instance:foo_process_id"));
}
|
public static String writeValueAsString(Object obj) {
try {
return getInstance().writeValueAsString(obj);
} catch (JsonGenerationException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
|
@Test
public void shouldWriteValueAsString() {
//given
Map<String, String> map = new HashMap<>();
map.put("aaa", "111");
map.put("bbb", "222");
//when
String json = writeValueAsString(map);
//then
assertEquals(json, "{\"aaa\":\"111\",\"bbb\":\"222\"}");
}
|
public static List<Validation> computeFlagsFromCSVString(String csvString,
Log log) {
List<Validation> flags = new ArrayList<>();
boolean resetFlag = false;
for (String p : csvString.split(",")) {
try {
flags.add(Validation.valueOf(p));
} catch (IllegalArgumentException e) {
log.info("validateDMN configured with flag: '" + p + "' determines this Mojo will not be executed (reset all flags).");
resetFlag = true;
}
}
if (resetFlag) {
flags.clear();
}
return flags;
}
|
@Test
public void testFlagsUnknown() {
List<DMNValidator.Validation> result = DMNValidationHelper.computeFlagsFromCSVString("VALIDATE_SCHEMA,boh", log);
assertThat(result).isNotNull()
.hasSize(0);
}
|
public static MessageHeaders createAfnemersberichtAanDGLHeaders(Map<String, Object> additionalHeaders) {
validateHeaders(additionalHeaders);
Map<String, Object> headersMap = createBasicHeaderMap();
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTION, "BRPAfnemersberichtAanDGL");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTIVITY, "dgl:objecten:1.0");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_TYPE, "dgl:objecten:1.0");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_VERSION, "1.0");
headersMap.putAll(additionalHeaders);
MessageHeaders headers = new MessageHeaders(headersMap);
return headers;
}
|
@Test
public void testAfnemersBerichtAanDGLHeaders() {
MessageHeaders afnemersberichtAanDGLHeaders = HeaderUtil.createAfnemersberichtAanDGLHeaders(validHeaders());
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PRODUCTION), is("Test"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PROTOCOL), is("ebMS"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PROTOCOL_VERSION), is("2.0"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_SYSTEM_MSG_ID).toString(), matchesPattern(UUID_REGEX));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PROCESS_INSTANCE_ID).toString(), matchesPattern(UUID_REGEX));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_SEQ_NUMBER), is("0"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_MSG_ORDER), is("false"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_ACTION), is("BRPAfnemersberichtAanDGL"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_ACTIVITY), is("dgl:objecten:1.0"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PROCESS_TYPE), is("dgl:objecten:1.0"));
assertThat(afnemersberichtAanDGLHeaders.get(Headers.X_AUX_PROCESS_VERSION), is("1.0"));
}
|
public static boolean isUri(String potentialUri) {
if (StringUtils.isBlank(potentialUri)) {
return false;
}
try {
URI uri = new URI(potentialUri);
return uri.getScheme() != null && uri.getHost() != null;
} catch (URISyntaxException e) {
return false;
}
}
|
@Test public void
returns_false_when_uri_is_blank() {
assertThat(UriValidator.isUri(" "), is(false));
}
|
ContentElement(PayloadElement payload, List<AffixElement> affixElements) {
this.payload = payload;
this.affixElements = Collections.unmodifiableList(affixElements);
}
|
@Test
public void testContentElement() throws ParseException {
Message.Body body = new Message.Body("en", "My battery is low and it’s getting dark"); // :'(
ContentElement contentElement = ContentElement.builder()
.addPayloadItem(body)
.setFrom(AffixElementsTest.JID_OPPORTUNITY)
.addTo(AffixElementsTest.JID_HOUSTON)
.setTimestamp(XmppDateTime.parseXEP0082Date("2018-06-10T00:00:00.000+00:00"))
.setRandomPadding("RANDOMPADDING")
.build();
String expectedXml = "" +
"<content xmlns='urn:xmpp:sce:0'>" +
" <to jid='[email protected]'/>" +
" <from jid='[email protected]'/>" +
" <time stamp='2018-06-10T00:00:00.000+00:00'/>" +
" <rpad>RANDOMPADDING</rpad>" +
" <payload>" +
" <body xmlns='jabber:client' xml:lang='en'>My battery is low and it’s getting dark</body>" +
" </payload>" +
"</content>";
assertXmlSimilar(expectedXml, contentElement.toXML());
assertEquals(Collections.singletonList(body), contentElement.getPayload().getItems());
assertEquals(4, contentElement.getAffixElements().size());
assertTrue(contentElement.getAffixElements().contains(new ToAffixElement(AffixElementsTest.JID_HOUSTON)));
assertTrue(contentElement.getAffixElements().contains(new FromAffixElement(AffixElementsTest.JID_OPPORTUNITY)));
assertTrue(contentElement.getAffixElements().contains(
new TimestampAffixElement(XmppDateTime.parseXEP0082Date("2018-06-10T00:00:00.000+00:00"))));
assertTrue(contentElement.getAffixElements().contains(new RandomPaddingAffixElement("RANDOMPADDING")));
}
|
public static ClusterHealthStatus isHealth(List<RemoteInstance> remoteInstances) {
if (CollectionUtils.isEmpty(remoteInstances)) {
return ClusterHealthStatus.unHealth("can't get the instance list");
}
if (!CoreModuleConfig.Role.Receiver.equals(ROLE)) {
List<RemoteInstance> selfInstances = remoteInstances.stream().
filter(remoteInstance -> remoteInstance.getAddress().isSelf()).collect(Collectors.toList());
if (CollectionUtils.isEmpty(selfInstances)) {
return ClusterHealthStatus.unHealth("can't get itself");
}
}
if (remoteInstances.size() > 1 && hasIllegalNodeAddress(remoteInstances)) {
return ClusterHealthStatus.unHealth("find illegal node in cluster mode such as 127.0.0.1, localhost");
}
return ClusterHealthStatus.HEALTH;
}
|
@Test
public void unHealthWithIllegalNodeInstance() {
List<RemoteInstance> remoteInstances = new ArrayList<>();
remoteInstances.add(new RemoteInstance(new Address("192.168.0.1", 8892, true)));
remoteInstances.add(new RemoteInstance(new Address("127.0.0.1", 8892, true)));
ClusterHealthStatus clusterHealthStatus = OAPNodeChecker.isHealth(remoteInstances);
Assertions.assertFalse(clusterHealthStatus.isHealth());
}
|
public static <T extends Comparable<? super T>> T max(Collection<T> coll) {
return isEmpty(coll) ? null : Collections.max(coll);
}
|
@Test
public void minNullTest() {
assertNull(CollUtil.max(null));
}
|
public static Builder builder() {
return new Builder(new HashSet<>());
}
|
@Test
public void shouldImplementEqualsAndHashCode() {
new EqualsTester()
.addEqualityGroup(
RequiredColumns.builder().add(EXP0).build(),
RequiredColumns.builder().addAll(ImmutableSet.of(COL1_REF, COL2_REF, COL3_REF)).build()
)
.addEqualityGroup(
RequiredColumns.builder().add(COL0_REF).build()
)
.testEquals();
}
|
public Column getColumn(String value) {
Matcher m = PATTERN.matcher(value);
if (!m.matches()) {
throw new IllegalArgumentException("value " + value + " is not a valid column definition");
}
String name = m.group(1);
String type = m.group(6);
type = type == null ? "String" : type;
boolean array = (m.group(4) != null) || (m.group(7) != null);
if (array) {
return new ArrayColumn(name,
createColumn(name,
type));
}
return createColumn(name,
type);
}
|
@Test
public void testGetArrayColumnSimple() {
ColumnFactory f = new ColumnFactory();
Column column = f.getColumn("column[]");
assertThat(column instanceof ArrayColumn).isTrue();
assertThat(column.getName()).isEqualTo("column");
assertThat(column.getCellType()).isEqualTo("StringCell");
}
|
@Nullable static String method(String fullMethodName) {
int index = fullMethodName.lastIndexOf('/');
if (index == -1 || index == 0) return null;
return fullMethodName.substring(index + 1);
}
|
@Test void method_malformed() {
assertThat(GrpcParser.method("/")).isNull();
}
|
public static HazelcastInstance getOrCreateHazelcastInstance() {
return HazelcastInstanceFactory.getOrCreateHazelcastInstance(null);
}
|
@Test
public void getOrCreateDefaultHazelcastInstance() {
String hzConfigProperty = System.getProperty(HAZELCAST_CONFIG);
try {
System.setProperty(HAZELCAST_CONFIG, "classpath:test-hazelcast-jcache.xml");
HazelcastInstance hz1 = Hazelcast.getOrCreateHazelcastInstance();
HazelcastInstance hz2 = Hazelcast.getOrCreateHazelcastInstance();
assertEquals("Calling two times getOrCreateHazelcastInstance should return same instance", hz1,
hz2);
} finally {
if (hzConfigProperty == null) {
System.clearProperty(HAZELCAST_CONFIG);
} else {
System.setProperty(HAZELCAST_CONFIG, hzConfigProperty);
}
}
}
|
@Override
public TransferAction action(final Session<?> source, final Session<?> destination, boolean resumeRequested, boolean reloadRequested,
final TransferPrompt prompt, final ListProgressListener listener) throws BackgroundException {
if(log.isDebugEnabled()) {
log.debug(String.format("Find transfer action with prompt %s", prompt));
}
if(resumeRequested) {
return TransferAction.comparison;
}
final TransferAction action;
if(reloadRequested) {
action = TransferAction.forName(
PreferencesFactory.get().getProperty("queue.copy.reload.action"));
}
else {
// Use default
action = TransferAction.forName(
PreferencesFactory.get().getProperty("queue.copy.action"));
}
if(action.equals(TransferAction.callback)) {
for(TransferItem upload : roots) {
final Path copy = mapping.get(upload.remote);
final Find find = destination.getFeature(Find.class);
if(find.find(copy)) {
// Found remote file
if(copy.isDirectory()) {
// List files in target directory
if(this.list(destination, copy, null, listener).isEmpty()) {
// Do not prompt for existing empty directories
continue;
}
}
// Prompt user to choose a filter
return prompt.prompt(upload);
}
}
// No files exist yet therefore it is most straightforward to use the overwrite action
return TransferAction.overwrite;
}
return action;
}
|
@Test
public void testActionPromptCancel() throws Exception {
final Path test = new Path("t", EnumSet.of(Path.Type.file));
final Host target = new Host(new TestProtocol(), "t");
CopyTransfer t = new CopyTransfer(target,
target,
Collections.singletonMap(test, new Path("d", EnumSet.of(Path.Type.file))), new BandwidthThrottle(BandwidthThrottle.UNLIMITED));
assertEquals(TransferAction.cancel, t.action(new NullTransferSession(target), new NullTransferSession(target), false, true,
new DisabledTransferPrompt(), new DisabledListProgressListener()));
}
|
@Override
public Graph<Entity> resolveForInstallation(Entity entity,
Map<String, ValueReference> parameters,
Map<EntityDescriptor, Entity> entities) {
if (entity instanceof EntityV1) {
return resolveForInstallationV1((EntityV1) entity, parameters, entities);
} else {
throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass());
}
}
|
@Test
@MongoDBFixtures("InputFacadeTest.json")
public void resolveForInstallationGrokPattern() throws NotFoundException {
final Input input = inputService.find("5ae2ebbeef27464477f0fd8b");
final InputWithExtractors inputWithExtractors = InputWithExtractors.create(input, inputService.getExtractors(input));
final GrokExtractor grokExtractor = (GrokExtractor) inputWithExtractors.extractors().iterator().next();
final ExtractorEntity extractorEntity = ExtractorEntity.create(
ValueReference.of(grokExtractor.getTitle()),
ValueReference.of(grokExtractor.getType()),
ValueReference.of(grokExtractor.getCursorStrategy()),
ValueReference.of(grokExtractor.getTargetField()),
ValueReference.of(grokExtractor.getSourceField()),
ReferenceMapUtils.toReferenceMap(grokExtractor.getExtractorConfig()),
Collections.emptyList(),
ValueReference.of(grokExtractor.getConditionType()),
ValueReference.of(grokExtractor.getConditionValue()),
ValueReference.of(grokExtractor.getOrder()));
List<ExtractorEntity> extractorEntities = new ArrayList<>(1);
extractorEntities.add(extractorEntity);
InputEntity inputEntity = InputEntity.create(
ValueReference.of(input.getTitle()),
ReferenceMapUtils.toReferenceMap(input.getConfiguration()),
Collections.emptyMap(),
ValueReference.of(input.getType()),
ValueReference.of(input.isGlobal()),
extractorEntities);
Entity entity = EntityV1.builder()
.id(ModelId.of(input.getId()))
.type(ModelTypes.INPUT_V1)
.data(objectMapper.convertValue(inputEntity, JsonNode.class))
.build();
final GrokPatternEntity grokPatternEntity = GrokPatternEntity.create("GREEDY", ".*");
final Entity expectedEntity = EntityV1.builder()
.id(ModelId.of("dead-feed"))
.data(objectMapper.convertValue(grokPatternEntity, JsonNode.class))
.type(ModelTypes.GROK_PATTERN_V1)
.build();
final EntityDescriptor entityDescriptor = expectedEntity.toEntityDescriptor();
final Map<EntityDescriptor, Entity> entities = new HashMap<>(1);
entities.put(entityDescriptor, expectedEntity);
Graph<Entity> graph = facade.resolveForInstallation(entity, Collections.emptyMap(), entities);
assertThat(graph.nodes()).contains(expectedEntity);
}
|
@Override
public CompletableFuture<List<Long>> getSplitBoundary(BundleSplitOption bundleSplitOption) {
NamespaceService service = bundleSplitOption.getService();
NamespaceBundle bundle = bundleSplitOption.getBundle();
List<Long> positions = bundleSplitOption.getPositions();
if (positions == null || positions.size() == 0) {
throw new IllegalArgumentException("SplitBoundaries can't be empty");
}
// sort all positions
Collections.sort(positions);
if (force) {
return getBoundaries(bundle, positions);
} else {
return service.getOwnedTopicListForNamespaceBundle(bundle)
.thenCompose(topics -> {
if (topics == null || topics.size() <= 1) {
return CompletableFuture.completedFuture(null);
}
return getBoundaries(bundle, positions);
});
}
}
|
@SuppressWarnings("UnstableApiUsage")
@Test
public void testAlgorithmReturnCorrectResult() {
// -- algorithm
SpecifiedPositionsBundleSplitAlgorithm algorithm = new SpecifiedPositionsBundleSplitAlgorithm();
// -- calculate the mock result
NamespaceService mockNamespaceService = mock(NamespaceService.class);
NamespaceBundle mockNamespaceBundle = mock(NamespaceBundle.class);
doReturn(1L).when(mockNamespaceBundle).getLowerEndpoint();
doReturn(1000L).when(mockNamespaceBundle).getUpperEndpoint();
doReturn(CompletableFuture.completedFuture(Lists.newArrayList("topic", "topic2")))
.when(mockNamespaceService)
.getOwnedTopicListForNamespaceBundle(mockNamespaceBundle);
List<Long> positions = Arrays.asList(-1L, 0L, 1L, 100L, 200L, 500L, 800L, 1000L, 1100L);
List<Long> splitPositions = algorithm.getSplitBoundary(
new BundleSplitOption(mockNamespaceService, mockNamespaceBundle, positions)).join();
assertEquals(splitPositions.size(), 4);
assertTrue(splitPositions.contains(100L));
assertTrue(splitPositions.contains(200L));
assertTrue(splitPositions.contains(500L));
assertTrue(splitPositions.contains(800L));
}
|
public String hashDirectoryId(final String cleartextDirectoryId) {
if(!directoryIdCache.contains(cleartextDirectoryId)) {
directoryIdCache.put(cleartextDirectoryId, impl.hashDirectoryId(cleartextDirectoryId));
}
return directoryIdCache.get(cleartextDirectoryId);
}
|
@Test
public void TestHashDirectoryId() {
final FileNameCryptor mock = mock(FileNameCryptor.class);
final CryptorCache cryptor = new CryptorCache(mock);
when(mock.hashDirectoryId(anyString())).thenReturn("hashed");
assertEquals("hashed", cryptor.hashDirectoryId("id"));
assertEquals("hashed", cryptor.hashDirectoryId("id"));
verify(mock, times(1)).hashDirectoryId(anyString());
verifyNoMoreInteractions(mock);
}
|
@Override
public String execute(CommandContext commandContext) throws NoSuchCommandException, PermissionDenyException {
String remoteAddress = Optional.ofNullable(commandContext.getRemote())
.map(Channel::remoteAddress)
.map(Objects::toString)
.orElse("unknown");
logger.info("[Dubbo QoS] Command Process start. Command: " + commandContext.getCommandName() + ", Args: "
+ Arrays.toString(commandContext.getArgs()) + ", Remote Address: " + remoteAddress);
BaseCommand command = null;
try {
command =
frameworkModel.getExtensionLoader(BaseCommand.class).getExtension(commandContext.getCommandName());
} catch (Throwable throwable) {
// can't find command
}
if (command == null) {
logger.info("[Dubbo QoS] Command Not found. Command: " + commandContext.getCommandName()
+ ", Remote Address: " + remoteAddress);
throw new NoSuchCommandException(commandContext.getCommandName());
}
// check permission when configs allow anonymous access
if (commandContext.isAllowAnonymousAccess()) {
PermissionChecker permissionChecker = DefaultAnonymousAccessPermissionChecker.INSTANCE;
try {
permissionChecker = frameworkModel
.getExtensionLoader(PermissionChecker.class)
.getExtension(QosConstants.QOS_PERMISSION_CHECKER);
} catch (Throwable throwable) {
// can't find valid custom permissionChecker
}
final Cmd cmd = command.getClass().getAnnotation(Cmd.class);
final PermissionLevel cmdRequiredPermissionLevel = cmd.requiredPermissionLevel();
if (!permissionChecker.access(commandContext, cmdRequiredPermissionLevel)) {
logger.info(
"[Dubbo QoS] Command Deny to access. Command: " + commandContext.getCommandName() + ", Args: "
+ Arrays.toString(commandContext.getArgs()) + ", Required Permission Level: "
+ cmdRequiredPermissionLevel + ", Remote Address: "
+ remoteAddress);
throw new PermissionDenyException(commandContext.getCommandName());
}
}
try {
String result = command.execute(commandContext, commandContext.getArgs());
if (command.logResult()) {
logger.info("[Dubbo QoS] Command Process success. Command: " + commandContext.getCommandName()
+ ", Args: "
+ Arrays.toString(commandContext.getArgs()) + ", Result: " + result + ", Remote Address: "
+ remoteAddress);
}
return result;
} catch (Throwable t) {
logger.info(
"[Dubbo QoS] Command Process Failed. Command: " + commandContext.getCommandName() + ", Args: "
+ Arrays.toString(commandContext.getArgs()) + ", Remote Address: "
+ remoteAddress,
t);
throw t;
}
}
|
@Test
void shouldNotThrowPermissionDenyException_GivenPermissionConfigAndMatchDefaultPUBLICCmdPermissionLevel() {
DefaultCommandExecutor executor = new DefaultCommandExecutor(FrameworkModel.defaultModel());
final CommandContext commandContext = CommandContextFactory.newInstance("live", new String[] {"dubbo"}, false);
commandContext.setQosConfiguration(QosConfiguration.builder().build());
Assertions.assertDoesNotThrow(() -> executor.execute(commandContext));
}
|
public MapConfig setNearCacheConfig(NearCacheConfig nearCacheConfig) {
this.nearCacheConfig = nearCacheConfig;
return this;
}
|
@Test
public void testSetNearCacheConfig() {
NearCacheConfig nearCacheConfig = new NearCacheConfig();
assertEquals(nearCacheConfig, new MapConfig().setNearCacheConfig(nearCacheConfig).getNearCacheConfig());
}
|
public static Map<String, Object> parseQuery(String uri) throws URISyntaxException {
return parseQuery(uri, false);
}
|
@Test
public void testParseQueryCurly() throws Exception {
Map<String, Object> map = URISupport.parseQuery("password=RAW{++?w0rd}&serviceName=somechat");
assertEquals(2, map.size());
assertEquals("RAW{++?w0rd}", map.get("password"));
assertEquals("somechat", map.get("serviceName"));
map = URISupport.parseQuery("password=RAW{++?)w&rd}&serviceName=somechat");
assertEquals(2, map.size());
assertEquals("RAW{++?)w&rd}", map.get("password"));
assertEquals("somechat", map.get("serviceName"));
map = URISupport.parseQuery("password=RAW{%2520w&rd}&serviceName=somechat");
assertEquals(2, map.size());
assertEquals("RAW{%2520w&rd}", map.get("password"));
assertEquals("somechat", map.get("serviceName"));
}
|
public static Field p(String fieldName) {
return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName);
}
|
@Test
void set_group_syntax_string_directly() {
/*
example from vespa document:
https://docs.vespa.ai/en/grouping.html
all( group(a) max(5) each(output(count())
all(max(1) each(output(summary())))
all(group(b) each(output(count())
all(max(1) each(output(summary())))
all(group(c) each(output(count())
all(max(1) each(output(summary())))))))) );
*/
String q = Q.p("f1").contains("v1")
.group("all(group(a) max(5) each(output(count()) all(max(1) each(output(summary()))) all(group(b) each(output(count()) all(max(1) each(output(summary()))) all(group(c) each(output(count()) all(max(1) each(output(summary())))))))))")
.build();
assertEquals(q, "yql=select * from sources * where f1 contains \"v1\" | all(group(a) max(5) each(output(count()) all(max(1) each(output(summary()))) all(group(b) each(output(count()) all(max(1) each(output(summary()))) all(group(c) each(output(count()) all(max(1) each(output(summary())))))))))");
}
|
@Override
public InputStream getInputStream() {
return new RedissonInputStream();
}
|
@Test
public void testReadArray() throws IOException {
RBinaryStream stream = redisson.getBinaryStream("test");
byte[] value = {1, 2, 3, 4, 5, 6};
stream.set(value);
InputStream s = stream.getInputStream();
byte[] b = new byte[6];
assertThat(s.read(b)).isEqualTo(6);
assertThat(s.read(b)).isEqualTo(-1);
assertThat(b).isEqualTo(value);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.