focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
static Schema fromPubsubSchema(com.google.api.services.pubsub.model.Schema pubsubSchema) {
if (!schemaTypeToConversionFnMap.containsKey(pubsubSchema.getType())) {
throw new IllegalArgumentException(
String.format(
"Pub/Sub schema type %s is not supported at this time", pubsubSchema.getType()));
}
SerializableFunction<String, Schema> definitionToSchemaFn =
schemaTypeToConversionFnMap.get(pubsubSchema.getType());
return definitionToSchemaFn.apply(pubsubSchema.getDefinition());
}
|
@Test
public void fromPubsubSchema() {
assertThrows(
"null definition should throw an exception",
NullPointerException.class,
() ->
PubsubClient.fromPubsubSchema(
new com.google.api.services.pubsub.model.Schema().setType("AVRO")));
assertThrows(
"null definition should throw an exception",
SchemaParseException.class,
() ->
PubsubClient.fromPubsubSchema(
com.google.pubsub.v1.Schema.newBuilder().setType(Schema.Type.AVRO).build()));
String badSchema =
"{\"type\": \"record\", \"name\": \"Avro\",\"fields\": [{\"name\": \"bad\", \"type\": \"notatype\"}]}";
String goodSchema =
"{"
+ " \"type\" : \"record\","
+ " \"name\" : \"Avro\","
+ " \"fields\" : ["
+ " {"
+ " \"name\" : \"StringField\","
+ " \"type\" : \"string\""
+ " },"
+ " {"
+ " \"name\" : \"FloatField\","
+ " \"type\" : \"float\""
+ " },"
+ " {"
+ " \"name\" : \"IntField\","
+ " \"type\" : \"int\""
+ " },"
+ " {"
+ " \"name\" : \"LongField\","
+ " \"type\" : \"long\""
+ " },"
+ " {"
+ " \"name\" : \"DoubleField\","
+ " \"type\" : \"double\""
+ " },"
+ " {"
+ " \"name\" : \"BytesField\","
+ " \"type\" : \"bytes\""
+ " },"
+ " {"
+ " \"name\" : \"BooleanField\","
+ " \"type\" : \"boolean\""
+ " }"
+ " ]"
+ "}";
assertThrows(
"unsupported Schema type should throw an exception",
IllegalArgumentException.class,
() ->
PubsubClient.fromPubsubSchema(
new com.google.api.services.pubsub.model.Schema()
.setType("PROTOCOL_BUFFER")
.setDefinition(goodSchema)));
assertThrows(
"'notatype' Avro type should throw an exception",
SchemaParseException.class,
() ->
PubsubClient.fromPubsubSchema(
new com.google.api.services.pubsub.model.Schema()
.setType("AVRO")
.setDefinition(badSchema)));
assertEquals(
org.apache.beam.sdk.schemas.Schema.of(
org.apache.beam.sdk.schemas.Schema.Field.of(
"StringField", org.apache.beam.sdk.schemas.Schema.FieldType.STRING),
org.apache.beam.sdk.schemas.Schema.Field.of(
"FloatField", org.apache.beam.sdk.schemas.Schema.FieldType.FLOAT),
org.apache.beam.sdk.schemas.Schema.Field.of(
"IntField", org.apache.beam.sdk.schemas.Schema.FieldType.INT32),
org.apache.beam.sdk.schemas.Schema.Field.of(
"LongField", org.apache.beam.sdk.schemas.Schema.FieldType.INT64),
org.apache.beam.sdk.schemas.Schema.Field.of(
"DoubleField", org.apache.beam.sdk.schemas.Schema.FieldType.DOUBLE),
org.apache.beam.sdk.schemas.Schema.Field.of(
"BytesField", org.apache.beam.sdk.schemas.Schema.FieldType.BYTES),
org.apache.beam.sdk.schemas.Schema.Field.of(
"BooleanField", org.apache.beam.sdk.schemas.Schema.FieldType.BOOLEAN)),
PubsubClient.fromPubsubSchema(
new com.google.api.services.pubsub.model.Schema()
.setType("AVRO")
.setDefinition(goodSchema)));
}
|
public WorkflowInstanceActionResponse stop(
String workflowId, long workflowInstanceId, long workflowRunId, User caller) {
return terminate(
workflowId, workflowInstanceId, workflowRunId, Actions.WorkflowInstanceAction.STOP, caller);
}
|
@Test
public void testStop() {
when(instanceDao.tryTerminateQueuedInstance(any(), any(), any())).thenReturn(true);
when(instance.getStatus()).thenReturn(WorkflowInstance.Status.CREATED);
boolean res = actionHandler.stop("test-workflow", 1, 1, user).isCompleted();
assertTrue(res);
verify(instanceDao, times(1)).getLatestWorkflowInstanceRun("test-workflow", 1);
verify(instanceDao, times(1))
.tryTerminateQueuedInstance(any(), eq(WorkflowInstance.Status.STOPPED), anyString());
verify(actionDao, times(0)).terminate(any(), any(), any(), anyString());
when(instance.getStatus()).thenReturn(WorkflowInstance.Status.IN_PROGRESS);
when(instance.getExecutionId()).thenReturn("foo");
res = actionHandler.stop("test-workflow", 1, 1, user).isCompleted();
assertFalse(res);
verify(instanceDao, times(2)).getLatestWorkflowInstanceRun("test-workflow", 1);
verify(instanceDao, times(1))
.tryTerminateQueuedInstance(any(), eq(WorkflowInstance.Status.STOPPED), anyString());
verify(actionDao, times(1)).terminate(any(), any(), any(), anyString());
when(instance.getStatus()).thenReturn(WorkflowInstance.Status.PAUSED);
when(instance.getExecutionId()).thenReturn(null);
res = actionHandler.stop("test-workflow", 1, 1, user).isCompleted();
assertTrue(res);
verify(instanceDao, times(3)).getLatestWorkflowInstanceRun("test-workflow", 1);
verify(instanceDao, times(2))
.tryTerminateQueuedInstance(any(), eq(WorkflowInstance.Status.STOPPED), anyString());
verify(actionDao, times(1)).terminate(any(), any(), any(), anyString());
}
|
public static ConfigurableResource parseResourceConfigValue(String value)
throws AllocationConfigurationException {
return parseResourceConfigValue(value, Long.MAX_VALUE);
}
|
@Test
public void testAbsoluteVcoresNegativeFractional() throws Exception {
expectNegativeValueOfResource("vcores");
parseResourceConfigValue(" 5120.3 mb, -2.35 vcores ");
}
|
@Override
public void serialize(ModelLocalUriId value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("model", value.model());
gen.writeStringField("basePath", decodedPath(value.basePath()));
gen.writeStringField("fullPath", decodedPath(value.fullPath()));
gen.writeEndObject();
}
|
@Test
void serializeEncodedPath() throws IOException {
String path = "/To+decode+first+part/To+decode+second+part/To+decode+third+part/";
LocalUri parsed = LocalUri.parse(path);
ModelLocalUriId modelLocalUriId = new ModelLocalUriId(parsed);
Writer jsonWriter = new StringWriter();
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(jsonWriter);
SerializerProvider serializerProvider = new ObjectMapper().getSerializerProvider();
new ModelLocalUriIdSerializer().serialize(modelLocalUriId, jsonGenerator, serializerProvider);
jsonGenerator.flush();
String expected = "{\"model\":\"To%2Bdecode%2Bfirst%2Bpart\"," +
"\"basePath\":\"/To+decode+second+part/To+decode+third+part\"," +
"\"fullPath\":\"/To+decode+first+part/To+decode+second+part/To+decode+third+part\"}";
assertThat(jsonWriter.toString()).isEqualTo(expected);
}
|
public RegistryBuilder register(Boolean register) {
this.register = register;
return getThis();
}
|
@Test
void register() {
RegistryBuilder builder = new RegistryBuilder();
builder.register(true);
Assertions.assertTrue(builder.build().isRegister());
}
|
@Override
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext,
final ShardingSphereDatabase database, final ConnectionContext connectionContext) throws SQLException {
if (!(sqlStatementContext instanceof FetchStatementContext)) {
return new TransparentMergedResult(queryResults.get(0));
}
if (1 == queryResults.size()) {
return new IteratorStreamMergedResult(queryResults);
}
FetchStatementContext fetchStatementContext = (FetchStatementContext) sqlStatementContext;
Map<String, Integer> columnLabelIndexMap = getColumnLabelIndexMap(queryResults.get(0));
fetchStatementContext.getCursorStatementContext().getSelectStatementContext().setIndexes(columnLabelIndexMap);
return new FetchStreamMergedResult(queryResults, fetchStatementContext, getSchema(sqlStatementContext, database), connectionContext);
}
|
@Test
void assertBuildTransparentMergedResult() throws SQLException {
ShardingDDLResultMerger merger = new ShardingDDLResultMerger();
assertThat(merger.merge(createMultiQueryResults(), mock(SelectStatementContext.class), mock(ShardingSphereDatabase.class), mock(ConnectionContext.class)),
instanceOf(TransparentMergedResult.class));
}
|
@Secured(resource = Commons.NACOS_CORE_CONTEXT_V2 + "/loader", action = ActionTypes.WRITE)
@GetMapping("/reloadCurrent")
public ResponseEntity<String> reloadCount(@RequestParam Integer count,
@RequestParam(value = "redirectAddress", required = false) String redirectAddress) {
connectionManager.loadCount(count, redirectAddress);
return ResponseEntity.ok().body("success");
}
|
@Test
void testReloadCount() {
ResponseEntity<String> result = serverLoaderController.reloadCount(1, "1.1.1.1");
assertEquals("success", result.getBody());
}
|
public static UUnary create(Kind unaryOp, UExpression expression) {
checkArgument(
UNARY_OP_CODES.containsKey(unaryOp), "%s is not a recognized unary operation", unaryOp);
return new AutoValue_UUnary(unaryOp, expression);
}
|
@Test
public void logicalNegation() {
assertUnifiesAndInlines(
"!false", UUnary.create(Kind.LOGICAL_COMPLEMENT, ULiteral.booleanLit(false)));
}
|
@Override
public void onClick(View v) {
final AppCompatActivity activity = (AppCompatActivity) requireActivity();
switch (v.getId()) {
case R.id.go_to_languages_action:
startActivity(
new Intent(
Intent.ACTION_VIEW,
Uri.parse(requireContext().getString(R.string.deeplink_url_keyboards)),
requireContext(),
MainSettingsActivity.class));
break;
case R.id.go_to_theme_action:
startActivity(
new Intent(
Intent.ACTION_VIEW,
Uri.parse(requireContext().getString(R.string.deeplink_url_themes)),
requireContext(),
MainSettingsActivity.class));
break;
case R.id.go_to_all_settings_action:
startActivity(new Intent(getContext(), MainSettingsActivity.class));
// not returning to this Activity any longer.
activity.finish();
break;
default:
throw new IllegalArgumentException(
"Failed to handle " + v.getId() + " in WizardPageDoneAndMoreSettingsFragment");
}
}
|
@Test
public void testGoToLanguagesOnClick() {
final WizardPageDoneAndMoreSettingsFragment fragment = startFragment();
final ShadowApplication shadowApplication =
Shadows.shadowOf((Application) getApplicationContext());
shadowApplication.clearNextStartedActivities();
final View clickView = fragment.getView().findViewById(R.id.go_to_languages_action);
View.OnClickListener clickHandler = Shadows.shadowOf(clickView).getOnClickListener();
clickHandler.onClick(clickView);
final Intent startIntent = shadowApplication.getNextStartedActivity();
Assert.assertNotNull(startIntent);
final Intent expected =
new Intent(
Intent.ACTION_VIEW,
Uri.parse(getApplicationContext().getString(R.string.deeplink_url_keyboards)),
getApplicationContext(),
MainSettingsActivity.class);
assertChauffeurIntent(expected, startIntent);
}
|
public static boolean isP2WSH(Script script) {
if (!isP2WH(script))
return false;
List<ScriptChunk> chunks = script.chunks();
if (!chunks.get(0).equalsOpCode(OP_0))
return false;
byte[] chunk1data = chunks.get(1).data;
return chunk1data != null && chunk1data.length == SegwitAddress.WITNESS_PROGRAM_LENGTH_SH;
}
|
@Test
public void testCreateP2WSHOutputScript() {
assertTrue(ScriptPattern.isP2WSH(
ScriptBuilder.createP2WSHOutputScript(new ScriptBuilder().build())
));
}
|
@Override
public void apply(IntentOperationContext<FlowObjectiveIntent> intentOperationContext) {
Objects.requireNonNull(intentOperationContext);
Optional<IntentData> toUninstall = intentOperationContext.toUninstall();
Optional<IntentData> toInstall = intentOperationContext.toInstall();
List<FlowObjectiveIntent> uninstallIntents = intentOperationContext.intentsToUninstall();
List<FlowObjectiveIntent> installIntents = intentOperationContext.intentsToInstall();
if (!toInstall.isPresent() && !toUninstall.isPresent()) {
intentInstallCoordinator.intentInstallSuccess(intentOperationContext);
return;
}
if (toUninstall.isPresent()) {
IntentData intentData = toUninstall.get();
trackerService.removeTrackedResources(intentData.key(), intentData.intent().resources());
uninstallIntents.forEach(installable ->
trackerService.removeTrackedResources(intentData.intent().key(),
installable.resources()));
}
if (toInstall.isPresent()) {
IntentData intentData = toInstall.get();
trackerService.addTrackedResources(intentData.key(), intentData.intent().resources());
installIntents.forEach(installable ->
trackerService.addTrackedResources(intentData.key(),
installable.resources()));
}
FlowObjectiveIntentInstallationContext intentInstallationContext =
new FlowObjectiveIntentInstallationContext(intentOperationContext);
uninstallIntents.stream()
.map(intent -> buildObjectiveContexts(intent, REMOVE))
.flatMap(Collection::stream)
.forEach(context -> {
context.intentInstallationContext(intentInstallationContext);
intentInstallationContext.addContext(context);
intentInstallationContext.addPendingContext(context);
});
installIntents.stream()
.map(intent -> buildObjectiveContexts(intent, ADD))
.flatMap(Collection::stream)
.forEach(context -> {
context.intentInstallationContext(intentInstallationContext);
intentInstallationContext.addContext(context);
intentInstallationContext.addNextPendingContext(context);
});
intentInstallationContext.apply();
}
|
@Test
public void testGroupChainElementMissingError() {
// group chain element missing
intentInstallCoordinator = new TestIntentInstallCoordinator();
installer.intentInstallCoordinator = intentInstallCoordinator;
errors = ImmutableList.of(GROUPMISSING);
installer.flowObjectiveService = new TestFailedFlowObjectiveService(errors);
context = createInstallContext();
installer.apply(context);
successContext = intentInstallCoordinator.successContext;
assertEquals(successContext, context);
}
|
public boolean contains(final Object value)
{
return contains((int)value);
}
|
@Test
void initiallyContainsNoBoxedElements()
{
for (int i = 0; i < 10_000; i++)
{
assertFalse(testSet.contains(Integer.valueOf(i)));
}
}
|
@Override
public boolean rejoinNeededOrPending() {
if (!subscriptions.hasAutoAssignedPartitions())
return false;
// we need to rejoin if we performed the assignment and metadata has changed;
// also for those owned-but-no-longer-existed partitions we should drop them as lost
if (assignmentSnapshot != null && !assignmentSnapshot.matches(metadataSnapshot)) {
final String fullReason = String.format("cached metadata has changed from %s at the beginning of the rebalance to %s",
assignmentSnapshot, metadataSnapshot);
requestRejoinIfNecessary("cached metadata has changed", fullReason);
return true;
}
// we need to join if our subscription has changed since the last join
if (joinedSubscription != null && !joinedSubscription.equals(subscriptions.subscription())) {
final String fullReason = String.format("subscription has changed from %s at the beginning of the rebalance to %s",
joinedSubscription, subscriptions.subscription());
requestRejoinIfNecessary("subscription has changed", fullReason);
return true;
}
return super.rejoinNeededOrPending();
}
|
@Test
public void testNormalJoinGroupFollower() {
final Set<String> subscription = singleton(topic1);
final List<TopicPartition> owned = Collections.emptyList();
final List<TopicPartition> assigned = singletonList(t1p);
subscriptions.subscribe(subscription, Optional.of(rebalanceListener));
client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE));
coordinator.ensureCoordinatorReady(time.timer(Long.MAX_VALUE));
// normal join group
client.prepareResponse(joinGroupFollowerResponse(1, consumerId, "leader", Errors.NONE));
client.prepareResponse(body -> {
SyncGroupRequest sync = (SyncGroupRequest) body;
return sync.data().memberId().equals(consumerId) &&
sync.data().generationId() == 1 &&
sync.groupAssignments().isEmpty();
}, syncGroupResponse(assigned, Errors.NONE));
coordinator.joinGroupIfNeeded(time.timer(Long.MAX_VALUE));
assertFalse(coordinator.rejoinNeededOrPending());
assertEquals(toSet(assigned), subscriptions.assignedPartitions());
assertEquals(subscription, subscriptions.metadataTopics());
assertEquals(0, rebalanceListener.revokedCount);
assertNull(rebalanceListener.revoked);
assertEquals(1, rebalanceListener.assignedCount);
assertEquals(getAdded(owned, assigned), rebalanceListener.assigned);
}
|
public boolean isGreaterThan(Version version) {
return !version.isUnknown() && compareTo(version) > 0;
}
|
@Test
public void isGreaterThan() throws Exception {
assertTrue(V3_0.isGreaterThan(of(2, 0)));
assertFalse(V3_0.isGreaterThan(of(3, 0)));
assertFalse(V3_0.isGreaterThan(of(4, 0)));
}
|
public static Formatter forDates(@Nonnull String format) {
return new DateFormat(format);
}
|
@Test
public void testDates() {
Formatter f = forDates("FMDay, Mon FMDDth, FMYYYY");
check(LocalDate.of(2022, 9, 26), f, "Monday, Sep 26th, 2022");
f = forDates("FMDD FMMonth FMYYYY");
check(LocalDate.of(2022, 9, 26), f, "26 Eylül 2022", TR);
f = forDates("YYYY-MM-DD FMHH:MI AM OF");
check(LocalDateTime.of(2022, 9, 26, 14, 53).atOffset(ZoneOffset.ofHours(3)), f,
"2022-09-26 2:53 PM +03:00");
f = forDates("HH12:MI A.M. TZTZH:TZM");
check(LocalTime.of(14, 53).atOffset(ZoneOffset.ofHours(3)), f, "02:53 P.M. GMT+03:00");
f = forDates("HH12:MI A.M.");
check(LocalTime.of(14, 53), f, "02:53 Ö.S.", TR);
f = forDates("At HH24:MI:SS, FMSSSS(=FMSSSSS) \"seconds are passed from the midnight.\"");
check(LocalTime.of(12, 34, 56), f,
"At 12:34:56, 45296(=45296) seconds are passed from the midnight.");
f = forDates("YYYY-MM-DD \"is the\" FMDDDth \"day of\" FMYYYY.");
check(LocalDate.of(2022, 9, 26), f, "2022-09-26 is the 269th day of 2022.");
f = forDates("Y,YYY YYYY YYY YY Y - FMY,YYY FMYYYY FMYYY FMYY FMY");
check(Year.of(1), f, "0,001 0001 001 01 1 - 1 1 1 1 1");
f = forDates("FF1 FF2 FF3(=MS) FF4 FF5 FF6(=US)");
check(LocalTime.ofNanoOfDay(123456789), f, "1 12 123(=123) 1234 12345 123456(=123456)");
f = forDates("\"Quarter\" FMYYYY-\"Q\"Q \"is in the\" FMCCth \"century.\"");
check(YearMonth.of(2022, 9), f, "Quarter 2022-Q3 is in the 21st century.");
f = forDates("\"Plato founded the Academy in c.\" FMYYYY AD.");
check(Year.of(-386), f, "Plato founded the Academy in c. 387 BC.");
f = forDates("\"Plato Akademi'yi\" A.D. FMYYYY \"civarında kurdu.\"");
check(Year.of(-386), f, "Plato Akademi'yi M.Ö. 387 civarında kurdu.", TR);
f = forDates("AD(=BC) A.D.(=B.C.) ad(=bc) a.d.(=b.c.)");
check(Year.of(0), f, "BC(=BC) B.C.(=B.C.) bc(=bc) b.c.(=b.c.)");
f = forDates("\"The Halley's closest approach to the Earth was on\" FMDD FMMonth FMYYYY AD (the FMJth \"Julian day\").");
check(LocalDate.of(837, 2, 28), f,
"The Halley's closest approach to the Earth was on 28 February 837 AD (the 2026827th Julian day).");
f = forDates("FMRM.FMRD.FMRY");
check(LocalDate.of(2022, 9, 26), f, "IX.XXVI.MMXXII");
}
|
public static ValueLabel formatClippedBitRate(long bytes) {
return new ValueLabel(bytes * 8, BITS_UNIT).perSec().clipG(100.0);
}
|
@Test
public void formatClippedBitsMega() {
vl = TopoUtils.formatClippedBitRate(3_123_123);
assertEquals(AM_WL, "23.83 Mbps", vl.toString());
assertFalse(AM_CL, vl.clipped());
}
|
public int getInteger(HazelcastProperty property) {
return Integer.parseInt(getString(property));
}
|
@Test
public void setProperty_inheritDefaultValueOfParentProperty() {
HazelcastProperty parent = new HazelcastProperty("parent", 1);
HazelcastProperty child = new HazelcastProperty("child", parent);
assertEquals(1, defaultProperties.getInteger(child));
}
|
static void verifyAddMissingValues(final List<KiePMMLMiningField> notTargetMiningFields,
final PMMLRequestData requestData) {
logger.debug("verifyMissingValues {} {}", notTargetMiningFields, requestData);
Collection<ParameterInfo> requestParams = requestData.getRequestParams();
notTargetMiningFields
.forEach(miningField -> {
ParameterInfo parameterInfo = requestParams.stream()
.filter(paramInfo -> miningField.getName().equals(paramInfo.getName()))
.findFirst()
.orElse(null);
if (parameterInfo == null) {
manageMissingValues(miningField, requestData);
}
});
}
|
@Test
void verifyAddMissingValuesNotMissingReturnInvalid() {
List<KiePMMLMiningField> miningFields = IntStream.range(0, 3).mapToObj(i -> {
DATA_TYPE dataType = DATA_TYPE.values()[i];
return KiePMMLMiningField.builder("FIELD-" + i, null)
.withDataType(dataType)
.withMissingValueTreatmentMethod(MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID)
.build();
})
.collect(Collectors.toList());
PMMLRequestData pmmlRequestData = new PMMLRequestData("123", "modelName");
pmmlRequestData.addRequestParam("FIELD-0", "123");
pmmlRequestData.addRequestParam("FIELD-1", 123);
pmmlRequestData.addRequestParam("FIELD-2", 1.23f);
PreProcess.verifyAddMissingValues(miningFields, pmmlRequestData);
}
|
public static SparkAppResourceSpec buildResourceSpec(
final SparkApplication app,
final KubernetesClient client,
final SparkAppSubmissionWorker worker) {
Map<String, String> confOverrides = overrideDependencyConf(app);
SparkAppResourceSpec resourceSpec = worker.getResourceSpec(app, client, confOverrides);
cleanUpTempResourcesForApp(app, confOverrides);
DriverDecorator decorator = new DriverDecorator(app);
decorator.decorate(resourceSpec.getConfiguredPod());
return resourceSpec;
}
|
@Test
void testBuildResourceSpecCoversBasicOverride() {
SparkApplication app = new SparkApplication();
app.setMetadata(
new ObjectMetaBuilder().withNamespace("foo").withName("bar-app").withUid("uid").build());
KubernetesClient mockClient = mock(KubernetesClient.class);
Pod mockDriver = mock(Pod.class);
when(mockDriver.getMetadata()).thenReturn(new ObjectMeta());
SparkAppResourceSpec mockSpec = mock(SparkAppResourceSpec.class);
when(mockSpec.getConfiguredPod()).thenReturn(mockDriver);
ArgumentCaptor<Map<String, String>> captor = ArgumentCaptor.forClass(Map.class);
SparkAppSubmissionWorker mockWorker = mock(SparkAppSubmissionWorker.class);
when(mockWorker.getResourceSpec(any(), any(), captor.capture())).thenReturn(mockSpec);
SparkAppResourceSpec spec =
SparkAppResourceSpecFactory.buildResourceSpec(app, mockClient, mockWorker);
verify(mockWorker).getResourceSpec(eq(app), eq(mockClient), any());
Map<String, String> props = captor.getValue();
assertTrue(props.containsKey("spark.kubernetes.namespace"));
assertEquals("foo", props.get("spark.kubernetes.namespace"));
ArgumentCaptor<ObjectMeta> metaArgumentCaptor = ArgumentCaptor.forClass(ObjectMeta.class);
verify(mockDriver).setMetadata(metaArgumentCaptor.capture());
assertEquals(mockSpec, spec);
ObjectMeta metaOverride = metaArgumentCaptor.getValue();
assertEquals(1, metaOverride.getOwnerReferences().size());
assertEquals("bar-app", metaOverride.getOwnerReferences().get(0).getName());
assertEquals("uid", metaOverride.getOwnerReferences().get(0).getUid());
assertEquals(app.getKind(), metaOverride.getOwnerReferences().get(0).getKind());
}
|
@SuppressWarnings("checkstyle:magicnumber")
public static long idFromString(String str) {
if (str == null || !ID_PATTERN.matcher(str).matches()) {
return -1;
}
str = StringUtil.removeCharacter(str, '-');
return Long.parseUnsignedLong(str, 16);
}
|
@Test
public void when_idFromString() {
assertEquals(0, idFromString("0000-0000-0000-0000"));
assertEquals(1, idFromString("0000-0000-0000-0001"));
assertEquals(Long.MAX_VALUE, idFromString("7fff-ffff-ffff-ffff"));
assertEquals(Long.MIN_VALUE, idFromString("8000-0000-0000-0000"));
assertEquals(-1, idFromString("ffff-ffff-ffff-ffff"));
assertEquals(1234567890123456789L, idFromString("1122-10f4-7de9-8115"));
assertEquals(-1234567890123456789L, idFromString("eedd-ef0b-8216-7eeb"));
}
|
public static void refreshSuperUserGroupsConfiguration() {
//load server side configuration;
refreshSuperUserGroupsConfiguration(new Configuration());
}
|
@Test
public void testWildcardIP() {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
"*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From either IP should be fine
assertAuthorized(proxyUserUgi, "1.2.3.4");
assertAuthorized(proxyUserUgi, "1.2.3.5");
// Now set up an unallowed group
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
// Neither IP should be OK
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
|
@CanIgnoreReturnValue
public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) {
List<@Nullable Object> expected =
(varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs);
return containsExactlyElementsIn(
expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable);
}
|
@Test
public void iterableContainsExactlyWithElementsThatThrowWhenYouCallHashCodeOneMismatch() {
HashCodeThrower one = new HashCodeThrower();
HashCodeThrower two = new HashCodeThrower();
expectFailureWhenTestingThat(asList(one, one)).containsExactly(one, two);
}
|
@Override
public Optional<DevOpsProjectCreator> getDevOpsProjectCreator(DbSession dbSession, Map<String, String> characteristics) {
return delegates.stream()
.flatMap(delegate -> delegate.getDevOpsProjectCreator(dbSession, characteristics).stream())
.findFirst();
}
|
@Test
public void getDevOpsProjectDescriptor_whenNoDelegatesReturningACreator_shouldReturnEmptyOptional() {
DelegatingDevOpsProjectCreatorFactory delegates = new DelegatingDevOpsProjectCreatorFactory(Set.of(mock(), mock()));
Optional<DevOpsProjectCreator> devOpsProjectCreator = delegates.getDevOpsProjectCreator(DB_SESSION, CHARACTERISTICS);
assertThat(devOpsProjectCreator).isEmpty();
}
|
@Override
public void setMonochrome(boolean monochrome) {
formats = monochrome ? monochrome() : ansi();
}
|
@Test
void should_print_error_message_for_after_hooks() {
Feature feature = TestFeatureParser.parse("path/test.feature", "" +
"Feature: feature name\n" +
" Scenario: scenario name\n" +
" Given first step\n");
ByteArrayOutputStream out = new ByteArrayOutputStream();
Runtime.builder()
.withFeatureSupplier(new StubFeatureSupplier(feature))
.withAdditionalPlugins(new PrettyFormatter(out))
.withRuntimeOptions(new RuntimeOptionsBuilder().setMonochrome().build())
.withBackendSupplier(new StubBackendSupplier(
emptyList(),
singletonList(new StubStepDefinition("first step", "path/step_definitions.java:3")),
singletonList(new StubHookDefinition(new StubException()))))
.build()
.run();
assertThat(out, bytes(equalToCompressingWhiteSpace("" +
"Scenario: scenario name # path/test.feature:2\n" +
" Given first step # path/step_definitions.java:3\n" +
" the stack trace")));
}
|
@Override
public <T> Invoker<T> buildInvokerChain(final Invoker<T> originalInvoker, String key, String group) {
Invoker<T> last = originalInvoker;
URL url = originalInvoker.getUrl();
List<ModuleModel> moduleModels = getModuleModelsFromUrl(url);
List<Filter> filters;
if (moduleModels != null && moduleModels.size() == 1) {
filters = ScopeModelUtil.getExtensionLoader(Filter.class, moduleModels.get(0))
.getActivateExtension(url, key, group);
} else if (moduleModels != null && moduleModels.size() > 1) {
filters = new ArrayList<>();
List<ExtensionDirector> directors = new ArrayList<>();
for (ModuleModel moduleModel : moduleModels) {
List<Filter> tempFilters = ScopeModelUtil.getExtensionLoader(Filter.class, moduleModel)
.getActivateExtension(url, key, group);
filters.addAll(tempFilters);
directors.add(moduleModel.getExtensionDirector());
}
filters = sortingAndDeduplication(filters, directors);
} else {
filters = ScopeModelUtil.getExtensionLoader(Filter.class, null).getActivateExtension(url, key, group);
}
if (!CollectionUtils.isEmpty(filters)) {
for (int i = filters.size() - 1; i >= 0; i--) {
final Filter filter = filters.get(i);
final Invoker<T> next = last;
last = new CopyOfFilterChainNode<>(originalInvoker, next, filter);
}
return new CallbackRegistrationInvoker<>(last, filters);
}
return last;
}
|
@Test
void testBuildInvokerChainForLocalReference() {
DefaultFilterChainBuilder defaultFilterChainBuilder = new DefaultFilterChainBuilder();
// verify that no filter is built by default
URL urlWithoutFilter =
URL.valueOf("injvm://127.0.0.1/DemoService").addParameter(INTERFACE_KEY, DemoService.class.getName());
urlWithoutFilter = urlWithoutFilter.setScopeModel(ApplicationModel.defaultModel());
AbstractInvoker<DemoService> invokerWithoutFilter =
new AbstractInvoker<DemoService>(DemoService.class, urlWithoutFilter) {
@Override
protected Result doInvoke(Invocation invocation) {
return null;
}
};
Invoker<?> invokerAfterBuild =
defaultFilterChainBuilder.buildInvokerChain(invokerWithoutFilter, REFERENCE_FILTER_KEY, CONSUMER);
// verify that if LogFilter is configured, LogFilter should exist in the filter chain
URL urlWithFilter = URL.valueOf("injvm://127.0.0.1/DemoService")
.addParameter(INTERFACE_KEY, DemoService.class.getName())
.addParameter(REFERENCE_FILTER_KEY, "log");
urlWithFilter = urlWithFilter.setScopeModel(ApplicationModel.defaultModel());
AbstractInvoker<DemoService> invokerWithFilter =
new AbstractInvoker<DemoService>(DemoService.class, urlWithFilter) {
@Override
protected Result doInvoke(Invocation invocation) {
return null;
}
};
invokerAfterBuild =
defaultFilterChainBuilder.buildInvokerChain(invokerWithFilter, REFERENCE_FILTER_KEY, CONSUMER);
Assertions.assertTrue(invokerAfterBuild instanceof FilterChainBuilder.CallbackRegistrationInvoker);
}
|
@Override
public void setFlushBulkSize(int flushBulkSize) {
}
|
@Test
public void setFlushBulkSize() {
mSensorsAPI.setFlushBulkSize(2000);
Assert.assertEquals(100, mSensorsAPI.getFlushBulkSize());
}
|
@Override
public boolean equals(CostModel otherModel) {
boolean equality = false;
if (this.getClass().equals(otherModel.getClass())) {
equality = ((LinearBorrowingCostModel) otherModel).feePerPeriod == this.feePerPeriod;
}
return equality;
}
|
@Test
public void testEquality() {
LinearBorrowingCostModel model = new LinearBorrowingCostModel(0.1);
CostModel modelSameClass = new LinearBorrowingCostModel(0.2);
CostModel modelSameFee = new LinearBorrowingCostModel(0.1);
CostModel modelOther = new ZeroCostModel();
boolean equality = model.equals(modelSameFee);
boolean inequality1 = model.equals(modelSameClass);
boolean inequality2 = model.equals(modelOther);
assertTrue(equality);
assertFalse(inequality1);
assertFalse(inequality2);
}
|
static PythonEnvironment preparePythonEnvironment(
ReadableConfig config, String entryPointScript, String tmpDir) throws IOException {
PythonEnvironment env = new PythonEnvironment();
// 1. set the path of python interpreter.
String pythonExec =
config.getOptional(PYTHON_CLIENT_EXECUTABLE)
.orElse(System.getenv(PYFLINK_CLIENT_EXECUTABLE));
if (pythonExec != null) {
env.pythonExec = pythonExec;
}
// 2. setup temporary local directory for the user files
tmpDir = new File(tmpDir).getAbsolutePath();
Path tmpDirPath = new Path(tmpDir);
tmpDirPath.getFileSystem().mkdirs(tmpDirPath);
env.tempDirectory = tmpDir;
// 3. append the internal lib files to PYTHONPATH.
if (System.getenv(ConfigConstants.ENV_FLINK_OPT_DIR) != null) {
String pythonLibDir =
System.getenv(ConfigConstants.ENV_FLINK_OPT_DIR) + File.separator + "python";
env.pythonPath =
getLibFiles(pythonLibDir).stream()
.map(p -> p.toFile().getAbsolutePath())
.collect(Collectors.joining(File.pathSeparator));
}
// 4. copy relevant python files to tmp dir and set them in PYTHONPATH.
if (config.getOptional(PYTHON_FILES).isPresent()) {
List<Path> pythonFiles =
Arrays.stream(config.get(PYTHON_FILES).split(FILE_DELIMITER))
.map(Path::new)
.collect(Collectors.toList());
addToPythonPath(env, pythonFiles);
}
// 5. set the archives directory as the working directory, then user could access the
// content of the archives via relative path
if (config.getOptional(PYTHON_ARCHIVES).isPresent()
&& (config.getOptional(PYTHON_CLIENT_EXECUTABLE).isPresent()
|| !StringUtils.isNullOrWhitespaceOnly(
System.getenv(PYFLINK_CLIENT_EXECUTABLE)))) {
env.archivesDirectory = String.join(File.separator, tmpDir, PYTHON_ARCHIVES_DIR);
// extract archives to archives directory
config.getOptional(PYTHON_ARCHIVES)
.ifPresent(
pyArchives -> {
for (String archive : pyArchives.split(FILE_DELIMITER)) {
final Path archivePath;
final String targetDirName;
final String originalFileName;
if (archive.contains(PythonDependencyUtils.PARAM_DELIMITER)) {
String[] filePathAndTargetDir =
archive.split(
PythonDependencyUtils.PARAM_DELIMITER, 2);
archivePath = new Path(filePathAndTargetDir[0]);
targetDirName = filePathAndTargetDir[1];
originalFileName = archivePath.getName();
} else {
archivePath = new Path(archive);
originalFileName = archivePath.getName();
targetDirName = originalFileName;
}
Path localArchivePath = archivePath;
try {
if (archivePath.getFileSystem().isDistributedFS()) {
localArchivePath =
new Path(
env.tempDirectory,
String.join(
File.separator,
UUID.randomUUID().toString(),
originalFileName));
FileUtils.copy(archivePath, localArchivePath, false);
}
} catch (IOException e) {
String msg =
String.format(
"Error occurred when copying %s to %s.",
archivePath, localArchivePath);
throw new RuntimeException(msg, e);
}
try {
CompressionUtils.extractFile(
localArchivePath.getPath(),
String.join(
File.separator,
env.archivesDirectory,
targetDirName),
originalFileName);
} catch (IOException e) {
throw new RuntimeException(
"Extract archives to archives directory failed.",
e);
}
}
});
}
// 4. append configured python.pythonpath to the PYTHONPATH.
if (config.getOptional(PYTHON_PATH).isPresent()) {
env.pythonPath =
String.join(
File.pathSeparator,
config.getOptional(PYTHON_PATH).get(),
env.pythonPath);
}
if (entryPointScript != null) {
addToPythonPath(env, Collections.singletonList(new Path(entryPointScript)));
}
return env;
}
|
@Test
void testPrepareEnvironmentWithEntryPointScript() throws IOException {
File entryFile = new File(tmpDirPath + File.separator + "test.py");
// The file must actually exist
entryFile.createNewFile();
String entryFilePath = entryFile.getAbsolutePath();
Configuration config = new Configuration();
PythonEnvUtils.PythonEnvironment env =
preparePythonEnvironment(config, entryFilePath, tmpDirPath);
Set<String> expectedPythonPaths = new HashSet<>();
expectedPythonPaths.add(
new Path(String.join(File.separator, replaceUUID(env.tempDirectory), "{uuid}"))
.toString());
Set<String> actualPaths =
Arrays.stream(env.pythonPath.split(File.pathSeparator))
.map(PythonEnvUtilsTest::replaceUUID)
.collect(Collectors.toSet());
assertThat(actualPaths).isEqualTo(expectedPythonPaths);
}
|
public static <T> AsIterable<T> asIterable() {
return new AsIterable<>();
}
|
@Test
public void testViewUnboundedAsIterableDirect() {
testViewUnbounded(pipeline, View.asIterable());
}
|
public static int hashToIndex(int hash, int length) {
checkPositive("length", length);
if (hash == Integer.MIN_VALUE) {
return 0;
}
return abs(hash) % length;
}
|
@Test
public void hashToIndex_whenHashPositive() {
assertEquals(20, hashToIndex(20, 100));
assertEquals(20, hashToIndex(420, 100));
}
|
@Override
public List<T> build(String consumer, List<T> provider) {
return provider;
}
|
@Test
void testBuild() {
NoneSelectorContextBuilder<Instance> contextBuilder = new NoneSelectorContextBuilder<>();
List<Instance> providers = Collections.emptyList();
assertEquals(providers, contextBuilder.build("1.1.1.1", providers));
}
|
static String determineOperatingSystemCompleteName() {
try {
useAgentTmpDirIfNecessary();
OperatingSystem os = newSystemInfo().getOperatingSystem();
return String.format("%s %s%s",
os.getFamily(),
os.getVersionInfo().getVersion(),
optionalFrom(os.getVersionInfo().getCodeName()).map(s -> " (" + s + ")").orElse("")
);
} catch (Exception e) {
LOG.warn("Unable to determine OS platform from native, falling back to default", e);
return new SystemEnvironment().getOperatingSystemFamilyJvmName();
}
}
|
@Test
public void shouldDisableUdevUsage() {
SystemInfo.determineOperatingSystemCompleteName();
assertThat(GlobalConfig.get(OSHI_OS_LINUX_ALLOWUDEV, true)).isFalse();
}
|
public void queryMessage(
final String addr,
final QueryMessageRequestHeader requestHeader,
final long timeoutMillis,
final InvokeCallback invokeCallback,
final Boolean isUnqiueKey
) throws RemotingException, MQBrokerException, InterruptedException {
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.QUERY_MESSAGE, requestHeader);
request.addExtField(MixAll.UNIQUE_MSG_QUERY_FLAG, isUnqiueKey.toString());
this.remotingClient.invokeAsync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis,
invokeCallback);
}
|
@Test
public void testQueryMessage() throws MQBrokerException, RemotingException, InterruptedException {
QueryMessageRequestHeader requestHeader = mock(QueryMessageRequestHeader.class);
InvokeCallback callback = mock(InvokeCallback.class);
mqClientAPI.queryMessage(defaultBrokerAddr, requestHeader, defaultTimeout, callback, false);
}
|
public PTransform<InputT, OutputT> setDisplayData(@NonNull List<ItemSpec<?>> displayData) {
this.displayData = displayData;
return this;
}
|
@Test
public void testSetDisplayData() {
PTransform<PCollection<String>, PCollection<String>> transform =
new PTransform<PCollection<String>, PCollection<String>>() {
@Override
public PCollection<String> expand(PCollection<String> begin) {
throw new IllegalArgumentException("Should never be applied");
}
};
transform.setDisplayData(
ImmutableList.of(DisplayData.item("key1", "value1"), DisplayData.item("key2", 2L)));
final DisplayData displayData = DisplayData.from(transform);
assertThat(displayData.items(), hasSize(2));
assertThat(displayData, hasDisplayItem("key1", "value1"));
assertThat(displayData, hasDisplayItem("key2", 2L));
}
|
public static List<PreparableReporter> getPreparableReporters(Map<String, Object> daemonConf) {
List<String> clazzes = (List<String>) daemonConf.get(DaemonConfig.STORM_DAEMON_METRICS_REPORTER_PLUGINS);
List<PreparableReporter> reporterList = new ArrayList<>();
if (clazzes != null) {
for (String clazz : clazzes) {
reporterList.add(getPreparableReporter(clazz));
}
}
if (reporterList.isEmpty()) {
reporterList.add(new JmxPreparableReporter());
}
return reporterList;
}
|
@Test
public void getPreparableReporters() {
Map<String, Object> daemonConf = new HashMap<>();
List<PreparableReporter> reporters = MetricsUtils.getPreparableReporters(daemonConf);
assertEquals(1, reporters.size());
assertTrue(reporters.get(0) instanceof JmxPreparableReporter);
List<String> reporterPlugins = Arrays.asList("org.apache.storm.daemon.metrics.reporters.ConsolePreparableReporter",
"org.apache.storm.daemon.metrics.reporters.CsvPreparableReporter");
daemonConf.put(DaemonConfig.STORM_DAEMON_METRICS_REPORTER_PLUGINS, reporterPlugins);
reporters = MetricsUtils.getPreparableReporters(daemonConf);
assertEquals(2, reporters.size());
assertTrue(reporters.get(0) instanceof ConsolePreparableReporter);
assertTrue(reporters.get(1) instanceof CsvPreparableReporter);
}
|
public final Sink sink(final Sink sink) {
return new Sink() {
@Override public void write(Buffer source, long byteCount) throws IOException {
boolean throwOnTimeout = false;
enter();
try {
sink.write(source, byteCount);
throwOnTimeout = true;
} catch (IOException e) {
throw exit(e);
} finally {
exit(throwOnTimeout);
}
}
@Override public void flush() throws IOException {
boolean throwOnTimeout = false;
enter();
try {
sink.flush();
throwOnTimeout = true;
} catch (IOException e) {
throw exit(e);
} finally {
exit(throwOnTimeout);
}
}
@Override public void close() throws IOException {
boolean throwOnTimeout = false;
enter();
try {
sink.close();
throwOnTimeout = true;
} catch (IOException e) {
throw exit(e);
} finally {
exit(throwOnTimeout);
}
}
@Override public Timeout timeout() {
return AsyncTimeout.this;
}
@Override public String toString() {
return "AsyncTimeout.sink(" + sink + ")";
}
};
}
|
@Test public void wrappedThrowsWithTimeout() throws Exception {
Sink sink = new ForwardingSink(new Buffer()) {
@Override public void write(Buffer source, long byteCount) throws IOException {
try {
Thread.sleep(500);
throw new IOException("exception and timeout");
} catch (InterruptedException e) {
throw new AssertionError();
}
}
};
AsyncTimeout timeout = new AsyncTimeout();
timeout.timeout(250, TimeUnit.MILLISECONDS);
Sink timeoutSink = timeout.sink(sink);
try {
timeoutSink.write(null, 0);
fail();
} catch (InterruptedIOException expected) {
assertEquals("timeout", expected.getMessage());
assertEquals("exception and timeout", expected.getCause().getMessage());
}
}
|
public static boolean areExceptionsPresentInChain(Throwable error, Class ... types) {
while (error != null) {
for (Class type : types) {
if (type.isInstance(error)) {
return true;
}
}
error = error.getCause();
}
return false;
}
|
@Test
public void testAreExceptionsPresentInChain5() {
assertFalse(Exceptions.areExceptionsPresentInChain(new IllegalArgumentException(new IllegalArgumentException()), IllegalStateException.class));
}
|
@Override
public double mean() {
return (double) m * n / N;
}
|
@Test
public void testMean() {
System.out.println("mean");
HyperGeometricDistribution instance = new HyperGeometricDistribution(100, 30, 70);
instance.rand();
assertEquals(21, instance.mean(), 1E-7);
instance = new HyperGeometricDistribution(100, 30, 80);
instance.rand();
assertEquals(24, instance.mean(), 1E-7);
instance = new HyperGeometricDistribution(100, 30, 60);
instance.rand();
assertEquals(18, instance.mean(), 1E-7);
}
|
public FEELFnResult<String> invoke(@ParameterName("list") List<?> list, @ParameterName("delimiter") String delimiter) {
if ( list == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null"));
}
if (list.isEmpty()) {
return FEELFnResult.ofResult(""); // If list is empty, the result is the empty string
}
StringJoiner sj = new StringJoiner(delimiter != null ? delimiter : ""); // If delimiter is null, the string elements are joined without a separator
for (Object element : list) {
if (element == null) {
continue; // Null elements in the list parameter are ignored.
} else if (element instanceof CharSequence) {
sj.add((CharSequence) element);
} else {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "contains an element which is not a string"));
}
}
return FEELFnResult.ofResult(sj.toString());
}
|
@Test
void stringJoinFunctionEmptyList() {
FunctionTestUtil.assertResult(stringJoinFunction.invoke(Collections.emptyList()), "");
FunctionTestUtil.assertResult(stringJoinFunction.invoke(Collections.emptyList(), "X"), "");
}
|
@SuppressWarnings("unchecked")
public Chain<T> orderNodes() {
List<T> chain = new ArrayList<>();
OrderedReadyNodes readyNodes = getReadyNodes();
while (!readyNodes.isEmpty() || popAllPhase(readyNodes) ) {
Node candidate = readyNodes.pop();
candidate.removed(readyNodes);
if ( candidate instanceof ComponentNode)
chain.add(((ComponentNode<T>)candidate).getComponent());
}
if ( chain.size() != numComponents)
throw new CycleDependenciesException(nameProviders);
//prevent accidental reuse
nameProviders = null;
return new Chain<>(id, chain);
}
|
@Test
void testRegular() throws Exception {
ChainBuilder chainBuilder = createDependencyHandler();
addAtoG(chainBuilder);
Chain<ChainedComponent> res = chainBuilder.orderNodes();
Iterator<ChainedComponent> i = res.components().iterator();
for (char j = 0; j < 'G' - 'A'; ++j) {
assertEquals(String.valueOf((char) ('A' + j)), name(i.next()));
}
}
|
@Override
public ConnectorPageSource createPageSource(
ConnectorTransactionHandle transaction,
ConnectorSession session,
ConnectorSplit split,
ConnectorTableLayoutHandle layout,
List<ColumnHandle> columns,
SplitContext splitContext,
RuntimeStats runtimeStats)
{
HiveTableLayoutHandle hiveLayout = (HiveTableLayoutHandle) layout;
List<HiveColumnHandle> selectedColumns = columns.stream()
.map(HiveColumnHandle.class::cast)
.collect(toList());
HiveSplit hiveSplit = (HiveSplit) split;
Path path = new Path(hiveSplit.getFileSplit().getPath());
Configuration configuration = hdfsEnvironment.getConfiguration(
new HdfsContext(
session,
hiveSplit.getDatabase(),
hiveSplit.getTable(),
hiveLayout.getTablePath(),
false),
path);
Optional<EncryptionInformation> encryptionInformation = hiveSplit.getEncryptionInformation();
CacheQuota cacheQuota = generateCacheQuota(hiveSplit);
HiveFileContext fileContext = new HiveFileContext(
splitContext.isCacheable(),
cacheQuota,
hiveSplit.getFileSplit().getExtraFileInfo().map(BinaryExtraHiveFileInfo::new),
OptionalLong.of(hiveSplit.getFileSplit().getFileSize()),
OptionalLong.of(hiveSplit.getFileSplit().getStart()),
OptionalLong.of(hiveSplit.getFileSplit().getLength()),
hiveSplit.getFileSplit().getFileModifiedTime(),
HiveSessionProperties.isVerboseRuntimeStatsEnabled(session),
runtimeStats);
if (columns.stream().anyMatch(columnHandle -> ((HiveColumnHandle) columnHandle).getColumnType().equals(AGGREGATED))) {
checkArgument(columns.stream().allMatch(columnHandle -> ((HiveColumnHandle) columnHandle).getColumnType().equals(AGGREGATED)), "Not all columns are of 'AGGREGATED' type");
if (hiveLayout.isFooterStatsUnreliable()) {
throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, format("Partial aggregation pushdown is not supported when footer stats are unreliable. " +
"Table %s has file %s with unreliable footer stats. " +
"Set session property [catalog-name].pushdown_partial_aggregations_into_scan=false and execute query again.",
hiveLayout.getSchemaTableName(),
hiveSplit.getFileSplit().getPath()));
}
return createAggregatedPageSource(aggregatedPageSourceFactories, configuration, session, hiveSplit, hiveLayout, selectedColumns, fileContext, encryptionInformation);
}
if (hiveLayout.isPushdownFilterEnabled()) {
Optional<ConnectorPageSource> selectivePageSource = createSelectivePageSource(
selectivePageSourceFactories,
configuration,
session,
hiveSplit,
hiveLayout,
selectedColumns,
hiveStorageTimeZone,
typeManager,
optimizedRowExpressionCache,
splitContext,
fileContext,
encryptionInformation);
if (selectivePageSource.isPresent()) {
return selectivePageSource.get();
}
}
TupleDomain<HiveColumnHandle> effectivePredicate = hiveLayout.getDomainPredicate()
.transform(Subfield::getRootName)
.transform(hiveLayout.getPredicateColumns()::get);
if (shouldSkipBucket(hiveLayout, hiveSplit, splitContext, isLegacyTimestampBucketing(session))) {
return new HiveEmptySplitPageSource();
}
if (shouldSkipPartition(typeManager, hiveLayout, hiveStorageTimeZone, hiveSplit, splitContext)) {
return new HiveEmptySplitPageSource();
}
Optional<ConnectorPageSource> pageSource = createHivePageSource(
cursorProviders,
pageSourceFactories,
configuration,
session,
hiveSplit.getFileSplit(),
hiveSplit.getTableBucketNumber(),
hiveSplit.getStorage(),
splitContext.getDynamicFilterPredicate().map(filter -> filter.transform(handle -> (HiveColumnHandle) handle).intersect(effectivePredicate)).orElse(effectivePredicate),
selectedColumns,
hiveLayout.getPredicateColumns(),
hiveSplit.getPartitionKeys(),
hiveStorageTimeZone,
typeManager,
hiveLayout.getSchemaTableName(),
hiveLayout.getPartitionColumns().stream().map(HiveColumnHandle.class::cast).collect(toList()),
hiveLayout.getDataColumns(),
hiveLayout.getTableParameters(),
hiveSplit.getPartitionDataColumnCount(),
hiveSplit.getTableToPartitionMapping(),
hiveSplit.getBucketConversion(),
hiveSplit.isS3SelectPushdownEnabled(),
fileContext,
hiveLayout.getRemainingPredicate(),
hiveLayout.isPushdownFilterEnabled(),
rowExpressionService,
encryptionInformation,
hiveSplit.getRowIdPartitionComponent());
if (pageSource.isPresent()) {
return pageSource.get();
}
throw new IllegalStateException("Could not find a file reader for split " + hiveSplit);
}
|
@Test(expectedExceptions = PrestoException.class,
expectedExceptionsMessageRegExp = "Partial aggregation pushdown is not supported when footer stats are unreliable. " +
"Table testdb.table has file file://test with unreliable footer stats. " +
"Set session property \\[catalog\\-name\\].pushdown_partial_aggregations_into_scan=false and execute query again.")
public void testFailsWhenFooterStatsUnreliable()
{
HivePageSourceProvider pageSourceProvider = createPageSourceProvider();
pageSourceProvider.createPageSource(
new HiveTransactionHandle(),
SESSION,
getHiveSplit(ORC),
getHiveTableLayout(false, true, true),
ImmutableList.of(LONG_AGGREGATED_COLUMN),
new SplitContext(false),
new RuntimeStats());
}
|
public static Counter upvoteCounter(MeterRegistry registry, String name) {
return counter(registry, name, Tag.of(SCENE, UPVOTE_SCENE));
}
|
@Test
void upvoteCounter() {
MeterRegistry meterRegistry = new SimpleMeterRegistry();
MeterUtils.upvoteCounter(meterRegistry, "posts.content.halo.run/fake-post")
.increment(2);
RequiredSearch requiredSearch = meterRegistry.get("posts.content.halo.run/fake-post");
assertThat(requiredSearch.counter().count()).isEqualTo(2);
Meter.Id id = requiredSearch.counter().getId();
assertThat(id.getTag(MeterUtils.SCENE)).isEqualTo(MeterUtils.UPVOTE_SCENE);
assertThat(id.getTag(MeterUtils.METRICS_COMMON_TAG.getKey()))
.isEqualTo(MeterUtils.METRICS_COMMON_TAG.getValue());
}
|
@Udf
public String lpad(
@UdfParameter(description = "String to be padded") final String input,
@UdfParameter(description = "Target length") final Integer targetLen,
@UdfParameter(description = "Padding string") final String padding) {
if (input == null) {
return null;
}
if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) {
return null;
}
final StringBuilder sb = new StringBuilder(targetLen + padding.length());
final int padUpTo = Math.max(targetLen - input.length(), 0);
for (int i = 0; i < padUpTo; i += padding.length()) {
sb.append(padding);
}
sb.setLength(padUpTo);
sb.append(input);
sb.setLength(targetLen);
return sb.toString();
}
|
@Test
public void shouldReturnEmptyByteBufferForZeroLength() {
final ByteBuffer result = udf.lpad(BYTES_123, 0, BYTES_45);
assertThat(result, is(EMPTY_BYTES));
}
|
@Override
public void rollJournal(long newName) throws JournalException {
// Doesn't need to roll if current database contains no journals
if (currentJournalDB.getDb().count() == 0) {
return;
}
String currentDbName = currentJournalDB.getDb().getDatabaseName();
String currentIdStr = currentDbName;
if (!prefix.isEmpty()) { // remove prefix
currentIdStr = currentDbName.substring(prefix.length());
}
long currentName = Long.parseLong(currentIdStr);
long newNameVerify = currentName + currentJournalDB.getDb().count();
if (newName == newNameVerify) {
String newDbName = getFullDatabaseName(newName);
LOG.info("roll edit log. new db name is {}", newDbName);
currentJournalDB.close();
currentJournalDB = bdbEnvironment.openDatabase(newDbName);
} else {
String msg = String.format("roll journal error! journalId and db journal numbers is not match. "
+ "journal id: %d, current db: %s, expected db count: %d",
newName, currentDbName, newNameVerify);
LOG.error(msg);
throw new JournalException(msg);
}
}
|
@Test(expected = JournalException.class)
public void testRollJournal(@Mocked CloseSafeDatabase closeSafeDatabase,
@Mocked BDBEnvironment environment,
@Mocked Database database) throws Exception {
new Expectations(closeSafeDatabase) {
{
closeSafeDatabase.getDb();
minTimes = 0;
result = database;
closeSafeDatabase.close();
minTimes = 0;
}
};
BDBJEJournal journal = new BDBJEJournal(environment, closeSafeDatabase);
// 1. no data, do nothing and return
new Expectations(database) {
{
database.count();
times = 1;
result = 0;
}
};
journal.rollJournal(111);
// 2. normal cases, current db is 9, has 10 logs, new log expect 19
new Expectations(database) {
{
database.count();
times = 2;
result = 10;
database.getDatabaseName();
times = 1;
result = 9;
}
};
journal.rollJournal(19);
// 3. exception current db is 9, has 10 logs, new log expect 19
new Expectations(database) {
{
database.count();
times = 2;
result = 10;
database.getDatabaseName();
times = 1;
result = 9;
}
};
journal.rollJournal(18);
Assert.fail();
}
|
@Override
public Optional<DispatchEvent> build(final DataChangedEvent event) {
String instanceId = ComputeNode.getInstanceIdByComputeNode(event.getKey());
if (!Strings.isNullOrEmpty(instanceId)) {
Optional<DispatchEvent> result = createInstanceDispatchEvent(event, instanceId);
if (result.isPresent()) {
return result;
}
}
if (event.getKey().startsWith(ComputeNode.getShowProcessListTriggerNodePath())) {
return createReportLocalProcessesEvent(event);
}
if (event.getKey().startsWith(ComputeNode.getKillProcessTriggerNodePath())) {
return createKillLocalProcessEvent(event);
}
return Optional.empty();
}
|
@Test
void assertCreateUpdateLabelsEvent() {
Optional<DispatchEvent> actual = new ComputeNodeStateDispatchEventBuilder()
.build(new DataChangedEvent("/nodes/compute_nodes/labels/foo_instance_id",
YamlEngine.marshal(Arrays.asList("label_1", "label_2")), Type.UPDATED));
assertTrue(actual.isPresent());
assertThat(((LabelsEvent) actual.get()).getLabels(), is(Arrays.asList("label_1", "label_2")));
assertThat(((LabelsEvent) actual.get()).getInstanceId(), is("foo_instance_id"));
}
|
@Override public long get(long key1, long key2) {
return super.get0(key1, key2);
}
|
@Test
public void testGet() {
final long key1 = randomKey();
final long key2 = randomKey();
final SlotAssignmentResult slot = insert(key1, key2);
final long valueAddress2 = hsa.get(key1, key2);
assertEquals(slot.address(), valueAddress2);
}
|
public boolean hasMaterial(MaterialConfig materialConfig) {
for (ConfigRepoConfig c : this) {
if (c.getRepo().equals(materialConfig)) {
return true;
}
}
return false;
}
|
@Test
public void shouldReturnTrueThatHasMaterialWhenAddedConfigRepo() {
repos.add(ConfigRepoConfig.createConfigRepoConfig(git("http://git"), "myplugin", "id"));
assertThat(repos.hasMaterial(git("http://git")), is(true));
}
|
static boolean isValidEncodedValue(String name) {
if (name.length() < 2 || name.startsWith("in_") || name.startsWith("backward_")
|| !isLowerLetter(name.charAt(0)) || SourceVersion.isKeyword(name))
return false;
int underscoreCount = 0;
for (int i = 1; i < name.length(); i++) {
char c = name.charAt(i);
if (c == '_') {
if (underscoreCount > 0) return false;
underscoreCount++;
} else if (!isLowerLetter(c) && !Character.isDigit(c)) {
return false;
} else {
underscoreCount = 0;
}
}
return true;
}
|
@Test
public void testEncodedValueName() {
for (String str : Arrays.asList("blup_test", "test", "test12", "car_test_test")) {
assertTrue(isValidEncodedValue(str), str);
}
for (String str : Arrays.asList("Test", "12test", "test|3", "car__test", "small_car$average_speed", "tes$0",
"blup_te.st_", "car___test", "car$$access", "test{34", "truck__average_speed", "blup.test", "test,21",
"täst", "blup.two.three", "blup..test")) {
assertFalse(isValidEncodedValue(str), str);
}
for (String str : Arrays.asList("break", "switch")) {
assertFalse(isValidEncodedValue(str), str);
}
}
|
static JdbcIO.PreparedStatementSetCaller getPreparedStatementSetCaller(
Schema.FieldType fieldType) {
switch (fieldType.getTypeName()) {
case ARRAY:
case ITERABLE:
return (element, ps, i, fieldWithIndex) -> {
Collection<Object> value = element.getArray(fieldWithIndex.getIndex());
if (value == null) {
setArrayNull(ps, i);
} else {
Schema.FieldType collectionElementType =
Preconditions.checkArgumentNotNull(fieldType.getCollectionElementType());
ps.setArray(
i + 1,
ps.getConnection()
.createArrayOf(collectionElementType.getTypeName().name(), value.toArray()));
}
};
case LOGICAL_TYPE:
{
Schema.LogicalType<?, ?> logicalType = checkArgumentNotNull(fieldType.getLogicalType());
if (Objects.equals(logicalType, LogicalTypes.JDBC_UUID_TYPE.getLogicalType())) {
return (element, ps, i, fieldWithIndex) ->
ps.setObject(
i + 1, element.getLogicalTypeValue(fieldWithIndex.getIndex(), UUID.class));
}
String logicalTypeName = logicalType.getIdentifier();
// Special case of Timestamp and Numeric which are logical types in Portable framework
// but have their own fieldType in Java.
if (logicalTypeName.equals(MicrosInstant.IDENTIFIER)) {
// Process timestamp of MicrosInstant kind, which should only be passed from other type
// systems such as SQL and other Beam SDKs.
return (element, ps, i, fieldWithIndex) -> {
// MicrosInstant uses native java.time.Instant instead of joda.Instant.
java.time.Instant value =
element.getLogicalTypeValue(fieldWithIndex.getIndex(), java.time.Instant.class);
ps.setTimestamp(i + 1, value == null ? null : new Timestamp(value.toEpochMilli()));
};
} else if (logicalTypeName.equals(FixedPrecisionNumeric.IDENTIFIER)) {
return (element, ps, i, fieldWithIndex) -> {
ps.setBigDecimal(i + 1, element.getDecimal(fieldWithIndex.getIndex()));
};
} else if (logicalTypeName.equals("DATE")) {
return (element, ps, i, fieldWithIndex) -> {
ReadableDateTime value = element.getDateTime(fieldWithIndex.getIndex());
ps.setDate(
i + 1,
value == null
? null
: new Date(getDateOrTimeOnly(value.toDateTime(), true).getTime().getTime()));
};
} else if (logicalTypeName.equals("TIME")) {
return (element, ps, i, fieldWithIndex) -> {
ReadableDateTime value = element.getDateTime(fieldWithIndex.getIndex());
ps.setTime(
i + 1,
value == null
? null
: new Time(getDateOrTimeOnly(value.toDateTime(), false).getTime().getTime()));
};
} else if (logicalTypeName.equals("TIMESTAMP_WITH_TIMEZONE")) {
return (element, ps, i, fieldWithIndex) -> {
ReadableDateTime value = element.getDateTime(fieldWithIndex.getIndex());
if (value == null) {
ps.setTimestamp(i + 1, null);
} else {
Calendar calendar = withTimestampAndTimezone(value.toDateTime());
ps.setTimestamp(i + 1, new Timestamp(calendar.getTime().getTime()), calendar);
}
};
} else if (logicalTypeName.equals("OTHER")) {
return (element, ps, i, fieldWithIndex) ->
ps.setObject(
i + 1, element.getValue(fieldWithIndex.getIndex()), java.sql.Types.OTHER);
} else {
// generic beam logic type (such as portable logical types)
return getPreparedStatementSetCaller(logicalType.getBaseType());
}
}
default:
{
JdbcIO.PreparedStatementSetCaller pssc =
typeNamePsSetCallerMap.get(fieldType.getTypeName());
if (pssc != null) {
return pssc;
} else {
throw new RuntimeException(
fieldType.getTypeName().name()
+ " in schema is not supported while writing. Please provide statement and"
+ " preparedStatementSetter");
}
}
}
}
|
@Test
public void testGetPreparedStatementSetCaller() throws Exception {
Schema wantSchema =
Schema.builder()
.addField("col1", Schema.FieldType.INT64)
.addField("col2", Schema.FieldType.INT64)
.addField("col3", Schema.FieldType.INT64)
.build();
String generatedStmt = JdbcUtil.generateStatement("test_table", wantSchema.getFields());
String expectedStmt = "INSERT INTO test_table(col1, col2, col3) VALUES(?, ?, ?)";
assertEquals(expectedStmt, generatedStmt);
}
|
@Override
public E computeIfAbsent(String key, Function<? super String, ? extends E> mappingFunction) {
try {
return cacheStore.invoke(key, new AtomicComputeProcessor<>(), mappingFunction);
} catch (EntryProcessorException e) {
throw new RuntimeException(e.getCause());
}
}
|
@Test
public void computeIfAbsent_cacheThrowsException_throwsUnwrappedEntryProcessorException() {
Function<String, Integer> mappingFunction = s -> {
throw new EntryProcessorException(new IllegalArgumentException());
};
doReturn(null).when(mutableEntryMock).getValue();
entryProcessorMock = new CacheRegistryStore.AtomicComputeProcessor<>();
entryProcessorArgMock = mappingFunction;
try {
classUnderTest.computeIfAbsent(CACHE_KEY, mappingFunction);
fail("Test should've thrown EntryProcessorException");
} catch (RuntimeException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
}
}
|
@Override
public SchemaResult getValueSchema(
final Optional<String> topicName,
final Optional<Integer> schemaId,
final FormatInfo expectedFormat,
final SerdeFeatures serdeFeatures
) {
return getSchema(topicName, schemaId, expectedFormat, serdeFeatures, false);
}
|
@Test
public void shouldReturnErrorFromGetValueSchemaIfNotFound() throws Exception {
// Given:
when(srClient.getLatestSchemaMetadata(any()))
.thenThrow(notFoundException());
// When:
final SchemaResult result = supplier.getValueSchema(Optional.of(TOPIC_NAME),
Optional.empty(), expectedFormat, SerdeFeatures.of());
// Then:
assertThat(result.schemaAndId, is(Optional.empty()));
assertThat(result.failureReason, is(not(Optional.empty())));
verifyFailureMessageForValue(result, Optional.empty());
}
|
@Override
public void audit(final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule) {
Collection<ShardingAuditStrategyConfiguration> auditStrategies = getShardingAuditStrategies(queryContext.getSqlStatementContext(), rule);
if (auditStrategies.isEmpty()) {
return;
}
Collection<String> disableAuditNames = queryContext.getHintValueContext().getDisableAuditNames();
for (ShardingAuditStrategyConfiguration auditStrategy : auditStrategies) {
for (String auditorName : auditStrategy.getAuditorNames()) {
if (!auditStrategy.isAllowHintDisable() || !disableAuditNames.contains(auditorName.toLowerCase())) {
rule.getAuditors().get(auditorName).check(queryContext.getSqlStatementContext(), queryContext.getParameters(), globalRuleMetaData, database);
}
}
}
}
|
@Test
void assertCheckFailed() {
ShardingAuditAlgorithm auditAlgorithm = rule.getAuditors().get("auditor_1");
RuleMetaData globalRuleMetaData = mock(RuleMetaData.class);
doThrow(new DMLWithoutShardingKeyException()).when(auditAlgorithm).check(sqlStatementContext, Collections.emptyList(), globalRuleMetaData, databases.get("foo_db"));
DMLWithoutShardingKeyException ex = assertThrows(DMLWithoutShardingKeyException.class, () -> new ShardingSQLAuditor().audit(
new QueryContext(sqlStatementContext, "", Collections.emptyList(), hintValueContext, mockConnectionContext(), mock(ShardingSphereMetaData.class)), globalRuleMetaData,
databases.get("foo_db"), rule));
assertThat(ex.getMessage(), is("Not allow DML operation without sharding conditions."));
verify(rule.getAuditors().get("auditor_1")).check(sqlStatementContext, Collections.emptyList(), globalRuleMetaData, databases.get("foo_db"));
}
|
public double calculateDensity(Graph graph, boolean isGraphDirected) {
double result;
double edgesCount = graph.getEdgeCount();
double nodesCount = graph.getNodeCount();
double multiplier = 1;
if (!isGraphDirected) {
multiplier = 2;
}
result = (multiplier * edgesCount) / (nodesCount * nodesCount - nodesCount);
return result;
}
|
@Test
public void testTwoConnectedNodesDensity() {
GraphModel graphModel = GraphGenerator.generateCompleteUndirectedGraph(2);
Graph graph = graphModel.getGraph();
GraphDensity d = new GraphDensity();
double density = d.calculateDensity(graph, false);
assertEquals(density, 1.0);
}
|
@Override
public void initialize(Map<String, String> props) {
this.properties = SerializableMap.copyOf(props);
this.s3FileIOProperties = new S3FileIOProperties(properties);
this.createStack =
PropertyUtil.propertyAsBoolean(props, "init-creation-stacktrace", true)
? Thread.currentThread().getStackTrace()
: null;
// Do not override s3 client if it was provided
if (s3 == null) {
Object clientFactory = S3FileIOAwsClientFactories.initialize(props);
if (clientFactory instanceof S3FileIOAwsClientFactory) {
this.s3 = ((S3FileIOAwsClientFactory) clientFactory)::s3;
}
if (clientFactory instanceof AwsClientFactory) {
this.s3 = ((AwsClientFactory) clientFactory)::s3;
}
if (clientFactory instanceof CredentialSupplier) {
this.credential = ((CredentialSupplier) clientFactory).getCredential();
}
if (s3FileIOProperties.isPreloadClientEnabled()) {
client();
}
}
initMetrics(properties);
}
|
@Test
public void testResolvingFileIOLoadWithoutConf() {
ResolvingFileIO resolvingFileIO = new ResolvingFileIO();
resolvingFileIO.initialize(ImmutableMap.of());
FileIO result =
DynMethods.builder("io")
.hiddenImpl(ResolvingFileIO.class, String.class)
.build(resolvingFileIO)
.invoke("s3://foo/bar");
assertThat(result).isInstanceOf(S3FileIO.class);
}
|
public FloatArrayAsIterable usingTolerance(double tolerance) {
return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject());
}
|
@Test
public void usingTolerance_containsExactly_primitiveFloatArray_success() {
assertThat(array(1.1f, TOLERABLE_2POINT2, 3.3f))
.usingTolerance(DEFAULT_TOLERANCE)
.containsExactly(array(2.2f, 1.1f, 3.3f));
}
|
public boolean match(int left, int right) {
return left == right;
}
|
@Test
public void integerShouldEqual() {
Integer a = 334;
Integer b = 334;
boolean match = new NumberMatch().match(a, b);
assertTrue(match);
a = -123;
b = -123;
match = new NumberMatch().match(a, b);
assertTrue(match);
a = -122;
b = -123;
match = new NumberMatch().match(a, b);
assertFalse(match);
a = -123;
b = -122;
match = new NumberMatch().match(a, b);
assertFalse(match);
}
|
public static void addTestNode(Class<? extends ExecNode<?>> execNodeClass) {
addToLookupMap(execNodeClass);
}
|
@Test
void testNoJsonCreator() {
assertThatThrownBy(() -> ExecNodeMetadataUtil.addTestNode(DummyNodeNoJsonCreator.class))
.isInstanceOf(IllegalStateException.class)
.hasMessage(
"ExecNode: org.apache.flink.table.planner.plan.utils."
+ "ExecNodeMetadataUtilTest.DummyNodeNoJsonCreator does not "
+ "implement @JsonCreator annotation on constructor.");
}
|
void handleTestStepFinished(TestStepFinished event) {
if (event.getTestStep() instanceof PickleStepTestStep && event.getResult().getStatus().is(Status.PASSED)) {
PickleStepTestStep testStep = (PickleStepTestStep) event.getTestStep();
addUsageEntry(event.getResult(), testStep);
}
}
|
@Test
void resultWithNullDuration() {
OutputStream out = new ByteArrayOutputStream();
UsageFormatter usageFormatter = new UsageFormatter(out);
PickleStepTestStep testStep = mockTestStep();
Result result = new Result(Status.PASSED, Duration.ZERO, null);
usageFormatter
.handleTestStepFinished(new TestStepFinished(Instant.EPOCH, mock(TestCase.class), testStep, result));
Map<String, List<UsageFormatter.StepContainer>> usageMap = usageFormatter.usageMap;
assertThat(usageMap.size(), is(equalTo(1)));
List<UsageFormatter.StepContainer> durationEntries = usageMap.get("stepDef");
assertThat(durationEntries.size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getName(), is(equalTo("step")));
assertThat(durationEntries.get(0).getDurations().size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getDurations().get(0).getDuration(), is(equalTo(0.0)));
}
|
@SuppressWarnings("unchecked")
@Override
public ConnectResponse<List<String>> connectors() {
try {
LOG.debug("Issuing request to Kafka Connect at URI {} to list connectors", connectUri);
final ConnectResponse<List<String>> connectResponse = withRetries(() -> Request
.get(resolveUri(CONNECTORS))
.setHeaders(requestHeaders)
.responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs))
.connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs))
.execute(httpClient)
.handleResponse(
createHandler(HttpStatus.SC_OK, new TypeReference<List<String>>() {},
Function.identity())));
connectResponse.error()
.ifPresent(error -> LOG.warn("Could not list connectors: {}.", error));
return connectResponse;
} catch (final Exception e) {
throw new KsqlServerException(e);
}
}
|
@Test
public void testList() throws JsonProcessingException {
// Given:
WireMock.stubFor(
WireMock.get(WireMock.urlEqualTo(pathPrefix + "/connectors"))
.withHeader(AUTHORIZATION.toString(), new EqualToPattern(AUTH_HEADER))
.withHeader(CUSTOM_HEADER_NAME, new EqualToPattern(CUSTOM_HEADER_VALUE))
.willReturn(WireMock.aResponse()
.withStatus(HttpStatus.SC_OK)
.withBody(MAPPER.writeValueAsString(ImmutableList.of("one", "two"))))
);
// When:
final ConnectResponse<List<String>> response = client.connectors();
// Then:
assertThat(response.datum(), OptionalMatchers.of(is(ImmutableList.of("one", "two"))));
assertThat("Expected no error!", !response.error().isPresent());
}
|
SubtaskCommittableManager<CommT> merge(SubtaskCommittableManager<CommT> other) {
checkArgument(other.getSubtaskId() == this.getSubtaskId());
this.numExpectedCommittables += other.numExpectedCommittables;
this.requests.addAll(other.requests);
this.numDrained += other.numDrained;
this.numFailed += other.numFailed;
return this;
}
|
@Test
void testMerge() {
final SubtaskCommittableManager<Integer> subtaskCommittableManager =
new SubtaskCommittableManager<>(
Collections.singletonList(new CommitRequestImpl<>(1, METRIC_GROUP)),
5,
1,
2,
1,
2L,
METRIC_GROUP);
subtaskCommittableManager.merge(
new SubtaskCommittableManager<>(
Arrays.asList(
new CommitRequestImpl<>(2, METRIC_GROUP),
new CommitRequestImpl<>(3, METRIC_GROUP)),
10,
2,
3,
1,
2L,
METRIC_GROUP));
assertThat(subtaskCommittableManager.getNumCommittables()).isEqualTo(11);
assertThat(subtaskCommittableManager.getNumDrained()).isEqualTo(3);
assertThat(subtaskCommittableManager.isFinished()).isFalse();
assertThat(subtaskCommittableManager.getNumFailed()).isEqualTo(5);
assertThat(subtaskCommittableManager.getPendingRequests()).hasSize(3);
}
|
List<Token> tokenize() throws ScanException {
List<Token> tokenList = new ArrayList<Token>();
StringBuilder buf = new StringBuilder();
while (pointer < patternLength) {
char c = pattern.charAt(pointer);
pointer++;
switch (state) {
case LITERAL_STATE:
handleLiteralState(c, tokenList, buf);
break;
case START_STATE:
handleStartState(c, tokenList, buf);
break;
case DEFAULT_VAL_STATE:
handleDefaultValueState(c, tokenList, buf);
default:
}
}
// EOS
switch (state) {
case LITERAL_STATE:
addLiteralToken(tokenList, buf);
break;
case DEFAULT_VAL_STATE:
// trailing colon. see also LOGBACK-1140
buf.append(CoreConstants.COLON_CHAR);
addLiteralToken(tokenList, buf);
break;
case START_STATE:
// trailing $. see also LOGBACK-1149
buf.append(CoreConstants.DOLLAR);
addLiteralToken(tokenList, buf);
break;
}
return tokenList;
}
|
@Test
public void simleVariable() throws ScanException {
String input = "${abc}";
Tokenizer tokenizer = new Tokenizer(input);
List<Token> tokenList = tokenizer.tokenize();
witnessList.add(Token.START_TOKEN);
witnessList.add(new Token(Token.Type.LITERAL, "abc"));
witnessList.add(Token.CURLY_RIGHT_TOKEN);
assertEquals(witnessList, tokenList);
}
|
@Override
public Local create(final Path file) {
return this.create(new UUIDRandomStringService().random(), file);
}
|
@Test
public void testVersion() {
final String temp = StringUtils.removeEnd(System.getProperty("java.io.tmpdir"), File.separator);
final String s = System.getProperty("file.separator");
{
final Path file = new Path("/p/f", EnumSet.of(Path.Type.file));
file.attributes().setRegion("region");
file.attributes().setVersionId("2");
assertEquals(String.format("%s%su%sp%s1744299885%sf", temp, s, s, s, s),
new DefaultTemporaryFileService().create("u", file).getAbsolute());
}
{
final Path file = new Path("/p", EnumSet.of(Path.Type.directory));
file.attributes().setRegion("region");
file.attributes().setVersionId("2");
assertEquals(String.format("%s%su%s1744299885%sp", temp, s, s, s),
new DefaultTemporaryFileService().create("u", file).getAbsolute());
}
}
|
@Override
public Schema getSourceSchema() {
return sourceSchema;
}
|
@Test
public void renameBadlyFormattedSchemaTest() throws IOException {
TypedProperties props = Helpers.setupSchemaOnDFS("streamer-config", "file_schema_provider_invalid.avsc");
props.put(SANITIZE_SCHEMA_FIELD_NAMES.key(), "true");
this.schemaProvider = new FilebasedSchemaProvider(props, jsc);
assertEquals(this.schemaProvider.getSourceSchema(), generateRenamedSchemaWithDefaultReplacement());
}
|
public VplsConfig vplsFromIface(String iface) {
for (VplsConfig vpls : vplss()) {
if (vpls.isAttached(iface)) {
return vpls;
}
}
return null;
}
|
@Test
public void getVplsFromInterface() {
assertNotNull("VPLS not found", vplsAppConfig.vplsFromIface(IF1));
assertNull("VPLS unexpectedly found",
vplsAppConfig.vplsFromIface(IF_NON_EXIST));
}
|
public static Builder builder() {
return new Builder();
}
|
@Test
public void processingHintsForbiddenInContentElementPayload() {
assertThrows(IllegalArgumentException.class,
() -> ContentElement.builder().addPayloadItem(StoreHint.INSTANCE));
}
|
@Override
public SendResult send(final Message message) {
return send(message, this.rocketmqProducer.getSendMsgTimeout());
}
|
@Test
public void testSend_Not_OK() throws InterruptedException, RemotingException, MQClientException, MQBrokerException {
SendResult sendResult = new SendResult();
sendResult.setSendStatus(SendStatus.FLUSH_DISK_TIMEOUT);
when(rocketmqProducer.send(any(Message.class), anyLong())).thenReturn(sendResult);
try {
producer.send(producer.createBytesMessage("HELLO_TOPIC", new byte[] {'a'}));
failBecauseExceptionWasNotThrown(OMSRuntimeException.class);
} catch (Exception e) {
assertThat(e).hasMessageContaining("Send message to RocketMQ broker failed.");
}
}
|
T getFunction(final List<SqlArgument> arguments) {
// first try to get the candidates without any implicit casting
Optional<T> candidate = findMatchingCandidate(arguments, false);
if (candidate.isPresent()) {
return candidate.get();
} else if (!supportsImplicitCasts) {
throw createNoMatchingFunctionException(arguments);
}
// if none were found (candidate isn't present) try again with implicit casting
candidate = findMatchingCandidate(arguments, true);
if (candidate.isPresent()) {
return candidate.get();
}
throw createNoMatchingFunctionException(arguments);
}
|
@Test
public void shouldFindPreferredOneArgWithCast() {
// Given:
final KsqlScalarFunction[] functions = new KsqlScalarFunction[]{
function(OTHER, -1, LONG),
function(EXPECTED, -1, INT),
function(OTHER, -1, DOUBLE)
};
Arrays.stream(functions).forEach(udfIndex::addFunction);
// When:
final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.INTEGER)));
// Then:
assertThat(fun.name(), equalTo(EXPECTED));
}
|
public List<HistoryKey> getCurrentHistory() {
if (mLoadedKeys.size() == 0)
// For a unknown reason, we cannot have 0 history emoji...
mLoadedKeys.add(new HistoryKey(DEFAULT_EMOJI, DEFAULT_EMOJI));
return Collections.unmodifiableList(mLoadedKeys);
}
|
@Test
public void testLoad() {
mSharedPreferences
.getString(R.string.settings_key_quick_text_history, R.string.settings_default_empty)
.set("1,2,3,4,5,6");
mUnderTest = new QuickKeyHistoryRecords(mSharedPreferences);
List<QuickKeyHistoryRecords.HistoryKey> keys = mUnderTest.getCurrentHistory();
Assert.assertEquals(3, keys.size());
Assert.assertEquals("1", keys.get(0).name);
Assert.assertEquals("2", keys.get(0).value);
Assert.assertEquals("3", keys.get(1).name);
Assert.assertEquals("4", keys.get(1).value);
Assert.assertEquals("5", keys.get(2).name);
Assert.assertEquals("6", keys.get(2).value);
}
|
@Override
public FilteredMessage apply(Message msg) {
try (var ignored = executionTime.time()) {
return doApply(msg);
}
}
|
@Test
void applyWithFilter(MessageFactory messageFactory) {
final var filterRules = List.of(
RuleDao.builder()
.id("668cff8ed9f9636b4b629c29")
.title("[668cff8ed9f9636b4b629c29] Test Filter 1")
.source("""
rule "[668cff8ed9f9636b4b629c29] Test Filter 1"
when has_field(field : "source")
then
__remove_from_stream_destination__(stream_id : "000000000000000000000001", destination_type : "indexer");
end
""")
.build()
);
final var pipeline = Pipeline.builder()
.id(defaultStream.getId()) // Must be the stream ID to make PipelineRuleOutputFilterState#getPipelinesForMessage work
.name("Test")
.stages(ImmutableSortedSet.of(
Stage.builder()
.stage(0)
.match(Stage.Match.EITHER)
.ruleReferences(filterRules.stream().map(RuleDao::title).toList())
.build()
))
.build();
final var resolver = createResolver(filterRules);
final var pipelines = resolver.resolveFunctions(Set.of(pipeline), PipelineMetricRegistry.create(new MetricRegistry(), "P", "R"));
final var filter = createFilter(pipelines, Set.of("indexer", "other"));
final var message = messageFactory.createMessage("msg", "src", Tools.nowUTC());
message.addStream(defaultStream);
final var filteredMessage = filter.apply(message);
assertThat(filteredMessage.message()).isEqualTo(ImmutableMessage.wrap(message));
// The filter rules removes the default stream from the indexer destination, so it shouldn't be in the
// destinations after running the filter.
assertThat(filteredMessage.destinations().keySet()).containsExactlyInAnyOrder("other");
assertThat(filteredMessage.destinations().get("indexer")).isEmpty();
assertThat(filteredMessage.destinations().get("other")).containsExactlyInAnyOrder(defaultStream);
}
|
@PUT
@Timed
@Path("{configClass}")
@Consumes(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Update configuration in database")
@RequiresPermissions({RestPermissions.CLUSTER_CONFIG_ENTRY_CREATE, RestPermissions.CLUSTER_CONFIG_ENTRY_EDIT})
@AuditEvent(type = AuditEventTypes.CLUSTER_CONFIGURATION_UPDATE)
public Response update(@ApiParam(name = "configClass", value = "The name of the cluster configuration class", required = true)
@PathParam("configClass") @NotBlank String configClass,
@ApiParam(name = "body", value = "The payload of the cluster configuration", required = true)
@NotNull InputStream body) throws IOException {
final Class<?> cls = classFromName(configClass);
if (cls == null) {
throw new NotFoundException(createNoClassMsg(configClass));
}
final Object configObject = parseConfigObject(configClass, body, cls);
validateConfigObject(configObject);
writeConfigObject(configClass, configObject);
return Response.accepted(configObject).build();
}
|
@Test
void putClassConsideredUnsafe(@TempDir Path tmpDir) throws IOException {
final Path file = tmpDir.resolve("secrets.txt");
Files.writeString(file, "secret content");
final ClusterConfigResource resource = new ClusterConfigResource(clusterConfigService,
new RestrictedChainingClassLoader(new ChainingClassLoader(this.getClass().getClassLoader()),
SafeClasses.allGraylogInternal()),
new ObjectMapperProvider().get(),
clusterConfigValidatorService
);
assertThatThrownBy(() -> resource.update("java.io.File",
new ByteArrayInputStream(f("\"%s\"", file.toAbsolutePath()).getBytes(StandardCharsets.UTF_8))))
.isInstanceOf(BadRequestException.class)
.hasMessageContaining("Prevented loading of unsafe class");
}
|
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload,
final ConnectionSession connectionSession) {
switch (commandPacketType) {
case COM_QUIT:
return new MySQLComQuitPacket();
case COM_INIT_DB:
return new MySQLComInitDbPacket(payload);
case COM_FIELD_LIST:
return new MySQLComFieldListPacket(payload);
case COM_QUERY:
return new MySQLComQueryPacket(payload);
case COM_STMT_PREPARE:
return new MySQLComStmtPreparePacket(payload);
case COM_STMT_EXECUTE:
MySQLServerPreparedStatement serverPreparedStatement =
connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex()));
return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount());
case COM_STMT_SEND_LONG_DATA:
return new MySQLComStmtSendLongDataPacket(payload);
case COM_STMT_RESET:
return new MySQLComStmtResetPacket(payload);
case COM_STMT_CLOSE:
return new MySQLComStmtClosePacket(payload);
case COM_SET_OPTION:
return new MySQLComSetOptionPacket(payload);
case COM_PING:
return new MySQLComPingPacket();
case COM_RESET_CONNECTION:
return new MySQLComResetConnectionPacket();
default:
return new MySQLUnsupportedCommandPacket(commandPacketType);
}
}
|
@Test
void assertNewInstanceWithComCreateDbPacket() {
assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_CREATE_DB, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class));
}
|
Token<DelegationTokenIdentifier> delegationToken() throws IOException {
String delegation = param(DelegationParam.NAME);
if (delegation == null) {
return null;
}
final Token<DelegationTokenIdentifier> token = new
Token<DelegationTokenIdentifier>();
token.decodeFromUrlString(delegation);
URI nnUri = URI.create(HDFS_URI_SCHEME + "://" + namenodeId());
boolean isLogical = HAUtilClient.isLogicalUri(conf, nnUri);
if (isLogical) {
token.setService(
HAUtilClient.buildTokenServiceForLogicalUri(nnUri, HDFS_URI_SCHEME));
} else {
token.setService(SecurityUtil.buildTokenService(nnUri));
}
return token;
}
|
@Test
public void testDeserializeHAToken() throws IOException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
final Token<DelegationTokenIdentifier> token = new
Token<DelegationTokenIdentifier>();
QueryStringDecoder decoder = new QueryStringDecoder(
WebHdfsHandler.WEBHDFS_PREFIX + "/?"
+ NamenodeAddressParam.NAME + "=" + LOGICAL_NAME + "&"
+ DelegationParam.NAME + "=" + token.encodeToUrlString());
ParameterParser testParser = new ParameterParser(decoder, conf);
final Token<DelegationTokenIdentifier> tok2 = testParser.delegationToken();
Assert.assertTrue(HAUtilClient.isTokenForLogicalUri(tok2));
}
|
public static int ipToInt(String ip) {
try {
return bytesToInt(ipToBytesByInet(ip));
} catch (Exception e) {
throw new IllegalArgumentException(ip + " is invalid IP");
}
}
|
@Test
void testIpToInt() {
assertEquals(2130706433, InternetAddressUtil.ipToInt("127.0.0.1"));
assertEquals(-1062731775, InternetAddressUtil.ipToInt("192.168.0.1"));
}
|
@Override
public void writeTo(MysqlSerializer serializer) {
MysqlCapability capability = CAPABILITY;
if (supportSSL) {
capability = new MysqlCapability(capability.getFlags()
| MysqlCapability.Flag.CLIENT_SSL.getFlagBit());
}
serializer.writeInt1(PROTOCOL_VERSION);
// JDBC use this version to check which protocol the server support
serializer.writeNulTerminateString(Config.mysql_server_version);
serializer.writeInt4(connectionId);
// first 8 bytes of auth plugin data
serializer.writeBytes(authPluginData, 0, 8);
// filler
serializer.writeInt1(0);
// lower 2 bytes of capability flags
serializer.writeInt2(capability.getFlags() & 0XFFFF);
serializer.writeInt1(CHARACTER_SET);
serializer.writeInt2(STATUS_FLAGS);
// upper 2 byte of capability flags
serializer.writeInt2(capability.getFlags() >> 16);
if (capability.isPluginAuth()) {
serializer.writeInt1(authPluginData.length + 1); // 1 byte is '\0'
} else {
serializer.writeInt1(0);
}
// reserved ten zeros
serializer.writeBytes(new byte[10]);
if (capability.isSecureConnection()) {
// NOTE: MySQL protocol require writing at least 13 byte here.
// write len(max(13, len(auth-plugin-data) - 8))
serializer.writeBytes(authPluginData, 8, 12);
// so we append one byte up to 13
serializer.writeInt1(0);
}
if (capability.isPluginAuth()) {
serializer.writeNulTerminateString(NATIVE_AUTH_PLUGIN_NAME);
}
}
|
@Test
public void testWrite() {
MysqlHandshakePacket packet = new MysqlHandshakePacket(1090, false);
MysqlSerializer serializer = MysqlSerializer.newInstance(capability);
packet.writeTo(serializer);
ByteBuffer buffer = serializer.toByteBuffer();
// assert protocol version
Assert.assertEquals(10, MysqlProto.readInt1(buffer));
// server version
Assert.assertEquals("5.1.0", new String(MysqlProto.readNulTerminateString(buffer)));
// connection id
Assert.assertEquals(1090, MysqlProto.readInt4(buffer));
// plugin data 1
byte[] pluginData1 = MysqlProto.readFixedString(buffer, 8);
Assert.assertEquals(0, MysqlProto.readInt1(buffer));
int flags = 0;
flags = MysqlProto.readInt2(buffer);
// char set
Assert.assertEquals(33, MysqlProto.readInt1(buffer));
// status flags
Assert.assertEquals(0, MysqlProto.readInt2(buffer));
// capability flags
flags |= MysqlProto.readInt2(buffer) << 16;
Assert.assertEquals(MysqlCapability.DEFAULT_CAPABILITY.getFlags(), flags);
// length of plugin data
Assert.assertEquals(21, MysqlProto.readInt1(buffer));
// length of plugin data
byte[] toCheck = new byte[10];
byte[] reserved = MysqlProto.readFixedString(buffer, 10);
for (int i = 0; i < 10; ++i) {
Assert.assertEquals(toCheck[i], reserved[i]);
}
byte[] pluginData2 = MysqlProto.readFixedString(buffer, 12);
byte[] pluginData = Bytes.concat(pluginData1, pluginData2);
for (int i = 0; i < 20; ++i) {
Assert.assertEquals(buf[i], pluginData[i]);
}
// one byte
Assert.assertEquals(0, MysqlProto.readInt1(buffer));
Assert.assertEquals(22, buffer.remaining());
}
|
@Override
protected int getJDBCPort() {
return PostgreSQLContainer.POSTGRESQL_PORT;
}
|
@Test
public void testGetJDBCPortReturnsCorrectValue() {
assertThat(testManager.getJDBCPort()).isEqualTo(PostgreSQLContainer.POSTGRESQL_PORT);
}
|
@Override
@Nullable
public ExecutionGraphInfo get(JobID jobId) {
try {
return executionGraphInfoCache.get(jobId);
} catch (ExecutionException e) {
LOG.debug(
"Could not load archived execution graph information for job id {}.", jobId, e);
return null;
}
}
|
@Test
public void testUnknownGet() throws IOException {
final File rootDir = temporaryFolder.newFolder();
try (final FileExecutionGraphInfoStore executionGraphStore =
createDefaultExecutionGraphInfoStore(
rootDir,
new ScheduledExecutorServiceAdapter(EXECUTOR_RESOURCE.getExecutor()))) {
assertThat(executionGraphStore.get(new JobID()), Matchers.nullValue());
}
}
|
public PolicyGenerator(Configuration conf, GPGContext context) {
setConf(conf);
init(context);
}
|
@Test
public void testPolicyGenerator() throws YarnException {
policyGenerator = new TestablePolicyGenerator();
policyGenerator.setPolicy(mock(GlobalPolicy.class));
policyGenerator.run();
verify(policyGenerator.getPolicy(), times(1))
.updatePolicy("default", clusterInfos, null);
verify(policyGenerator.getPolicy(), times(1))
.updatePolicy("default2", clusterInfos, null);
}
|
@Override
public SendResult send(
Message msg) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
msg.setTopic(withNamespace(msg.getTopic()));
if (this.getAutoBatch() && !(msg instanceof MessageBatch)) {
return sendByAccumulator(msg, null, null);
} else {
return sendDirect(msg, null, null);
}
}
|
@Test
public void testSendMessage_NoNameSrv() throws RemotingException, InterruptedException, MQBrokerException {
when(mQClientAPIImpl.getNameServerAddressList()).thenReturn(new ArrayList<>());
try {
producer.send(message);
failBecauseExceptionWasNotThrown(MQClientException.class);
} catch (MQClientException e) {
assertThat(e).hasMessageContaining("No name server address");
}
}
|
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> fetch(final Bytes key) {
return wrapped().fetch(key);
}
|
@Test
public void shouldDelegateToUnderlyingStoreWhenFetching() {
store.fetch(bytesKey);
verify(inner).fetch(bytesKey);
}
|
@Override
public void onError(final Exception e) {
LOG.error("websocket server[{}] is error.....", getURI(), e);
}
|
@Test
public void testOnError() {
shenyuWebsocketClient = spy(shenyuWebsocketClient);
Assertions.assertDoesNotThrow(() -> shenyuWebsocketClient.onError(new ShenyuException("test")));
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertTextMessageToAmqpMessageWithNoBodyOriginalEncodingWasNull() throws Exception {
ActiveMQTextMessage outbound = createTextMessage();
outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_NULL);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNull(amqp.getBody());
}
|
public <T> List<T> fromCurrentList(final String json, final Class<T> clazz) {
return GSON.fromJson(json, TypeToken.getParameterized(CopyOnWriteArrayList.class, clazz).getType());
}
|
@Test
public void testFromCurrentList() {
Map<String, Object> map = ImmutableMap.of("id", "123", "name", "test", "data", "测试");
List<Map<String, Object>> list = ImmutableList.of(ImmutableMap.copyOf(map), ImmutableMap.copyOf(map),
ImmutableMap.copyOf(map));
String json = "[{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"},"
+ "{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"},"
+ "{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"}]";
List<? extends Map> testList = GsonUtils.getInstance().fromCurrentList(json, map.getClass());
Assertions.assertEquals(list, testList);
}
|
public static Builder custom() {
return new Builder();
}
|
@Test(expected = IllegalArgumentException.class)
public void testBuildWithIllegalMaxWait() {
ThreadPoolBulkheadConfig.custom()
.keepAliveDuration(Duration.ofMillis(-1))
.build();
}
|
public static Message parse(String in)
{
Message message = new Message();
if (in.startsWith("@"))
{
String[] tags = in.substring(1)
.split(";");
for (String tag : tags)
{
int eq = tag.indexOf('=');
if (eq == -1) continue;
String key = tag.substring(0, eq);
String value = tag.substring(eq + 1)
.replace("\\:", ";")
.replace("\\s", " ")
.replace("\\\\", "\\")
.replace("\\r", "\r")
.replace("\\n", "\n");
message.tags.put(key, value);
}
int sp = in.indexOf(' ');
in = in.substring(sp + 1);
}
if (in.startsWith(":"))
{
int sp = in.indexOf(' ');
message.source = in.substring(1, sp);
in = in.substring(sp + 1);
}
int sp = in.indexOf(' ');
if (sp == -1)
{
message.command = in;
message.arguments = new String[0];
return message;
}
message.command = in.substring(0, sp);
String args = in.substring(sp + 1);
List<String> argList = new ArrayList<>();
do
{
String arg;
if (args.startsWith(":"))
{
arg = args.substring(1);
sp = -1;
}
else
{
sp = args.indexOf(' ');
arg = sp == -1 ? args : args.substring(0, sp);
}
args = args.substring(sp + 1);
argList.add(arg);
} while (sp != -1);
message.arguments = argList.toArray(new String[0]);
return message;
}
|
@Test
public void testParse()
{
Message message = Message.parse("@badges=subscriber/0;color=;display-name=kappa_kid_;emotes=;id=6539b42a-e945-4a83-a5b7-018149ca9fa7;mod=0;room-id=27107346;subscriber=1;tmi-sent-ts=1535926830652;turbo=0;user-id=33390095;user-type= :kappa_kid_!kappa_kid_@kappa_kid_.tmi.twitch.tv PRIVMSG #b0aty :how do u add charges to that zeah book?");
Map<String, String> messageTags = message.getTags();
assertEquals("subscriber/0", messageTags.get("badges"));
assertEquals("kappa_kid_!kappa_kid_@kappa_kid_.tmi.twitch.tv", message.getSource());
assertEquals("PRIVMSG", message.getCommand());
assertEquals("#b0aty", message.getArguments()[0]);
assertEquals("how do u add charges to that zeah book?", message.getArguments()[1]);
message = Message.parse("@badges=moderator/1,subscriber/12,bits/10000;color=#008000;display-name=Am_Sephiroth;emotes=;id=7d516b7c-de7a-4c8b-ad23-d8880b55d46b;login=am_sephiroth;mod=1;msg-id=subgift;msg-param-months=8;msg-param-recipient-display-name=IntRS;msg-param-recipient-id=189672346;msg-param-recipient-user-name=intrs;msg-param-sender-count=215;msg-param-sub-plan-name=Sick\\sNerd\\sSubscription\\s;msg-param-sub-plan=1000;room-id=49408183;subscriber=1;system-msg=Am_Sephiroth\\sgifted\\sa\\sTier\\s1\\ssub\\sto\\sIntRS!\\sThey\\shave\\sgiven\\s215\\sGift\\sSubs\\sin\\sthe\\schannel!;tmi-sent-ts=1535980032939;turbo=0;user-id=69539403;user-type=mod :tmi.twitch.tv USERNOTICE #sick_nerd");
messageTags = message.getTags();
assertEquals("Am_Sephiroth gifted a Tier 1 sub to IntRS! They have given 215 Gift Subs in the channel!", messageTags.get("system-msg"));
}
|
@Override
public String loopbackEthOnu(String target) {
DriverHandler handler = handler();
NetconfController controller = handler.get(NetconfController.class);
MastershipService mastershipService = handler.get(MastershipService.class);
DeviceId ncDeviceId = handler.data().deviceId();
checkNotNull(controller, "Netconf controller is null");
String reply = null;
String[] data = null;
String[] ethId = null;
if (!mastershipService.isLocalMaster(ncDeviceId)) {
log.warn("Not master for {} Use {} to execute command",
ncDeviceId,
mastershipService.getMasterFor(ncDeviceId));
return null;
}
data = target.split(COLON);
if (data.length > TWO) {
log.error("Invalid number of parameters {}", target);
return null;
}
ethId = checkIdString(data[FIRST_PART], THREE);
if (ethId == null) {
log.error("Invalid ETH port identifier {}", data[FIRST_PART]);
return null;
}
if (data.length > ONE) {
if (!LOOPBACKMODES.contains(data[SECOND_PART])) {
log.error("Unsupported parameter: {}", data[SECOND_PART]);
return null;
}
}
try {
StringBuilder request = new StringBuilder();
request.append(ANGLE_LEFT + ONU_ETHPORT_LOOPBACK + SPACE);
request.append(VOLT_NE_NAMESPACE + ANGLE_RIGHT + NEW_LINE);
request.append(buildStartTag(PONLINK_ID, false))
.append(ethId[FIRST_PART])
.append(buildEndTag(PONLINK_ID))
.append(buildStartTag(ONU_ID, false))
.append(ethId[SECOND_PART])
.append(buildEndTag(ONU_ID))
.append(buildStartTag(ETHPORT_ID, false))
.append(ethId[THIRD_PART])
.append(buildEndTag(ETHPORT_ID));
if (data.length > ONE) {
request.append(buildStartTag(LOOPBACK_MODE, false))
.append(data[SECOND_PART])
.append(buildEndTag(LOOPBACK_MODE));
}
request.append(buildEndTag(ONU_ETHPORT_LOOPBACK));
reply = controller
.getDevicesMap()
.get(ncDeviceId)
.getSession()
.doWrappedRpc(request.toString());
} catch (NetconfException e) {
log.error("Cannot communicate to device {} exception {}", ncDeviceId, e);
}
return reply;
}
|
@Test
public void testInvalidEthLoopbackOnuInput() throws Exception {
String target;
String reply;
for (int i = ZERO; i < INVALID_ETHPORT_LOOPBACK_TCS.length; i++) {
target = INVALID_ETHPORT_LOOPBACK_TCS[i];
reply = voltConfig.loopbackEthOnu(target);
assertNull("Incorrect response for INVALID_ETHPORT_LOOPBACK_TCS", reply);
}
}
|
public boolean isNumGroupsLimitReached() {
return _brokerResponse.has(NUM_GROUPS_LIMIT_REACHED) && _brokerResponse.get(NUM_GROUPS_LIMIT_REACHED).asBoolean();
}
|
@Test
public void testIsNumGroupsLimitReached() {
// Run the test
final boolean result = _executionStatsUnderTest.isNumGroupsLimitReached();
// Verify the results
assertTrue(result);
}
|
public void appendDocument(PDDocument destination, PDDocument source) throws IOException
{
if (source.getDocument().isClosed())
{
throw new IOException("Error: source PDF is closed.");
}
if (destination.getDocument().isClosed())
{
throw new IOException("Error: destination PDF is closed.");
}
PDDocumentCatalog srcCatalog = source.getDocumentCatalog();
if (isDynamicXfa(srcCatalog.getAcroForm()))
{
throw new IOException("Error: can't merge source document containing dynamic XFA form content.");
}
PDDocumentInformation destInfo = destination.getDocumentInformation();
PDDocumentInformation srcInfo = source.getDocumentInformation();
mergeInto(srcInfo.getCOSObject(), destInfo.getCOSObject(), Collections.emptySet());
// use the highest version number for the resulting pdf
float destVersion = destination.getVersion();
float srcVersion = source.getVersion();
if (destVersion < srcVersion)
{
destination.setVersion(srcVersion);
}
int pageIndexOpenActionDest = -1;
PDDocumentCatalog destCatalog = destination.getDocumentCatalog();
if (destCatalog.getOpenAction() == null)
{
// PDFBOX-3972: get local dest page index, it must be reassigned after the page cloning
PDDestinationOrAction openAction = null;
try
{
openAction = srcCatalog.getOpenAction();
}
catch (IOException ex)
{
// PDFBOX-4223
LOG.error("Invalid OpenAction ignored", ex);
}
PDDestination openActionDestination = null;
if (openAction instanceof PDActionGoTo)
{
openActionDestination = ((PDActionGoTo) openAction).getDestination();
}
else if (openAction instanceof PDDestination)
{
openActionDestination = (PDDestination) openAction;
}
// note that it can also be something else, e.g. PDActionJavaScript, then do nothing
if (openActionDestination instanceof PDPageDestination)
{
PDPage page = ((PDPageDestination) openActionDestination).getPage();
if (page != null)
{
pageIndexOpenActionDest = srcCatalog.getPages().indexOf(page);
}
}
destCatalog.setOpenAction(openAction);
}
PDFCloneUtility cloner = new PDFCloneUtility(destination);
mergeAcroForm(cloner, destCatalog, srcCatalog);
COSArray destThreads = destCatalog.getCOSObject().getCOSArray(COSName.THREADS);
COSArray srcThreads = (COSArray) cloner.cloneForNewDocument(destCatalog.getCOSObject().getDictionaryObject(
COSName.THREADS));
if (destThreads == null)
{
destCatalog.getCOSObject().setItem(COSName.THREADS, srcThreads);
}
else
{
destThreads.addAll(srcThreads);
}
PDDocumentNameDictionary destNames = destCatalog.getNames();
PDDocumentNameDictionary srcNames = srcCatalog.getNames();
if (srcNames != null)
{
if (destNames == null)
{
destCatalog.getCOSObject().setItem(COSName.NAMES,
cloner.cloneForNewDocument(srcNames.getCOSObject()));
}
else
{
cloner.cloneMerge(srcNames, destNames);
}
}
if (destNames != null && destNames.getCOSObject().containsKey(COSName.ID_TREE))
{
// found in 001031.pdf from PDFBOX-4417 and doesn't belong there
destNames.getCOSObject().removeItem(COSName.ID_TREE);
LOG.warn("Removed /IDTree from /Names dictionary, doesn't belong there");
}
PDDocumentNameDestinationDictionary srcDests = srcCatalog.getDests();
if (srcDests != null)
{
PDDocumentNameDestinationDictionary destDests = destCatalog.getDests();
if (destDests == null)
{
destCatalog.getCOSObject().setItem(COSName.DESTS,
cloner.cloneForNewDocument(srcDests.getCOSObject()));
}
else
{
cloner.cloneMerge(srcDests, destDests);
}
}
PDDocumentOutline srcOutline = srcCatalog.getDocumentOutline();
if (srcOutline != null)
{
PDDocumentOutline destOutline = destCatalog.getDocumentOutline();
if (destOutline == null || destOutline.getFirstChild() == null)
{
PDDocumentOutline cloned = new PDDocumentOutline(
cloner.cloneForNewDocument(srcOutline.getCOSObject()));
destCatalog.setDocumentOutline(cloned);
}
else
{
// search last sibling for dest, because /Last entry is sometimes wrong
PDOutlineItem destLastOutlineItem = destOutline.getFirstChild();
while (true)
{
PDOutlineItem outlineItem = destLastOutlineItem.getNextSibling();
if (outlineItem == null)
{
break;
}
destLastOutlineItem = outlineItem;
}
for (PDOutlineItem item : srcOutline.children())
{
// get each child, clone its dictionary, remove siblings info,
// append outline item created from there
COSDictionary clonedDict = cloner.cloneForNewDocument(item.getCOSObject());
clonedDict.removeItem(COSName.PREV);
clonedDict.removeItem(COSName.NEXT);
PDOutlineItem clonedItem = new PDOutlineItem(clonedDict);
destLastOutlineItem.insertSiblingAfter(clonedItem);
destLastOutlineItem = destLastOutlineItem.getNextSibling();
}
}
}
PageMode destPageMode = destCatalog.getPageMode();
if (destPageMode == null)
{
PageMode srcPageMode = srcCatalog.getPageMode();
destCatalog.setPageMode(srcPageMode);
}
COSDictionary srcLabels = srcCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (srcLabels != null)
{
int destPageCount = destination.getNumberOfPages();
COSArray destNums;
COSDictionary destLabels = destCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (destLabels == null)
{
destLabels = new COSDictionary();
destNums = new COSArray();
destLabels.setItem(COSName.NUMS, destNums);
destCatalog.getCOSObject().setItem(COSName.PAGE_LABELS, destLabels);
}
else
{
destNums = (COSArray) destLabels.getDictionaryObject(COSName.NUMS);
}
COSArray srcNums = (COSArray) srcLabels.getDictionaryObject(COSName.NUMS);
if (srcNums != null)
{
int startSize = destNums.size();
for (int i = 0; i < srcNums.size(); i += 2)
{
COSBase base = srcNums.getObject(i);
if (!(base instanceof COSNumber))
{
LOG.error("page labels ignored, index {} should be a number, but is {}", i,
base);
// remove what we added
while (destNums.size() > startSize)
{
destNums.remove(startSize);
}
break;
}
COSNumber labelIndex = (COSNumber) base;
long labelIndexValue = labelIndex.intValue();
destNums.add(COSInteger.get(labelIndexValue + destPageCount));
destNums.add(cloner.cloneForNewDocument(srcNums.getObject(i + 1)));
}
}
}
COSStream destMetadata = destCatalog.getCOSObject().getCOSStream(COSName.METADATA);
COSStream srcMetadata = srcCatalog.getCOSObject().getCOSStream(COSName.METADATA);
if (destMetadata == null && srcMetadata != null)
{
try
{
PDStream newStream = new PDStream(destination, srcMetadata.createInputStream(), (COSName) null);
mergeInto(srcMetadata, newStream.getCOSObject(),
new HashSet<>(Arrays.asList(COSName.FILTER, COSName.LENGTH)));
destCatalog.getCOSObject().setItem(COSName.METADATA, newStream);
}
catch (IOException ex)
{
// PDFBOX-4227 cleartext XMP stream with /Flate
LOG.error("Metadata skipped because it could not be read", ex);
}
}
COSDictionary destOCP = destCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
COSDictionary srcOCP = srcCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
if (destOCP == null && srcOCP != null)
{
destCatalog.getCOSObject().setItem(COSName.OCPROPERTIES, cloner.cloneForNewDocument(srcOCP));
}
else if (destOCP != null && srcOCP != null)
{
cloner.cloneMerge(srcOCP, destOCP);
}
mergeOutputIntents(cloner, srcCatalog, destCatalog);
// merge logical structure hierarchy
boolean mergeStructTree = false;
int destParentTreeNextKey = -1;
Map<Integer, COSObjectable> srcNumberTreeAsMap = null;
Map<Integer, COSObjectable> destNumberTreeAsMap = null;
PDStructureTreeRoot srcStructTree = srcCatalog.getStructureTreeRoot();
PDStructureTreeRoot destStructTree = destCatalog.getStructureTreeRoot();
if (destStructTree == null && srcStructTree != null)
{
// create a dummy structure tree in the destination, so that the source
// tree is cloned. (We can't just copy the tree reference due to PDFBOX-3999)
destStructTree = new PDStructureTreeRoot();
destCatalog.setStructureTreeRoot(destStructTree);
destStructTree.setParentTree(new PDNumberTreeNode(PDParentTreeValue.class));
// PDFBOX-4429: remove bogus StructParent(s)
for (PDPage page : destCatalog.getPages())
{
page.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : page.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
}
if (destStructTree != null)
{
PDNumberTreeNode destParentTree = destStructTree.getParentTree();
destParentTreeNextKey = destStructTree.getParentTreeNextKey();
if (destParentTree != null)
{
destNumberTreeAsMap = getNumberTreeAsMap(destParentTree);
if (destParentTreeNextKey < 0)
{
if (destNumberTreeAsMap.isEmpty())
{
destParentTreeNextKey = 0;
}
else
{
destParentTreeNextKey = Collections.max(destNumberTreeAsMap.keySet()) + 1;
}
}
if (destParentTreeNextKey >= 0 && srcStructTree != null)
{
PDNumberTreeNode srcParentTree = srcStructTree.getParentTree();
if (srcParentTree != null)
{
srcNumberTreeAsMap = getNumberTreeAsMap(srcParentTree);
if (!srcNumberTreeAsMap.isEmpty())
{
mergeStructTree = true;
}
}
}
}
}
Map<COSDictionary, COSDictionary> objMapping = new HashMap<>();
int pageIndex = 0;
PDPageTree destinationPageTree = destination.getPages(); // cache PageTree
for (PDPage page : srcCatalog.getPages())
{
PDPage newPage = new PDPage(cloner.cloneForNewDocument(page.getCOSObject()));
if (!mergeStructTree)
{
// PDFBOX-4429: remove bogus StructParent(s)
newPage.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : newPage.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
newPage.setCropBox(page.getCropBox());
newPage.setMediaBox(page.getMediaBox());
newPage.setRotation(page.getRotation());
PDResources resources = page.getResources();
if (resources != null)
{
// this is smart enough to just create references for resources that are used on multiple pages
newPage.setResources(new PDResources(
cloner.cloneForNewDocument(resources.getCOSObject())));
}
else
{
newPage.setResources(new PDResources());
}
if (mergeStructTree)
{
// add the value of the destination ParentTreeNextKey to every source element
// StructParent(s) value so that these don't overlap with the existing values
updateStructParentEntries(newPage, destParentTreeNextKey);
objMapping.put(page.getCOSObject(), newPage.getCOSObject());
List<PDAnnotation> oldAnnots = page.getAnnotations();
List<PDAnnotation> newAnnots = newPage.getAnnotations();
for (int i = 0; i < oldAnnots.size(); i++)
{
objMapping.put(oldAnnots.get(i).getCOSObject(), newAnnots.get(i).getCOSObject());
}
// TODO update mapping for XObjects
}
destinationPageTree.add(newPage);
if (pageIndex == pageIndexOpenActionDest)
{
// PDFBOX-3972: reassign the page.
// The openAction is either a PDActionGoTo or a PDPageDestination
PDDestinationOrAction openAction = destCatalog.getOpenAction();
PDPageDestination pageDestination;
if (openAction instanceof PDActionGoTo)
{
pageDestination = (PDPageDestination) ((PDActionGoTo) openAction).getDestination();
}
else
{
pageDestination = (PDPageDestination) openAction;
}
pageDestination.setPage(newPage);
}
++pageIndex;
}
if (mergeStructTree)
{
updatePageReferences(cloner, srcNumberTreeAsMap, objMapping);
int maxSrcKey = -1;
for (Map.Entry<Integer, COSObjectable> entry : srcNumberTreeAsMap.entrySet())
{
int srcKey = entry.getKey();
maxSrcKey = Math.max(srcKey, maxSrcKey);
destNumberTreeAsMap.put(destParentTreeNextKey + srcKey,
cloner.cloneForNewDocument(entry.getValue().getCOSObject()));
}
destParentTreeNextKey += maxSrcKey + 1;
PDNumberTreeNode newParentTreeNode = new PDNumberTreeNode(PDParentTreeValue.class);
// Note that all elements are stored flatly. This could become a problem for large files
// when these are opened in a viewer that uses the tagging information.
// If this happens, then PDNumberTreeNode should be improved with a convenience method that
// stores the map into a B+Tree, see https://en.wikipedia.org/wiki/B+_tree
newParentTreeNode.setNumbers(destNumberTreeAsMap);
destStructTree.setParentTree(newParentTreeNode);
destStructTree.setParentTreeNextKey(destParentTreeNextKey);
mergeKEntries(cloner, srcStructTree, destStructTree);
mergeRoleMap(srcStructTree, destStructTree);
mergeIDTree(cloner, srcStructTree, destStructTree);
mergeMarkInfo(destCatalog, srcCatalog);
mergeLanguage(destCatalog, srcCatalog);
mergeViewerPreferences(destCatalog, srcCatalog);
}
}
|
@Test
void testMergeBogusStructParents2() throws IOException
{
PDFMergerUtility pdfMergerUtility = new PDFMergerUtility();
try (PDDocument src = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4408.pdf"));
PDDocument dst = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4408.pdf")))
{
src.getDocumentCatalog().setStructureTreeRoot(null);
src.getPage(0).setStructParents(9999);
src.getPage(0).getAnnotations().get(0).setStructParent(9998);
pdfMergerUtility.appendDocument(dst, src);
checkWithNumberTree(dst);
checkForPageOrphans(dst);
}
}
|
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) {
return api.send(request);
}
|
@Test
public void copyMessages() {
MessageIdsResponse response = bot.execute(new CopyMessages(chatId, chatId, new int[]{forwardMessageId})
.messageThreadId(0)
.removeCaption(true)
.disableNotification(true)
.protectContent(true)
);
assertTrue(response.result().length > 0);
}
|
@Override
public ParameterType<?> parameterType() {
return parameterType;
}
|
@Test
void can_define_parameter_type_converters_with_one_capture_group() throws NoSuchMethodException {
Method method = JavaParameterTypeDefinitionTest.class.getMethod("convert_one_capture_group_to_string",
String.class);
JavaParameterTypeDefinition definition = new JavaParameterTypeDefinition("", "(.*)", method, false, false,
false, lookup);
registry.defineParameterType(definition.parameterType());
Expression cucumberExpression = new ExpressionFactory(registry)
.createExpression("{convert_one_capture_group_to_string}");
List<Argument<?>> test = cucumberExpression.match("test");
assertThat(test.get(0).getValue(), equalTo("convert_one_capture_group_to_string"));
}
|
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public @Nullable <InputT> TransformEvaluator<InputT> forApplication(
AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) throws IOException {
return createEvaluator((AppliedPTransform) application);
}
|
@Test
public void boundedSourceEvaluatorProducesDynamicSplits() throws Exception {
BoundedReadEvaluatorFactory factory = new BoundedReadEvaluatorFactory(context, options, 0L);
when(context.createRootBundle()).thenReturn(bundleFactory.createRootBundle());
int numElements = 10;
Long[] elems = new Long[numElements];
for (int i = 0; i < numElements; i++) {
elems[i] = (long) i;
}
PCollection<Long> read = p.apply(Read.from(new TestSource<>(VarLongCoder.of(), 5, elems)));
SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReads(p);
AppliedPTransform<?, ?, ?> transform = DirectGraphs.getProducer(read);
Collection<CommittedBundle<?>> unreadInputs =
new BoundedReadEvaluatorFactory.InputProvider(context, options)
.getInitialInputs(transform, 1);
Collection<WindowedValue<?>> outputs = new ArrayList<>();
int numIterations = 0;
while (!unreadInputs.isEmpty()) {
numIterations++;
UncommittedBundle<Long> outputBundle = bundleFactory.createBundle(read);
when(context.createBundle(read)).thenReturn(outputBundle);
Collection<CommittedBundle<?>> newUnreadInputs = new ArrayList<>();
for (CommittedBundle<?> shardBundle : unreadInputs) {
TransformEvaluator<Long> evaluator = factory.forApplication(transform, null);
for (WindowedValue<?> shard : shardBundle.getElements()) {
evaluator.processElement((WindowedValue) shard);
}
TransformResult<Long> result = evaluator.finishBundle();
assertThat(result.getWatermarkHold(), equalTo(BoundedWindow.TIMESTAMP_MAX_VALUE));
assertThat(
Iterables.size(result.getOutputBundles()),
equalTo(Iterables.size(shardBundle.getElements())));
for (UncommittedBundle<?> output : result.getOutputBundles()) {
CommittedBundle<?> committed = output.commit(BoundedWindow.TIMESTAMP_MAX_VALUE);
for (WindowedValue<?> val : committed.getElements()) {
outputs.add(val);
}
}
if (!Iterables.isEmpty(result.getUnprocessedElements())) {
newUnreadInputs.add(shardBundle.withElements((Iterable) result.getUnprocessedElements()));
}
}
unreadInputs = newUnreadInputs;
}
assertThat(numIterations, greaterThan(1));
WindowedValue[] expectedValues = new WindowedValue[numElements];
for (long i = 0L; i < numElements; i++) {
expectedValues[(int) i] = gw(i);
}
assertThat(outputs, Matchers.<WindowedValue<?>>containsInAnyOrder(expectedValues));
}
|
@Override
public boolean intersects(PointList pointList) {
// similar code to LocationIndexTree.checkAdjacent
int len = pointList.size();
if (len == 0)
throw new IllegalArgumentException("PointList must not be empty");
double tmpLat = pointList.getLat(0);
double tmpLon = pointList.getLon(0);
if (len == 1)
return calc.calcNormalizedDist(lat, lon, tmpLat, tmpLon) <= normedDist;
for (int pointIndex = 1; pointIndex < len; pointIndex++) {
double wayLat = pointList.getLat(pointIndex);
double wayLon = pointList.getLon(pointIndex);
if (calc.validEdgeDistance(lat, lon, tmpLat, tmpLon, wayLat, wayLon)) {
if (calc.calcNormalizedEdgeDistance(lat, lon, tmpLat, tmpLon, wayLat, wayLon) <= normedDist)
return true;
} else {
if (calc.calcNormalizedDist(lat, lon, tmpLat, tmpLon) <= normedDist
|| pointIndex + 1 == len && calc.calcNormalizedDist(lat, lon, wayLat, wayLon) <= normedDist)
return true;
}
tmpLat = wayLat;
tmpLon = wayLon;
}
return false;
}
|
@Test
public void testIntersectCircleBBox() {
assertTrue(new Circle(10, 10, 120000).intersects(new BBox(9, 11, 8, 9)));
assertFalse(new Circle(10, 10, 110000).intersects(new BBox(9, 11, 8, 9)));
}
|
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
if (inner != null) {
log.error("Could not configure ListSerializer as the parameter has already been set -- inner: {}", inner);
throw new ConfigException("List serializer was already initialized using a non-default constructor");
}
final String innerSerdePropertyName = isKey ? CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS : CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS;
final Object innerSerdeClassOrName = configs.get(innerSerdePropertyName);
if (innerSerdeClassOrName == null) {
throw new ConfigException("Not able to determine the serializer class because it was neither passed via the constructor nor set in the config.");
}
try {
if (innerSerdeClassOrName instanceof String) {
inner = Utils.newInstance((String) innerSerdeClassOrName, Serde.class).serializer();
} else if (innerSerdeClassOrName instanceof Class) {
inner = (Serializer<Inner>) ((Serde) Utils.newInstance((Class) innerSerdeClassOrName)).serializer();
} else {
throw new KafkaException("Could not create a serializer class instance using \"" + innerSerdePropertyName + "\" property.");
}
inner.configure(configs, isKey);
serStrategy = FIXED_LENGTH_SERIALIZERS.contains(inner.getClass()) ? SerializationStrategy.CONSTANT_SIZE : SerializationStrategy.VARIABLE_SIZE;
} catch (final ClassNotFoundException e) {
throw new ConfigException(innerSerdePropertyName, innerSerdeClassOrName, "Serializer class " + innerSerdeClassOrName + " could not be found.");
}
}
|
@Test
public void testListKeySerializerNoArgConstructorsShouldThrowKafkaExceptionDueInvalidClass() {
props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS, new FakeObject());
final KafkaException exception = assertThrows(
KafkaException.class,
() -> listSerializer.configure(props, true)
);
assertEquals("Could not create a serializer class instance using \"" + CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS + "\" property.", exception.getMessage());
}
|
public synchronized TopologyDescription describe() {
return internalTopologyBuilder.describe();
}
|
@Test
public void tableNamedMaterializedCountShouldPreserveTopologyStructure() {
final StreamsBuilder builder = new StreamsBuilder();
builder.table("input-topic")
.groupBy((key, value) -> null)
.count(Materialized.<Object, Long, KeyValueStore<Bytes, byte[]>>as("count-store")
.withStoreType(Materialized.StoreType.IN_MEMORY));
final Topology topology = builder.build();
final TopologyDescription describe = topology.describe();
assertEquals(
"Topologies:\n" +
" Sub-topology: 0\n" +
" Source: KSTREAM-SOURCE-0000000001 (topics: [input-topic])\n" +
" --> KTABLE-SOURCE-0000000002\n" +
" Processor: KTABLE-SOURCE-0000000002 (stores: [input-topic-STATE-STORE-0000000000])\n" +
" --> KTABLE-SELECT-0000000003\n" +
" <-- KSTREAM-SOURCE-0000000001\n" +
" Processor: KTABLE-SELECT-0000000003 (stores: [])\n" +
" --> KSTREAM-SINK-0000000004\n" +
" <-- KTABLE-SOURCE-0000000002\n" +
" Sink: KSTREAM-SINK-0000000004 (topic: count-store-repartition)\n" +
" <-- KTABLE-SELECT-0000000003\n" +
"\n" +
" Sub-topology: 1\n" +
" Source: KSTREAM-SOURCE-0000000005 (topics: [count-store-repartition])\n" +
" --> KTABLE-AGGREGATE-0000000006\n" +
" Processor: KTABLE-AGGREGATE-0000000006 (stores: [count-store])\n" +
" --> none\n" +
" <-- KSTREAM-SOURCE-0000000005\n" +
"\n",
describe.toString()
);
topology.internalTopologyBuilder.setStreamsConfig(streamsConfig);
final ProcessorTopology processorTopology = topology.internalTopologyBuilder.setApplicationId("test").buildTopology();
// one for ktable, and one for count operation
assertThat(processorTopology.stateStores().size(), is(2));
// ktable store is rocksDB (default)
assertThat(processorTopology.stateStores().get(0).persistent(), is(true));
// count store is in-memory
assertThat(processorTopology.stateStores().get(1).persistent(), is(false));
}
|
@Override
public void filter(final ContainerRequestContext request,
final ContainerResponseContext response) throws IOException {
String id = Optional.ofNullable(request.getHeaderString(REQUEST_ID))
.filter(header -> !header.isEmpty())
.orElseGet(() -> generateRandomUuid().toString());
logger.trace("method={} path={} request_id={} status={} length={}",
request.getMethod(), request.getUriInfo().getPath(), id,
response.getStatus(), response.getLength());
response.getHeaders().putSingle(REQUEST_ID, id);
}
|
@Test
void addsRandomRequestIdHeader() throws Exception {
requestIdFilter.filter(request, response);
String requestId = (String) headers.getFirst("X-Request-Id");
assertThat(requestId).isNotNull();
assertThat(UUID.fromString(requestId)).isNotNull();
verify(logger).trace("method={} path={} request_id={} status={} length={}",
"GET", "/some/path", requestId, 200, 2048);
}
|
public static boolean validKey(final String key) {
return VALID_KEY_CHARS.matcher(key).matches();
}
|
@Test
public void testValidKeys() throws Exception {
assertTrue(Message.validKey("foo123"));
assertTrue(Message.validKey("foo-bar123"));
assertTrue(Message.validKey("foo_bar123"));
assertTrue(Message.validKey("foo.bar123"));
assertTrue(Message.validKey("foo@bar"));
assertTrue(Message.validKey("123"));
assertTrue(Message.validKey(""));
assertFalse(Message.validKey("foo bar"));
assertFalse(Message.validKey("foo+bar"));
assertFalse(Message.validKey("foo$bar"));
assertFalse(Message.validKey(" "));
}
|
@VisibleForTesting
static Pattern convertToPattern(String scopeOrNameComponent) {
final String[] split = scopeOrNameComponent.split(LIST_DELIMITER);
final String rawPattern =
Arrays.stream(split)
.map(s -> s.replaceAll("\\.", "\\."))
.map(s -> s.replaceAll("\\*", ".*"))
.collect(Collectors.joining("|", "(", ")"));
return Pattern.compile(rawPattern);
}
|
@Test
void testConvertToPatternMultiple() {
final Pattern pattern = DefaultMetricFilter.convertToPattern("numRecords*,numBytes*");
assertThat(pattern.toString()).isEqualTo("(numRecords.*|numBytes.*)");
assertThat(pattern.matcher("numRecordsIn").matches()).isTrue();
assertThat(pattern.matcher("numBytesOut").matches()).isTrue();
assertThat(pattern.matcher("numBytes").matches()).isTrue();
assertThat(pattern.matcher("hello").matches()).isFalse();
}
|
public static ShowResultSet execute(ShowStmt statement, ConnectContext context) {
return GlobalStateMgr.getCurrentState().getShowExecutor().showExecutorVisitor.visit(statement, context);
}
|
@Test
public void testShowVariable2() throws AnalysisException, DdlException {
ShowVariablesStmt stmt = new ShowVariablesStmt(SetType.VERBOSE, null);
ShowResultSet resultSet = ShowExecutor.execute(stmt, ctx);
Assert.assertEquals(4, resultSet.getMetaData().getColumnCount());
Assert.assertEquals("Variable_name", resultSet.getMetaData().getColumn(0).getName());
Assert.assertEquals("Value", resultSet.getMetaData().getColumn(1).getName());
Assert.assertEquals("Default_value", resultSet.getMetaData().getColumn(2).getName());
Assert.assertEquals("Is_changed", resultSet.getMetaData().getColumn(3).getName());
Assert.assertTrue(resultSet.getResultRows().size() > 0);
Assert.assertEquals(4, resultSet.getResultRows().get(0).size());
ShowVariablesStmt stmt2 = new ShowVariablesStmt(SetType.VERBOSE, "query_%");
ShowResultSet resultSet2 = ShowExecutor.execute(stmt2, ctx);
Assert.assertEquals(4, resultSet2.getMetaData().getColumnCount());
Assert.assertTrue(resultSet2.getResultRows().size() > 0);
Assert.assertEquals(4, resultSet2.getResultRows().get(0).size());
}
|
public synchronized Topology addSource(final String name,
final String... topics) {
internalTopologyBuilder.addSource(null, name, null, null, null, topics);
return this;
}
|
@Test
public void shouldNotAllowZeroTopicsWhenAddingSource() {
assertThrows(TopologyException.class, () -> topology.addSource("source"));
}
|
public static CreateSourceProperties from(final Map<String, Literal> literals) {
try {
return new CreateSourceProperties(literals, DurationParser::parse, false);
} catch (final ConfigException e) {
final String message = e.getMessage().replace(
"configuration",
"property"
);
throw new KsqlException(message, e);
}
}
|
@Test
public void shouldThrowIfKeyFormatAndFormatProvided() {
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> CreateSourceProperties.from(
ImmutableMap.<String, Literal>builder()
.putAll(MINIMUM_VALID_PROPS)
.put(KEY_FORMAT_PROPERTY, new StringLiteral("KAFKA"))
.put(FORMAT_PROPERTY, new StringLiteral("JSON"))
.build())
);
// Then:
assertThat(e.getMessage(), containsString("Cannot supply both 'KEY_FORMAT' and 'FORMAT' properties, "
+ "as 'FORMAT' sets both key and value formats."));
assertThat(e.getMessage(), containsString("Either use just 'FORMAT', or use 'KEY_FORMAT' and 'VALUE_FORMAT'."));
}
|
public String transform() throws ScanException {
StringBuilder stringBuilder = new StringBuilder();
compileNode(node, stringBuilder, new Stack<Node>());
return stringBuilder.toString();
}
|
@Test
public void nestedVariable() throws ScanException {
String input = "a${k${zero}}b";
Node node = makeNode(input);
NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0);
assertEquals("av0b", nodeToStringTransformer.transform());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.