focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public byte[] serialize(final String topic, final T data) {
try {
return delegate.serialize(topic, data);
} catch (final RuntimeException e) {
processingLogger.error(new SerializationError<>(e, Optional.of(data), topic, isKey));
throw e;
}
}
|
@Test
public void shouldLogOnException() {
// Given:
when(delegate.serialize(any(), any())).thenThrow(ERROR);
// When:
assertThrows(
RuntimeException.class,
() -> serializer.serialize("t", SOME_ROW)
);
// Then:
verify(processingLogger).error(new SerializationError<>(ERROR, Optional.of(SOME_ROW), "t", false));
}
|
public long availableMemory() {
lock.lock();
try {
return this.nonPooledAvailableMemory + freeSize() * (long) this.poolableSize;
} finally {
lock.unlock();
}
}
|
@Test
public void testStressfulSituation() throws Exception {
int numThreads = 10;
final int iterations = 50000;
final int poolableSize = 1024;
final long totalMemory = numThreads / 2 * poolableSize;
final BufferPool pool = new BufferPool(totalMemory, poolableSize, metrics, time, metricGroup);
List<StressTestThread> threads = new ArrayList<>();
for (int i = 0; i < numThreads; i++)
threads.add(new StressTestThread(pool, iterations));
for (StressTestThread thread : threads)
thread.start();
for (StressTestThread thread : threads)
thread.join();
for (StressTestThread thread : threads)
assertTrue(thread.success.get(), "Thread should have completed all iterations successfully.");
assertEquals(totalMemory, pool.availableMemory());
}
|
@Override
public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) {
AbstractWALEvent result;
byte[] bytes = new byte[data.remaining()];
data.get(bytes);
String dataText = new String(bytes, StandardCharsets.UTF_8);
if (decodeWithTX) {
result = decodeDataWithTX(dataText);
} else {
result = decodeDataIgnoreTX(dataText);
}
result.setLogSequenceNumber(logSequenceNumber);
return result;
}
|
@Test
void assertDecodeUnknownTableType() {
ByteBuffer data = ByteBuffer.wrap("unknown".getBytes());
assertThat(new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber), instanceOf(PlaceholderEvent.class));
}
|
public void close() {
close(Long.MAX_VALUE, false);
}
|
@Test
public void shouldReturnFalseOnCloseWithCloseOptionWithLeaveGroupTrueWhenThreadsHaventTerminated() throws Exception {
prepareStreams();
prepareStreamThread(streamThreadOne, 1);
prepareStreamThread(streamThreadTwo, 2);
prepareTerminableThread(streamThreadOne);
final MockClientSupplier mockClientSupplier = spy(MockClientSupplier.class);
when(mockClientSupplier.getAdmin(any())).thenReturn(adminClient);
final KafkaStreams.CloseOptions closeOptions = new KafkaStreams.CloseOptions();
closeOptions.timeout(Duration.ofMillis(10L));
closeOptions.leaveGroup(true);
try (final KafkaStreams streams = new KafkaStreams(getBuilderWithSource().build(), props, mockClientSupplier)) {
assertFalse(streams.close(closeOptions));
}
}
|
public static Descriptors.MethodDescriptor findMethodDescriptor(String base64ProtobufDescriptor, String serviceName,
String methodName) throws InvalidProtocolBufferException, Descriptors.DescriptorValidationException {
// Now we may have serviceName as being the FQDN. We have to find short version to later findServiceByName().
String shortServiceName = serviceName;
if (serviceName.contains(".")) {
shortServiceName = serviceName.substring(serviceName.lastIndexOf(".") + 1);
}
// Find descriptor with this service name as symbol.
Descriptors.FileDescriptor fd = findFileDescriptorBySymbol(base64ProtobufDescriptor, shortServiceName);
Descriptors.ServiceDescriptor sd = fd.findServiceByName(shortServiceName);
return sd.findMethodByName(methodName);
}
|
@Test
void testFindMethodDescriptor() {
// This is the simple HelloService with no dependencies.
String base64ProtobufDescriptor = "CrICCg5oZWxsby12MS5wcm90bxIgaW8uZ2l0aHViLm1pY3JvY2tzLmdycGMuaGVsbG8udjEiSAoMSGVsbG9SZXF1ZXN0EhwKCWZpcnN0bmFtZRgBIAEoCVIJZmlyc3RuYW1lEhoKCGxhc3RuYW1lGAIgASgJUghsYXN0bmFtZSIrCg1IZWxsb1Jlc3BvbnNlEhoKCGdyZWV0aW5nGAEgASgJUghncmVldGluZzJ7CgxIZWxsb1NlcnZpY2USawoIZ3JlZXRpbmcSLi5pby5naXRodWIubWljcm9ja3MuZ3JwYy5oZWxsby52MS5IZWxsb1JlcXVlc3QaLy5pby5naXRodWIubWljcm9ja3MuZ3JwYy5oZWxsby52MS5IZWxsb1Jlc3BvbnNlQgJQAWIGcHJvdG8z";
Descriptors.MethodDescriptor desc = null;
try {
desc = GrpcUtil.findMethodDescriptor(base64ProtobufDescriptor, "HelloService", "greeting");
} catch (Exception e) {
fail("No exception should be thrown while parsing protobuf descriptor and searching service");
}
assertNotNull(desc);
assertEquals("io.github.microcks.grpc.hello.v1.HelloService.greeting", desc.getFullName());
}
|
public static Range<Comparable<?>> safeClosed(final Comparable<?> lowerEndpoint, final Comparable<?> upperEndpoint) {
try {
return Range.closed(lowerEndpoint, upperEndpoint);
} catch (final ClassCastException ex) {
Optional<Class<?>> clazz = getTargetNumericType(Arrays.asList(lowerEndpoint, upperEndpoint));
if (!clazz.isPresent()) {
throw ex;
}
return Range.closed(parseNumberByClazz(lowerEndpoint.toString(), clazz.get()), parseNumberByClazz(upperEndpoint.toString(), clazz.get()));
}
}
|
@Test
void assertSafeClosedForLong() {
Range<Comparable<?>> range = SafeNumberOperationUtils.safeClosed(12, 5001L);
assertThat(range.lowerEndpoint(), is(12L));
assertThat(range.upperEndpoint(), is(5001L));
}
|
public int minValue()
{
final int initialValue = this.initialValue;
int min = 0 == size ? initialValue : Integer.MAX_VALUE;
final int[] entries = this.entries;
@DoNotSub final int length = entries.length;
for (@DoNotSub int i = 1; i < length; i += 2)
{
final int value = entries[i];
if (initialValue != value)
{
min = Math.min(min, value);
}
}
return min;
}
|
@Test
void shouldHaveNoMinValueForEmptyCollection()
{
assertEquals(INITIAL_VALUE, map.minValue());
}
|
public List<StreamPartitionWithWatermark> readStreamPartitionsWithWatermark()
throws InvalidProtocolBufferException {
LOG.debug(
"Reading stream partitions from metadata table: "
+ getFullStreamPartitionPrefix().toStringUtf8());
Filter filterForWatermark =
FILTERS
.chain()
.filter(Filters.FILTERS.limit().cellsPerColumn(1))
.filter(FILTERS.family().exactMatch(MetadataTableAdminDao.CF_WATERMARK))
.filter(FILTERS.qualifier().exactMatch(MetadataTableAdminDao.QUALIFIER_DEFAULT));
Filter filterForLock =
FILTERS
.chain()
.filter(Filters.FILTERS.limit().cellsPerColumn(1))
.filter(FILTERS.family().exactMatch(MetadataTableAdminDao.CF_LOCK))
.filter(FILTERS.qualifier().exactMatch(MetadataTableAdminDao.QUALIFIER_DEFAULT));
Query query =
Query.create(tableId)
.prefix(getFullStreamPartitionPrefix())
.filter(FILTERS.interleave().filter(filterForWatermark).filter(filterForLock));
ServerStream<Row> rows = dataClient.readRows(query);
List<StreamPartitionWithWatermark> partitions = new ArrayList<>();
for (Row row : rows) {
if (!isRowLocked(row)) {
continue;
}
Instant watermark = MetadataTableEncoder.parseWatermarkFromRow(row);
if (watermark == null) {
continue;
}
ByteStringRange partition = convertStreamPartitionRowKeyToPartition(row.getKey());
partitions.add(new StreamPartitionWithWatermark(partition, watermark));
}
return partitions;
}
|
@Test
public void testReadStreamPartitionsWithWatermark() throws InvalidProtocolBufferException {
ByteStringRange lockedPartition = ByteStringRange.create("", "a");
PartitionRecord partitionRecord =
new PartitionRecord(
lockedPartition,
Instant.now(),
UniqueIdGenerator.getNextId(),
Instant.now(),
Collections.emptyList(),
null);
metadataTableDao.lockAndRecordPartition(partitionRecord);
// Only one row has both watermark and is locked.
List<StreamPartitionWithWatermark> streamPartitionsWithWatermark =
metadataTableDao.readStreamPartitionsWithWatermark();
assertEquals(1, streamPartitionsWithWatermark.size());
assertEquals(
partitionRecord.getParentLowWatermark(),
streamPartitionsWithWatermark.get(0).getWatermark());
assertEquals(
partitionRecord.getPartition(), streamPartitionsWithWatermark.get(0).getPartition());
Instant watermark = Instant.now();
// Update the watermark on the locked partition.
metadataTableDao.updateWatermark(lockedPartition, watermark, null);
// Watermark is updated.
streamPartitionsWithWatermark = metadataTableDao.readStreamPartitionsWithWatermark();
assertEquals(1, streamPartitionsWithWatermark.size());
assertEquals(watermark, streamPartitionsWithWatermark.get(0).getWatermark());
}
|
void deleteObsoleteFiles() {
final long rrdDiskUsage = CounterStorage.deleteObsoleteCounterFiles(getApplication());
final long serGzDiskUsage = JRobin.deleteObsoleteJRobinFiles(getApplication());
diskUsage = rrdDiskUsage + serGzDiskUsage;
// il manque la taille du fichier "last_shutdown.html", mais on n'est pas à ça près
LOG.debug("Obsolete files deleted. JavaMelody disk usage: " + diskUsage / 1024 + " KB");
}
|
@Test
public void testDeleteObsoleteFiles() {
final Collector collector = createCollectorWithOneCounter();
collector.deleteObsoleteFiles();
}
|
@Override
protected String convertFromString(final String value) throws ConversionException {
final Path path = Paths.get(value);
if (path.getParent() != null) {
throw new ConversionException(
String.format("%s must be a filename only (%s)", KEY, path));
}
return value;
}
|
@Test
void testConvertFromString() throws Exception {
final String expectedJarId = "test.jar";
final String jarId = jarIdPathParameter.convertFromString(expectedJarId);
assertThat(jarId).isEqualTo(expectedJarId);
}
|
@Override
public Map<String, String> responseHeaders() {
return unmodifiableMap(responseHeaders);
}
|
@Test
public void shouldReturnUnmodifiableResponseHeaders() throws Exception {
DefaultGoApiResponse response = new DefaultGoApiResponse(0);
Map<String, String> headers = response.responseHeaders();
try {
headers.put("new-key", "new-value");
fail("Should not allow modification of response headers");
} catch (UnsupportedOperationException e) {
}
try {
headers.remove("key");
fail("Should not allow modification of response headers");
} catch (UnsupportedOperationException e) {
}
}
|
public FEELFnResult<Boolean> invoke(@ParameterName( "list" ) List list) {
if ( list == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null"));
}
boolean result = true;
boolean containsNull = false;
// Spec. definition: return false if any item is false, else true if all items are true, else null
for ( final Object element : list ) {
if (element != null && !(element instanceof Boolean)) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not a Boolean"));
} else {
if (element != null) {
result &= (Boolean) element;
} else if (!containsNull) {
containsNull = true;
}
}
}
if (containsNull && result) {
return FEELFnResult.ofResult( null );
} else {
return FEELFnResult.ofResult( result );
}
}
|
@Test
void invokeArrayParamReturnNull() {
FunctionTestUtil.assertResultNull(allFunction.invoke(new Object[]{Boolean.TRUE, null, Boolean.TRUE}));
}
|
@KafkaClientInternalsDependant
@VisibleForTesting
Mono<Map<TopicPartition, Long>> listOffsetsUnsafe(Collection<TopicPartition> partitions, OffsetSpec offsetSpec) {
if (partitions.isEmpty()) {
return Mono.just(Map.of());
}
Function<Collection<TopicPartition>, Mono<Map<TopicPartition, Long>>> call =
parts -> {
ListOffsetsResult r = client.listOffsets(parts.stream().collect(toMap(tp -> tp, tp -> offsetSpec)));
Map<TopicPartition, KafkaFuture<ListOffsetsResultInfo>> perPartitionResults = new HashMap<>();
parts.forEach(p -> perPartitionResults.put(p, r.partitionResult(p)));
return toMonoWithExceptionFilter(perPartitionResults, UnknownTopicOrPartitionException.class)
.map(offsets -> offsets.entrySet().stream()
// filtering partitions for which offsets were not found
.filter(e -> e.getValue().offset() >= 0)
.collect(toMap(Map.Entry::getKey, e -> e.getValue().offset())));
};
return partitionCalls(
partitions,
200,
call,
mapMerger()
);
}
|
@Test
void testListOffsetsUnsafe() {
String topic = UUID.randomUUID().toString();
createTopics(new NewTopic(topic, 2, (short) 1));
// sending messages to have non-zero offsets for tp
try (var producer = KafkaTestProducer.forKafka(kafka)) {
producer.send(new ProducerRecord<>(topic, 1, "k", "v"));
producer.send(new ProducerRecord<>(topic, 1, "k", "v"));
}
var requestedPartitions = List.of(
new TopicPartition(topic, 0),
new TopicPartition(topic, 1)
);
StepVerifier.create(reactiveAdminClient.listOffsetsUnsafe(requestedPartitions, OffsetSpec.earliest()))
.assertNext(offsets -> {
Assertions.assertThat(offsets)
.hasSize(2)
.containsEntry(new TopicPartition(topic, 0), 0L)
.containsEntry(new TopicPartition(topic, 1), 0L);
})
.verifyComplete();
StepVerifier.create(reactiveAdminClient.listOffsetsUnsafe(requestedPartitions, OffsetSpec.latest()))
.assertNext(offsets -> {
Assertions.assertThat(offsets)
.hasSize(2)
.containsEntry(new TopicPartition(topic, 0), 0L)
.containsEntry(new TopicPartition(topic, 1), 2L);
})
.verifyComplete();
}
|
public static URI parse(String featureIdentifier) {
requireNonNull(featureIdentifier, "featureIdentifier may not be null");
if (featureIdentifier.isEmpty()) {
throw new IllegalArgumentException("featureIdentifier may not be empty");
}
// Legacy from the Cucumber Eclipse plugin
// Older versions of Cucumber allowed it.
if (CLASSPATH_SCHEME_PREFIX.equals(featureIdentifier)) {
return rootPackageUri();
}
if (nonStandardPathSeparatorInUse(featureIdentifier)) {
String standardized = replaceNonStandardPathSeparator(featureIdentifier);
return parseAssumeFileScheme(standardized);
}
if (isWindowsOS() && pathContainsWindowsDrivePattern(featureIdentifier)) {
return parseAssumeFileScheme(featureIdentifier);
}
if (probablyURI(featureIdentifier)) {
return parseProbableURI(featureIdentifier);
}
return parseAssumeFileScheme(featureIdentifier);
}
|
@Test
void can_parse_empty_feature_path() {
IllegalArgumentException exception = assertThrows(
IllegalArgumentException.class,
() -> FeaturePath.parse(""));
assertThat(exception.getMessage(), is("featureIdentifier may not be empty"));
}
|
@Override
public Batch toBatch() {
return new SparkBatch(
sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode());
}
|
@TestTemplate
public void testUnpartitionedTruncateString() throws Exception {
createUnpartitionedTable(spark, tableName);
SparkScanBuilder builder = scanBuilder();
TruncateFunction.TruncateString function = new TruncateFunction.TruncateString();
UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data")));
Predicate predicate = new Predicate("<>", expressions(udf, stringLit("data")));
pushFilters(builder, predicate);
Batch scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(10);
// NOT NotEqual
builder = scanBuilder();
predicate = new Not(predicate);
pushFilters(builder, predicate);
scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(10);
}
|
Tracer(
Propagation.Factory propagationFactory,
SpanHandler spanHandler,
PendingSpans pendingSpans,
Sampler sampler,
CurrentTraceContext currentTraceContext,
boolean traceId128Bit,
boolean supportsJoin,
boolean alwaysSampleLocal,
AtomicBoolean noop
) {
this.propagationFactory = propagationFactory;
this.spanHandler = spanHandler;
this.pendingSpans = pendingSpans;
this.sampler = sampler;
this.currentTraceContext = currentTraceContext;
this.traceId128Bit = traceId128Bit;
this.supportsJoin = supportsJoin;
this.alwaysSampleLocal = alwaysSampleLocal;
this.noop = noop;
}
|
@Test void sampler() {
Sampler sampler = new Sampler() {
@Override public boolean isSampled(long traceId) {
return false;
}
};
tracer = Tracing.newBuilder().sampler(sampler).build().tracer();
assertThat(tracer.sampler)
.isSameAs(sampler);
}
|
public void processOnce() throws IOException {
// set status of query to OK.
ctx.getState().reset();
executor = null;
// reset sequence id of MySQL protocol
final MysqlChannel channel = ctx.getMysqlChannel();
channel.setSequenceId(0);
// read packet from channel
try {
packetBuf = channel.fetchOnePacket();
if (packetBuf == null) {
throw new RpcException(ctx.getRemoteIP(), "Error happened when receiving packet.");
}
} catch (AsynchronousCloseException e) {
// when this happened, timeout checker close this channel
// killed flag in ctx has been already set, just return
return;
}
// dispatch
dispatch();
// finalize
finalizeCommand();
ctx.setCommand(MysqlCommand.COM_SLEEP);
}
|
@Test
public void testUnknownCommand() throws Exception {
MysqlSerializer serializer = MysqlSerializer.newInstance();
serializer.writeInt1(101);
ByteBuffer packet = serializer.toByteBuffer();
ConnectContext ctx = initMockContext(mockChannel(packet), GlobalStateMgr.getCurrentState());
ConnectProcessor processor = new ConnectProcessor(ctx);
processor.processOnce();
Assert.assertEquals(MysqlCommand.COM_SLEEP, myContext.getCommand());
Assert.assertTrue(myContext.getState().toResponsePacket() instanceof MysqlErrPacket);
Assert.assertFalse(myContext.isKilled());
}
|
public CompletableFuture<Integer> inferSourceParallelismAsync(
int parallelismInferenceUpperBound, long dataVolumePerTask) {
return context.supplyAsync(
() -> {
if (!(source instanceof DynamicParallelismInference)) {
return ExecutionConfig.PARALLELISM_DEFAULT;
}
DynamicParallelismInference parallelismInference =
(DynamicParallelismInference) source;
try {
return parallelismInference.inferParallelism(
new DynamicParallelismInference.Context() {
@Override
public Optional<DynamicFilteringInfo>
getDynamicFilteringInfo() {
return getSourceDynamicFilteringInfo();
}
@Override
public int getParallelismInferenceUpperBound() {
return parallelismInferenceUpperBound;
}
@Override
public long getDataVolumePerTask() {
return dataVolumePerTask;
}
});
} catch (Throwable e) {
LOG.error(
"Unexpected error occurred when dynamically inferring source parallelism.",
e);
return ExecutionConfig.PARALLELISM_DEFAULT;
}
})
.thenApply(future -> (Integer) future);
}
|
@Test
public void testInferSourceParallelismAsync() throws Exception {
final String listeningID = "testListeningID";
class TestDynamicFilteringEvent implements SourceEvent, DynamicFilteringInfo {}
CoordinatorStore store = new CoordinatorStoreImpl();
store.putIfAbsent(listeningID, new SourceEventWrapper(new TestDynamicFilteringEvent()));
final SourceCoordinator<?, ?> coordinator =
new SourceCoordinator<>(
OPERATOR_NAME,
createMockSource(),
context,
store,
WatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED,
listeningID);
assertThat(coordinator.inferSourceParallelismAsync(2, 1).get()).isEqualTo(2);
}
|
@Override
public void run(T configuration, Environment environment) throws Exception {
final Map<String, Map<String, String>> options = getViewConfiguration(configuration);
for (ViewRenderer viewRenderer : viewRenderers) {
final Map<String, String> viewOptions = options.get(viewRenderer.getConfigurationKey());
viewRenderer.configure(viewOptions == null ? Map.of() : viewOptions);
}
environment.jersey().register(new ViewMessageBodyWriter(environment.metrics(), viewRenderers));
}
|
@Test
void addsTheViewMessageBodyWriterToTheEnvironment() throws Exception {
new ViewBundle<>().run(new MyConfiguration(), environment);
verify(jerseyEnvironment).register(any(ViewMessageBodyWriter.class));
}
|
public static Sensor processLatencySensor(final String threadId,
final String taskId,
final StreamsMetricsImpl streamsMetrics) {
return avgAndMaxSensor(
threadId,
taskId,
PROCESS_LATENCY,
PROCESS_AVG_LATENCY_DESCRIPTION,
PROCESS_MAX_LATENCY_DESCRIPTION,
RecordingLevel.DEBUG,
streamsMetrics
);
}
|
@Test
public void shouldGetProcessLatencySensor() {
final String operation = "process-latency";
when(streamsMetrics.taskLevelSensor(THREAD_ID, TASK_ID, operation, RecordingLevel.DEBUG))
.thenReturn(expectedSensor);
final String avgLatencyDescription = "The average latency of calls to process";
final String maxLatencyDescription = "The maximum latency of calls to process";
when(streamsMetrics.taskLevelTagMap(THREAD_ID, TASK_ID)).thenReturn(tagMap);
try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) {
final Sensor sensor = TaskMetrics.processLatencySensor(THREAD_ID, TASK_ID, streamsMetrics);
streamsMetricsStaticMock.verify(
() -> StreamsMetricsImpl.addAvgAndMaxToSensor(
expectedSensor,
TASK_LEVEL_GROUP,
tagMap,
operation,
avgLatencyDescription,
maxLatencyDescription
)
);
assertThat(sensor, is(expectedSensor));
}
}
|
public static RestServerConfig forPublic(Integer rebalanceTimeoutMs, Map<?, ?> props) {
return new PublicConfig(rebalanceTimeoutMs, props);
}
|
@Test
public void testAdminListenersNotAllowingBlankStrings() {
Map<String, String> props = new HashMap<>();
props.put(RestServerConfig.ADMIN_LISTENERS_CONFIG, "http://a.b:9999, ,https://a.b:9999");
assertThrows(ConfigException.class, () -> RestServerConfig.forPublic(null, props));
}
|
public TopicRouteData getAnExistTopicRouteData(final String topic) {
return this.topicRouteTable.get(topic);
}
|
@Test
public void testGetAnExistTopicRouteData() {
topicRouteTable.put(topic, createTopicRouteData());
TopicRouteData actual = mqClientInstance.getAnExistTopicRouteData(topic);
assertNotNull(actual);
assertNotNull(actual.getQueueDatas());
assertNotNull(actual.getBrokerDatas());
}
|
public static boolean getBooleanWithAltKeys(org.apache.flink.configuration.Configuration conf,
ConfigProperty<?> configProperty) {
Option<String> rawValue = getRawValueWithAltKeys(conf, configProperty);
boolean defaultValue = configProperty.hasDefaultValue()
? Boolean.parseBoolean(configProperty.defaultValue().toString()) : false;
return rawValue.map(Boolean::parseBoolean).orElse(defaultValue);
}
|
@Test
public void testGetBooleanWithAltKeys() {
Configuration flinkConf = new Configuration();
assertEquals(Boolean.parseBoolean(TEST_BOOLEAN_CONFIG_PROPERTY.defaultValue()),
FormatUtils.getBooleanWithAltKeys(flinkConf, TEST_BOOLEAN_CONFIG_PROPERTY));
boolean setValue = !Boolean.parseBoolean(TEST_BOOLEAN_CONFIG_PROPERTY.defaultValue());
flinkConf.setBoolean(TEST_BOOLEAN_CONFIG_PROPERTY.key(), setValue);
assertEquals(setValue,
FormatUtils.getBooleanWithAltKeys(flinkConf, TEST_BOOLEAN_CONFIG_PROPERTY));
flinkConf = new Configuration();
flinkConf.setBoolean(TEST_BOOLEAN_CONFIG_PROPERTY.getAlternatives().get(0), setValue);
assertEquals(setValue,
FormatUtils.getBooleanWithAltKeys(flinkConf, TEST_BOOLEAN_CONFIG_PROPERTY));
}
|
public static String validateColumnName(@Nullable String columnName) {
String name = requireNonNull(columnName, "Column name cannot be null");
checkDbIdentifierCharacters(columnName, "Column name");
return name;
}
|
@Test
public void fail_when_column_name_is_in_upper_case() {
assertThatThrownBy(() -> validateColumnName("DATE_IN_MS"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Column name must be lower case and contain only alphanumeric chars or '_', got 'DATE_IN_MS'");
}
|
static JavaType constructType(Type type) {
try {
return constructTypeInner(type);
} catch (Exception e) {
throw new InvalidDataTableTypeException(type, e);
}
}
|
@Test
void optional_is_optional_type() {
JavaType javaType = TypeFactory.constructType(OPTIONAL_NUMBER);
assertThat(javaType.getClass(), equalTo(TypeFactory.OptionalType.class));
assertThat(javaType.getOriginal(), is(OPTIONAL_NUMBER));
}
|
@Override
public void updateRouter(Router osRouter) {
checkNotNull(osRouter, ERR_NULL_ROUTER);
checkArgument(!Strings.isNullOrEmpty(osRouter.getId()), ERR_NULL_ROUTER_ID);
osRouterStore.updateRouter(osRouter);
log.info(String.format(MSG_ROUTER, osRouter.getId(), MSG_UPDATED));
}
|
@Test(expected = IllegalArgumentException.class)
public void testUpdateUnregisteredRouter() {
target.updateRouter(ROUTER);
}
|
public static Object newInstance(String name) {
try {
return forName(name).getDeclaredConstructor().newInstance();
} catch (InstantiationException
| IllegalAccessException
| InvocationTargetException
| NoSuchMethodException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
|
@Test
void testNewInstance() {
HelloServiceImpl0 instance = (HelloServiceImpl0) ClassUtils.newInstance(HelloServiceImpl0.class.getName());
Assertions.assertEquals("Hello world!", instance.sayHello());
}
|
@Override public HashSlotCursor8byteKey cursor() {
return new Cursor();
}
|
@Test
public void testCursor_key() {
final long key = random.nextLong();
insert(key);
HashSlotCursor8byteKey cursor = hsa.cursor();
cursor.advance();
assertEquals(key, cursor.key());
}
|
static byte[] generateRandomBytes(int size) {
byte[] bytes = new byte[size];
secureRandom().nextBytes(bytes);
return bytes;
}
|
@Test
public void testGenerateRandomBytes() {
assertArrayEquals(Wallet.generateRandomBytes(0), (new byte[] {}));
assertEquals(Wallet.generateRandomBytes(10).length, (10));
}
|
@Override
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) {
int laneCount = 1;
if (way.hasTag("lanes")) {
String noLanes = way.getTag("lanes");
String[] noLanesTok = noLanes.split(";|\\.");
if (noLanesTok.length > 0) {
try {
int noLanesInt = Integer.parseInt(noLanesTok[0]);
// there was a proposal with negative lanes but I cannot find it
if (noLanesInt < 0)
laneCount = 1;
else if (noLanesInt > 6)
laneCount = 6;
else
laneCount = noLanesInt;
} catch (NumberFormatException ex) {
// ignore if no number
}
}
}
lanesEnc.setInt(false, edgeId, edgeIntAccess, laneCount);
}
|
@Test
void notTagged() {
ReaderWay readerWay = new ReaderWay(1);
EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1);
int edgeId = 0;
parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags);
Assertions.assertEquals(1, lanesEnc.getInt(false, edgeId, edgeIntAccess));
}
|
@Override
public Long createJobLog(Long jobId, LocalDateTime beginTime,
String jobHandlerName, String jobHandlerParam, Integer executeIndex) {
JobLogDO log = JobLogDO.builder().jobId(jobId).handlerName(jobHandlerName)
.handlerParam(jobHandlerParam).executeIndex(executeIndex)
.beginTime(beginTime).status(JobLogStatusEnum.RUNNING.getStatus()).build();
jobLogMapper.insert(log);
return log.getId();
}
|
@Test
public void testCreateJobLog() {
// 准备参数
JobLogDO reqVO = randomPojo(JobLogDO.class, o -> o.setExecuteIndex(1));
// 调用
Long id = jobLogService.createJobLog(reqVO.getJobId(), reqVO.getBeginTime(),
reqVO.getHandlerName(), reqVO.getHandlerParam(), reqVO.getExecuteIndex());
// 断言
assertNotNull(id);
// 校验记录的属性是否正确
JobLogDO job = jobLogMapper.selectById(id);
assertEquals(JobLogStatusEnum.RUNNING.getStatus(), job.getStatus());
}
|
public static long getSizeOfPhysicalMemory() {
// first try if the JVM can directly tell us what the system memory is
// this works only on Oracle JVMs
try {
Class<?> clazz = Class.forName("com.sun.management.OperatingSystemMXBean");
Method method = clazz.getMethod("getTotalPhysicalMemorySize");
OperatingSystemMXBean operatingSystemMXBean =
ManagementFactory.getOperatingSystemMXBean();
// someone may install different beans, so we need to check whether the bean
// is in fact the sun management bean
if (clazz.isInstance(operatingSystemMXBean)) {
return (Long) method.invoke(operatingSystemMXBean);
}
} catch (ClassNotFoundException e) {
// this happens on non-Oracle JVMs, do nothing and use the alternative code paths
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
LOG.warn(
"Access to physical memory size: "
+ "com.sun.management.OperatingSystemMXBean incompatibly changed.",
e);
}
// we now try the OS specific access paths
switch (OperatingSystem.getCurrentOperatingSystem()) {
case LINUX:
return getSizeOfPhysicalMemoryForLinux();
case WINDOWS:
return getSizeOfPhysicalMemoryForWindows();
case MAC_OS:
return getSizeOfPhysicalMemoryForMac();
case FREE_BSD:
return getSizeOfPhysicalMemoryForFreeBSD();
case UNKNOWN:
LOG.error("Cannot determine size of physical memory for unknown operating system");
return -1;
default:
LOG.error("Unrecognized OS: " + OperatingSystem.getCurrentOperatingSystem());
return -1;
}
}
|
@Test
void testPhysicalMemory() {
try {
long physMem = Hardware.getSizeOfPhysicalMemory();
assertThat(physMem).isGreaterThanOrEqualTo(-1);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
|
public int getIndex_depth() {
return index_depth;
}
|
@Test
public void testGetIndex_depth() {
assertEquals(TestParameters.VP_INDEX_DEPTH, chmItspHeader.getIndex_depth());
}
|
public String getType() {
return type;
}
|
@Test
void testDeserialize() throws JsonProcessingException {
String json = "{\"resultCode\":200,\"errorCode\":0,\"type\":\"deregisterInstance\",\"success\":true}";
InstanceResponse response = mapper.readValue(json, InstanceResponse.class);
assertEquals(NamingRemoteConstants.DE_REGISTER_INSTANCE, response.getType());
}
|
public <T> T fromXmlPartial(String partial, Class<T> o) throws Exception {
return fromXmlPartial(toInputStream(partial, UTF_8), o);
}
|
@Test
void shouldLoadIgnoresFromSvnPartial() throws Exception {
String buildXmlPartial =
"""
<svn url="file:///tmp/testSvnRepo/project1/trunk" >
<filter>
<ignore pattern="x"/>
</filter>
</svn>""";
MaterialConfig svnMaterial = xmlLoader.fromXmlPartial(buildXmlPartial, SvnMaterialConfig.class);
Filter parsedFilter = svnMaterial.filter();
Filter expectedFilter = new Filter();
expectedFilter.add(new IgnoredFiles("x"));
assertThat(parsedFilter).isEqualTo(expectedFilter);
}
|
@Override
public void put(final Bytes rawBaseKey,
final byte[] value) {
final long timestamp = baseKeySchema.segmentTimestamp(rawBaseKey);
observedStreamTime = Math.max(observedStreamTime, timestamp);
final long segmentId = segments.segmentId(timestamp);
final S segment = segments.getOrCreateSegmentIfLive(segmentId, context, observedStreamTime);
if (segment == null) {
expiredRecordSensor.record(1.0d, context.currentSystemTimeMs());
LOG.warn("Skipping record for expired segment.");
} else {
synchronized (position) {
StoreQueryUtils.updatePosition(position, stateStoreContext);
// Put to index first so that if put to base failed, when we iterate index, we will
// find no base value. If put to base first but putting to index fails, when we iterate
// index, we can't find the key but if we iterate over base store, we can find the key
// which lead to inconsistency.
if (hasIndex()) {
final KeyValue<Bytes, byte[]> indexKeyValue = getIndexKeyValue(rawBaseKey, value);
segment.put(indexKeyValue.key, indexKeyValue.value);
}
segment.put(rawBaseKey, value);
}
}
}
|
@Test
public void shouldPutAndBackwardFetchEdgeKeyRange() {
final String keyA = "a";
final String keyB = "b";
final Bytes serializedKeyAStart = serializeKey(new Windowed<>(keyA, startEdgeWindow), false,
Integer.MAX_VALUE);
final Bytes serializedKeyAEnd = serializeKey(new Windowed<>(keyA, endEdgeWindow), false,
Integer.MAX_VALUE);
final Bytes serializedKeyBStart = serializeKey(new Windowed<>(keyB, startEdgeWindow), false,
Integer.MAX_VALUE);
final Bytes serializedKeyBEnd = serializeKey(new Windowed<>(keyB, endEdgeWindow), false,
Integer.MAX_VALUE);
bytesStore.put(serializedKeyAStart, serializeValue(10));
bytesStore.put(serializedKeyAEnd, serializeValue(50));
bytesStore.put(serializedKeyBStart, serializeValue(100));
bytesStore.put(serializedKeyBEnd, serializeValue(150));
// Can fetch from start/end for key range
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
Bytes.wrap(keyA.getBytes()), Bytes.wrap(keyB.getBytes()), startEdgeTime, endEdgeTime)) {
final List<KeyValue<Windowed<String>, Long>> expected = getIndexSchema() == null ? asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
) : asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
);
assertEquals(expected, toList(values));
}
// Can fetch from 0 to max for key range
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
Bytes.wrap(keyA.getBytes()), Bytes.wrap(keyB.getBytes()), 0L, Long.MAX_VALUE)) {
final List<KeyValue<Windowed<String>, Long>> expected = getIndexSchema() == null ? asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
) : asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
);
assertEquals(expected, toList(values));
}
// KeyB should be ignored and KeyA should be included even in storage
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
null, Bytes.wrap(keyA.getBytes()), startEdgeTime, endEdgeTime - 1L)) {
final List<KeyValue<Windowed<String>, Long>> expected = asList(
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
);
assertEquals(expected, toList(values));
}
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
Bytes.wrap(keyB.getBytes()), null, startEdgeTime + 1, endEdgeTime)) {
final List<KeyValue<Windowed<String>, Long>> expected = asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L)
);
assertEquals(expected, toList(values));
}
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
null, null, 0, Long.MAX_VALUE)) {
final List<KeyValue<Windowed<String>, Long>> expected = getIndexSchema() == null ? asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
) : asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
);
assertEquals(expected, toList(values));
}
try (final KeyValueIterator<Bytes, byte[]> values = bytesStore.backwardFetch(
null, null, startEdgeTime, endEdgeTime)) {
final List<KeyValue<Windowed<String>, Long>> expected = getIndexSchema() == null ? asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
) : asList(
KeyValue.pair(new Windowed<>(keyB, endEdgeWindow), 150L),
KeyValue.pair(new Windowed<>(keyB, startEdgeWindow), 100L),
KeyValue.pair(new Windowed<>(keyA, endEdgeWindow), 50L),
KeyValue.pair(new Windowed<>(keyA, startEdgeWindow), 10L)
);
assertEquals(expected, toList(values));
}
}
|
public static String initNamespaceForNaming(NacosClientProperties properties) {
String tmpNamespace = null;
String isUseCloudNamespaceParsing = properties.getProperty(PropertyKeyConst.IS_USE_CLOUD_NAMESPACE_PARSING,
properties.getProperty(SystemPropertyKeyConst.IS_USE_CLOUD_NAMESPACE_PARSING,
String.valueOf(Constants.DEFAULT_USE_CLOUD_NAMESPACE_PARSING)));
if (Boolean.parseBoolean(isUseCloudNamespaceParsing)) {
tmpNamespace = TenantUtil.getUserTenantForAns();
LogUtils.NAMING_LOGGER.info("initializer namespace from ans.namespace attribute : {}", tmpNamespace);
tmpNamespace = TemplateUtils.stringEmptyAndThenExecute(tmpNamespace, () -> {
String namespace = properties.getProperty(PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_NAMESPACE);
LogUtils.NAMING_LOGGER.info("initializer namespace from ALIBABA_ALIWARE_NAMESPACE attribute :" + namespace);
return namespace;
});
}
tmpNamespace = TemplateUtils.stringEmptyAndThenExecute(tmpNamespace, () -> {
String namespace = properties.getPropertyFrom(SourceType.JVM, PropertyKeyConst.NAMESPACE);
LogUtils.NAMING_LOGGER.info("initializer namespace from namespace attribute :" + namespace);
return namespace;
});
if (StringUtils.isEmpty(tmpNamespace)) {
tmpNamespace = properties.getProperty(PropertyKeyConst.NAMESPACE);
}
tmpNamespace = TemplateUtils.stringEmptyAndThenExecute(tmpNamespace, () -> UtilAndComs.DEFAULT_NAMESPACE_ID);
return tmpNamespace;
}
|
@Test
void testInitNamespaceFromAnsWithCloudParsing() {
String expect = "ans";
System.setProperty(SystemPropertyKeyConst.ANS_NAMESPACE, expect);
final NacosClientProperties properties = NacosClientProperties.PROTOTYPE.derive();
properties.setProperty(PropertyKeyConst.IS_USE_CLOUD_NAMESPACE_PARSING, "true");
String actual = InitUtils.initNamespaceForNaming(properties);
assertEquals(expect, actual);
}
|
@Override
public ProtobufSystemInfo.Section toProtobuf() {
ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder();
protobuf.setName("System");
setAttribute(protobuf, "Server ID", server.getId());
setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel());
setAttribute(protobuf, NCLOC.getName() ,statisticsSupport.getLinesOfCode());
setAttribute(protobuf, "Container", containerSupport.isRunningInContainer());
setAttribute(protobuf, "High Availability", true);
setAttribute(protobuf, "External Users and Groups Provisioning",
commonSystemInformation.getManagedInstanceProviderName());
setAttribute(protobuf, "External User Authentication",
commonSystemInformation.getExternalUserAuthentication());
addIfNotEmpty(protobuf, "Accepted external identity providers",
commonSystemInformation.getEnabledIdentityProviders());
addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up",
commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders());
setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication());
return protobuf.build();
}
|
@Test
public void toProtobuf_whenInstanceIsNotManaged_shouldWriteNothing() {
when(commonSystemInformation.getManagedInstanceProviderName()).thenReturn(null);
ProtobufSystemInfo.Section protobuf = underTest.toProtobuf();
assertThatAttributeDoesNotExist(protobuf, "External Users and Groups Provisioning");
}
|
@Override
public Optional<SchemaDescription> getSchema(String topic, Target type) {
String subject = schemaSubject(topic, type);
return getSchemaBySubject(subject)
.flatMap(schemaMetadata ->
//schema can be not-found, when schema contexts configured improperly
getSchemaById(schemaMetadata.getId())
.map(parsedSchema ->
new SchemaDescription(
convertSchema(schemaMetadata, parsedSchema),
Map.of(
"subject", subject,
"schemaId", schemaMetadata.getId(),
"latestVersion", schemaMetadata.getVersion(),
"type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
)
)));
}
|
@Test
void returnsEmptyDescriptorIfSchemaNotRegisteredInSR() {
String topic = "test";
assertThat(serde.getSchema(topic, Serde.Target.KEY)).isEmpty();
assertThat(serde.getSchema(topic, Serde.Target.VALUE)).isEmpty();
}
|
public static boolean isChinese(CharSequence value) {
return isMatchRegex(PatternPool.CHINESES, value);
}
|
@Test
public void isChineseTest() {
assertTrue(Validator.isChinese("全都是中文"));
assertTrue(Validator.isChinese("㐓㐘"));
assertFalse(Validator.isChinese("not全都是中文"));
}
|
public void processNullMessage(Null nullMessage, Afnemersbericht afnemersbericht){
if (afnemersbericht != null && afnemersbericht.getType() == Afnemersbericht.Type.Av01){
afnemersberichtRepository.delete(afnemersbericht);
}
logger.info("Received null message");
}
|
@Test
public void testProcessNullMessageNotAv01(){
Null testNullMessage = TestDglMessagesUtil.createTestNullMessage();
when(afnemersbericht.getType()).thenReturn(Afnemersbericht.Type.Ap01);
classUnderTest.processNullMessage(testNullMessage, afnemersbericht);
verify(afnemersberichtRepository, times(0)).delete(afnemersbericht);
}
|
@Override
public Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
Object result = getCurrentQueryResult().getValue(columnIndex, type);
wasNull = getCurrentQueryResult().wasNull();
return result;
}
|
@Test
void assertGetValue() throws SQLException {
QueryResult queryResult = mock(QueryResult.class);
when(queryResult.getValue(1, Object.class)).thenReturn("1");
streamMergedResult.setCurrentQueryResult(queryResult);
assertThat(streamMergedResult.getValue(1, Object.class).toString(), is("1"));
}
|
@VisibleForTesting
public void validateSmsTemplateCodeDuplicate(Long id, String code) {
SmsTemplateDO template = smsTemplateMapper.selectByCode(code);
if (template == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的字典类型
if (id == null) {
throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code);
}
if (!template.getId().equals(id)) {
throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code);
}
}
|
@Test
public void testValidateSmsTemplateCodeDuplicate_valueDuplicateForCreate() {
// 准备参数
String code = randomString();
// mock 数据
smsTemplateMapper.insert(randomSmsTemplateDO(o -> o.setCode(code)));
// 调用,校验异常
assertServiceException(() -> smsTemplateService.validateSmsTemplateCodeDuplicate(null, code),
SMS_TEMPLATE_CODE_DUPLICATE, code);
}
|
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String dmType = typeDefine.getDataType().toUpperCase();
switch (dmType) {
case DM_BIT:
builder.sourceType(DM_BIT);
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case DM_TINYINT:
builder.sourceType(DM_TINYINT);
builder.dataType(BasicType.BYTE_TYPE);
break;
case DM_BYTE:
builder.sourceType(DM_BYTE);
builder.dataType(BasicType.BYTE_TYPE);
break;
case DM_SMALLINT:
builder.sourceType(DM_SMALLINT);
builder.dataType(BasicType.SHORT_TYPE);
break;
case DM_INT:
builder.sourceType(DM_INT);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_INTEGER:
builder.sourceType(DM_INTEGER);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_PLS_INTEGER:
builder.sourceType(DM_PLS_INTEGER);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_BIGINT:
builder.sourceType(DM_BIGINT);
builder.dataType(BasicType.LONG_TYPE);
break;
case DM_REAL:
builder.sourceType(DM_REAL);
builder.dataType(BasicType.FLOAT_TYPE);
break;
case DM_FLOAT:
builder.sourceType(DM_FLOAT);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_DOUBLE:
builder.sourceType(DM_DOUBLE);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_DOUBLE_PRECISION:
builder.sourceType(DM_DOUBLE_PRECISION);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_NUMERIC:
case DM_NUMBER:
case DM_DECIMAL:
case DM_DEC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.sourceType(
String.format(
"%s(%s,%s)",
DM_DECIMAL, decimalType.getPrecision(), decimalType.getScale()));
builder.dataType(decimalType);
builder.columnLength((long) decimalType.getPrecision());
builder.scale(decimalType.getScale());
break;
case DM_CHAR:
case DM_CHARACTER:
builder.sourceType(String.format("%s(%s)", DM_CHAR, typeDefine.getLength()));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
break;
case DM_VARCHAR:
case DM_VARCHAR2:
builder.sourceType(String.format("%s(%s)", DM_VARCHAR2, typeDefine.getLength()));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
break;
case DM_TEXT:
builder.sourceType(DM_TEXT);
builder.dataType(BasicType.STRING_TYPE);
// dm text max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_LONG:
builder.sourceType(DM_LONG);
builder.dataType(BasicType.STRING_TYPE);
// dm long max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_LONGVARCHAR:
builder.sourceType(DM_LONGVARCHAR);
builder.dataType(BasicType.STRING_TYPE);
// dm longvarchar max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_CLOB:
builder.sourceType(DM_CLOB);
builder.dataType(BasicType.STRING_TYPE);
// dm clob max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_BINARY:
builder.sourceType(String.format("%s(%s)", DM_BINARY, typeDefine.getLength()));
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_VARBINARY:
builder.sourceType(String.format("%s(%s)", DM_VARBINARY, typeDefine.getLength()));
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_LONGVARBINARY:
builder.sourceType(DM_LONGVARBINARY);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_IMAGE:
builder.sourceType(DM_IMAGE);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_BLOB:
builder.sourceType(DM_BLOB);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_BFILE:
builder.sourceType(DM_BFILE);
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(typeDefine.getLength());
break;
case DM_DATE:
builder.sourceType(DM_DATE);
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case DM_TIME:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIME);
} else {
builder.sourceType(String.format("%s(%s)", DM_TIME, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_TIME_WITH_TIME_ZONE:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIME_WITH_TIME_ZONE);
} else {
builder.sourceType(
String.format("TIME(%s) WITH TIME ZONE", typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_TIMESTAMP:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIMESTAMP);
} else {
builder.sourceType(
String.format("%s(%s)", DM_TIMESTAMP, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_DATETIME:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_DATETIME);
} else {
builder.sourceType(String.format("%s(%s)", DM_DATETIME, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_DATETIME_WITH_TIME_ZONE:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_DATETIME_WITH_TIME_ZONE);
} else {
builder.sourceType(
String.format("DATETIME(%s) WITH TIME ZONE", typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.DAMENG, typeDefine.getDataType(), typeDefine.getName());
}
return builder.build();
}
|
@Test
public void testConvertBfile() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("bfile")
.dataType("bfile")
.length(2147483647L)
.build();
Column column = DmdbTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType());
Assertions.assertEquals(2147483647L, column.getColumnLength());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase());
}
|
public static List<Date> matchedDates(String patternStr, Date start, int count, boolean isMatchSecond) {
return matchedDates(patternStr, start, DateUtil.endOfYear(start), count, isMatchSecond);
}
|
@Test
public void matchedDatesTest2() {
//测试每小时执行
List<Date> matchedDates = CronPatternUtil.matchedDates("0 0 */1 * * *", DateUtil.parse("2018-10-15 14:33:22"), 5, true);
assertEquals(5, matchedDates.size());
assertEquals("2018-10-15 15:00:00", matchedDates.get(0).toString());
assertEquals("2018-10-15 16:00:00", matchedDates.get(1).toString());
assertEquals("2018-10-15 17:00:00", matchedDates.get(2).toString());
assertEquals("2018-10-15 18:00:00", matchedDates.get(3).toString());
assertEquals("2018-10-15 19:00:00", matchedDates.get(4).toString());
}
|
public static URI parse(String featureIdentifier) {
return parse(FeaturePath.parse(featureIdentifier));
}
|
@Test
void reject_directory_form() {
Executable testMethod = () -> FeatureIdentifier.parse(URI.create("classpath:/path/to"));
IllegalArgumentException actualThrown = assertThrows(IllegalArgumentException.class, testMethod);
assertThat("Unexpected exception message", actualThrown.getMessage(), is(equalTo(
"featureIdentifier does not reference a single feature file: classpath:/path/to")));
}
|
public boolean fileIsInAllowedPath(Path path) {
if (allowedPaths.isEmpty()) {
return true;
}
final Path realFilePath = resolveRealPath(path);
if (realFilePath == null) {
return false;
}
for (Path allowedPath : allowedPaths) {
final Path realAllowedPath = resolveRealPath(allowedPath);
if (realAllowedPath != null && realFilePath.startsWith(realAllowedPath)) {
return true;
}
}
return false;
}
|
@Test
public void outsideOfAllowedPath() throws IOException {
final Path permittedPath = permittedTempDir.getRoot().toPath();
final Path filePath = forbiddenTempDir.newFile(FILE).toPath();
pathChecker = new AllowedAuxiliaryPathChecker(new TreeSet<>(Collections.singleton(permittedPath)));
assertFalse(pathChecker.fileIsInAllowedPath(filePath));
}
|
@Override
@MethodNotAvailable
public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor,
Object... arguments) {
throw new MethodNotAvailableException();
}
|
@Test(expected = MethodNotAvailableException.class)
public void testInvokeAll() {
Set<Integer> keys = new HashSet<>(asList(23, 65, 88));
adapter.invokeAll(keys, new ICacheReplaceEntryProcessor(), "value", "newValue");
}
|
public synchronized TopologyDescription describe() {
return internalTopologyBuilder.describe();
}
|
@Test
public void sessionWindowedCogroupedZeroArgCountShouldPreserveTopologyStructure() {
final StreamsBuilder builder = new StreamsBuilder();
builder.stream("input-topic")
.groupByKey()
.cogroup((key, value, aggregate) -> value)
.windowedBy(SessionWindows.ofInactivityGapWithNoGrace(ofMillis(1)))
.aggregate(() -> "", (aggKey, aggOne, aggTwo) -> "");
final Topology topology = builder.build();
final TopologyDescription describe = topology.describe();
assertEquals(
"Topologies:\n" +
" Sub-topology: 0\n" +
" Source: KSTREAM-SOURCE-0000000000 (topics: [input-topic])\n" +
" --> COGROUPKSTREAM-AGGREGATE-0000000002\n" +
" Processor: COGROUPKSTREAM-AGGREGATE-0000000002 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000001])\n" +
" --> COGROUPKSTREAM-MERGE-0000000003\n" +
" <-- KSTREAM-SOURCE-0000000000\n" +
" Processor: COGROUPKSTREAM-MERGE-0000000003 (stores: [])\n" +
" --> none\n" +
" <-- COGROUPKSTREAM-AGGREGATE-0000000002\n\n",
describe.toString()
);
topology.internalTopologyBuilder.setStreamsConfig(streamsConfig);
assertThat(topology.internalTopologyBuilder.setApplicationId("test").buildTopology().hasPersistentLocalStore(), is(true));
}
|
public static MySQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) {
Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find MySQL type '%s' in column type when process binary protocol value", binaryColumnType);
return BINARY_PROTOCOL_VALUES.get(binaryColumnType);
}
|
@Test
void assertGetBinaryProtocolValueWithMySQLTypeGeometry() {
assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.GEOMETRY), instanceOf(MySQLStringLenencBinaryProtocolValue.class));
}
|
public static String substringBefore(String s, String splitter) {
int endIndex = s.indexOf(splitter);
if (endIndex >= 0) {
return s.substring(0, endIndex);
}
return s;
}
|
@Test
void testSubstringBeforeSplitterMultiChar() {
assertThat(substringBefore("this is a test", " is ")).isEqualTo("this");
assertThat(substringBefore("this is a test", " was ")).isEqualTo("this is a test");
}
|
@Override
public void connect(
ChannelHandlerContext ctx,
SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception {
if (logger.isEnabled(internalLevel)) {
logger.log(internalLevel, format(ctx, "CONNECT", remoteAddress, localAddress));
}
ctx.connect(remoteAddress, localAddress, promise);
}
|
@Test
public void shouldLogChannelConnect() throws Exception {
EmbeddedChannel channel = new EmbeddedChannel(new LoggingHandler());
channel.connect(new InetSocketAddress(80)).await();
verify(appender).doAppend(argThat(new RegexLogMatcher(".+CONNECT: 0.0.0.0/0.0.0.0:80$")));
}
|
public boolean isSupported(final SQLStatement sqlStatement) {
for (Class<? extends SQLStatement> each : supportedSQLStatements) {
if (each.isAssignableFrom(sqlStatement.getClass())) {
return true;
}
}
for (Class<? extends SQLStatement> each : unsupportedSQLStatements) {
if (each.isAssignableFrom(sqlStatement.getClass())) {
return false;
}
}
return true;
}
|
@Test
void assertIsSupportedWithOverlappedList() {
assertTrue(new SQLSupportedJudgeEngine(Collections.singleton(SelectStatement.class), Collections.singleton(SQLStatement.class)).isSupported(mock(SelectStatement.class)));
}
|
public static <T> TimeLimiterOperator<T> of(TimeLimiter timeLimiter) {
return new TimeLimiterOperator<>(timeLimiter);
}
|
@Test
public void timeoutUsingFlux() {
given(timeLimiter.getTimeLimiterConfig())
.willReturn(toConfig(Duration.ofMillis(1)));
Flux<?> flux = Flux.interval(Duration.ofSeconds(1))
.transformDeferred(TimeLimiterOperator.of(timeLimiter));
StepVerifier.create(flux)
.expectError(TimeoutException.class)
.verify(Duration.ofMinutes(1));
then(timeLimiter).should()
.onError(any(TimeoutException.class));
}
|
@Override
public synchronized boolean remove(Object o) {
int idx = originList.indexOf(o);
if (idx > -1 && rootSet.get(idx)) {
rootSet.set(idx, false);
return true;
}
if (CollectionUtils.isNotEmpty(tailList)) {
return tailList.remove(o);
}
return false;
}
|
@Test
void testRemove() {
List<String> list = Arrays.asList("A", "B", "C");
BitList<String> bitList = new BitList<>(list);
Assertions.assertTrue(bitList.remove("A"));
Assertions.assertFalse(bitList.remove("A"));
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("B")));
Assertions.assertFalse(bitList.removeAll(Collections.singletonList("B")));
Assertions.assertFalse(bitList.removeAll(Collections.singletonList("D")));
bitList.add("D");
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("D")));
Assertions.assertFalse(bitList.hasMoreElementInTailList());
bitList.add("A");
bitList.add("E");
bitList.add("F");
Assertions.assertEquals(4, bitList.size());
Assertions.assertFalse(bitList.removeAll(Collections.singletonList("D")));
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("A")));
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("C")));
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("E")));
Assertions.assertTrue(bitList.removeAll(Collections.singletonList("F")));
Assertions.assertTrue(bitList.isEmpty());
}
|
public List<MappingField> resolveAndValidateFields(
List<MappingField> userFields,
Map<String, String> options,
NodeEngine nodeEngine
) {
final InternalSerializationService serializationService = (InternalSerializationService) nodeEngine
.getSerializationService();
final AbstractRelationsStorage relationsStorage = ((CalciteSqlOptimizer) nodeEngine.getSqlService().getOptimizer())
.relationsStorage();
// normalize and validate the names and external names
for (MappingField field : userFields) {
String name = field.name();
String externalName = field.externalName();
if (externalName == null) {
if (name.equals(KEY) || name.equals(VALUE)) {
externalName = name;
} else {
externalName = VALUE_PREFIX + name;
}
field.setExternalName(name);
}
if ((name.equals(KEY) && !externalName.equals(KEY))
|| (name.equals(VALUE) && !externalName.equals(VALUE))) {
throw QueryException.error("Cannot rename field: '" + name + '\'');
}
if (!EXT_NAME_PATTERN.matcher(externalName).matches()) {
throw QueryException.error("Invalid external name: " + externalName);
}
}
Stream<MappingField> keyFields = resolveAndValidateFields(true, userFields, options,
serializationService, relationsStorage);
Stream<MappingField> valueFields = resolveAndValidateFields(false, userFields, options,
serializationService, relationsStorage);
Map<String, MappingField> fields = Stream.concat(keyFields, valueFields)
.collect(LinkedHashMap::new, (map, field) -> map.putIfAbsent(field.name(), field), Map::putAll);
if (fields.isEmpty()) {
throw QueryException.error("The resolved field list is empty");
}
return new ArrayList<>(fields.values());
}
|
@Test
@Parameters({
"__key",
"this"
})
public void when_renamedKeyOrThis_then_throws(String fieldName) {
MappingField field = field(fieldName, QueryDataType.INT, "renamed");
assertThatThrownBy(() -> resolvers.resolveAndValidateFields(singletonList(field), emptyMap(), nodeEngine))
.hasMessage("Cannot rename field: '" + fieldName + '\'');
}
|
public FontMetrics parse() throws IOException
{
return parseFontMetric(false);
}
|
@Test
void testMalformedFloat() throws IOException
{
AFMParser parser = new AFMParser(
new FileInputStream("src/test/resources/afm/MalformedFloat.afm"));
try
{
parser.parse();
fail("The AFMParser should have thrown an IOException because of a malformed float value");
}
catch (IOException e)
{
assertTrue(e.getCause() instanceof NumberFormatException);
assertTrue(e.getMessage().contains("4,1ab"));
}
}
|
public static Set<X509Certificate> filterValid( X509Certificate... certificates )
{
final Set<X509Certificate> results = new HashSet<>();
if (certificates != null)
{
for ( X509Certificate certificate : certificates )
{
if ( certificate == null )
{
continue;
}
try
{
certificate.checkValidity();
}
catch ( CertificateExpiredException | CertificateNotYetValidException e )
{
// Not yet or no longer valid. Don't include in result.
continue;
}
results.add( certificate );
}
}
return results;
}
|
@Test
public void testFilterValidWithOneValidCert() throws Exception
{
// Setup fixture.
final X509Certificate valid = KeystoreTestUtils.generateValidCertificate().getCertificate();
final Collection<X509Certificate> input = new ArrayList<>();
input.add( valid );
// Execute system under test.
final Collection<X509Certificate> result = CertificateUtils.filterValid( input );
// Verify results.
assertEquals( 1, result.size() );
assertTrue( result.contains( valid ) );
}
|
public static void registry(NotificationListener notificationListener,
Class<? extends NotificationType> typeClass) {
if (!isEnable()) {
return;
}
List<NotificationListener> listenerList = NOTIFICATION_LISTENER_MAP.computeIfAbsent(
typeClass.getCanonicalName(), key -> new ArrayList<>());
listenerList.add(notificationListener);
}
|
@Test
public void registry() {
NotificationManager.registry(new ListenerTest(), NettyNotificationType.class);
Optional<?> mapOptional = ReflectUtils.getStaticFieldValue(NotificationManager.class, LISTENER_MAP);
Assert.assertTrue(mapOptional.isPresent());
Map<String, List<NotificationListener>> map = (Map<String, List<NotificationListener>>) mapOptional.get();
Assert.assertTrue(map.containsKey(NettyNotificationType.class.getCanonicalName()));
}
|
@Override
public JFieldVar apply(String nodeName, JsonNode node, JsonNode parent, JFieldVar field, Schema currentSchema) {
if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && isApplicableType(field)) {
if (node.has("minimum")) {
final Class<? extends Annotation> decimalMinClass
= ruleFactory.getGenerationConfig().isUseJakartaValidation()
? DecimalMin.class
: javax.validation.constraints.DecimalMin.class;
JAnnotationUse annotation = field.annotate(decimalMinClass);
annotation.param("value", node.get("minimum").asText());
}
if (node.has("maximum")) {
final Class<? extends Annotation> decimalMaxClass
= ruleFactory.getGenerationConfig().isUseJakartaValidation()
? DecimalMax.class
: javax.validation.constraints.DecimalMax.class;
JAnnotationUse annotation = field.annotate(decimalMaxClass);
annotation.param("value", node.get("maximum").asText());
}
}
return field;
}
|
@Test
public void testNotUsed() {
when(config.isIncludeJsr303Annotations()).thenReturn(true);
when(node.has("minimum")).thenReturn(false);
when(node.has("maximum")).thenReturn(false);
when(fieldVar.type().boxify().fullName()).thenReturn(fieldClass.getTypeName());
JFieldVar result = rule.apply("node", node, null, fieldVar, null);
assertSame(fieldVar, result);
verify(fieldVar, never()).annotate(sizeClass);
verify(annotationMin, never()).param(anyString(), anyString());
verify(annotationMax, never()).param(anyString(), anyString());
}
|
static void dissectResolve(
final MutableDirectBuffer buffer, final int offset, final StringBuilder builder)
{
int absoluteOffset = offset;
absoluteOffset += dissectLogHeader(CONTEXT, NAME_RESOLUTION_RESOLVE, buffer, absoluteOffset, builder);
final boolean isReResolution = 1 == buffer.getByte(absoluteOffset);
absoluteOffset += SIZE_OF_BYTE;
final long durationNs = buffer.getLong(absoluteOffset, LITTLE_ENDIAN);
absoluteOffset += SIZE_OF_LONG;
builder.append(": resolver=");
absoluteOffset += buffer.getStringAscii(absoluteOffset, builder);
absoluteOffset += SIZE_OF_INT;
builder.append(" durationNs=").append(durationNs);
builder.append(" name=");
absoluteOffset += buffer.getStringAscii(absoluteOffset, builder);
absoluteOffset += SIZE_OF_INT;
builder.append(" isReResolution=").append(isReResolution);
builder.append(" address=");
dissectInetAddress(buffer, absoluteOffset, builder);
}
|
@Test
void dissectResolve() throws UnknownHostException
{
final String resolver = "testResolver";
final long durationNs = 32167;
final String hostname = "localhost";
final boolean isReResolution = false;
final InetAddress address = InetAddress.getByName("127.0.0.1");
final int length = SIZE_OF_BOOLEAN + SIZE_OF_LONG + trailingStringLength(resolver, MAX_HOST_NAME_LENGTH) +
trailingStringLength(hostname, MAX_HOST_NAME_LENGTH) +
inetAddressLength(address);
DriverEventEncoder.encodeResolve(
buffer, 0, length, length, resolver, durationNs, hostname, isReResolution, address);
final StringBuilder builder = new StringBuilder();
DriverEventDissector.dissectResolve(buffer, 0, builder);
assertThat(builder.toString(), endsWith(
"DRIVER: NAME_RESOLUTION_RESOLVE [46/46]: " +
"resolver=testResolver durationNs=32167 name=localhost isReResolution=false address=127.0.0.1"));
}
|
static Object[] calculateActualParams(Method m, NamedParameter[] params) {
logger.trace("calculateActualParams {} {}", m, params);
List<String> names = getParametersNames(m);
Object[] actualParams = new Object[names.size()];
boolean isVariableParameters =
m.getParameterCount() > 0 && m.getParameterTypes()[m.getParameterCount() - 1].isArray();
String variableParamPrefix = isVariableParameters ? names.get(names.size() - 1) : null;
List<Object> variableParams = isVariableParameters ? new ArrayList<>() : null;
for (NamedParameter np : params) {
if (!calculateActualParam(np, names, actualParams, isVariableParameters, variableParamPrefix,
variableParams)) {
return null;
}
}
if (isVariableParameters) {
actualParams[actualParams.length - 1] = variableParams.toArray();
}
return actualParams;
}
|
@Test
void calculateActualParams() throws NoSuchMethodException {
// CeilingFunction.invoke(@ParameterName( "n" ) BigDecimal n)
Method m = CeilingFunction.class.getMethod("invoke", BigDecimal.class);
assertNotNull(m);
NamedParameter[] parameters = {new NamedParameter("n", BigDecimal.valueOf(1.5))};
Object[] retrieved = BaseFEELFunctionHelper.calculateActualParams(m, parameters);
assertNotNull(retrieved);
assertEquals(parameters.length, retrieved.length);
assertEquals(parameters[0].getValue(), retrieved[0]);
parameters = new NamedParameter[]{new NamedParameter("undefined", BigDecimal.class)};
retrieved = BaseFEELFunctionHelper.calculateActualParams(m, parameters);
assertNull(retrieved);
}
|
public Response put(URL url, Request request) throws IOException {
return call(HttpMethods.PUT, url, request);
}
|
@Test
public void testPut() throws IOException {
verifyCall(HttpMethods.PUT, FailoverHttpClient::put);
}
|
public static Schema convert(final org.apache.iceberg.Schema schema) {
ImmutableList.Builder<Field> fields = ImmutableList.builder();
for (NestedField f : schema.columns()) {
fields.add(convert(f));
}
return new Schema(fields.build());
}
|
@Test
public void convertPrimitive() {
Schema iceberg =
new Schema(
Types.NestedField.optional(0, INTEGER_FIELD, IntegerType.get()),
Types.NestedField.optional(1, BOOLEAN_FIELD, BooleanType.get()),
Types.NestedField.required(2, DOUBLE_FIELD, DoubleType.get()),
Types.NestedField.required(3, STRING_FIELD, StringType.get()),
Types.NestedField.optional(4, DATE_FIELD, DateType.get()),
Types.NestedField.optional(5, TIMESTAMP_FIELD, TimestampType.withZone()),
Types.NestedField.optional(6, LONG_FIELD, LongType.get()),
Types.NestedField.optional(7, FLOAT_FIELD, FloatType.get()),
Types.NestedField.optional(8, TIME_FIELD, TimeType.get()),
Types.NestedField.optional(9, BINARY_FIELD, Types.BinaryType.get()),
Types.NestedField.optional(10, DECIMAL_FIELD, Types.DecimalType.of(1, 1)),
Types.NestedField.optional(
12, LIST_FIELD, Types.ListType.ofOptional(13, Types.IntegerType.get())),
Types.NestedField.required(
14,
MAP_FIELD,
Types.MapType.ofOptional(15, 16, StringType.get(), IntegerType.get())),
Types.NestedField.optional(17, FIXED_WIDTH_BINARY_FIELD, Types.FixedType.ofLength(10)),
Types.NestedField.optional(18, UUID_FIELD, Types.UUIDType.get()));
org.apache.arrow.vector.types.pojo.Schema arrow = ArrowSchemaUtil.convert(iceberg);
validate(iceberg, arrow);
}
|
@JsonIgnore
public ValidationResult validate() {
final ValidationResult validation = new ValidationResult();
if (title().isEmpty()) {
validation.addError(FIELD_TITLE, "Notification title cannot be empty.");
}
try {
validation.addAll(config().validate());
} catch (UnsupportedOperationException e) {
validation.addError(FIELD_CONFIG, "Notification config type cannot be empty.");
}
return validation;
}
|
@Test
public void testValidEmailNotification() {
final NotificationDto validNotification = getEmailNotification();
final ValidationResult validationResult = validNotification.validate();
assertThat(validationResult.failed()).isFalse();
assertThat(validationResult.getErrors()).isEmpty();
}
|
public static NamenodeRole convert(NamenodeRoleProto role) {
switch (role) {
case NAMENODE:
return NamenodeRole.NAMENODE;
case BACKUP:
return NamenodeRole.BACKUP;
case CHECKPOINT:
return NamenodeRole.CHECKPOINT;
}
return null;
}
|
@Test
public void testConvertDatanodeID() {
DatanodeID dn = DFSTestUtil.getLocalDatanodeID();
DatanodeIDProto dnProto = PBHelperClient.convert(dn);
DatanodeID dn2 = PBHelperClient.convert(dnProto);
compare(dn, dn2);
}
|
@Override
public Set<Name> getLocations() {
if(StringUtils.isNotBlank(session.getHost().getRegion())) {
final S3Region region = new S3Region(session.getHost().getRegion());
if(log.isDebugEnabled()) {
log.debug(String.format("Return single region %s set in bookmark", region));
}
return Collections.singleton(region);
}
if(StringUtils.isNotEmpty(RequestEntityRestStorageService.findBucketInHostname(session.getHost()))) {
if(log.isDebugEnabled()) {
log.debug(String.format("Return empty set for hostname %s", session.getHost()));
}
// Connected to single bucket
return Collections.emptySet();
}
if(!S3Session.isAwsHostname(session.getHost().getHostname(), false)) {
if(new S3Protocol().getRegions().equals(session.getHost().getProtocol().getRegions())) {
// Return empty set for unknown provider
if(log.isDebugEnabled()) {
log.debug(String.format("Return empty set for unknown provider %s", session.getHost()));
}
return Collections.emptySet();
}
}
return session.getHost().getProtocol().getRegions();
}
|
@Test
public void testEmptyThirdPartyProvider() {
final Host host = new Host(new S3Protocol(), "mys3");
final S3Session session = new S3Session(host);
assertTrue(new S3LocationFeature(session).getLocations().isEmpty());
}
|
public BlobOperationResponse listBlobs(final Exchange exchange) {
final ListBlobsOptions listBlobOptions = configurationProxy.getListBlobOptions(exchange);
final Duration timeout = configurationProxy.getTimeout(exchange);
final String regex = configurationProxy.getRegex(exchange);
List<BlobItem> blobs = client.listBlobs(listBlobOptions, timeout);
if (ObjectHelper.isEmpty(regex)) {
return BlobOperationResponse.create(blobs);
}
List<BlobItem> filteredBlobs = blobs.stream()
.filter(x -> x.getName().matches(regex))
.collect(Collectors.toCollection(LinkedList<BlobItem>::new));
return BlobOperationResponse.create(filteredBlobs);
}
|
@Test
void testListBlob() {
when(client.listBlobs(any(), any())).thenReturn(listBlobsMock());
final BlobContainerOperations blobContainerOperations = new BlobContainerOperations(configuration, client);
final BlobOperationResponse response = blobContainerOperations.listBlobs(null);
assertNotNull(response);
@SuppressWarnings("unchecked")
final List<BlobItem> body = (List<BlobItem>) response.getBody();
final List<String> items = body.stream().map(BlobItem::getName).toList();
assertTrue(items.contains("item-1"));
assertTrue(items.contains("item-2"));
}
|
@GET
@Produces({MediaType.APPLICATION_JSON})
public Map<String, Object> requestFormats() {
return SUPPORTED_FORMATS;
}
|
@Test
public void testFormats() {
assertEquals(1, server.requestFormats().size());
assertEquals( ((List)server.requestFormats().get("responseTypes")).get(0), "application/json");
}
|
@VisibleForTesting
long customShuffleTransfer(WritableByteChannel target, long position)
throws IOException {
long actualCount = this.count - position;
if (actualCount < 0 || position < 0) {
throw new IllegalArgumentException(
"position out of range: " + position +
" (expected: 0 - " + (this.count - 1) + ')');
}
if (actualCount == 0) {
return 0L;
}
long trans = actualCount;
int readSize;
ByteBuffer byteBuffer = ByteBuffer.allocate(
Math.min(
this.shuffleBufferSize,
trans > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) trans));
while(trans > 0L &&
(readSize = fileChannel.read(byteBuffer, this.position+position)) > 0) {
//adjust counters and buffer limit
if(readSize < trans) {
trans -= readSize;
position += readSize;
byteBuffer.flip();
} else {
//We can read more than we need if the actualCount is not multiple
//of the byteBuffer size and file is big enough. In that case we cannot
//use flip method but we need to set buffer limit manually to trans.
byteBuffer.limit((int)trans);
byteBuffer.position(0);
position += trans;
trans = 0;
}
//write data to the target
while(byteBuffer.hasRemaining()) {
target.write(byteBuffer);
}
byteBuffer.clear();
}
return actualCount - trans;
}
|
@Test(timeout = 100000)
public void testCustomShuffleTransfer() throws IOException {
File absLogDir = new File("target",
TestFadvisedFileRegion.class.getSimpleName() +
"LocDir").getAbsoluteFile();
String testDirPath =
StringUtils.join(Path.SEPARATOR,
new String[] { absLogDir.getAbsolutePath(),
"testCustomShuffleTransfer"});
File testDir = new File(testDirPath);
testDir.mkdirs();
System.out.println(testDir.getAbsolutePath());
File inFile = new File(testDir, "fileIn.out");
File outFile = new File(testDir, "fileOut.out");
//Initialize input file
byte [] initBuff = new byte[FILE_SIZE];
Random rand = new Random();
rand.nextBytes(initBuff);
FileOutputStream out = new FileOutputStream(inFile);
try{
out.write(initBuff);
} finally {
IOUtils.cleanupWithLogger(LOG, out);
}
//define position and count to read from a file region.
int position = 2*1024*1024;
int count = 4*1024*1024 - 1;
RandomAccessFile inputFile = null;
RandomAccessFile targetFile = null;
WritableByteChannel target = null;
FadvisedFileRegion fileRegion = null;
try {
inputFile = new RandomAccessFile(inFile.getAbsolutePath(), "r");
targetFile = new RandomAccessFile(outFile.getAbsolutePath(), "rw");
target = targetFile.getChannel();
Assert.assertEquals(FILE_SIZE, inputFile.length());
//create FadvisedFileRegion
fileRegion = new FadvisedFileRegion(
inputFile, position, count, false, 0, null, null, 1024, false);
//test corner cases
customShuffleTransferCornerCases(fileRegion, target, count);
long pos = 0;
long size;
while((size = fileRegion.customShuffleTransfer(target, pos)) > 0) {
pos += size;
}
//assert size
Assert.assertEquals(count, (int)pos);
Assert.assertEquals(count, targetFile.length());
} finally {
if (fileRegion != null) {
fileRegion.deallocate();
}
IOUtils.cleanupWithLogger(LOG, target);
IOUtils.cleanupWithLogger(LOG, targetFile);
IOUtils.cleanupWithLogger(LOG, inputFile);
}
//Read the target file and verify that copy is done correctly
byte [] buff = new byte[FILE_SIZE];
FileInputStream in = new FileInputStream(outFile);
try {
int total = in.read(buff, 0, count);
Assert.assertEquals(count, total);
for(int i = 0; i < count; i++) {
Assert.assertEquals(initBuff[position+i], buff[i]);
}
} finally {
IOUtils.cleanupWithLogger(LOG, in);
}
//delete files and folders
inFile.delete();
outFile.delete();
testDir.delete();
absLogDir.delete();
}
|
public <T extends ShardingSphereRule> T getSingleRule(final Class<T> clazz) {
Collection<T> foundRules = findRules(clazz);
Preconditions.checkState(1 == foundRules.size(), "Rule `%s` should have and only have one instance.", clazz.getSimpleName());
return foundRules.iterator().next();
}
|
@Test
void assertGetSingleRule() {
assertThat(ruleMetaData.getSingleRule(ShardingSphereRuleFixture.class), instanceOf(ShardingSphereRuleFixture.class));
}
|
@Override
protected void doStop() throws Exception {
shutdownReconnectService(reconnectService);
LOG.debug("Disconnecting from: {}...", getEndpoint().getConnectionString());
super.doStop();
closeSession();
LOG.info("Disconnected from: {}", getEndpoint().getConnectionString());
}
|
@Test
public void doStopShouldNotCloseTheSMPPSessionIfItIsNull() throws Exception {
when(endpoint.getConnectionString())
.thenReturn("smpp://smppclient@localhost:2775");
consumer.doStop();
}
|
@Nullable
public byte[] getValue() {
return mValue;
}
|
@Test
public void setValue_SINT32_BE() {
final MutableData data = new MutableData(new byte[4]);
data.setValue(0x00fdfdfe, Data.FORMAT_UINT32_BE, 0);
assertArrayEquals(new byte[] { (byte) 0x00, (byte) 0xFD, (byte) 0xFD, (byte) 0xFE } , data.getValue());
}
|
@Override
public void execute(Context context) {
executeForBranch(treeRootHolder.getRoot());
}
|
@Test
public void execute_whenNoBaseMeasure_shouldNotRaiseEvent() {
when(measureRepository.getBaseMeasure(treeRootHolder.getRoot(), qualityProfileMetric)).thenReturn(Optional.empty());
underTest.execute(new TestComputationStepContext());
verifyNoMoreInteractions(eventRepository);
}
|
public static int MAXIM(@NonNull final byte[] data, final int offset, final int length) {
return CRC(0x8005, 0x0000, data, offset, length, true, true, 0xFFFF);
}
|
@Test
public void MAXIM_empty() {
final byte[] data = new byte[0];
assertEquals(0xFFFF, CRC16.MAXIM(data, 0, 0));
}
|
int parseAndConvert(String[] args) throws Exception {
Options opts = createOptions();
int retVal = 0;
try {
if (args.length == 0) {
LOG.info("Missing command line arguments");
printHelp(opts);
return 0;
}
CommandLine cliParser = new GnuParser().parse(opts, args);
if (cliParser.hasOption(CliOption.HELP.shortSwitch)) {
printHelp(opts);
return 0;
}
FSConfigToCSConfigConverter converter =
prepareAndGetConverter(cliParser);
converter.convert(converterParams);
String outputDir = converterParams.getOutputDirectory();
boolean skipVerification =
cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch);
if (outputDir != null && !skipVerification) {
validator.validateConvertedConfig(
converterParams.getOutputDirectory());
}
} catch (ParseException e) {
String msg = "Options parsing failed: " + e.getMessage();
logAndStdErr(e, msg);
printHelp(opts);
retVal = -1;
} catch (PreconditionException e) {
String msg = "Cannot start FS config conversion due to the following"
+ " precondition error: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (UnsupportedPropertyException e) {
String msg = "Unsupported property/setting encountered during FS config "
+ "conversion: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (ConversionException | IllegalArgumentException e) {
String msg = "Fatal error during FS config conversion: " + e.getMessage();
handleException(e, msg);
retVal = -1;
} catch (VerificationException e) {
Throwable cause = e.getCause();
String msg = "Verification failed: " + e.getCause().getMessage();
conversionOptions.handleVerificationFailure(cause, msg);
retVal = -1;
}
conversionOptions.handleParsingFinished();
return retVal;
}
|
@Test
public void testValidationSkippedWhenCmdLineSwitchIsDefined()
throws Exception {
setupFSConfigConversionFiles(true);
FSConfigToCSConfigArgumentHandler argumentHandler =
new FSConfigToCSConfigArgumentHandler(conversionOptions,
mockValidator);
String[] args = getArgumentsAsArrayWithDefaults("-f",
FSConfigConverterTestCommons.FS_ALLOC_FILE, "-s");
argumentHandler.parseAndConvert(args);
verifyZeroInteractions(mockValidator);
}
|
public PrefetchableIterable<V> get(K key) {
checkState(
!isClosed,
"Multimap user state is no longer usable because it is closed for %s",
keysStateRequest.getStateKey());
Object structuralKey = mapKeyCoder.structuralValue(key);
KV<K, List<V>> pendingAddValues = pendingAdds.get(structuralKey);
PrefetchableIterable<V> pendingValues =
pendingAddValues == null
? PrefetchableIterables.fromArray()
: PrefetchableIterables.limit(
pendingAddValues.getValue(), pendingAddValues.getValue().size());
if (isCleared || pendingRemoves.containsKey(structuralKey)) {
return pendingValues;
}
return PrefetchableIterables.concat(getPersistedValues(structuralKey, key), pendingValues);
}
|
@Test
public void testImmutableValues() throws Exception {
FakeBeamFnStateClient fakeClient =
new FakeBeamFnStateClient(
ImmutableMap.of(
createMultimapKeyStateKey(),
KV.of(ByteArrayCoder.of(), singletonList(A1)),
createMultimapValueStateKey(A1),
KV.of(StringUtf8Coder.of(), asList("V1", "V2"))));
MultimapUserState<byte[], String> userState =
new MultimapUserState<>(
Caches.noop(),
fakeClient,
"instructionId",
createMultimapKeyStateKey(),
ByteArrayCoder.of(),
StringUtf8Coder.of());
Iterable<String> values = userState.get(A1);
assertThrows(
UnsupportedOperationException.class,
() -> Iterables.removeAll(values, Arrays.asList("V1")));
}
|
public Result parse(final String string) throws DateNotParsableException {
return this.parse(string, new Date());
}
|
@Test
public void testDefaultTZ() throws Exception {
NaturalDateParser.Result today = naturalDateParser.parse("today");
assertThat(today.getFrom()).as("From should not be null").isNotNull();
assertThat(today.getTo()).as("To should not be null").isNotNull();
assertThat(today.getDateTimeZone().getID()).as("should have the Etc/UTC as Timezone").isEqualTo("Etc/UTC");
}
|
public ServiceInfo processServiceInfo(String json) {
ServiceInfo serviceInfo = JacksonUtils.toObj(json, ServiceInfo.class);
serviceInfo.setJsonFromServer(json);
return processServiceInfo(serviceInfo);
}
|
@Test
void testProcessServiceInfo() {
ServiceInfo info = new ServiceInfo("a@@b@@c");
Instance instance1 = createInstance("1.1.1.1", 1);
Instance instance2 = createInstance("1.1.1.2", 2);
List<Instance> hosts = new ArrayList<>();
hosts.add(instance1);
hosts.add(instance2);
info.setHosts(hosts);
ServiceInfo actual1 = holder.processServiceInfo(info);
assertEquals(info, actual1);
Instance newInstance1 = createInstance("1.1.1.1", 1);
newInstance1.setWeight(2.0);
Instance instance3 = createInstance("1.1.1.3", 3);
List<Instance> hosts2 = new ArrayList<>();
hosts2.add(newInstance1);
hosts2.add(instance3);
ServiceInfo info2 = new ServiceInfo("a@@b@@c");
info2.setHosts(hosts2);
ServiceInfo actual2 = holder.processServiceInfo(info2);
assertEquals(info2, actual2);
}
|
public void createNamespace(Namespace namespace, Map<String, String> metadata) {
checkNamespaceIsValid(namespace);
getRef().checkMutable();
ContentKey key = ContentKey.of(namespace.levels());
org.projectnessie.model.Namespace content =
org.projectnessie.model.Namespace.of(key.getElements(), metadata);
try {
Content existing = api.getContent().reference(getReference()).key(key).get().get(key);
if (existing != null) {
throw namespaceAlreadyExists(key, existing, null);
}
try {
commitRetry("create namespace " + key, Operation.Put.of(key, content));
} catch (NessieReferenceConflictException e) {
Optional<Conflict> conflict =
NessieUtil.extractSingleConflict(
e,
EnumSet.of(
Conflict.ConflictType.KEY_EXISTS, Conflict.ConflictType.NAMESPACE_ABSENT));
if (conflict.isPresent()) {
switch (conflict.get().conflictType()) {
case KEY_EXISTS:
Content conflicting = withReference(api.getContent()).key(key).get().get(key);
throw namespaceAlreadyExists(key, conflicting, e);
case NAMESPACE_ABSENT:
throw new NoSuchNamespaceException(
e,
"Cannot create namespace '%s': parent namespace '%s' does not exist",
namespace,
conflict.get().key());
}
}
throw new RuntimeException(
String.format("Cannot create namespace '%s': %s", namespace, e.getMessage()));
}
} catch (NessieNotFoundException e) {
throw new RuntimeException(
String.format(
"Cannot create namespace '%s': ref '%s' is no longer valid.",
namespace, getRef().getName()),
e);
} catch (BaseNessieClientServerException e) {
throw new RuntimeException(
String.format("Cannot create namespace '%s': %s", namespace, e.getMessage()), e);
}
}
|
@Test
public void testCreateNamespaceInvalid() throws NessieConflictException, NessieNotFoundException {
String branch = "createNamespaceInvalidBranch";
createBranch(branch);
NessieIcebergClient client = new NessieIcebergClient(api, branch, null, Map.of());
assertThatThrownBy(() -> client.createNamespace(Namespace.empty(), Map.of()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Invalid namespace: ");
assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "b"), Map.of()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Cannot create namespace 'a.b': parent namespace 'a' does not exist");
}
|
public RuntimeOptionsBuilder parse(Map<String, String> properties) {
return parse(properties::get);
}
|
@Test
void should_parse_rerun_file_and_remove_existing_tag_filters() throws IOException {
RuntimeOptions existing = RuntimeOptions.defaultOptions();
existing.setTagExpressions(Collections.singletonList(TagExpressionParser.parse("@example")));
Path path = mockFileResource("classpath:path/to.feature");
properties.put(Constants.FEATURES_PROPERTY_NAME, "@" + path.toString());
RuntimeOptions options = cucumberPropertiesParser.parse(properties).build();
assertAll(
() -> assertThat(options.getFeaturePaths(), contains(URI.create("classpath:path/to.feature"))),
() -> assertThat(options.getTagExpressions(), not(contains("@example"))));
}
|
public String validate(final String xml) {
final Source source = new SAXSource(reader, new InputSource(IOUtils.toInputStream(xml, Charset.defaultCharset())));
return validate(source);
}
|
@Test
public void testValidXML() throws Exception {
String payload = IOUtils.toString(ClassLoader.getSystemResourceAsStream("xml/article-1.xml"),
Charset.defaultCharset());
logger.info("Validating payload: {}", payload);
// validate
String result = getProcessor("sch/schematron-1.sch", null).validate(payload);
logger.info("Schematron Report: {}", result);
assertEquals(0, Integer.valueOf(Utils.evaluate("count(//svrl:failed-assert)", result)).intValue());
assertEquals(0, Integer.valueOf(Utils.evaluate("count(//svrl:successful-report)", result)).intValue());
}
|
@Override
protected OutputStream createObject(String key) throws IOException {
return new GCSOutputStream(mBucketName, key, mClient,
mUfsConf.getList(PropertyKey.TMP_DIRS));
}
|
@Test
public void testCreateObject() throws IOException, ServiceException {
// test successful create object
Mockito.when(mClient.putObject(ArgumentMatchers.anyString(),
ArgumentMatchers.any(GSObject.class))).thenReturn(null);
OutputStream result = mGCSUnderFileSystem.createObject(KEY);
Assert.assertTrue(result instanceof GCSOutputStream);
}
|
public boolean isValid(String value) {
if (value == null) {
return false;
}
URI uri; // ensure value is a valid URI
try {
uri = new URI(value);
} catch (URISyntaxException e) {
return false;
}
// OK, perfom additional validation
String scheme = uri.getScheme();
if (!isValidScheme(scheme)) {
return false;
}
String authority = uri.getRawAuthority();
if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority
return true; // this is a local file - nothing more to do here
} else if ("file".equals(scheme) && authority != null && authority.contains(":")) {
return false;
} else {
// Validate the authority
if (!isValidAuthority(authority)) {
return false;
}
}
if (!isValidPath(uri.getRawPath())) {
return false;
}
if (!isValidQuery(uri.getRawQuery())) {
return false;
}
if (!isValidFragment(uri.getRawFragment())) {
return false;
}
return true;
}
|
@Test
public void testValidator420() {
UrlValidator validator = new UrlValidator();
assertFalse(validator.isValid("http://example.com/serach?address=Main Avenue"));
assertTrue(validator.isValid("http://example.com/serach?address=Main%20Avenue"));
assertTrue(validator.isValid("http://example.com/serach?address=Main+Avenue"));
}
|
public Path getSegmentsDirectory(final Path file) {
return new Path(file.getParent(), String.format("%s%s", prefix, file.getName()), EnumSet.of(Path.Type.directory));
}
|
@Test
public void testGetSegmentsDirectory() {
final SwiftSegmentService service = new SwiftSegmentService(session, ".prefix/");
final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume));
final String name = UUID.randomUUID().toString();
final String key = UUID.randomUUID().toString() + "/" + name;
assertEquals("/test.cyberduck.ch/.prefix/" + key, service.getSegmentsDirectory(new Path(container, key, EnumSet.of(Path.Type.file))).getAbsolute());
final Path directory = new Path(container, "dir", EnumSet.of(Path.Type.directory));
assertEquals("/test.cyberduck.ch/dir/.prefix/" + key, service.getSegmentsDirectory(new Path(directory, key, EnumSet.of(Path.Type.file))).getAbsolute());
}
|
@Override
public Result invoke(Invocation invocation) throws RpcException {
Result result;
String value = getUrl().getMethodParameter(
RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString())
.trim();
if (ConfigUtils.isEmpty(value)) {
// no mock
result = this.invoker.invoke(invocation);
} else if (value.startsWith(FORCE_KEY)) {
if (logger.isWarnEnabled()) {
logger.warn(
CLUSTER_FAILED_MOCK_REQUEST,
"force mock",
"",
"force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : "
+ getUrl());
}
// force:direct mock
result = doMockInvoke(invocation, null);
} else {
// fail-mock
try {
result = this.invoker.invoke(invocation);
// fix:#4585
if (result.getException() != null && result.getException() instanceof RpcException) {
RpcException rpcException = (RpcException) result.getException();
if (rpcException.isBiz()) {
throw rpcException;
} else {
result = doMockInvoke(invocation, rpcException);
}
}
} catch (RpcException e) {
if (e.isBiz()) {
throw e;
}
if (logger.isWarnEnabled()) {
logger.warn(
CLUSTER_FAILED_MOCK_REQUEST,
"failed to mock invoke",
"",
"fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : "
+ getUrl(),
e);
}
result = doMockInvoke(invocation, e);
}
}
return result;
}
|
@Test
void testMockInvokerFromOverride_Invoke_check_int() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getInt1.mock=force:return 1688"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getInt1");
Result ret = cluster.invoke(invocation);
Assertions.assertTrue(
ret.getValue() instanceof Integer,
"result type must be integer but was : " + ret.getValue().getClass());
Assertions.assertEquals(new Integer(1688), (Integer) ret.getValue());
}
|
public static Optional<String> maybeCreateProcessingLogTopic(
final KafkaTopicClient topicClient,
final ProcessingLogConfig config,
final KsqlConfig ksqlConfig) {
if (!config.getBoolean(ProcessingLogConfig.TOPIC_AUTO_CREATE)) {
return Optional.empty();
}
final String topicName = getTopicName(config, ksqlConfig);
final int nPartitions =
config.getInt(ProcessingLogConfig.TOPIC_PARTITIONS);
final short nReplicas =
config.getShort(ProcessingLogConfig.TOPIC_REPLICATION_FACTOR);
try {
topicClient.createTopic(topicName, nPartitions, nReplicas);
} catch (final KafkaTopicExistsException e) {
if (e.getPartitionOrReplicaMismatch()) {
LOGGER.warn(String.format("Log topic %s already exists", topicName), e);
} else {
LOGGER.info(String.format("Log topic %s already exists", topicName), e);
}
}
return Optional.of(topicName);
}
|
@Test
public void shouldNotCreateLogTopicIfNotConfigured() {
// Given:
final ProcessingLogConfig config = new ProcessingLogConfig(
ImmutableMap.of(ProcessingLogConfig.TOPIC_AUTO_CREATE, false)
);
// When:
final Optional<String> createdTopic = ProcessingLogServerUtils.maybeCreateProcessingLogTopic(
spyTopicClient,
config,
ksqlConfig);
// Then:
assertThat(createdTopic.isPresent(), is(false));
verifyNoMoreInteractions(spyTopicClient);
}
|
public static Duration parseDuration(String text) {
checkNotNull(text);
final String trimmed = text.trim();
checkArgument(!trimmed.isEmpty(), "argument is an empty- or whitespace-only string");
final int len = trimmed.length();
int pos = 0;
char current;
while (pos < len && (current = trimmed.charAt(pos)) >= '0' && current <= '9') {
pos++;
}
final String number = trimmed.substring(0, pos);
final String unitLabel = trimmed.substring(pos).trim().toLowerCase(Locale.US);
if (number.isEmpty()) {
throw new NumberFormatException("text does not start with a number");
}
final BigInteger value;
try {
value = new BigInteger(number); // this throws a NumberFormatException
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"The value '" + number + "' cannot be represented as an integer number.", e);
}
final ChronoUnit unit;
if (unitLabel.isEmpty()) {
unit = ChronoUnit.MILLIS;
} else {
unit = LABEL_TO_UNIT_MAP.get(unitLabel);
}
if (unit == null) {
throw new IllegalArgumentException(
"Time interval unit label '"
+ unitLabel
+ "' does not match any of the recognized units: "
+ TimeUnit.getAllUnits());
}
try {
return convertBigIntToDuration(value, unit);
} catch (ArithmeticException e) {
throw new IllegalArgumentException(
"The value '"
+ number
+ "' cannot be represented as java.time.Duration (numeric overflow).",
e);
}
}
|
@Test
void testParseDurationNanos() {
assertThat(TimeUtils.parseDuration("424562ns").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("424562nano").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("424562nanos").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("424562nanosecond").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("424562nanoseconds").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("424562 ns").getNano()).isEqualTo(424562);
assertThat(TimeUtils.parseDuration("9223372036854775807000001 ns"))
.isEqualByComparingTo(Duration.ofMillis(9223372036854775807L).plusNanos(1));
}
|
public void runExtractor(Message msg) {
try(final Timer.Context ignored = completeTimer.time()) {
final String field;
try (final Timer.Context ignored2 = conditionTimer.time()) {
// We can only work on Strings.
if (!(msg.getField(sourceField) instanceof String)) {
conditionMissesCounter.inc();
return;
}
field = (String) msg.getField(sourceField);
// Decide if to extract at all.
if (conditionType.equals(ConditionType.STRING)) {
if (field.contains(conditionValue)) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
} else if (conditionType.equals(ConditionType.REGEX)) {
if (regexConditionPattern.matcher(field).find()) {
conditionHitsCounter.inc();
} else {
conditionMissesCounter.inc();
return;
}
}
}
try (final Timer.Context ignored2 = executionTimer.time()) {
Result[] results;
try {
results = run(field);
} catch (ExtractorException e) {
final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>";
msg.addProcessingError(new Message.ProcessingError(
ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e)));
return;
}
if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) {
return;
} else if (results.length == 1 && results[0].target == null) {
// results[0].target is null if this extractor cannot produce multiple fields use targetField in that case
msg.addField(targetField, results[0].getValue());
} else {
for (final Result result : results) {
msg.addField(result.getTarget(), result.getValue());
}
}
// Remove original from message?
if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) {
final StringBuilder sb = new StringBuilder(field);
final List<Result> reverseList = Arrays.stream(results)
.sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed())
.collect(Collectors.toList());
// remove all from reverse so that the indices still match
for (final Result result : reverseList) {
sb.delete(result.getBeginIndex(), result.getEndIndex());
}
final String builtString = sb.toString();
final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString;
msg.removeField(sourceField);
// TODO don't add an empty field back, or rather don't add fullyCutByExtractor
msg.addField(sourceField, finalResult);
}
runConverters(msg);
}
}
}
|
@Test
public void testCursorStrategyCutIfBeginAndEndIndexAreDisabled() throws Exception {
final TestExtractor extractor = new TestExtractor.Builder()
.cursorStrategy(CUT)
.sourceField("msg")
.callback(new Callable<Result[]>() {
@Override
public Result[] call() throws Exception {
return new Result[]{
new Result("the", -1, -1)
};
}
})
.build();
final Message msg = createMessage("message");
msg.addField("msg", "the hello");
extractor.runExtractor(msg);
// If the begin and end index is -1, the source field should not be modified.
assertThat(msg.getField("msg")).isEqualTo("the hello");
}
|
@GwtIncompatible("java.util.regex.Pattern")
public void containsMatch(@Nullable Pattern regex) {
checkNotNull(regex);
if (actual == null) {
failWithActual("expected a string that contains a match for", regex);
} else if (!regex.matcher(actual).find()) {
failWithActual("expected to contain a match for", regex);
}
}
|
@Test
@GwtIncompatible("Pattern")
public void stringContainsMatchStringUsesFind() {
assertThat("aba").containsMatch("[b]");
assertThat("aba").containsMatch(Pattern.compile("[b]"));
}
|
public short toShort() {
int s = (mOwnerBits.ordinal() << 6) | (mGroupBits.ordinal() << 3) | mOtherBits.ordinal();
return (short) s;
}
|
@Test
public void toShort() {
Mode mode = new Mode(Mode.Bits.ALL, Mode.Bits.READ_EXECUTE, Mode.Bits.READ_EXECUTE);
assertEquals(0755, mode.toShort());
mode = Mode.defaults();
assertEquals(0777, mode.toShort());
mode = new Mode(Mode.Bits.READ_WRITE, Mode.Bits.READ, Mode.Bits.READ);
assertEquals(0644, mode.toShort());
}
|
public static Optional<String> maybeCreateProcessingLogTopic(
final KafkaTopicClient topicClient,
final ProcessingLogConfig config,
final KsqlConfig ksqlConfig) {
if (!config.getBoolean(ProcessingLogConfig.TOPIC_AUTO_CREATE)) {
return Optional.empty();
}
final String topicName = getTopicName(config, ksqlConfig);
final int nPartitions =
config.getInt(ProcessingLogConfig.TOPIC_PARTITIONS);
final short nReplicas =
config.getShort(ProcessingLogConfig.TOPIC_REPLICATION_FACTOR);
try {
topicClient.createTopic(topicName, nPartitions, nReplicas);
} catch (final KafkaTopicExistsException e) {
if (e.getPartitionOrReplicaMismatch()) {
LOGGER.warn(String.format("Log topic %s already exists", topicName), e);
} else {
LOGGER.info(String.format("Log topic %s already exists", topicName), e);
}
}
return Optional.of(topicName);
}
|
@Test
public void shouldThrowOnUnexpectedKafkaClientError() {
// Given:
doThrow(new RuntimeException("bad"))
.when(mockTopicClient)
.createTopic(anyString(), anyInt(), anyShort());
// When:
final Exception e = assertThrows(
RuntimeException.class,
() -> ProcessingLogServerUtils.maybeCreateProcessingLogTopic(
mockTopicClient, config, ksqlConfig)
);
// Then:
assertThat(e.getMessage(), containsString("bad"));
}
|
public static ProxyBackendHandler newInstance(final DatabaseType databaseType, final String sql, final SQLStatement sqlStatement,
final ConnectionSession connectionSession, final HintValueContext hintValueContext) throws SQLException {
if (sqlStatement instanceof EmptyStatement) {
return new SkipBackendHandler(sqlStatement);
}
SQLStatementContext sqlStatementContext = sqlStatement instanceof DistSQLStatement ? new DistSQLStatementContext((DistSQLStatement) sqlStatement)
: new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getCurrentDatabaseName(), hintValueContext).bind(sqlStatement,
Collections.emptyList());
QueryContext queryContext = new QueryContext(sqlStatementContext, sql, Collections.emptyList(), hintValueContext, connectionSession.getConnectionContext(),
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData());
connectionSession.setQueryContext(queryContext);
return newInstance(databaseType, queryContext, connectionSession, false);
}
|
@Test
void assertNewInstanceWithQueryableRALStatementInTransaction() throws SQLException {
when(connectionSession.getTransactionStatus().isInTransaction()).thenReturn(true);
String sql = "SHOW TRANSACTION RULE;";
SQLStatement sqlStatement = ProxySQLComQueryParser.parse(sql, databaseType, connectionSession);
ProxyBackendHandler actual = ProxyBackendHandlerFactory.newInstance(databaseType, sql, sqlStatement, connectionSession, new HintValueContext());
assertThat(actual, instanceOf(DistSQLQueryBackendHandler.class));
}
|
public static TriggerStateMachine stateMachineForTrigger(RunnerApi.Trigger trigger) {
switch (trigger.getTriggerCase()) {
case AFTER_ALL:
return AfterAllStateMachine.of(
stateMachinesForTriggers(trigger.getAfterAll().getSubtriggersList()));
case AFTER_ANY:
return AfterFirstStateMachine.of(
stateMachinesForTriggers(trigger.getAfterAny().getSubtriggersList()));
case AFTER_END_OF_WINDOW:
return stateMachineForAfterEndOfWindow(trigger.getAfterEndOfWindow());
case ELEMENT_COUNT:
return AfterPaneStateMachine.elementCountAtLeast(
trigger.getElementCount().getElementCount());
case AFTER_SYNCHRONIZED_PROCESSING_TIME:
return AfterSynchronizedProcessingTimeStateMachine.ofFirstElement();
case DEFAULT:
return DefaultTriggerStateMachine.of();
case NEVER:
return NeverStateMachine.ever();
case ALWAYS:
return ReshuffleTriggerStateMachine.create();
case OR_FINALLY:
return stateMachineForTrigger(trigger.getOrFinally().getMain())
.orFinally(stateMachineForTrigger(trigger.getOrFinally().getFinally()));
case REPEAT:
return RepeatedlyStateMachine.forever(
stateMachineForTrigger(trigger.getRepeat().getSubtrigger()));
case AFTER_EACH:
return AfterEachStateMachine.inOrder(
stateMachinesForTriggers(trigger.getAfterEach().getSubtriggersList()));
case AFTER_PROCESSING_TIME:
return stateMachineForAfterProcessingTime(trigger.getAfterProcessingTime());
case TRIGGER_NOT_SET:
throw new IllegalArgumentException(
String.format("Required field 'trigger' not set on %s", trigger));
default:
throw new IllegalArgumentException(String.format("Unknown trigger type %s", trigger));
}
}
|
@Test
public void testOrFinallyTranslation() {
RunnerApi.Trigger trigger =
RunnerApi.Trigger.newBuilder()
.setOrFinally(
RunnerApi.Trigger.OrFinally.newBuilder()
.setMain(subtrigger1)
.setFinally(subtrigger2))
.build();
OrFinallyStateMachine machine =
(OrFinallyStateMachine) TriggerStateMachines.stateMachineForTrigger(trigger);
assertThat(machine, equalTo(submachine1.orFinally(submachine2)));
}
|
@Override
public HttpResponse sendAsIs(HttpRequest httpRequest) throws IOException {
HttpURLConnection connection = connectionFactory.openConnection(httpRequest.url());
connection.setRequestMethod(httpRequest.method().toString());
httpRequest.headers().names().stream()
.filter(headerName -> !Ascii.equalsIgnoreCase(headerName, USER_AGENT))
.forEach(
headerName ->
httpRequest
.headers()
.getAll(headerName)
.forEach(
headerValue -> connection.setRequestProperty(headerName, headerValue)));
connection.setRequestProperty(USER_AGENT, this.userAgent);
if (ImmutableSet.of(HttpMethod.POST, HttpMethod.PUT, HttpMethod.DELETE)
.contains(httpRequest.method())) {
connection.setDoOutput(true);
ByteSource.wrap(httpRequest.requestBody().orElse(ByteString.EMPTY).toByteArray())
.copyTo(connection.getOutputStream());
}
int responseCode = connection.getResponseCode();
HttpHeaders.Builder responseHeadersBuilder = HttpHeaders.builder();
for (Map.Entry<String, List<String>> headerEntry : connection.getHeaderFields().entrySet()) {
String headerName = headerEntry.getKey();
if (!isNullOrEmpty(headerName)) {
for (String headerValue : headerEntry.getValue()) {
if (!isNullOrEmpty(headerValue)) {
responseHeadersBuilder.addHeader(headerName, headerValue);
}
}
}
}
return HttpResponse.builder()
.setStatus(HttpStatus.fromCode(responseCode))
.setHeaders(responseHeadersBuilder.build())
.setBodyBytes(ByteString.readFrom(connection.getInputStream()))
.build();
}
|
@Test
public void sendAsIs_always_returnsExpectedHttpResponse()
throws IOException, InterruptedException {
mockWebServer.setDispatcher(new SendAsIsTestDispatcher());
mockWebServer.start();
String expectedResponseBody = SendAsIsTestDispatcher.buildBody("GET", "");
HttpUrl baseUrl = mockWebServer.url("/");
String requestUrl =
new URL(
baseUrl.scheme(),
baseUrl.host(),
baseUrl.port(),
"/send-as-is/%2e%2e/%2e%2e/etc/passwd")
.toString();
HttpResponse response = httpClient.sendAsIs(get(requestUrl).withEmptyHeaders().build());
assertThat(mockWebServer.takeRequest().getPath())
.isEqualTo("/send-as-is/%2e%2e/%2e%2e/etc/passwd");
assertThat(response)
.isEqualTo(
HttpResponse.builder()
.setStatus(HttpStatus.OK)
.setHeaders(
HttpHeaders.builder()
.addHeader(CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString())
// MockWebServer always adds this response header.
.addHeader(CONTENT_LENGTH, String.valueOf(expectedResponseBody.length()))
.build())
.setBodyBytes(ByteString.copyFrom(expectedResponseBody, UTF_8))
.build());
}
|
@Override
public <T> UncommittedBundle<T> createRootBundle() {
// The DirectRunner is responsible for these elements, but they need not be encodable.
return underlying.createRootBundle();
}
|
@Test
public void rootBundleSucceedsIgnoresCoder() {
WindowedValue<Record> one = WindowedValue.valueInGlobalWindow(new Record());
WindowedValue<Record> two = WindowedValue.valueInGlobalWindow(new Record());
CommittedBundle<Record> root =
factory.<Record>createRootBundle().add(one).add(two).commit(Instant.now());
assertThat(root.getElements(), containsInAnyOrder(one, two));
}
|
@Override
public PipelineProcessConfiguration swapToObject(final YamlPipelineProcessConfiguration yamlConfig) {
return null == yamlConfig
? null
: new PipelineProcessConfiguration(
readConfigSwapper.swapToObject(yamlConfig.getRead()), writeConfigSwapper.swapToObject(yamlConfig.getWrite()), algorithmSwapper.swapToObject(yamlConfig.getStreamChannel()));
}
|
@Test
void assertSwapToObjectWithNull() {
assertNull(new YamlPipelineProcessConfigurationSwapper().swapToObject(null));
}
|
public static StructType convert(Schema schema) {
return (StructType) TypeUtil.visit(schema, new TypeToSparkType());
}
|
@Test
public void testSchemaConversionWithMetaDataColumnSchema() {
StructType structType = SparkSchemaUtil.convert(TEST_SCHEMA_WITH_METADATA_COLS);
List<AttributeReference> attrRefs =
scala.collection.JavaConverters.seqAsJavaList(structType.toAttributes());
for (AttributeReference attrRef : attrRefs) {
if (MetadataColumns.isMetadataColumn(attrRef.name())) {
Assert.assertTrue(
"metadata columns should have __metadata_col in attribute metadata",
MetadataAttribute.unapply(attrRef).isDefined());
} else {
Assert.assertFalse(
"non metadata columns should not have __metadata_col in attribute metadata",
MetadataAttribute.unapply(attrRef).isDefined());
}
}
}
|
public HttpMethod method() {
return this.httpMethod;
}
|
@Test
public void methodTest() {
ShenyuRequest.HttpMethod httpMethod = retryableException.method();
Assert.assertNotNull(httpMethod);
}
|
public static boolean isUnclosedQuote(final String line) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
int quoteStart = -1;
for (int i = 0; i < line.length(); ++i) {
if (quoteStart < 0 && isQuoteChar(line, i)) {
quoteStart = i;
} else if (quoteStart >= 0 && isTwoQuoteStart(line, i) && !isEscaped(line, i)) {
// Together, two quotes are effectively an escaped quote and don't act as a quote character.
// Skip the next quote char, since it's coupled with the first.
i++;
} else if (quoteStart >= 0 && isQuoteChar(line, i) && !isEscaped(line, i)) {
quoteStart = -1;
}
}
final int commentInd = line.indexOf(COMMENT);
if (commentInd < 0) {
return quoteStart >= 0;
} else if (quoteStart < 0) {
return false;
} else {
return commentInd > quoteStart;
}
}
|
@Test
public void shouldNotFindUnclosedQuote_twoQuote() {
// Given:
final String line = "some line 'this is in a quote'''";
// Then:
assertThat(UnclosedQuoteChecker.isUnclosedQuote(line), is(false));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.