focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain)
throws IOException, ServletException {
if (bizConfig.isAdminServiceAccessControlEnabled()) {
HttpServletRequest request = (HttpServletRequest) req;
HttpServletResponse response = (HttpServletResponse) resp;
String token = request.getHeader(HttpHeaders.AUTHORIZATION);
if (!checkAccessToken(token)) {
logger.warn("Invalid access token: {} for uri: {}", token, request.getRequestURI());
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
}
chain.doFilter(req, resp);
}
|
@Test
public void testWithAccessControlEnabledWithNoTokenSpecifiedWithNoTokenPassed() throws Exception {
String someToken = "someToken";
when(bizConfig.isAdminServiceAccessControlEnabled()).thenReturn(true);
when(bizConfig.getAdminServiceAccessTokens()).thenReturn(null);
when(servletRequest.getHeader(HttpHeaders.AUTHORIZATION)).thenReturn(null);
authenticationFilter.doFilter(servletRequest, servletResponse, filterChain);
verify(bizConfig, times(1)).isAdminServiceAccessControlEnabled();
verify(bizConfig, times(1)).getAdminServiceAccessTokens();
verify(filterChain, times(1)).doFilter(servletRequest, servletResponse);
verify(servletResponse, never()).sendError(anyInt(), anyString());
}
|
@Override
public void configure() throws Exception {
server.addEventListener(mbeans());
server.addConnector(plainConnector());
ContextHandlerCollection handlers = new ContextHandlerCollection();
deploymentManager.setContexts(handlers);
webAppContext = createWebAppContext();
JettyCustomErrorPageHandler errorHandler = new JettyCustomErrorPageHandler();
webAppContext.setErrorHandler(errorHandler);
webAppContext.insertHandler(gzipHandler());
server.addBean(errorHandler);
server.addBean(deploymentManager);
HandlerCollection serverLevelHandlers = new HandlerCollection();
serverLevelHandlers.setHandlers(new Handler[]{handlers});
server.setHandler(serverLevelHandlers);
performCustomConfiguration();
server.setStopAtShutdown(true);
}
|
@Test
public void shouldSetErrorHandlerForServer() throws Exception {
jettyServer.configure();
verify(server).addBean(any(JettyCustomErrorPageHandler.class));
}
|
@Override
public double read() {
return gaugeSource.read();
}
|
@Test
public void whenProbeThrowsException() {
metricsRegistry.registerStaticProbe(this, "foo", MANDATORY,
(DoubleProbeFunction) o -> {
throw new RuntimeException();
});
DoubleGauge gauge = metricsRegistry.newDoubleGauge("foo");
double actual = gauge.read();
assertEquals(0, actual, 0.1);
}
|
public static long now() {
return Instant.now().toEpochMilli();
}
|
@Test
public void testNow() {
assertThat(TbDate.now()).isCloseTo(Instant.now().toEpochMilli(), Offset.offset(1000L));
}
|
@Override
public ParsedSchema fromConnectSchema(final Schema schema) {
// Bug in ProtobufData means `fromConnectSchema` throws on the second invocation if using
// default naming.
return new ProtobufData(new ProtobufDataConfig(updatedConfigs))
.fromConnectSchema(injectSchemaFullName(schema));
}
|
@Test
public void shouldApplyNullableAsOptional() {
// Given:
givenNullableAsOptional();
// When:
final ParsedSchema schema = schemaTranslator.fromConnectSchema(CONNECT_SCHEMA_WITH_NULLABLE_PRIMITIVES);
// Then:
assertThat(schema.canonicalString(), is("syntax = \"proto3\";\n"
+ "\n"
+ "message ConnectDefault1 {\n"
+ " optional int32 optional_int32 = 1 [deprecated = false];\n"
+ " optional bool optional_boolean = 2 [deprecated = false];\n"
+ " optional string optional_string = 3 [deprecated = false];\n"
+ "}\n"));
}
|
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload,
final ConnectionSession connectionSession) {
switch (commandPacketType) {
case COM_QUIT:
return new MySQLComQuitPacket();
case COM_INIT_DB:
return new MySQLComInitDbPacket(payload);
case COM_FIELD_LIST:
return new MySQLComFieldListPacket(payload);
case COM_QUERY:
return new MySQLComQueryPacket(payload);
case COM_STMT_PREPARE:
return new MySQLComStmtPreparePacket(payload);
case COM_STMT_EXECUTE:
MySQLServerPreparedStatement serverPreparedStatement =
connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex()));
return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount());
case COM_STMT_SEND_LONG_DATA:
return new MySQLComStmtSendLongDataPacket(payload);
case COM_STMT_RESET:
return new MySQLComStmtResetPacket(payload);
case COM_STMT_CLOSE:
return new MySQLComStmtClosePacket(payload);
case COM_SET_OPTION:
return new MySQLComSetOptionPacket(payload);
case COM_PING:
return new MySQLComPingPacket();
case COM_RESET_CONNECTION:
return new MySQLComResetConnectionPacket();
default:
return new MySQLUnsupportedCommandPacket(commandPacketType);
}
}
|
@Test
void assertNewInstanceWithComProcessInfoPacket() {
assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_PROCESS_INFO, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class));
}
|
public URLNormalizer lowerCaseSchemeHost() {
URL u = toURL();
url = Pattern.compile(u.getProtocol(),
Pattern.CASE_INSENSITIVE).matcher(url).replaceFirst(
u.getProtocol().toLowerCase());
url = Pattern.compile(u.getHost(),
Pattern.CASE_INSENSITIVE).matcher(url).replaceFirst(
u.getHost().toLowerCase());
return this;
}
|
@Test
public void testLowerCaseSchemeHost() {
s = "HTTP://www.Example.com/Hello.html";
t = "http://www.example.com/Hello.html";
assertEquals(t, n(s).lowerCaseSchemeHost().toString());
}
|
@ScalarOperator(LESS_THAN)
@SqlType(StandardTypes.BOOLEAN)
public static boolean lessThan(@SqlType(StandardTypes.BOOLEAN) boolean left, @SqlType(StandardTypes.BOOLEAN) boolean right)
{
return !left && right;
}
|
@Test
public void testLessThan()
{
assertFunction("true < true", BOOLEAN, false);
assertFunction("true < false", BOOLEAN, false);
assertFunction("false < true", BOOLEAN, true);
assertFunction("false < false", BOOLEAN, false);
}
|
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
if (data.size() == 1) {
final Integer level = data.getIntValue(Data.FORMAT_UINT8, 0);
if (level != null && level <= AlertLevelCallback.ALERT_HIGH) {
onAlertLevelChanged(device, level);
return;
}
}
onInvalidDataReceived(device, data);
}
|
@Test
public void onAlertLevelChanged_invalid() {
final DataReceivedCallback callback = new AlertLevelDataCallback() {
@Override
public void onAlertLevelChanged(@NonNull final BluetoothDevice device, final int level) {
fail("Invalid data reported as valid");
}
@Override
public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onInvalidDataReceived(device, data);
assertEquals("Invalid data", 2, data.size());
}
};
final Data data = Data.opCode((byte) 0x01, (byte) 0x02);
callback.onDataReceived(null, data);
assertEquals("Correct value", 0x00, AlertLevelCallback.ALERT_NONE);
}
|
@Override
public MetadataReport getMetadataReport(URL url) {
url = url.setPath(MetadataReport.class.getName()).removeParameters(EXPORT_KEY, REFER_KEY);
String key = url.toServiceString(NAMESPACE_KEY);
MetadataReport metadataReport = serviceStoreMap.get(key);
if (metadataReport != null) {
return metadataReport;
}
// Lock the metadata access process to ensure a single instance of the metadata instance
lock.lock();
try {
metadataReport = serviceStoreMap.get(key);
if (metadataReport != null) {
return metadataReport;
}
boolean check = url.getParameter(CHECK_KEY, true) && url.getPort() != 0;
try {
metadataReport = createMetadataReport(url);
} catch (Exception e) {
if (!check) {
logger.warn(PROXY_FAILED_EXPORT_SERVICE, "", "", "The metadata reporter failed to initialize", e);
} else {
throw e;
}
}
if (check && metadataReport == null) {
throw new IllegalStateException("Can not create metadata Report " + url);
}
if (metadataReport != null) {
serviceStoreMap.put(key, metadataReport);
}
return metadataReport;
} finally {
// Release the lock
lock.unlock();
}
}
|
@Test
void testGetForDiffService() {
URL url1 = URL.valueOf("zookeeper://" + NetUtils.getLocalAddress().getHostName()
+ ":4444/org.apache.dubbo.TestService1?version=1.0.0&application=vic");
URL url2 = URL.valueOf("zookeeper://" + NetUtils.getLocalAddress().getHostName()
+ ":4444/org.apache.dubbo.TestService2?version=1.0.0&application=vic");
MetadataReport metadataReport1 = metadataReportFactory.getMetadataReport(url1);
MetadataReport metadataReport2 = metadataReportFactory.getMetadataReport(url2);
Assertions.assertEquals(metadataReport1, metadataReport2);
}
|
public static JibContainerBuilder create(
String baseImageReference,
Set<Platform> platforms,
CommonCliOptions commonCliOptions,
ConsoleLogger logger)
throws InvalidImageReferenceException, FileNotFoundException {
if (baseImageReference.startsWith(DOCKER_DAEMON_IMAGE_PREFIX)) {
return Jib.from(
DockerDaemonImage.named(baseImageReference.replaceFirst(DOCKER_DAEMON_IMAGE_PREFIX, "")));
}
if (baseImageReference.startsWith(TAR_IMAGE_PREFIX)) {
return Jib.from(
TarImage.at(Paths.get(baseImageReference.replaceFirst(TAR_IMAGE_PREFIX, ""))));
}
ImageReference imageReference =
ImageReference.parse(baseImageReference.replaceFirst(REGISTRY_IMAGE_PREFIX, ""));
RegistryImage registryImage = RegistryImage.named(imageReference);
DefaultCredentialRetrievers defaultCredentialRetrievers =
DefaultCredentialRetrievers.init(
CredentialRetrieverFactory.forImage(
imageReference,
logEvent -> logger.log(logEvent.getLevel(), logEvent.getMessage())));
Credentials.getFromCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers)
.forEach(registryImage::addCredentialRetriever);
JibContainerBuilder containerBuilder = Jib.from(registryImage);
if (!platforms.isEmpty()) {
containerBuilder.setPlatforms(platforms);
}
return containerBuilder;
}
|
@Test
public void testCreate_tarBase()
throws IOException, InvalidImageReferenceException, CacheDirectoryCreationException {
JibContainerBuilder containerBuilder =
ContainerBuilders.create(
"tar:///path/to.tar", Collections.emptySet(), mockCommonCliOptions, mockLogger);
BuildContext buildContext =
JibContainerBuilderTestHelper.toBuildContext(
containerBuilder, Containerizer.to(RegistryImage.named("ignored")));
ImageConfiguration imageConfiguration = buildContext.getBaseImageConfiguration();
assertThat(imageConfiguration.getTarPath()).isEqualTo(Optional.of(Paths.get("/path/to.tar")));
assertThat(imageConfiguration.getDockerClient().isPresent()).isFalse();
}
|
@Override
public boolean isEmpty() {
return size() == 0;
}
|
@Test
public void shouldReturnFalseSomePartIsNotEmpty() {
PipelineConfigs group = new MergePipelineConfigs(
new BasicPipelineConfigs(PipelineConfigMother.pipelineConfig("pipeline1")),
new BasicPipelineConfigs());
assertThat(group.isEmpty(), is(false));
}
|
public static String dumpObject(Object object) {
return new Yaml(new YamlParserConstructor(), new CustomRepresenter()).dumpAsMap(object);
}
|
@Test
void testDumpObject() {
ConfigMetadata configMetadata = new ConfigMetadata();
List<ConfigMetadata.ConfigExportItem> configMetadataItems = new ArrayList<>();
configMetadataItems.add(item1);
configMetadataItems.add(item2);
configMetadata.setMetadata(configMetadataItems);
String parseString = YamlParserUtil.dumpObject(configMetadata);
assertEquals(CONFIG_METADATA_STRING, parseString);
}
|
@VisibleForTesting
void sendNotification(RuneScapeProfile profile, PatchPrediction prediction, FarmingPatch patch)
{
final RuneScapeProfileType profileType = profile.getType();
final StringBuilder stringBuilder = new StringBuilder();
// Same RS account
if (client.getGameState() == GameState.LOGGED_IN && profile.getDisplayName().equals(client.getLocalPlayer().getName()))
{
// Same RS account but different profile type
if (profileType != RuneScapeProfileType.getCurrent(client))
{
stringBuilder.append('(')
.append(Text.titleCase(profile.getType()))
.append(") ");
}
// Same RS account AND profile falls through here so no bracketed prefix is added
}
else
{
// Different RS account AND profile type
if (profileType != RuneScapeProfileType.getCurrent(client) || client.getGameState() == GameState.LOGIN_SCREEN)
{
//Don't print profile type when logged out if is STANDARD
if (client.getGameState() == GameState.LOGIN_SCREEN && profileType == RuneScapeProfileType.STANDARD)
{
stringBuilder.append('(')
.append(profile.getDisplayName())
.append(") ");
}
else
{
stringBuilder.append('(')
.append(profile.getDisplayName())
.append(" - ")
.append(Text.titleCase(profile.getType()))
.append(") ");
}
}
// Different RS account but same profile type
else
{
stringBuilder.append('(')
.append(profile.getDisplayName())
.append(") ");
}
}
stringBuilder
.append("Your ")
.append(prediction.getProduce().getName());
switch (prediction.getCropState())
{
case HARVESTABLE:
case GROWING:
if (prediction.getProduce().getName().toLowerCase(Locale.ENGLISH).contains("compost"))
{
stringBuilder.append(" is ready to collect in ");
}
else
{
stringBuilder.append(" is ready to harvest in ");
}
break;
case DISEASED:
stringBuilder.append(" has become diseased in ");
break;
case DEAD:
stringBuilder.append(" has died in ");
break;
default:
// EMPTY and FILLING are caught above
throw new IllegalStateException();
}
stringBuilder.append(patch.getRegion().isDefinite() ? "the " : "")
.append(patch.getRegion().getName())
.append('.');
notifier.notify(stringBuilder.toString());
}
|
@Test
public void testHarvestableNotification()
{
RuneScapeProfile runeScapeProfile = new RuneScapeProfile("Adam", RuneScapeProfileType.STANDARD, -1, null);
PatchPrediction patchPrediction = new PatchPrediction(Produce.RANARR, CropState.HARVESTABLE, 0L, 0, 0);
FarmingRegion region = new FarmingRegion("Ardougne", 10548, false,
new FarmingPatch("North", Varbits.FARMING_4771, PatchImplementation.ALLOTMENT),
new FarmingPatch("South", Varbits.FARMING_4772, PatchImplementation.ALLOTMENT),
new FarmingPatch("", Varbits.FARMING_4773, PatchImplementation.FLOWER),
new FarmingPatch("", Varbits.FARMING_4774, PatchImplementation.HERB),
new FarmingPatch("", Varbits.FARMING_4775, PatchImplementation.COMPOST)
);
FarmingPatch patch = region.getPatches()[3];
patch.setRegion(region);
farmingTracker.sendNotification(runeScapeProfile, patchPrediction, patch);
verify(notifier).notify("Your Ranarr is ready to harvest in Ardougne.");
}
|
public static String prepareUrl(@NonNull String url) {
url = url.trim();
String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive
if (lowerCaseUrl.startsWith("feed://")) {
Log.d(TAG, "Replacing feed:// with http://");
return prepareUrl(url.substring("feed://".length()));
} else if (lowerCaseUrl.startsWith("pcast://")) {
Log.d(TAG, "Removing pcast://");
return prepareUrl(url.substring("pcast://".length()));
} else if (lowerCaseUrl.startsWith("pcast:")) {
Log.d(TAG, "Removing pcast:");
return prepareUrl(url.substring("pcast:".length()));
} else if (lowerCaseUrl.startsWith("itpc")) {
Log.d(TAG, "Replacing itpc:// with http://");
return prepareUrl(url.substring("itpc://".length()));
} else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) {
Log.d(TAG, "Removing antennapod-subscribe://");
return prepareUrl(url.substring(AP_SUBSCRIBE.length()));
} else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) {
Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK);
String query = Uri.parse(url).getQueryParameter("url");
try {
return prepareUrl(URLDecoder.decode(query, "UTF-8"));
} catch (UnsupportedEncodingException e) {
return prepareUrl(query);
}
} else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) {
Log.d(TAG, "Adding http:// at the beginning of the URL");
return "http://" + url;
} else {
return url;
}
}
|
@Test
public void testAntennaPodSubscribeDeeplink() throws UnsupportedEncodingException {
final String feed = "http://example.org/podcast.rss";
assertEquals(feed, UrlChecker.prepareUrl("https://antennapod.org/deeplink/subscribe?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("http://antennapod.org/deeplink/subscribe?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("http://antennapod.org/deeplink/subscribe/?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("https://www.antennapod.org/deeplink/subscribe?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("http://www.antennapod.org/deeplink/subscribe?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("http://www.antennapod.org/deeplink/subscribe/?url=" + feed));
assertEquals(feed, UrlChecker.prepareUrl("http://www.antennapod.org/deeplink/subscribe?url="
+ URLEncoder.encode(feed, "UTF-8")));
assertEquals(feed, UrlChecker.prepareUrl("http://www.antennapod.org/deeplink/subscribe?url="
+ "example.org/podcast.rss"));
assertEquals(feed, UrlChecker.prepareUrl("https://antennapod.org/deeplink/subscribe?url=" + feed + "&title=a"));
assertEquals(feed, UrlChecker.prepareUrl("https://antennapod.org/deeplink/subscribe?url="
+ URLEncoder.encode(feed) + "&title=a"));
}
|
@Override
public void route(final RouteContext routeContext, final SingleRule singleRule) {
if (routeContext.getRouteUnits().isEmpty() || sqlStatement instanceof SelectStatement) {
routeStatement(routeContext, singleRule);
} else {
RouteContext newRouteContext = new RouteContext();
routeStatement(newRouteContext, singleRule);
combineRouteContext(routeContext, newRouteContext);
}
}
|
@Test
void assertRouteInSameDataSource() throws SQLException {
SingleStandardRouteEngine engine = new SingleStandardRouteEngine(mockQualifiedTables(), null);
SingleRule singleRule = new SingleRule(new SingleRuleConfiguration(), DefaultDatabase.LOGIC_NAME, new MySQLDatabaseType(), createDataSourceMap(), Collections.emptyList());
singleRule.getAttributes().getAttribute(DataNodeRuleAttribute.class).getAllDataNodes().put("t_order", Collections.singleton(mockDataNode("t_order")));
singleRule.getAttributes().getAttribute(DataNodeRuleAttribute.class).getAllDataNodes().put("t_order_item", Collections.singleton(mockDataNode("t_order_item")));
RouteContext routeContext = new RouteContext();
engine.route(routeContext, singleRule);
List<RouteUnit> routeUnits = new ArrayList<>(routeContext.getRouteUnits());
assertThat(routeContext.getRouteUnits().size(), is(1));
assertThat(routeUnits.get(0).getDataSourceMapper().getActualName(), is("ds_0"));
assertThat(routeUnits.get(0).getTableMappers().size(), is(2));
Iterator<RouteMapper> tableMappers = routeUnits.get(0).getTableMappers().iterator();
RouteMapper tableMapper0 = tableMappers.next();
assertThat(tableMapper0.getActualName(), is("t_order"));
assertThat(tableMapper0.getLogicName(), is("t_order"));
RouteMapper tableMapper1 = tableMappers.next();
assertThat(tableMapper1.getActualName(), is("t_order_item"));
assertThat(tableMapper1.getLogicName(), is("t_order_item"));
}
|
public Boolean createNamespace(String namespaceId, String namespaceName, String namespaceDesc)
throws NacosException {
// TODO 获取用kp
if (namespacePersistService.tenantInfoCountByTenantId(namespaceId) > 0) {
throw new NacosApiException(HttpStatus.INTERNAL_SERVER_ERROR.value(), ErrorCode.NAMESPACE_ALREADY_EXIST,
"namespaceId [" + namespaceId + "] already exist");
}
namespacePersistService
.insertTenantInfoAtomic(DEFAULT_KP, namespaceId, namespaceName, namespaceDesc, DEFAULT_CREATE_SOURCE,
System.currentTimeMillis());
return true;
}
|
@Test
void testCreateNamespace() throws NacosException {
when(namespacePersistService.tenantInfoCountByTenantId(anyString())).thenReturn(0);
namespaceOperationService.createNamespace(TEST_NAMESPACE_ID, TEST_NAMESPACE_NAME, TEST_NAMESPACE_DESC);
verify(namespacePersistService).insertTenantInfoAtomic(eq(DEFAULT_KP), eq(TEST_NAMESPACE_ID), eq(TEST_NAMESPACE_NAME),
eq(TEST_NAMESPACE_DESC), any(), anyLong());
}
|
@Override
public Double getValue() {
return getRatio().getValue();
}
|
@Test
public void handlesDivideByZeroIssues() throws Exception {
final RatioGauge divByZero = new RatioGauge() {
@Override
protected Ratio getRatio() {
return Ratio.of(100, 0);
}
};
assertThat(divByZero.getValue())
.isNaN();
}
|
void fetchRepositoryAndPackageMetaData(GoPluginDescriptor pluginDescriptor) {
try {
RepositoryConfiguration repositoryConfiguration = packageRepositoryExtension.getRepositoryConfiguration(pluginDescriptor.id());
com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageConfiguration = packageRepositoryExtension.getPackageConfiguration(pluginDescriptor.id());
if (repositoryConfiguration == null) {
throw new RuntimeException(format("Plugin[%s] returned null repository configuration", pluginDescriptor.id()));
}
if (packageConfiguration == null) {
throw new RuntimeException(format("Plugin[%s] returned null package configuration", pluginDescriptor.id()));
}
repositoryMetadataStore.addMetadataFor(pluginDescriptor.id(), new PackageConfigurations(repositoryConfiguration));
packageMetadataStore.addMetadataFor(pluginDescriptor.id(), new PackageConfigurations(packageConfiguration));
} catch (GoPluginFrameworkException e) {
LOGGER.error("Failed to fetch package metadata for plugin : {}", pluginDescriptor.id(), e);
}
}
|
@Test
public void shouldThrowExceptionWhenNullRepositoryConfigurationReturned() {
when(packageRepositoryExtension.getRepositoryConfiguration(pluginDescriptor.id())).thenReturn(null);
try {
metadataLoader.fetchRepositoryAndPackageMetaData(pluginDescriptor);
} catch (Exception e) {
assertThat(e.getMessage(), is("Plugin[plugin-id] returned null repository configuration"));
}
assertThat(RepositoryMetadataStore.getInstance().getMetadata(pluginDescriptor.id()), nullValue());
assertThat(PackageMetadataStore.getInstance().getMetadata(pluginDescriptor.id()), nullValue());
}
|
public static Index simple(String name) {
return new Index(name, false);
}
|
@Test
public void simple_index_name_must_not_contain_upper_case_char() {
assertThatThrownBy(() -> Index.simple("Issues"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Index name must be lower-case letters or '_all': Issues");
}
|
public void onNewMetadataImage(
MetadataImage newImage,
MetadataDelta delta
) {
throwIfNotRunning();
log.debug("Scheduling applying of a new metadata image with offset {}.", newImage.offset());
// Update global image.
metadataImage = newImage;
// Push an event for each coordinator.
coordinators.keySet().forEach(tp -> {
scheduleInternalOperation("UpdateImage(tp=" + tp + ", offset=" + newImage.offset() + ")", tp, () -> {
CoordinatorContext context = coordinators.get(tp);
if (context != null) {
context.lock.lock();
try {
if (context.state == CoordinatorState.ACTIVE) {
// The new image can be applied to the coordinator only if the coordinator
// exists and is in the active state.
log.debug("Applying new metadata image with offset {} to {}.", newImage.offset(), tp);
context.coordinator.onNewMetadataImage(newImage, delta);
} else {
log.debug("Ignored new metadata image with offset {} for {} because the coordinator is not active.",
newImage.offset(), tp);
}
} finally {
context.lock.unlock();
}
} else {
log.debug("Ignored new metadata image with offset {} for {} because the coordinator does not exist.",
newImage.offset(), tp);
}
});
});
}
|
@Test
public void testOnNewMetadataImage() {
TopicPartition tp0 = new TopicPartition("__consumer_offsets", 0);
TopicPartition tp1 = new TopicPartition("__consumer_offsets", 1);
MockTimer timer = new MockTimer();
MockCoordinatorLoader loader = mock(MockCoordinatorLoader.class);
MockPartitionWriter writer = mock(MockPartitionWriter.class);
MockCoordinatorShardBuilderSupplier supplier = mock(MockCoordinatorShardBuilderSupplier.class);
MockCoordinatorShardBuilder builder = mock(MockCoordinatorShardBuilder.class);
CoordinatorRuntime<MockCoordinatorShard, String> runtime =
new CoordinatorRuntime.Builder<MockCoordinatorShard, String>()
.withTime(timer.time())
.withTimer(timer)
.withLoader(loader)
.withDefaultWriteTimeOut(DEFAULT_WRITE_TIMEOUT)
.withEventProcessor(new DirectEventProcessor())
.withPartitionWriter(writer)
.withCoordinatorShardBuilderSupplier(supplier)
.withCoordinatorRuntimeMetrics(mock(CoordinatorRuntimeMetrics.class))
.withCoordinatorMetrics(mock(CoordinatorMetrics.class))
.withSerializer(new StringSerializer())
.build();
MockCoordinatorShard coordinator0 = mock(MockCoordinatorShard.class);
MockCoordinatorShard coordinator1 = mock(MockCoordinatorShard.class);
when(supplier.get()).thenReturn(builder);
when(builder.withSnapshotRegistry(any())).thenReturn(builder);
when(builder.withLogContext(any())).thenReturn(builder);
when(builder.withTime(any())).thenReturn(builder);
when(builder.withTimer(any())).thenReturn(builder);
when(builder.withCoordinatorMetrics(any())).thenReturn(builder);
when(builder.withTopicPartition(any())).thenReturn(builder);
when(builder.withTime(any())).thenReturn(builder);
when(builder.build())
.thenReturn(coordinator0)
.thenReturn(coordinator1);
CompletableFuture<CoordinatorLoader.LoadSummary> future0 = new CompletableFuture<>();
when(loader.load(eq(tp0), argThat(coordinatorMatcher(runtime, tp0)))).thenReturn(future0);
CompletableFuture<CoordinatorLoader.LoadSummary> future1 = new CompletableFuture<>();
when(loader.load(eq(tp1), argThat(coordinatorMatcher(runtime, tp1)))).thenReturn(future1);
runtime.scheduleLoadOperation(tp0, 0);
runtime.scheduleLoadOperation(tp1, 0);
assertEquals(coordinator0, runtime.contextOrThrow(tp0).coordinator.coordinator());
assertEquals(coordinator1, runtime.contextOrThrow(tp1).coordinator.coordinator());
// Coordinator 0 is loaded. It should get the current image
// that is the empty one.
future0.complete(null);
verify(coordinator0).onLoaded(MetadataImage.EMPTY);
// Publish a new image.
MetadataDelta delta = new MetadataDelta(MetadataImage.EMPTY);
MetadataImage newImage = delta.apply(MetadataProvenance.EMPTY);
runtime.onNewMetadataImage(newImage, delta);
// Coordinator 0 should be notified about it.
verify(coordinator0).onNewMetadataImage(newImage, delta);
// Coordinator 1 is loaded. It should get the current image
// that is the new image.
future1.complete(null);
verify(coordinator1).onLoaded(newImage);
}
|
public void destroy() {
synchronized (buffers) {
destroyed = true;
buffers.clear();
buffers.notifyAll();
}
}
|
@Test
void testDestroy() throws Exception {
BatchShuffleReadBufferPool bufferPool = createBufferPool();
List<MemorySegment> buffers = bufferPool.requestBuffers();
bufferPool.recycle(buffers);
assertThat(bufferPool.isDestroyed()).isFalse();
assertThat(bufferPool.getAvailableBuffers()).isEqualTo(bufferPool.getNumTotalBuffers());
buffers = bufferPool.requestBuffers();
assertThat(bufferPool.getAvailableBuffers())
.isEqualTo(bufferPool.getNumTotalBuffers() - buffers.size());
bufferPool.destroy();
assertThat(bufferPool.isDestroyed()).isTrue();
assertThat(bufferPool.getAvailableBuffers()).isZero();
}
|
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (executor.isShutdown()) {
return;
}
try {
executor.getQueue().put(r);
} catch (InterruptedException e) {
log.error("Adding Queue task to thread pool failed.", e);
}
}
|
@Test
public void testRejectedExecution() {
SyncPutQueuePolicy syncPutQueuePolicy = new SyncPutQueuePolicy();
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(1, 2,
60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(1), syncPutQueuePolicy);
threadPoolExecutor.prestartAllCoreThreads();
Assert.assertSame(syncPutQueuePolicy, threadPoolExecutor.getRejectedExecutionHandler());
IntStream.range(0, 4).forEach(s -> {
threadPoolExecutor.execute(() -> ThreadUtil.sleep(200L));
});
threadPoolExecutor.shutdown();
while (!threadPoolExecutor.isTerminated()) {
}
Assert.assertEquals(4, threadPoolExecutor.getCompletedTaskCount());
}
|
public void createMapping(Mapping mapping, boolean replace, boolean ifNotExists, SqlSecurityContext securityContext) {
Mapping resolved = resolveMapping(mapping, securityContext);
String name = resolved.name();
if (ifNotExists) {
relationsStorage.putIfAbsent(name, resolved);
} else if (replace) {
relationsStorage.put(name, resolved);
listeners.forEach(TableListener::onTableChanged);
} else if (!relationsStorage.putIfAbsent(name, resolved)) {
throw QueryException.error("Mapping or view already exists: " + name);
}
}
|
@Test
public void when_replacesMapping_then_succeeds() {
// given
Mapping mapping = mapping();
given(connectorCache.forType(mapping.connectorType())).willReturn(connector);
given(connector.typeName()).willReturn(mapping.connectorType());
given(connector.defaultObjectType()).willReturn("Dummy");
given(connector.resolveAndValidateFields(nodeEngine,
new SqlExternalResource(mapping.externalName(), mapping.dataConnection(), mapping.connectorType(), null, mapping.options()),
mapping.fields()
))
.willReturn(singletonList(new MappingField("field_name", INT)));
// when
catalog.createMapping(mapping, true, false, null);
// then
verify(relationsStorage).put(eq(mapping.name()), isA(Mapping.class));
verify(listener).onTableChanged();
}
|
public static void getSemanticPropsSingleFromString(
SingleInputSemanticProperties result,
String[] forwarded,
String[] nonForwarded,
String[] readSet,
TypeInformation<?> inType,
TypeInformation<?> outType) {
getSemanticPropsSingleFromString(
result, forwarded, nonForwarded, readSet, inType, outType, false);
}
|
@Test
void testForwardedSameTargetTwice() {
String[] forwardedFields = {"f0->f2; f1->f2"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
assertThatThrownBy(
() ->
SemanticPropUtil.getSemanticPropsSingleFromString(
sp,
forwardedFields,
null,
null,
fiveIntTupleType,
fiveIntTupleType))
.isInstanceOf(InvalidSemanticAnnotationException.class);
}
|
public T addFromMandatoryProperty(Props props, String propertyName) {
String value = props.nonNullValue(propertyName);
if (!value.isEmpty()) {
String splitRegex = " (?=-)";
List<String> jvmOptions = Arrays.stream(value.split(splitRegex)).map(String::trim).toList();
checkOptionFormat(propertyName, jvmOptions);
checkMandatoryOptionOverwrite(propertyName, jvmOptions);
options.addAll(jvmOptions);
}
return castThis();
}
|
@Test
public void addFromMandatoryProperty_checks_against_mandatory_options_is_case_sensitive() {
String[] optionOverrides = {
randomPrefix,
randomPrefix + randomValue.substring(1),
randomPrefix + randomValue.substring(1),
randomPrefix + randomValue.substring(2),
randomPrefix + randomValue.substring(3),
randomPrefix + randomValue.substring(3) + randomAlphanumeric(1),
randomPrefix + randomValue.substring(3) + randomAlphanumeric(2),
randomPrefix + randomValue.substring(3) + randomAlphanumeric(3),
randomPrefix + randomValue + randomAlphanumeric(1)
};
JvmOptions underTest = new JvmOptions(ImmutableMap.of(randomPrefix, randomValue));
for (String optionOverride : optionOverrides) {
properties.setProperty(randomPropertyName, optionOverride.toUpperCase(Locale.ENGLISH));
underTest.addFromMandatoryProperty(new Props(properties), randomPropertyName);
}
}
|
@Override
@Deprecated
public String toString() {
final String from = new Throwable().getStackTrace()[1].toString();
LOGGER.warning("Use of toString() on hudson.util.Secret from " + from + ". Prefer getPlainText() or getEncryptedValue() depending your needs. see https://www.jenkins.io/redirect/hudson.util.Secret/");
return value;
}
|
@Test
public void testCompatibilityFromString() {
String tagName = Foo.class.getName().replace("$", "_-");
String xml = "<" + tagName + "><password>secret</password></" + tagName + ">";
Foo foo = new Foo();
Jenkins.XSTREAM.fromXML(xml, foo);
assertEquals("secret", Secret.toString(foo.password));
}
|
public String toLoggableString(ApiMessage message) {
MetadataRecordType type = MetadataRecordType.fromId(message.apiKey());
switch (type) {
case CONFIG_RECORD: {
if (!configSchema.isSensitive((ConfigRecord) message)) {
return message.toString();
}
ConfigRecord duplicate = ((ConfigRecord) message).duplicate();
duplicate.setValue("(redacted)");
return duplicate.toString();
}
case USER_SCRAM_CREDENTIAL_RECORD: {
UserScramCredentialRecord record = (UserScramCredentialRecord) message;
return "UserScramCredentialRecord("
+ "name=" + ((record.name() == null) ? "null" : "'" + record.name() + "'")
+ ", mechanism=" + record.mechanism()
+ ", salt=(redacted)"
+ ", storedKey=(redacted)"
+ ", serverKey=(redacted)"
+ ", iterations=" + record.iterations()
+ ")";
}
default:
return message.toString();
}
}
|
@Test
public void testTopicRecordToString() {
assertEquals("TopicRecord(name='foo', topicId=UOovKkohSU6AGdYW33ZUNg)",
REDACTOR.toLoggableString(new TopicRecord().
setTopicId(Uuid.fromString("UOovKkohSU6AGdYW33ZUNg")).
setName("foo")));
}
|
@CheckForNull
@Override
public Map<Path, Set<Integer>> branchChangedLines(String targetBranchName, Path projectBaseDir, Set<Path> changedFiles) {
return branchChangedLinesWithFileMovementDetection(targetBranchName, projectBaseDir, toChangedFileByPathsMap(changedFiles));
}
|
@Test
public void branchChangedLines_should_be_correct_when_change_is_not_committed() throws GitAPIException, IOException {
String fileName = "file-in-first-commit.xoo";
git.branchCreate().setName("b1").call();
git.checkout().setName("b1").call();
// this line is committed
addLineToFile(fileName, 3);
commit(fileName);
// this line is not committed
addLineToFile(fileName, 1);
Path filePath = worktree.resolve(fileName);
Map<Path, Set<Integer>> changedLines = newScmProvider().branchChangedLines("master", worktree, Collections.singleton(filePath));
// both lines appear correctly
assertThat(changedLines).containsExactly(entry(filePath, new HashSet<>(Arrays.asList(1, 4))));
}
|
public DateTokenConverter<Object> getPrimaryDateTokenConverter() {
Converter<Object> p = headTokenConverter;
while (p != null) {
if (p instanceof DateTokenConverter) {
DateTokenConverter<Object> dtc = (DateTokenConverter<Object>) p;
// only primary converters should be returned as
if (dtc.isPrimary())
return dtc;
}
p = p.getNext();
}
return null;
}
|
@Test
public void settingTimeZoneOptionHasAnEffect() {
ZoneId tz = ZoneId.of("Australia/Perth");
FileNamePattern fnp = new FileNamePattern("%d{hh, " + tz.getId() + "}", context);
assertEquals(tz, fnp.getPrimaryDateTokenConverter().getZoneId());
}
|
public String mapToSqlQuery(OffsetBasedPageRequest pageRequest, Sql table) {
table.with("limit", pageRequest.getLimit());
table.with("offset", pageRequest.getOffset());
return orderClause(pageRequest) + " " + dialect.limitAndOffset();
}
|
@Test
void sqlOffsetBasedPageRequestMapperMapsOrder() {
OffsetBasedPageRequest offsetBasedPageRequest = Paging.OffsetBasedPage.ascOnScheduledAt(10);
String filter = offsetBasedPageRequestMapper.mapToSqlQuery(offsetBasedPageRequest, jobTable);
verify(jobTable).with("offset", 0L);
verify(jobTable).with("limit", 10);
assertThat(filter).isEqualTo(" ORDER BY scheduledAt ASC LIMIT :limit OFFSET :offset");
}
|
public static int[] invertPermutation(int... input){
int[] target = new int[input.length];
for(int i = 0 ; i < input.length ; i++){
target[input[i]] = i;
}
return target;
}
|
@Test
public void testInvertPermutationInt(){
assertArrayEquals(
new int[]{ 2, 4, 3, 0, 1 },
ArrayUtil.invertPermutation(3, 4, 0, 2, 1)
);
}
|
public static String hashpw(String password, String salt) throws IllegalArgumentException {
BCrypt B;
String real_salt;
byte passwordb[], saltb[], hashed[];
char minor = (char) 0;
int rounds, off = 0;
StringBuilder rs = new StringBuilder();
if (salt == null) {
throw new IllegalArgumentException("salt cannot be null");
}
int saltLength = salt.length();
if (saltLength < 28) {
throw new IllegalArgumentException("Invalid salt");
}
if (salt.charAt(0) != '$' || salt.charAt(1) != '2') {
throw new IllegalArgumentException("Invalid salt version");
}
if (salt.charAt(2) == '$') {
off = 3;
} else {
minor = salt.charAt(2);
if (minor != 'a' || salt.charAt(3) != '$') {
throw new IllegalArgumentException("Invalid salt revision");
}
off = 4;
}
if (saltLength - off < 25) {
throw new IllegalArgumentException("Invalid salt");
}
// Extract number of rounds
if (salt.charAt(off + 2) > '$') {
throw new IllegalArgumentException("Missing salt rounds");
}
rounds = Integer.parseInt(salt.substring(off, off + 2));
real_salt = salt.substring(off + 3, off + 25);
try {
passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8");
} catch (UnsupportedEncodingException uee) {
throw new AssertionError("UTF-8 is not supported");
}
saltb = decode_base64(real_salt, BCRYPT_SALT_LEN);
B = new BCrypt();
hashed = B.crypt_raw(passwordb, saltb, rounds);
rs.append("$2");
if (minor >= 'a') {
rs.append(minor);
}
rs.append("$");
if (rounds < 10) {
rs.append("0");
}
rs.append(rounds);
rs.append("$");
encode_base64(saltb, saltb.length, rs);
encode_base64(hashed, bf_crypt_ciphertext.length * 4 - 1, rs);
return rs.toString();
}
|
@Test
public void testHashpwInvalidSaltVersion2() throws IllegalArgumentException {
thrown.expect(IllegalArgumentException.class);
BCrypt.hashpw("foo", "$1a$10$.....................");
}
|
public static long producerRecordSizeInBytes(final ProducerRecord<byte[], byte[]> record) {
return recordSizeInBytes(
record.key() == null ? 0 : record.key().length,
record.value() == null ? 0 : record.value().length,
record.topic(),
record.headers()
);
}
|
@Test
public void shouldComputeSizeInBytesForProducerRecordWithNullKey() {
final ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(
TOPIC,
1,
0L,
null,
VALUE,
HEADERS
);
assertThat(producerRecordSizeInBytes(record), equalTo(NULL_KEY_SIZE_IN_BYTES));
}
|
public static VersionRange parse(String rangeString) {
validateRangeString(rangeString);
Inclusiveness minVersionInclusiveness =
rangeString.startsWith("[") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE;
Inclusiveness maxVersionInclusiveness =
rangeString.endsWith("]") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE;
int commaIndex = rangeString.indexOf(',');
String minVersionString = rangeString.substring(1, commaIndex).trim();
Version minVersion;
if (minVersionString.isEmpty()) {
minVersionInclusiveness = Inclusiveness.EXCLUSIVE;
minVersion = Version.minimum();
} else {
minVersion = Version.fromString(minVersionString);
}
String maxVersionString =
rangeString.substring(commaIndex + 1, rangeString.length() - 1).trim();
Version maxVersion;
if (maxVersionString.isEmpty()) {
maxVersionInclusiveness = Inclusiveness.EXCLUSIVE;
maxVersion = Version.maximum();
} else {
maxVersion = Version.fromString(maxVersionString);
}
if (!minVersion.isLessThan(maxVersion)) {
throw new IllegalArgumentException(
String.format(
"Min version in range must be less than max version in range, got '%s'",
rangeString));
}
return builder()
.setMinVersion(minVersion)
.setMinVersionInclusiveness(minVersionInclusiveness)
.setMaxVersion(maxVersion)
.setMaxVersionInclusiveness(maxVersionInclusiveness)
.build();
}
|
@Test
public void parse_withoutComma_throwsIllegalArgumentException() {
IllegalArgumentException exception =
assertThrows(IllegalArgumentException.class, () -> VersionRange.parse("[1.0]"));
assertThat(exception).hasMessageThat().isEqualTo("Invalid range of versions, got '[1.0]'");
}
|
@Override
public RandomAccessReadView createView(long startPosition, long streamLength) throws IOException
{
throw new IOException(getClass().getName() + ".createView isn't supported.");
}
|
@Test
void testCreateView() throws IOException
{
byte[] values = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20 };
try (RandomAccessReadBuffer randomAccessSource = new RandomAccessReadBuffer(
new ByteArrayInputStream(values));
RandomAccessReadView randomAccessReadView =
new RandomAccessReadView(randomAccessSource, 10, 20))
{
randomAccessReadView.createView(0, 20);
fail("CreateView() should have throw an IOException");
}
catch (IOException exception)
{
}
}
|
@Override
public Path move(final Path source, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException {
Path target;
if(source.attributes().getCustom().containsKey(KEY_DELETE_MARKER)) {
// Delete marker, copy not supported but we have to retain the delete marker at the target
target = new Path(renamed);
target.attributes().setVersionId(null);
delete.delete(Collections.singletonMap(target, status), connectionCallback, callback);
try {
// Find version id of moved delete marker
final Path bucket = containerService.getContainer(renamed);
final VersionOrDeleteMarkersChunk marker = session.getClient().listVersionedObjectsChunked(
bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(renamed),
String.valueOf(Path.DELIMITER), 1, null, null, false);
if(marker.getItems().length == 1) {
final BaseVersionOrDeleteMarker markerObject = marker.getItems()[0];
target.attributes().withVersionId(markerObject.getVersionId()).setCustom(Collections.singletonMap(KEY_DELETE_MARKER, Boolean.TRUE.toString()));
delete.delete(Collections.singletonMap(source, status), connectionCallback, callback);
}
else {
throw new NotfoundException(String.format("Unable to find delete marker %s", renamed.getName()));
}
}
catch(ServiceException e) {
throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, renamed);
}
}
else {
try {
target = proxy.copy(source, renamed, status.withLength(source.attributes().getSize()), connectionCallback, new DisabledStreamListener());
// Copy source path and nullify version id to add a delete marker
delete.delete(Collections.singletonMap(new Path(source).withAttributes(new PathAttributes(source.attributes()).withVersionId(null)), status),
connectionCallback, callback);
}
catch(NotfoundException e) {
if(source.getType().contains(Path.Type.placeholder)) {
// No placeholder object to copy, create a new one at the target
target = session.getFeature(Directory.class).mkdir(renamed, new TransferStatus().withRegion(source.attributes().getRegion()));
}
else {
throw e;
}
}
}
return target;
}
|
@Test
public void testMove() throws Exception {
final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
final S3AccessControlListFeature acl = new S3AccessControlListFeature(session);
final Path test = new S3TouchFeature(session, acl).touch(new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
assertNull(test.attributes().getVersionId());
assertTrue(new S3FindFeature(session, acl).find(test));
final Path renamed = new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file));
new S3MoveFeature(session, acl).move(test, renamed, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback());
assertFalse(new S3FindFeature(session, acl).find(test));
assertTrue(new S3FindFeature(session, acl).find(renamed));
final PathAttributes targetAttr = new S3AttributesFinderFeature(session, acl).find(renamed);
assertEquals(Comparison.equal, session.getHost().getProtocol().getFeature(ComparisonService.class).compare(Path.Type.file, test.attributes(), targetAttr));
assertEquals(Comparison.equal, session.getHost().getProtocol().getFeature(ComparisonService.class).compare(Path.Type.file, renamed.attributes(), targetAttr));
new S3DefaultDeleteFeature(session).delete(Collections.singletonList(renamed), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@VisibleForTesting
static Object convertAvroField(Object avroValue, Schema schema) {
if (avroValue == null) {
return null;
}
switch (schema.getType()) {
case NULL:
case INT:
case LONG:
case DOUBLE:
case FLOAT:
case BOOLEAN:
return avroValue;
case ENUM:
case STRING:
return avroValue.toString(); // can be a String or org.apache.avro.util.Utf8
case UNION:
for (Schema s : schema.getTypes()) {
if (s.getType() == Schema.Type.NULL) {
continue;
}
return convertAvroField(avroValue, s);
}
throw new IllegalArgumentException("Found UNION schema but it doesn't contain any type");
case ARRAY:
case BYTES:
case FIXED:
case RECORD:
case MAP:
default:
throw new UnsupportedOperationException("Unsupported avro schema type=" + schema.getType()
+ " for value field schema " + schema.getName());
}
}
|
@Test
public void testConvertAvroEnum() {
Object converted = BaseJdbcAutoSchemaSink.convertAvroField("e1", createFieldAndGetSchema((builder) ->
builder.name("field").type().enumeration("myenum").symbols("e1", "e2").noDefault()));
Assert.assertEquals(converted, "e1");
}
|
public static Set<String> getFieldsForRecordExtractor(@Nullable IngestionConfig ingestionConfig, Schema schema) {
Set<String> fieldsForRecordExtractor = new HashSet<>();
if (null != ingestionConfig && (null != ingestionConfig.getSchemaConformingTransformerConfig()
|| null != ingestionConfig.getSchemaConformingTransformerV2Config())) {
// The SchemaConformingTransformer requires that all fields are extracted, indicated by returning an empty set
// here. Compared to extracting the fields specified below, extracting all fields should be a superset.
return fieldsForRecordExtractor;
}
extractFieldsFromIngestionConfig(ingestionConfig, fieldsForRecordExtractor);
extractFieldsFromSchema(schema, fieldsForRecordExtractor);
fieldsForRecordExtractor = getFieldsToReadWithComplexType(fieldsForRecordExtractor, ingestionConfig);
return fieldsForRecordExtractor;
}
|
@Test
public void testExtractFieldsAggregationConfig() {
IngestionConfig ingestionConfig = new IngestionConfig();
Schema schema = new Schema();
ingestionConfig.setAggregationConfigs(Collections.singletonList(new AggregationConfig("d1", "SUM(s1)")));
Set<String> fields = IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema);
Assert.assertEquals(fields.size(), 1);
Assert.assertTrue(fields.containsAll(Sets.newHashSet("s1")));
ingestionConfig.setAggregationConfigs(Collections.singletonList(new AggregationConfig("d1", "MIN(s1)")));
fields = IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema);
Assert.assertEquals(fields.size(), 1);
Assert.assertTrue(fields.containsAll(Sets.newHashSet("s1")));
ingestionConfig.setAggregationConfigs(Collections.singletonList(new AggregationConfig("d1", "MAX(s1)")));
fields = IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema);
Assert.assertEquals(fields.size(), 1);
Assert.assertTrue(fields.containsAll(Sets.newHashSet("s1")));
}
|
public static String[] split(String splittee, String splitChar, boolean truncate) { //NOSONAR
if (splittee == null || splitChar == null) {
return new String[0];
}
final String EMPTY_ELEMENT = "";
int spot;
final int splitLength = splitChar.length();
final String adjacentSplit = splitChar + splitChar;
final int adjacentSplitLength = adjacentSplit.length();
if (truncate) {
while ((spot = splittee.indexOf(adjacentSplit)) != -1) {
splittee = splittee.substring(0, spot + splitLength)
+ splittee.substring(spot + adjacentSplitLength, splittee.length());
}
if (splittee.startsWith(splitChar)) {
splittee = splittee.substring(splitLength);
}
if (splittee.endsWith(splitChar)) { // Remove trailing splitter
splittee = splittee.substring(0, splittee.length() - splitLength);
}
}
List<String> returns = new ArrayList<>();
final int length = splittee.length(); // This is the new length
int start = 0;
spot = 0;
while (start < length && (spot = splittee.indexOf(splitChar, start)) > -1) {
if (spot > 0) {
returns.add(splittee.substring(start, spot));
} else {
returns.add(EMPTY_ELEMENT);
}
start = spot + splitLength;
}
if (start < length) {
returns.add(splittee.substring(start));
} else if (spot == length - splitLength) {// Found splitChar at end of line
returns.add(EMPTY_ELEMENT);
}
return returns.toArray(new String[returns.size()]);
}
|
@Test
public void testSplitSSSWithEmptyInput() {
String[] out = JOrphanUtils.split("", ",", "x");
assertEquals(0, out.length);
}
|
public static RpcService startRemoteMetricsRpcService(
Configuration configuration,
String externalAddress,
@Nullable String bindAddress,
RpcSystem rpcSystem)
throws Exception {
final String portRange = configuration.get(MetricOptions.QUERY_SERVICE_PORT);
final RpcSystem.RpcServiceBuilder rpcServiceBuilder =
rpcSystem.remoteServiceBuilder(configuration, externalAddress, portRange);
if (bindAddress != null) {
rpcServiceBuilder.withBindAddress(bindAddress);
}
return startMetricRpcService(configuration, rpcServiceBuilder);
}
|
@Test
void testStartMetricActorSystemRespectsThreadPriority() throws Exception {
final Configuration configuration = new Configuration();
final int expectedThreadPriority = 3;
configuration.set(MetricOptions.QUERY_SERVICE_THREAD_PRIORITY, expectedThreadPriority);
final RpcService rpcService =
MetricUtils.startRemoteMetricsRpcService(
configuration, "localhost", null, RpcSystem.load());
try {
final int threadPriority =
rpcService
.getScheduledExecutor()
.schedule(
() -> Thread.currentThread().getPriority(), 0, TimeUnit.SECONDS)
.get();
assertThat(threadPriority).isEqualTo(expectedThreadPriority);
} finally {
rpcService.closeAsync().get();
}
}
|
public QueryObjectBundle rewriteQuery(@Language("SQL") String query, QueryConfiguration queryConfiguration, ClusterType clusterType)
{
return rewriteQuery(query, queryConfiguration, clusterType, false);
}
|
@Test
public void testRewriteDecimal()
{
QueryBundle queryBundle = getQueryRewriter().rewriteQuery("SELECT decimal '1.2', decimal '1.2' d", CONFIGURATION, CONTROL);
assertCreateTableAs(queryBundle.getQuery(), "SELECT\n" +
" CAST(decimal '1.2' AS double)\n" +
", CAST(decimal '1.2' AS double) d");
}
|
@Override
public Node upload(final Path file, final Local local, final BandwidthThrottle throttle, final StreamListener listener,
final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
final ThreadPool pool = ThreadPoolFactory.get("multipart", concurrency);
try {
final InputStream in;
if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(file))) {
in = new SDSTripleCryptEncryptorFeature(session, nodeid).encrypt(file, local.getInputStream(), status);
}
else {
in = local.getInputStream();
}
final CreateFileUploadRequest createFileUploadRequest = new CreateFileUploadRequest()
.directS3Upload(true)
.timestampModification(status.getModified() != null ? new DateTime(status.getModified()) : null)
.timestampCreation(status.getCreated() != null ? new DateTime(status.getCreated()) : null)
.size(TransferStatus.UNKNOWN_LENGTH == status.getLength() ? null : status.getLength())
.parentId(Long.parseLong(nodeid.getVersionId(file.getParent())))
.name(file.getName());
final CreateFileUploadResponse createFileUploadResponse = new NodesApi(session.getClient())
.createFileUploadChannel(createFileUploadRequest, StringUtils.EMPTY);
if(log.isDebugEnabled()) {
log.debug(String.format("upload started for %s with response %s", file, createFileUploadResponse));
}
final Map<Integer, TransferStatus> etags = new HashMap<>();
final List<PresignedUrl> presignedUrls = this.retrievePresignedUrls(createFileUploadResponse, status);
final List<Future<TransferStatus>> parts = new ArrayList<>();
try {
final String random = new UUIDRandomStringService().random();
// Full size of file
final long size = status.getLength() + status.getOffset();
long offset = 0;
long remaining = status.getLength();
for(int partNumber = 1; remaining >= 0; partNumber++) {
final long length = Math.min(Math.max((size / (MAXIMUM_UPLOAD_PARTS - 1)), partsize), remaining);
final PresignedUrl presignedUrl = presignedUrls.get(partNumber - 1);
if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(file))) {
final Local temporary = temp.create(String.format("%s-%d", random, partNumber));
if(log.isDebugEnabled()) {
log.debug(String.format("Encrypted contents for part %d to %s", partNumber, temporary));
}
final FileBuffer buffer = new FileBuffer(temporary);
new StreamCopier(status, StreamProgress.noop).withAutoclose(false).withLimit(length)
.transfer(in, new BufferOutputStream(buffer));
parts.add(this.submit(pool, file, temporary, buffer, throttle, listener, status,
presignedUrl.getUrl(), presignedUrl.getPartNumber(), 0L, length, callback));
}
else {
parts.add(this.submit(pool, file, local, Buffer.noop, throttle, listener, status,
presignedUrl.getUrl(), presignedUrl.getPartNumber(), offset, length, callback));
}
remaining -= length;
offset += length;
if(0L == remaining) {
break;
}
}
}
finally {
in.close();
}
Interruptibles.awaitAll(parts)
.forEach(part -> etags.put(part.getPart(), part));
final CompleteS3FileUploadRequest completeS3FileUploadRequest = new CompleteS3FileUploadRequest()
.keepShareLinks(new HostPreferences(session.getHost()).getBoolean("sds.upload.sharelinks.keep"))
.resolutionStrategy(CompleteS3FileUploadRequest.ResolutionStrategyEnum.OVERWRITE);
if(status.getFilekey() != null) {
final ObjectReader reader = session.getClient().getJSON().getContext(null).readerFor(FileKey.class);
final FileKey fileKey = reader.readValue(status.getFilekey().array());
final EncryptedFileKey encryptFileKey = Crypto.encryptFileKey(
TripleCryptConverter.toCryptoPlainFileKey(fileKey),
TripleCryptConverter.toCryptoUserPublicKey(session.keyPair().getPublicKeyContainer())
);
completeS3FileUploadRequest.setFileKey(TripleCryptConverter.toSwaggerFileKey(encryptFileKey));
}
etags.forEach((key, value) -> completeS3FileUploadRequest.addPartsItem(
new S3FileUploadPart().partEtag(value.getChecksum().hash).partNumber(key)));
if(log.isDebugEnabled()) {
log.debug(String.format("Complete file upload with %s for %s", completeS3FileUploadRequest, file));
}
new NodesApi(session.getClient()).completeS3FileUpload(completeS3FileUploadRequest, createFileUploadResponse.getUploadId(), StringUtils.EMPTY);
// Polling
return new SDSUploadService(session, nodeid).await(file, status, createFileUploadResponse.getUploadId()).getNode();
}
catch(CryptoSystemException | InvalidFileKeyException | InvalidKeyPairException | UnknownVersionException e) {
throw new TripleCryptExceptionMappingService().map("Upload {0} failed", e, file);
}
catch(ApiException e) {
throw new SDSExceptionMappingService(nodeid).map("Upload {0} failed", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Upload {0} failed", e, file);
}
finally {
temp.shutdown();
// Cancel future tasks
pool.shutdown(false);
}
}
|
@Test
public void testTripleCryptUploadMultipleParts() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final SDSDirectS3UploadFeature feature = new SDSDirectS3UploadFeature(session, nodeid, new SDSDirectS3WriteFeature(session, nodeid));
final Path room = new SDSDirectoryFeature(session, nodeid).createRoom(
new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), true);
final Path test = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
final byte[] random = RandomUtils.nextBytes(21 * 1024 * 1024);
final OutputStream out = local.getOutputStream(false);
IOUtils.write(random, out);
out.close();
final TransferStatus status = new TransferStatus();
status.setFilekey(SDSTripleCryptEncryptorFeature.generateFileKey());
status.setLength(random.length);
final SDSEncryptionBulkFeature bulk = new SDSEncryptionBulkFeature(session, nodeid);
bulk.pre(Transfer.Type.upload, Collections.singletonMap(new TransferItem(test, local), status), new DisabledConnectionCallback());
final Node node = feature.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED),
new DisabledStreamListener(), status, new DisabledLoginCallback());
assertTrue(status.isComplete());
assertNotSame(PathAttributes.EMPTY, status.getResponse());
assertTrue(new SDSFindFeature(session, nodeid).find(test));
final PathAttributes attributes = new SDSAttributesFinderFeature(session, nodeid).find(test);
assertEquals(random.length, attributes.getSize());
assertEquals(new SDSAttributesAdapter(session).toAttributes(node), attributes);
final byte[] compare = new byte[random.length];
final InputStream stream = new TripleCryptReadFeature(session, nodeid, new SDSReadFeature(session, nodeid)).read(test, new TransferStatus(), new DisabledConnectionCallback() {
@Override
public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) {
return new VaultCredentials("eth[oh8uv4Eesij");
}
});
IOUtils.readFully(stream, compare);
stream.close();
assertArrayEquals(random, compare);
new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
local.delete();
}
|
@Override
public synchronized void write(int b) throws IOException {
checkNotClosed();
file.writeLock().lock();
try {
if (append) {
pos = file.sizeWithoutLocking();
}
file.write(pos++, (byte) b);
file.setLastModifiedTime(fileSystemState.now());
} finally {
file.writeLock().unlock();
}
}
|
@Test
public void testWrite_singleByte_overwriting() throws IOException {
JimfsOutputStream out = newOutputStream(false);
addBytesToStore(out, 9, 8, 7, 6, 5, 4, 3);
out.write(1);
out.write(2);
out.write(3);
assertStoreContains(out, 1, 2, 3, 6, 5, 4, 3);
}
|
@Udf(description = "Returns the INT base raised to the INT exponent.")
public Double power(
@UdfParameter(
value = "base",
description = "the base of the power."
) final Integer base,
@UdfParameter(
value = "exponent",
description = "the exponent of the power."
) final Integer exponent
) {
return power(
base == null ? null : base.doubleValue(),
exponent == null ? null : exponent.doubleValue()
);
}
|
@Test
public void shouldHandleNegativeBase() {
assertThat(udf.power(-15, 2), closeTo(225.0, 0.000000000000001));
assertThat(udf.power(-15L, 2L), closeTo(225.0, 0.000000000000001));
assertThat(udf.power(-15.0, 2.0), closeTo(225.0, 0.000000000000001));
assertThat(udf.power(-15, 3), closeTo(-3375.0, 0.000000000000001));
assertThat(udf.power(-15L, 3L), closeTo(-3375.0, 0.000000000000001));
assertThat(udf.power(-15.0, 3.0), closeTo(-3375.0, 0.000000000000001));
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertCompressedObjectMessageToAmqpMessageUnknownEncodingGetsDataSection() throws Exception {
ActiveMQObjectMessage outbound = createObjectMessage(TEST_OBJECT_VALUE, true);
outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_UNKNOWN);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNotNull(amqp.getBody());
assertTrue(amqp.getBody() instanceof Data);
assertFalse(0 == ((Data) amqp.getBody()).getValue().getLength());
Object value = deserialize(((Data) amqp.getBody()).getValue().getArray());
assertNotNull(value);
assertTrue(value instanceof UUID);
}
|
static public void addOnConsoleListenerInstance(Context context, OnConsoleStatusListener onConsoleStatusListener) {
onConsoleStatusListener.setContext(context);
boolean effectivelyAdded = context.getStatusManager().add(onConsoleStatusListener);
if (effectivelyAdded) {
onConsoleStatusListener.start();
}
}
|
@Test
public void addOnConsoleListenerInstanceShouldNotStartSecondListener() {
OnConsoleStatusListener ocl0 = new OnConsoleStatusListener();
OnConsoleStatusListener ocl1 = new OnConsoleStatusListener();
StatusListenerConfigHelper.addOnConsoleListenerInstance(context, ocl0);
{
List<StatusListener> listeners = sm.getCopyOfStatusListenerList();
assertEquals(1, listeners.size());
assertTrue(ocl0.isStarted());
}
// second listener should not have been started
StatusListenerConfigHelper.addOnConsoleListenerInstance(context, ocl1);
{
List<StatusListener> listeners = sm.getCopyOfStatusListenerList();
assertEquals(1, listeners.size());
assertFalse(ocl1.isStarted());
}
}
|
static Set<PipelineOptionSpec> getOptionSpecs(
Class<? extends PipelineOptions> optionsInterface, boolean skipHidden) {
Iterable<Method> methods = ReflectHelpers.getClosureOfMethodsOnInterface(optionsInterface);
Multimap<String, Method> propsToGetters = getPropertyNamesToGetters(methods);
ImmutableSet.Builder<PipelineOptionSpec> setBuilder = ImmutableSet.builder();
for (Map.Entry<String, Method> propAndGetter : propsToGetters.entries()) {
String prop = propAndGetter.getKey();
Method getter = propAndGetter.getValue();
@SuppressWarnings("unchecked")
Class<? extends PipelineOptions> declaringClass =
(Class<? extends PipelineOptions>) getter.getDeclaringClass();
if (!PipelineOptions.class.isAssignableFrom(declaringClass)) {
continue;
}
if (skipHidden && declaringClass.isAnnotationPresent(Hidden.class)) {
continue;
}
setBuilder.add(PipelineOptionSpec.of(declaringClass, prop, getter));
}
return setBuilder.build();
}
|
@Test
public void testGetOptionSpecs() throws NoSuchMethodException {
Set<PipelineOptionSpec> properties =
PipelineOptionsReflector.getOptionSpecs(SimpleOptions.class, true);
assertThat(
properties,
Matchers.hasItems(
PipelineOptionSpec.of(
SimpleOptions.class, "foo", SimpleOptions.class.getDeclaredMethod("getFoo"))));
}
|
@Udf
public String extractFragment(
@UdfParameter(
value = "input",
description = "a valid URL to extract a fragment from")
final String input) {
return UrlParser.extract(input, URI::getFragment);
}
|
@Test
public void shouldThrowExceptionForMalformedURL() {
// When:
final KsqlException e = assertThrows(
KsqlException.class,
() -> extractUdf.extractFragment("http://257.1/bogus/[url")
);
// Then:
assertThat(e.getMessage(), containsString("URL input has invalid syntax: http://257.1/bogus/[url"));
}
|
@SuppressWarnings({ "MethodLength", "DuplicatedCode" })
public String build()
{
sb.setLength(0);
if (null != prefix && !prefix.isEmpty())
{
sb.append(prefix).append(':');
}
sb.append(ChannelUri.AERON_SCHEME).append(':').append(media).append('?');
appendParameter(sb, TAGS_PARAM_NAME, tags);
appendParameter(sb, ENDPOINT_PARAM_NAME, endpoint);
appendParameter(sb, INTERFACE_PARAM_NAME, networkInterface);
appendParameter(sb, MDC_CONTROL_PARAM_NAME, controlEndpoint);
appendParameter(sb, MDC_CONTROL_MODE_PARAM_NAME, controlMode);
appendParameter(sb, MTU_LENGTH_PARAM_NAME, mtu);
appendParameter(sb, TERM_LENGTH_PARAM_NAME, termLength);
appendParameter(sb, INITIAL_TERM_ID_PARAM_NAME, initialTermId);
appendParameter(sb, TERM_ID_PARAM_NAME, termId);
appendParameter(sb, TERM_OFFSET_PARAM_NAME, termOffset);
if (null != sessionId)
{
appendParameter(sb, SESSION_ID_PARAM_NAME, prefixTag(isSessionIdTagged, sessionId));
}
appendParameter(sb, TTL_PARAM_NAME, ttl);
appendParameter(sb, RELIABLE_STREAM_PARAM_NAME, reliable);
appendParameter(sb, LINGER_PARAM_NAME, linger);
appendParameter(sb, ALIAS_PARAM_NAME, alias);
appendParameter(sb, CONGESTION_CONTROL_PARAM_NAME, cc);
appendParameter(sb, FLOW_CONTROL_PARAM_NAME, fc);
appendParameter(sb, GROUP_TAG_PARAM_NAME, groupTag);
appendParameter(sb, SPARSE_PARAM_NAME, sparse);
appendParameter(sb, EOS_PARAM_NAME, eos);
appendParameter(sb, TETHER_PARAM_NAME, tether);
appendParameter(sb, GROUP_PARAM_NAME, group);
appendParameter(sb, REJOIN_PARAM_NAME, rejoin);
appendParameter(sb, SPIES_SIMULATE_CONNECTION_PARAM_NAME, ssc);
appendParameter(sb, SOCKET_SNDBUF_PARAM_NAME, socketSndbufLength);
appendParameter(sb, SOCKET_RCVBUF_PARAM_NAME, socketRcvbufLength);
appendParameter(sb, RECEIVER_WINDOW_LENGTH_PARAM_NAME, receiverWindowLength);
appendParameter(sb, MEDIA_RCV_TIMESTAMP_OFFSET_PARAM_NAME, mediaReceiveTimestampOffset);
appendParameter(sb, CHANNEL_RECEIVE_TIMESTAMP_OFFSET_PARAM_NAME, channelReceiveTimestampOffset);
appendParameter(sb, CHANNEL_SEND_TIMESTAMP_OFFSET_PARAM_NAME, channelSendTimestampOffset);
appendParameter(sb, RESPONSE_ENDPOINT_PARAM_NAME, responseEndpoint);
appendParameter(sb, RESPONSE_CORRELATION_ID_PARAM_NAME, responseCorrelationId);
appendParameter(sb, NAK_DELAY_PARAM_NAME, nakDelay);
appendParameter(sb, UNTETHERED_WINDOW_LIMIT_TIMEOUT_PARAM_NAME, untetheredWindowLimitTimeoutNs);
appendParameter(sb, UNTETHERED_RESTING_TIMEOUT_PARAM_NAME, untetheredRestingTimeoutNs);
appendParameter(sb, MAX_RESEND_PARAM_NAME, maxResend);
final char lastChar = sb.charAt(sb.length() - 1);
if (lastChar == '|' || lastChar == '?')
{
sb.setLength(sb.length() - 1);
}
return sb.toString();
}
|
@Test
void shouldBuildChannelBuilderUsingExistingStringWithAllTheFields()
{
final String uri = "aeron-spy:aeron:udp?endpoint=127.0.0.1:0|interface=127.0.0.1|control=127.0.0.2:0|" +
"control-mode=manual|tags=2,4|alias=foo|cc=cubic|fc=min|reliable=false|ttl=16|mtu=8992|" +
"term-length=1048576|init-term-id=5|term-offset=64|term-id=4353|session-id=2314234|gtag=3|" +
"linger=100000055000001|sparse=true|eos=true|tether=false|group=false|ssc=true|so-sndbuf=8388608|" +
"so-rcvbuf=2097152|rcv-wnd=1048576|media-rcv-ts-offset=reserved|channel-rcv-ts-offset=0|" +
"channel-snd-ts-offset=8|response-endpoint=127.0.0.3:0|response-correlation-id=12345|nak-delay=100000|" +
"untethered-window-limit-timeout=1000|untethered-resting-timeout=5000";
final ChannelUri fromString = ChannelUri.parse(uri);
final ChannelUri fromBuilder = ChannelUri.parse(new ChannelUriStringBuilder(uri).build());
assertEquals(Collections.emptyMap(), fromString.diff(fromBuilder));
}
|
@Override
public double getAndIncrement() {
return getAndAdd(1);
}
|
@Test
public void testGetAndIncrement() {
RAtomicDouble al = redisson.getAtomicDouble("test");
assertThat(al.getAndIncrement()).isEqualTo(0);
assertThat(al.get()).isEqualTo(1);
}
|
@Override
public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) {
AbstractWALEvent result;
byte[] bytes = new byte[data.remaining()];
data.get(bytes);
String dataText = new String(bytes, StandardCharsets.UTF_8);
if (decodeWithTX) {
result = decodeDataWithTX(dataText);
} else {
result = decodeDataIgnoreTX(dataText);
}
result.setLogSequenceNumber(logSequenceNumber);
return result;
}
|
@Test
void assertDecodeWithTsrange() {
MppTableData tableData = new MppTableData();
tableData.setTableName("public.test");
tableData.setOpType("INSERT");
tableData.setColumnsName(new String[]{"data"});
tableData.setColumnsType(new String[]{"tsrange"});
tableData.setColumnsVal(new String[]{"'[\"2020-01-01 00:00:00\",\"2021-01-01 00:00:00\")'"});
ByteBuffer data = ByteBuffer.wrap(JsonUtils.toJsonString(tableData).getBytes());
WriteRowEvent actual = (WriteRowEvent) new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber);
Object byteaObj = actual.getAfterRow().get(0);
assertThat(byteaObj, instanceOf(PGobject.class));
assertThat(byteaObj.toString(), is("[\"2020-01-01 00:00:00\",\"2021-01-01 00:00:00\")"));
}
|
static Result coerceUserList(
final Collection<Expression> expressions,
final ExpressionTypeManager typeManager
) {
return coerceUserList(expressions, typeManager, Collections.emptyMap());
}
|
@Test
public void shouldCoerceToBooleans() {
// Given:
final ImmutableList<Expression> expressions = ImmutableList.of(
new BooleanLiteral(true),
new StringLiteral("FaLsE"),
BOOL_EXPRESSION
);
// When:
final Result result = CoercionUtil.coerceUserList(expressions, typeManager);
// Then:
assertThat(result.commonType(), is(Optional.of(SqlTypes.BOOLEAN)));
assertThat(result.expressions(), is(ImmutableList.of(
new BooleanLiteral(true),
new BooleanLiteral(false),
BOOL_EXPRESSION
)));
}
|
public static int decodeConsecutiveOctets(StringBuilder dest, String s, int start)
{
final int n = s.length();
if (start >= n)
{
throw new IllegalArgumentException("Cannot decode from index " + start + " of a length-" + n + " string");
}
if (s.charAt(start) != '%')
{
throw new IllegalArgumentException("Must begin decoding from a percent-escaped octet, but found '" + s.charAt(start) + "'");
}
if (start + 3 < n && s.charAt(start + 3) == '%')
{
// If there are multiple consecutive encoded octets, decode all into bytes
ByteBuffer bb = decodeConsecutiveOctets(s, start);
int numCharsConsumed = bb.limit() * 3;
// Decode the bytes into a string
decodeBytes(dest, bb);
return numCharsConsumed;
}
else if (start + 2 < n)
{
// Else, decode just one octet
byte b = decodeOctet(s, start + 1);
decodeByte(dest, b);
return 3;
}
throw new IllegalArgumentException("Malformed percent-encoded octet at index " + start);
}
|
@Test(dataProvider = "validConsecutiveOctetData")
public void testDecodeValidConsecutiveOctets(String encoded, int startIndex, String expected, int expectedCharsConsumed)
{
StringBuilder result = new StringBuilder();
int numCharsConsumed = URIDecoderUtils.decodeConsecutiveOctets(result, encoded, startIndex);
Assert.assertEquals(result.toString(), expected);
Assert.assertEquals(numCharsConsumed, expectedCharsConsumed);
}
|
public FEELFnResult<String> invoke(@ParameterName("string") String string, @ParameterName("start position") Number start) {
return invoke(string, start, null);
}
|
@Test
void invokeLengthNegative() {
FunctionTestUtil.assertResultError(substringFunction.invoke("test", 1, -3), InvalidParametersEvent.class);
}
|
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String oracleType = typeDefine.getDataType().toUpperCase();
switch (oracleType) {
case ORACLE_INTEGER:
builder.dataType(new DecimalType(DEFAULT_PRECISION, 0));
builder.columnLength((long) DEFAULT_PRECISION);
break;
case ORACLE_NUMBER:
Long precision = typeDefine.getPrecision();
if (precision == null || precision == 0 || precision > DEFAULT_PRECISION) {
precision = Long.valueOf(DEFAULT_PRECISION);
}
Integer scale = typeDefine.getScale();
if (scale == null) {
scale = 127;
}
if (scale <= 0) {
int newPrecision = (int) (precision - scale);
if (newPrecision == 1) {
builder.dataType(BasicType.BOOLEAN_TYPE);
} else if (newPrecision <= 9) {
builder.dataType(BasicType.INT_TYPE);
} else if (newPrecision <= 18) {
builder.dataType(BasicType.LONG_TYPE);
} else if (newPrecision < 38) {
builder.dataType(new DecimalType(newPrecision, 0));
builder.columnLength((long) newPrecision);
} else {
builder.dataType(new DecimalType(DEFAULT_PRECISION, 0));
builder.columnLength((long) DEFAULT_PRECISION);
}
} else if (scale <= DEFAULT_SCALE) {
builder.dataType(new DecimalType(precision.intValue(), scale));
builder.columnLength(precision);
builder.scale(scale);
} else {
builder.dataType(new DecimalType(precision.intValue(), DEFAULT_SCALE));
builder.columnLength(precision);
builder.scale(DEFAULT_SCALE);
}
break;
case ORACLE_FLOAT:
// The float type will be converted to DecimalType(10, -127),
// which will lose precision in the spark engine
DecimalType floatDecimal = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
builder.dataType(floatDecimal);
builder.columnLength((long) floatDecimal.getPrecision());
builder.scale(floatDecimal.getScale());
break;
case ORACLE_BINARY_FLOAT:
case ORACLE_REAL:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case ORACLE_BINARY_DOUBLE:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case ORACLE_CHAR:
case ORACLE_VARCHAR:
case ORACLE_VARCHAR2:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(typeDefine.getLength());
break;
case ORACLE_NCHAR:
case ORACLE_NVARCHAR2:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(
TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength()));
break;
case ORACLE_ROWID:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(MAX_ROWID_LENGTH);
break;
case ORACLE_XML:
case ORACLE_SYS_XML:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(typeDefine.getLength());
break;
case ORACLE_LONG:
builder.dataType(BasicType.STRING_TYPE);
// The maximum length of the column is 2GB-1
builder.columnLength(BYTES_2GB - 1);
break;
case ORACLE_CLOB:
case ORACLE_NCLOB:
builder.dataType(BasicType.STRING_TYPE);
// The maximum length of the column is 4GB-1
builder.columnLength(BYTES_4GB - 1);
break;
case ORACLE_BLOB:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
// The maximum length of the column is 4GB-1
builder.columnLength(BYTES_4GB - 1);
break;
case ORACLE_RAW:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
if (typeDefine.getLength() == null || typeDefine.getLength() == 0) {
builder.columnLength(MAX_RAW_LENGTH);
} else {
builder.columnLength(typeDefine.getLength());
}
break;
case ORACLE_LONG_RAW:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
// The maximum length of the column is 2GB-1
builder.columnLength(BYTES_2GB - 1);
break;
case ORACLE_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
break;
case ORACLE_TIMESTAMP:
case ORACLE_TIMESTAMP_WITH_TIME_ZONE:
case ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
if (typeDefine.getScale() == null) {
builder.scale(TIMESTAMP_DEFAULT_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.ORACLE, oracleType, typeDefine.getName());
}
return builder.build();
}
|
@Test
public void testConvertUnsupported() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder().name("test").columnType("aaa").dataType("aaa").build();
try {
OracleTypeConverter.INSTANCE.convert(typeDefine);
Assertions.fail();
} catch (SeaTunnelRuntimeException e) {
// ignore
} catch (Throwable e) {
Assertions.fail();
}
}
|
@Override
public void deleteTopics(final Collection<String> topicsToDelete) {
if (topicsToDelete.isEmpty()) {
return;
}
final DeleteTopicsResult deleteTopicsResult = adminClient.get().deleteTopics(topicsToDelete);
final Map<String, KafkaFuture<Void>> results = deleteTopicsResult.topicNameValues();
final List<String> failList = Lists.newArrayList();
final List<Pair<String, Throwable>> exceptionList = Lists.newArrayList();
for (final Map.Entry<String, KafkaFuture<Void>> entry : results.entrySet()) {
try {
entry.getValue().get(30, TimeUnit.SECONDS);
} catch (final Exception e) {
final Throwable rootCause = ExceptionUtils.getRootCause(e);
if (rootCause instanceof TopicDeletionDisabledException) {
throw new TopicDeletionDisabledException("Topic deletion is disabled. "
+ "To delete the topic, you must set '" + DELETE_TOPIC_ENABLE + "' to true in "
+ "the Kafka broker configuration.");
} else if (rootCause instanceof TopicAuthorizationException) {
throw new KsqlTopicAuthorizationException(
AclOperation.DELETE, Collections.singleton(entry.getKey()));
} else if (!(rootCause instanceof UnknownTopicOrPartitionException)) {
LOG.error(String.format("Could not delete topic '%s'", entry.getKey()), e);
failList.add(entry.getKey());
exceptionList.add(new Pair<>(entry.getKey(), rootCause));
}
}
}
if (!failList.isEmpty()) {
throw new KafkaDeleteTopicsException("Failed to clean up topics: "
+ String.join(",", failList), exceptionList);
}
}
|
@Test
@SuppressWarnings("unchecked")
public void shouldFailToDeleteOnTopicAuthorizationException() {
// Given:
when(adminClient.deleteTopics(any(Collection.class)))
.thenAnswer(deleteTopicsResult(new TopicAuthorizationException("error")));
// When:
final Exception e = assertThrows(
KsqlTopicAuthorizationException.class,
() -> kafkaTopicClient.deleteTopics(ImmutableList.of("theTopic"))
);
// Then:
assertThat(e.getMessage(), containsString(
"Authorization denied to Delete on topic(s): [theTopic]"));
}
|
public static int age(Date birthday, Date dateToCompare) {
Assert.notNull(birthday, "Birthday can not be null !");
if (null == dateToCompare) {
dateToCompare = date();
}
return age(birthday.getTime(), dateToCompare.getTime());
}
|
@Test
public void ageTest() {
final String d1 = "2000-02-29";
final String d2 = "2018-02-28";
final int age = DateUtil.age(DateUtil.parseDate(d1), DateUtil.parseDate(d2));
// issue#I6E6ZG,法定生日当天不算年龄,从第二天开始计算
assertEquals(17, age);
}
|
public static int tryConfigReadLock(String groupKey) {
// Lock failed by default.
int lockResult = -1;
// Try to get lock times, max value: 10;
for (int i = TRY_GET_LOCK_TIMES; i >= 0; --i) {
lockResult = ConfigCacheService.tryReadLock(groupKey);
// The data is non-existent.
if (0 == lockResult) {
break;
}
// Success
if (lockResult > 0) {
break;
}
// Retry.
if (i > 0) {
try {
Thread.sleep(1);
} catch (Exception e) {
LogUtil.PULL_CHECK_LOG.error("An Exception occurred while thread sleep", e);
}
}
}
return lockResult;
}
|
@Test
void testTryConfigReadLock() throws Exception {
String dataId = "123testTryConfigReadLock";
String group = "1234";
String tenant = "1234";
CacheItem cacheItem = Mockito.mock(CacheItem.class);
SimpleReadWriteLock lock = Mockito.mock(SimpleReadWriteLock.class);
Mockito.when(cacheItem.getRwLock()).thenReturn(lock);
String groupKey = GroupKey2.getKey(dataId, group, tenant);
Field cache1 = ConfigCacheService.class.getDeclaredField("CACHE");
cache1.setAccessible(true);
ConcurrentHashMap<String, CacheItem> cache = (ConcurrentHashMap<String, CacheItem>) cache1.get(null);
cache.put(groupKey, cacheItem);
// lock ==0,not exist
int readLock = ConfigCacheService.tryConfigReadLock(groupKey + "3245");
assertEquals(0, readLock);
//lock == 1 , success get lock
Mockito.when(lock.tryReadLock()).thenReturn(true);
int readLockSuccess = ConfigCacheService.tryConfigReadLock(groupKey);
assertEquals(1, readLockSuccess);
//lock ==-1 fail after spin all times;
OngoingStubbing<Boolean> when = Mockito.when(lock.tryReadLock());
for (int i = 0; i < 10; i++) {
when = when.thenReturn(false);
}
int readLockFail = ConfigCacheService.tryConfigReadLock(groupKey);
assertEquals(-1, readLockFail);
//lock ==1 success after serval spin times;
OngoingStubbing<Boolean> when2 = Mockito.when(lock.tryReadLock());
for (int i = 0; i < 5; i++) {
when2 = when2.thenReturn(false);
}
when2.thenReturn(true);
int readLockSuccessAfterRetry = ConfigCacheService.tryConfigReadLock(groupKey);
assertEquals(1, readLockSuccessAfterRetry);
}
|
public static List<String> mergeValues(
ExtensionDirector extensionDirector, Class<?> type, String cfg, List<String> def) {
List<String> defaults = new ArrayList<>();
if (def != null) {
for (String name : def) {
if (extensionDirector.getExtensionLoader(type).hasExtension(name)) {
defaults.add(name);
}
}
}
List<String> names = new ArrayList<>();
// add initial values
String[] configs = (cfg == null || cfg.trim().length() == 0) ? new String[0] : COMMA_SPLIT_PATTERN.split(cfg);
for (String config : configs) {
if (config != null && config.trim().length() > 0) {
names.add(config);
}
}
// -default is not included
if (!names.contains(REMOVE_VALUE_PREFIX + DEFAULT_KEY)) {
// add default extension
int i = names.indexOf(DEFAULT_KEY);
if (i > 0) {
names.addAll(i, defaults);
} else {
names.addAll(0, defaults);
}
names.remove(DEFAULT_KEY);
} else {
names.remove(DEFAULT_KEY);
}
// merge - configuration
for (String name : new ArrayList<String>(names)) {
if (name.startsWith(REMOVE_VALUE_PREFIX)) {
names.remove(name);
names.remove(name.substring(1));
}
}
return names;
}
|
@Test
void testMergeValuesDeleteDefault() {
List<String> merged = ConfigUtils.mergeValues(
ApplicationModel.defaultModel().getExtensionDirector(),
ThreadPool.class,
"-default",
asList("fixed", "default.limited", "cached"));
assertEquals(Collections.emptyList(), merged);
}
|
public static StatementExecutorResponse execute(
final ConfiguredStatement<ListTopics> statement,
final SessionProperties sessionProperties,
final KsqlExecutionContext executionContext,
final ServiceContext serviceContext
) {
final KafkaTopicClient client = serviceContext.getTopicClient();
final Map<String, TopicDescription> topicDescriptions = listTopics(client, statement);
if (statement.getStatement().getShowExtended()) {
final KafkaConsumerGroupClient consumerGroupClient
= new KafkaConsumerGroupClientImpl(serviceContext::getAdminClient);
final Map<String, List<Integer>> topicConsumersAndGroupCount
= getTopicConsumerAndGroupCounts(consumerGroupClient);
final List<KafkaTopicInfoExtended> topicInfoExtendedList = topicDescriptions.values()
.stream().map(desc ->
topicDescriptionToTopicInfoExtended(desc, topicConsumersAndGroupCount))
.collect(Collectors.toList());
return StatementExecutorResponse.handled(Optional.of(
new KafkaTopicsListExtended(statement.getMaskedStatementText(), topicInfoExtendedList)));
} else {
final List<KafkaTopicInfo> topicInfoList = topicDescriptions.values()
.stream().map(ListTopicsExecutor::topicDescriptionToTopicInfo)
.collect(Collectors.toList());
return StatementExecutorResponse.handled(Optional.of(
new KafkaTopicsList(statement.getMaskedStatementText(), topicInfoList)));
}
}
|
@Test
public void shouldListKafkaTopicsIncludingInternalTopics() {
// Given:
engine.givenKafkaTopic("topic1");
engine.givenKafkaTopic("topic2");
engine.givenKafkaTopic("_confluent_any_topic");
// When:
final KafkaTopicsList topicsList =
(KafkaTopicsList) CustomExecutors.LIST_TOPICS.execute(
engine.configure("LIST ALL TOPICS;"),
mock(SessionProperties.class),
engine.getEngine(),
serviceContext
).getEntity().orElseThrow(IllegalStateException::new);
// Then:
assertThat(topicsList.getTopics(), containsInAnyOrder(
new KafkaTopicInfo("topic1", ImmutableList.of(1)),
new KafkaTopicInfo("topic2", ImmutableList.of(1)),
new KafkaTopicInfo("_confluent_any_topic", ImmutableList.of(1))
));
}
|
@Override
public Deserializer deserializer(String topic, Target type) {
return new Deserializer() {
@SneakyThrows
@Override
public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
try (var reader = new DataFileReader<>(new SeekableByteArrayInput(data), new GenericDatumReader<>())) {
if (!reader.hasNext()) {
// this is very strange situation, when only header present in payload
// returning null in this case
return new DeserializeResult(null, DeserializeResult.Type.JSON, Map.of());
}
Object avroObj = reader.next();
String jsonValue = new String(AvroSchemaUtils.toJson(avroObj));
return new DeserializeResult(jsonValue, DeserializeResult.Type.JSON, Map.of());
}
}
};
}
|
@Test
void deserializerParsesAvroDataWithEmbeddedSchema() throws Exception {
Schema schema = new Schema.Parser().parse("""
{
"type": "record",
"name": "TestAvroRecord",
"fields": [
{ "name": "field1", "type": "string" },
{ "name": "field2", "type": "int" }
]
}
"""
);
GenericRecord record = new GenericData.Record(schema);
record.put("field1", "this is test msg");
record.put("field2", 100500);
String jsonRecord = new String(AvroSchemaUtils.toJson(record));
byte[] serializedRecordBytes = serializeAvroWithEmbeddedSchema(record);
var deserializer = avroEmbeddedSerde.deserializer("anyTopic", Serde.Target.KEY);
DeserializeResult result = deserializer.deserialize(null, serializedRecordBytes);
assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
assertThat(result.getAdditionalProperties()).isEmpty();
assertJsonEquals(jsonRecord, result.getResult());
}
|
public static String convertToBitcoinURI(Address address, Coin amount,
String label, String message) {
return convertToBitcoinURI(address.network(), address.toString(), amount, label, message);
}
|
@Test
public void testConvertToBitcoinURI() {
Address goodAddress = AddressParser.getDefault(MAINNET).parseAddress(MAINNET_GOOD_ADDRESS);
// simple example
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello&message=AMessage", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("12.34"), "Hello", "AMessage"));
// example with spaces, ampersand and plus
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello%20World&message=Mess%20%26%20age%20%2B%20hope", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("12.34"), "Hello World", "Mess & age + hope"));
// no amount, label present, message present
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?label=Hello&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, null, "Hello", "glory"));
// amount present, no label, message present
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=0.1&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("0.1"), null, "glory"));
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=0.1&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("0.1"), "", "glory"));
// amount present, label present, no message
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("12.34"), "Hello", null));
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("12.34"), "Hello", ""));
// amount present, no label, no message
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=1000", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("1000"), null, null));
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?amount=1000", BitcoinURI.convertToBitcoinURI(goodAddress, parseCoin("1000"), "", ""));
// no amount, label present, no message
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, null, "Hello", null));
// no amount, no label, message present
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?message=Agatha", BitcoinURI.convertToBitcoinURI(goodAddress, null, null, "Agatha"));
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS + "?message=Agatha", BitcoinURI.convertToBitcoinURI(goodAddress, null, "", "Agatha"));
// no amount, no label, no message
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS, BitcoinURI.convertToBitcoinURI(goodAddress, null, null, null));
assertEquals("bitcoin:" + MAINNET_GOOD_ADDRESS, BitcoinURI.convertToBitcoinURI(goodAddress, null, "", ""));
// different scheme
NetworkParameters alternativeParameters = new MockAltNetworkParams();
String mockNetGoodAddress = MockAltNetworkParams.MOCKNET_GOOD_ADDRESS;
Networks.register(alternativeParameters);
try {
assertEquals("mockcoin:" + mockNetGoodAddress + "?amount=12.34&label=Hello&message=AMessage",
BitcoinURI.convertToBitcoinURI(LegacyAddress.fromBase58(mockNetGoodAddress, alternativeParameters.network()), parseCoin("12.34"), "Hello", "AMessage"));
} finally {
Networks.unregister(alternativeParameters);
}
}
|
@SuppressWarnings("checkstyle:npathcomplexity")
public PartitionServiceState getPartitionServiceState() {
PartitionServiceState state = getPartitionTableState();
if (state != SAFE) {
return state;
}
if (!checkAndTriggerReplicaSync()) {
return REPLICA_NOT_SYNC;
}
return SAFE;
}
|
@Test
public void shouldNotBeSafe_whenReplicasAreNotSync() {
Config config = new Config();
ServiceConfig serviceConfig = TestMigrationAwareService.createServiceConfig(1);
ConfigAccessor.getServicesConfig(config).addServiceConfig(serviceConfig);
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory();
HazelcastInstance hz = factory.newHazelcastInstance(config);
HazelcastInstance hz2 = factory.newHazelcastInstance(config);
InternalPartitionServiceImpl partitionService1 = getNode(hz).partitionService;
InternalPartitionServiceImpl partitionService2 = getNode(hz2).partitionService;
int maxPermits = drainAllReplicaSyncPermits(partitionService1);
int maxPermits2 = drainAllReplicaSyncPermits(partitionService2);
assertEquals(maxPermits, maxPermits2);
warmUpPartitions(hz, hz2);
setBackupPacketDropFilter(hz, 100);
setBackupPacketDropFilter(hz2, 100);
NodeEngine nodeEngine = getNode(hz).nodeEngine;
for (int i = 0; i < nodeEngine.getPartitionService().getPartitionCount(); i++) {
Operation op = new TestPutOperationWithAsyncBackup(i);
nodeEngine.getOperationService().invokeOnPartition(null, op, i).join();
}
final PartitionReplicaStateChecker replicaStateChecker1 = partitionService1.getPartitionReplicaStateChecker();
final PartitionReplicaStateChecker replicaStateChecker2 = partitionService2.getPartitionReplicaStateChecker();
assertEquals(PartitionServiceState.REPLICA_NOT_SYNC, replicaStateChecker1.getPartitionServiceState());
assertEquals(PartitionServiceState.REPLICA_NOT_SYNC, replicaStateChecker2.getPartitionServiceState());
addReplicaSyncPermits(partitionService1, maxPermits);
addReplicaSyncPermits(partitionService2, maxPermits);
assertTrueEventually(() -> {
assertEquals(PartitionServiceState.SAFE, replicaStateChecker1.getPartitionServiceState());
assertEquals(PartitionServiceState.SAFE, replicaStateChecker2.getPartitionServiceState());
// assert no leftovers of PartitionReplicaSyncRequestOffloadable
assertEquals(0, getAsyncOperationsCount(hz));
assertEquals(0, getAsyncOperationsCount(hz2));
});
}
|
public static List<Interval> normalize(List<Interval> intervals) {
if (intervals.size() <= 1) {
return intervals;
}
List<Interval> valid =
intervals.stream().filter(Interval::isValid).collect(Collectors.toList());
if (valid.size() <= 1) {
return valid;
}
// 2 or more intervals
List<Interval> result = new ArrayList<>(valid.size());
Collections.sort(valid);
long start = valid.get(0).getStartMs();
long end = valid.get(0).getEndMs();
// scan entire list from the second interval
for (int i = 1; i < valid.size(); i++) {
Interval interval = valid.get(i);
if (interval.getStartMs() <= end) {
// continue with the same interval
end = Math.max(end, interval.getEndMs());
} else {
// These are disjoint. add the previous interval
result.add(Interval.between(start, end));
start = interval.getStartMs();
end = interval.getEndMs();
}
}
// add the last interval
result.add(Interval.between(start, end));
if (result.isEmpty()) {
return Collections.emptyList();
}
return result;
}
|
@Test
public void normalizeMerge() {
List<Interval> i;
i = IntervalUtils.normalize(Arrays.asList(Interval.between(1, 3), Interval.between(2, 4)));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.between(1, 4), i.get(0));
i = IntervalUtils.normalize(Arrays.asList(Interval.between(2, 4), Interval.between(1, 3)));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.between(1, 4), i.get(0));
i = IntervalUtils.normalize(Arrays.asList(Interval.between(2, 3), Interval.between(1, 4)));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.between(1, 4), i.get(0));
i = IntervalUtils.normalize(Arrays.asList(Interval.between(2, 4), Interval.between(1, 2)));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.between(1, 4), i.get(0));
i = IntervalUtils.normalize(Arrays.asList(Interval.ALWAYS, Interval.NEVER));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.ALWAYS, i.get(0));
i = IntervalUtils.normalize(Arrays.asList(Interval.ALWAYS, Interval.between(2, 4),
Interval.NEVER));
Assert.assertEquals(1, i.size());
Assert.assertEquals(Interval.ALWAYS, i.get(0));
}
|
public static SourceConfig validateUpdate(SourceConfig existingConfig, SourceConfig newConfig) {
SourceConfig mergedConfig = clone(existingConfig);
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getTopicName())) {
mergedConfig.setTopicName(newConfig.getTopicName());
}
if (!StringUtils.isEmpty(newConfig.getSerdeClassName())) {
mergedConfig.setSerdeClassName(newConfig.getSerdeClassName());
}
if (!StringUtils.isEmpty(newConfig.getSchemaType())) {
mergedConfig.setSchemaType(newConfig.getSchemaType());
}
if (newConfig.getConfigs() != null) {
mergedConfig.setConfigs(newConfig.getConfigs());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (!StringUtils.isEmpty(newConfig.getArchive())) {
mergedConfig.setArchive(newConfig.getArchive());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (isBatchSource(existingConfig) != isBatchSource(newConfig)) {
throw new IllegalArgumentException("Sources cannot be update between regular sources and batchsource");
}
if (newConfig.getBatchSourceConfig() != null) {
validateBatchSourceConfigUpdate(existingConfig.getBatchSourceConfig(), newConfig.getBatchSourceConfig());
mergedConfig.setBatchSourceConfig(newConfig.getBatchSourceConfig());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
}
|
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "DiscoverTriggerer class cannot be updated for batchsources")
public void testMergeDifferentBatchTriggerer() {
SourceConfig sourceConfig = createSourceConfigWithBatch();
BatchSourceConfig batchSourceConfig = createBatchSourceConfig();
batchSourceConfig.setDiscoveryTriggererClassName("SomeOtherClassName");
SourceConfig newSourceConfig = createUpdatedSourceConfig("batchSourceConfig", batchSourceConfig);
SourceConfigUtils.validateUpdate(sourceConfig, newSourceConfig);
}
|
static boolean shouldRefresh(AwsTemporaryCredentials credentials) {
Instant expiration = Optional.ofNullable(credentials).map(AwsTemporaryCredentials::expiration).orElse(Instant.EPOCH);
return Duration.between(Instant.now(), expiration).toMinutes() < MIN_EXPIRY.toMinutes();
}
|
@Test
void refreshes_correctly() {
Clock clock = Clock.systemUTC();
// Does not require refresh when expires in 10 minutes
assertFalse(AwsCredentials.shouldRefresh(getCredentials(clock.instant().plus(Duration.ofMinutes(10)))));
// Requires refresh when expires in 3 minutes
assertTrue(AwsCredentials.shouldRefresh(getCredentials(clock.instant().plus(Duration.ofMinutes(3)))));
// Requires refresh when expired
assertTrue(AwsCredentials.shouldRefresh(getCredentials(clock.instant().minus(Duration.ofMinutes(1)))));
// Refreshes when no credentials provided
assertTrue(AwsCredentials.shouldRefresh(null));
}
|
public static String replaceAll(CharSequence content, String regex, String replacementTemplate) {
final Pattern pattern = Pattern.compile(regex, Pattern.DOTALL);
return replaceAll(content, pattern, replacementTemplate);
}
|
@Test
public void replaceAllTest2() {
//此处把1234替换为 ->1234<-
final String replaceAll = ReUtil.replaceAll(this.content, "(\\d+)", parameters -> "->" + parameters.group(1) + "<-");
assertEquals("ZZZaaabbbccc中文->1234<-", replaceAll);
}
|
@SuppressWarnings({"checkstyle:NPathComplexity", "checkstyle:CyclomaticComplexity"})
@Override
public void shutdown() {
log.info("ksqlDB shutdown called");
try {
pullQueryMetrics.ifPresent(PullQueryExecutorMetrics::close);
} catch (final Exception e) {
log.error("Exception while waiting for pull query metrics to close", e);
}
try {
scalablePushQueryMetrics.ifPresent(ScalablePushQueryMetrics::close);
} catch (final Exception e) {
log.error("Exception while waiting for scalable push query metrics to close", e);
}
localCommands.ifPresent(lc -> {
try {
lc.close();
} catch (final Exception e) {
log.error("Exception while closing local commands", e);
}
});
try {
ksqlEngine.close();
} catch (final Exception e) {
log.error("Exception while waiting for Ksql Engine to close", e);
}
try {
commandRunner.close();
} catch (final Exception e) {
log.error("Exception while waiting for CommandRunner thread to complete", e);
}
try {
serviceContext.close();
} catch (final Exception e) {
log.error("Exception while closing services", e);
}
try {
securityExtension.close();
} catch (final Exception e) {
log.error("Exception while closing security extension", e);
}
if (apiServer != null) {
apiServer.stop();
apiServer = null;
}
if (vertx != null) {
try {
final CountDownLatch latch = new CountDownLatch(1);
vertx.close(ar -> latch.countDown());
latch.await();
} catch (InterruptedException e) {
log.error("Exception while closing vertx", e);
}
}
if (oldApiWebsocketExecutor != null) {
oldApiWebsocketExecutor.shutdown();
}
shutdownAdditionalAgents();
log.info("ksqlDB shutdown complete");
}
|
@Test
public void shouldCloseServiceContextOnClose() {
// When:
app.shutdown();
// Then:
verify(serviceContext).close();
}
|
@Override
protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) {
String handle = rule.getHandle();
RewriteHandle rewriteHandle = RewritePluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(rule));
if (Objects.isNull(rewriteHandle)) {
LOG.error("uri rewrite rule can not configuration:{}", handle);
return chain.execute(exchange);
}
String rewriteUri = this.getRawPath(exchange);
// the default percentage compatible with older versions is 100
final Integer percentage = Optional.ofNullable(rewriteHandle.getPercentage()).orElse(100);
if (StringUtils.isNoneBlank(rewriteHandle.getRegex(), rewriteHandle.getReplace())
&& ThreadLocalRandom.current().nextInt(100) < percentage) {
rewriteUri = rewriteHandle.getReplace().contains("{")
? PathMatchUtils.replaceAll(rewriteHandle.getReplace(), rewriteHandle.getRegex().substring(rewriteHandle.getRegex().indexOf("{")),
rewriteUri.substring(rewriteHandle.getRegex().indexOf("{") + 1))
: rewriteUri.replaceAll(rewriteHandle.getRegex(), rewriteHandle.getReplace());
Map<String, Object> attributes = exchange.getAttributes();
if (Optional.ofNullable(rewriteHandle.getRewriteMetaData()).orElse(false)) {
// when the rewritten uri crosses plugins, this is necessary
final String contextPath = Optional.ofNullable((String) exchange.getAttribute(Constants.REWRITE_CONTEXT_PATH))
.orElseGet(() -> exchange.getAttribute(Constants.CONTEXT_PATH));
MetaData metaData = MetaDataCache.getInstance().obtain(contextPath + rewriteUri);
Optional.ofNullable(exchange.getAttribute(Constants.META_DATA))
.ifPresent(metadata -> attributes.put(Constants.OLD_CONTEXT_PATH_META_DATA, metadata));
if (Objects.nonNull(metaData)) {
attributes.put(Constants.META_DATA, metaData);
}
ShenyuContext context = exchange.getAttribute(Constants.CONTEXT);
assert context != null;
if (Objects.nonNull(metaData) && Boolean.TRUE.equals(metaData.getEnabled())) {
context.setRpcType(metaData.getRpcType());
} else {
context.setRpcType(RpcTypeEnum.HTTP.getName());
}
}
attributes.put(Constants.REWRITE_URI, rewriteUri);
}
return chain.execute(exchange);
}
|
@Test
public void testRewritePlugin() {
RuleData data = new RuleData();
data.setHandle("{\"regex\":\"\",\"replace\":\"\"}");
RewriteHandle rewriteHandle = GsonUtils.getGson().fromJson(data.getHandle(), RewriteHandle.class);
RewritePluginDataHandler.CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(data), rewriteHandle);
when(chain.execute(exchange)).thenReturn(Mono.empty());
SelectorData selectorData = mock(SelectorData.class);
StepVerifier.create(rewritePlugin.doExecute(exchange, chain, selectorData, data)).expectSubscription().verifyComplete();
assertTrue(StringUtils.isBlank((String) exchange.getAttributes().get(Constants.REWRITE_URI)));
}
|
@Override
public void expireConnectorTableColumnStatistics(Table table, List<String> columns) {
if (table == null || columns == null) {
return;
}
List<ConnectorTableColumnKey> allKeys = Lists.newArrayList();
for (String column : columns) {
ConnectorTableColumnKey key = new ConnectorTableColumnKey(table.getUUID(), column);
allKeys.add(key);
}
connectorTableCachedStatistics.synchronous().invalidateAll(allKeys);
}
|
@Test
public void testExpireConnectorTableColumnStatistics() {
Table table = connectContext.getGlobalStateMgr().getMetadataMgr().getTable("hive0", "partitioned_db", "t1");
CachedStatisticStorage cachedStatisticStorage = new CachedStatisticStorage();
try {
cachedStatisticStorage.expireConnectorTableColumnStatistics(table, ImmutableList.of("c1", "c2"));
} catch (Exception e) {
Assert.fail();
}
}
|
@Override
public boolean supportsSchemasInIndexDefinitions() {
return false;
}
|
@Test
void assertSupportsSchemasInIndexDefinitions() {
assertFalse(metaData.supportsSchemasInIndexDefinitions());
}
|
public static long download(String url, OutputStream out, boolean isCloseOut) {
return download(url, out, isCloseOut, null);
}
|
@Test
@Disabled
public void getTest5() {
String url2 = "http://storage.chancecloud.com.cn/20200413_%E7%B2%A4B12313_386.pdf";
final ByteArrayOutputStream os2 = new ByteArrayOutputStream();
HttpUtil.download(url2, os2, false);
url2 = "http://storage.chancecloud.com.cn/20200413_粤B12313_386.pdf";
HttpUtil.download(url2, os2, false);
}
|
@Override
public TaskConfig convertJsonToTaskConfig(String configJson) {
final TaskConfig taskConfig = new TaskConfig();
ArrayList<String> exceptions = new ArrayList<>();
try {
Map<String, Object> configMap = (Map) GSON.fromJson(configJson, Object.class);
if (configMap.isEmpty()) {
exceptions.add("The Json for Task Config cannot be empty");
}
for (Map.Entry<String, Object> entry : configMap.entrySet()) {
TaskConfigProperty property = new TaskConfigProperty(entry.getKey(), null);
property.with(Property.REQUIRED, true);
Map propertyValue = (Map) entry.getValue();
if (propertyValue != null) {
if (propertyValue.containsKey("default-value")) {
if (!(propertyValue.get("default-value") instanceof String)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a not-null 'default-value' of type String", entry.getKey()));
} else {
property.withDefault((String) propertyValue.get("default-value"));
}
}
if (propertyValue.containsKey("display-name")) {
if (!(propertyValue.get("display-name") instanceof String)) {
exceptions.add(String.format("Key: '%s' - 'display-name' should be of type String", entry.getKey()));
} else {
property.with(Property.DISPLAY_NAME, (String) propertyValue.get("display-name"));
}
}
if (propertyValue.containsKey("display-order")) {
if (!(propertyValue.get("display-order") instanceof String && StringUtil.isInteger((String) propertyValue.get("display-order")))) {
exceptions.add(String.format("Key: '%s' - 'display-order' should be a String containing a numerical value", entry.getKey()));
} else {
property.with(Property.DISPLAY_ORDER, Integer.parseInt((String) propertyValue.get("display-order")));
}
}
if (propertyValue.containsKey("secure")) {
if (!(propertyValue.get("secure") instanceof Boolean)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a 'secure' field of type Boolean", entry.getKey()));
} else {
property.with(Property.SECURE, (Boolean) propertyValue.get("secure"));
}
}
if (propertyValue.containsKey("required")) {
if (!(propertyValue.get("required") instanceof Boolean)) {
exceptions.add(String.format("Key: '%s' - The Json for Task Config should contain a 'required' field of type Boolean", entry.getKey()));
} else {
property.with(Property.REQUIRED, (Boolean) propertyValue.get("required"));
}
}
}
taskConfig.add(property);
}
if (!exceptions.isEmpty()) {
throw new RuntimeException(StringUtils.join(exceptions, ", "));
}
return taskConfig;
} catch (Exception e) {
LOGGER.error("Error occurred while converting the Json to Task Config. Error: {}. The Json received was '{}'.", e.getMessage(), configJson);
throw new RuntimeException(String.format("Error occurred while converting the Json to Task Config. Error: %s.", e.getMessage()));
}
}
|
@Test
public void shouldKeepTheConfigInTheOrderOfDisplayOrder(){
String json = "{\"URL\":{\"default-value\":\"\",\"secure\":false,\"required\":true,\"display-name\":\"Url\",\"display-order\":\"0\"}," +
"\"PASSWORD\":{\"display-order\":\"2\"}," +
"\"USER\":{\"default-value\":\"foo\",\"secure\":true,\"required\":false,\"display-order\":\"1\"}" +
"}";
TaskConfig config = new JsonBasedTaskExtensionHandler_V1().convertJsonToTaskConfig(json);
assertThat(config.list().get(0).getKey(), is("URL"));
assertThat(config.list().get(1).getKey(), is("USER"));
assertThat(config.list().get(2).getKey(), is("PASSWORD"));
}
|
@Override
public String getDataSource() {
return DataSourceConstant.MYSQL;
}
|
@Test
void testGetDataSource() {
String sql = configInfoMapperByMySql.getDataSource();
assertEquals(DataSourceConstant.MYSQL, sql);
}
|
@Override
public Result invoke(Invoker<?> invoker, Invocation inv) throws RpcException {
String token = invoker.getUrl().getParameter(TOKEN_KEY);
if (ConfigUtils.isNotEmpty(token)) {
Class<?> serviceType = invoker.getInterface();
String remoteToken = (String) inv.getObjectAttachmentWithoutConvert(TOKEN_KEY);
if (!token.equals(remoteToken)) {
throw new RpcException("Invalid token! Forbid invoke remote service " + serviceType + " method "
+ RpcUtils.getMethodName(inv) + "() from consumer "
+ RpcContext.getServiceContext().getRemoteHost() + " to provider "
+ RpcContext.getServiceContext().getLocalHost()
+ ", consumer incorrect token is " + remoteToken);
}
}
return invoker.invoke(inv);
}
|
@Test
void testInvokeWithWrongToken() throws Exception {
Assertions.assertThrows(RpcException.class, () -> {
String token = "token";
Invoker invoker = Mockito.mock(Invoker.class);
URL url = URL.valueOf("test://test:11/test?accesslog=true&group=dubbo&version=1.1&token=" + token);
when(invoker.getUrl()).thenReturn(url);
when(invoker.invoke(any(Invocation.class))).thenReturn(new AppResponse("result"));
Map<String, Object> attachments = new HashMap<>();
attachments.put(TOKEN_KEY, "wrongToken");
Invocation invocation = Mockito.mock(Invocation.class);
when(invocation.getObjectAttachments()).thenReturn(attachments);
tokenFilter.invoke(invoker, invocation);
});
}
|
public static RemoteFileInputFormat getHdfsFileFormat(FileFormat format) {
switch (format) {
case ORC:
return RemoteFileInputFormat.ORC;
case PARQUET:
return RemoteFileInputFormat.PARQUET;
default:
throw new StarRocksConnectorException("Unexpected file format: " + format);
}
}
|
@Test
public void testGetHdfsFileFormat() {
RemoteFileInputFormat fileFormat = IcebergApiConverter.getHdfsFileFormat(FileFormat.PARQUET);
Assert.assertTrue(fileFormat.equals(RemoteFileInputFormat.PARQUET));
Assert.assertThrows("Unexpected file format: %s", StarRocksConnectorException.class, () -> {
IcebergApiConverter.getHdfsFileFormat(FileFormat.AVRO);
});
}
|
public static ILogger getLogger(@Nonnull Class<?> clazz) {
checkNotNull(clazz, "class must not be null");
return getLoggerInternal(clazz.getName());
}
|
@Test
public void getLogger_thenLog4j2_thenReturnLog4j2Logger() {
isolatedLoggingRule.setLoggingType(LOGGING_TYPE_LOG4J2);
assertInstanceOf(Log4j2Factory.Log4j2Logger.class, Logger.getLogger(getClass()));
}
|
public void resetReadTimeout() {
if (readerIdleTimeNanos > 0 || allIdleTimeNanos > 0) {
lastReadTime = ticksInNanos();
reading = false;
}
}
|
@Test
public void testResetReader() throws Exception {
final TestableIdleStateHandler idleStateHandler = new TestableIdleStateHandler(
false, 1L, 0L, 0L, TimeUnit.SECONDS);
Action action = new Action() {
@Override
public void run(EmbeddedChannel channel) throws Exception {
idleStateHandler.resetReadTimeout();
}
};
anyNotIdle(idleStateHandler, action, IdleStateEvent.FIRST_READER_IDLE_STATE_EVENT);
}
|
@Override
public Cursor<Tuple> zScan(byte[] key, ScanOptions options) {
return new KeyBoundCursor<Tuple>(key, 0, options) {
private RedisClient client;
@Override
protected ScanIteration<Tuple> doScan(byte[] key, long cursorId, ScanOptions options) {
if (isQueueing() || isPipelined()) {
throw new UnsupportedOperationException("'ZSCAN' cannot be called in pipeline / transaction mode.");
}
List<Object> args = new ArrayList<Object>();
args.add(key);
args.add(Long.toUnsignedString(cursorId));
if (options.getPattern() != null) {
args.add("MATCH");
args.add(options.getPattern());
}
if (options.getCount() != null) {
args.add("COUNT");
args.add(options.getCount());
}
RFuture<ListScanResult<Tuple>> f = executorService.readAsync(client, key, ByteArrayCodec.INSTANCE, ZSCAN, args.toArray());
ListScanResult<Tuple> res = syncFuture(f);
client = res.getRedisClient();
return new ScanIteration<Tuple>(Long.parseUnsignedLong(res.getPos()), res.getValues());
}
}.open();
}
|
@Test
public void testZScan() {
connection.zAdd("key".getBytes(), 1, "value1".getBytes());
connection.zAdd("key".getBytes(), 2, "value2".getBytes());
Cursor<RedisZSetCommands.Tuple> t = connection.zScan("key".getBytes(), ScanOptions.scanOptions().build());
assertThat(t.hasNext()).isTrue();
assertThat(t.next().getValue()).isEqualTo("value1".getBytes());
assertThat(t.hasNext()).isTrue();
assertThat(t.next().getValue()).isEqualTo("value2".getBytes());
}
|
public Integer doCall() throws Exception {
List<Row> rows = new ArrayList<>();
List<Integration> integrations = client(Integration.class).list().getItems();
integrations
.forEach(integration -> {
Row row = new Row();
row.name = integration.getMetadata().getName();
row.ready = "0/1";
if (integration.getStatus() != null) {
row.phase = integration.getStatus().getPhase();
if (integration.getStatus().getConditions() != null) {
row.ready
= integration.getStatus().getConditions().stream().filter(c -> c.getType().equals("Ready"))
.anyMatch(c -> c.getStatus().equals("True")) ? "1/1" : "0/1";
}
row.kit = integration.getStatus().getIntegrationKit() != null
? integration.getStatus().getIntegrationKit().getName() : "";
} else {
row.phase = "Unknown";
}
rows.add(row);
});
if (!rows.isEmpty()) {
if (name) {
rows.forEach(r -> printer().println(r.name));
} else {
printer().println(AsciiTable.getTable(AsciiTable.NO_BORDERS, rows, Arrays.asList(
new Column().header("NAME").dataAlign(HorizontalAlign.LEFT)
.maxWidth(40, OverflowBehaviour.ELLIPSIS_RIGHT)
.with(r -> r.name),
new Column().header("PHASE").headerAlign(HorizontalAlign.LEFT)
.with(r -> r.phase),
new Column().header("KIT").headerAlign(HorizontalAlign.LEFT).with(r -> r.kit),
new Column().header("READY").dataAlign(HorizontalAlign.CENTER).with(r -> r.ready))));
}
}
return 0;
}
|
@Test
public void shouldListIntegrationsEmpty() throws Exception {
createCommand().doCall();
Assertions.assertEquals("", printer.getOutput());
}
|
@GET
@Path("/entity-uid/{uid}/")
@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8)
public TimelineEntity getEntity(
@Context HttpServletRequest req,
@Context HttpServletResponse res,
@PathParam("uid") String uId,
@QueryParam("confstoretrieve") String confsToRetrieve,
@QueryParam("metricstoretrieve") String metricsToRetrieve,
@QueryParam("fields") String fields,
@QueryParam("metricslimit") String metricsLimit,
@QueryParam("metricstimestart") String metricsTimeStart,
@QueryParam("metricstimeend") String metricsTimeEnd) {
String url = req.getRequestURI() +
(req.getQueryString() == null ? "" :
QUERY_STRING_SEP + req.getQueryString());
UserGroupInformation callerUGI =
TimelineReaderWebServicesUtils.getUser(req);
LOG.info("Received URL {} from user {}",
url, TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
try {
TimelineReaderContext context =
TimelineUIDConverter.GENERIC_ENTITY_UID.decodeUID(uId);
if (context == null) {
throw new BadRequestException("Incorrect UID " + uId);
}
entity = timelineReaderManager.getEntity(context,
TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForGenericEntity(entity, callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "Either metricslimit or metricstime"
+ " start/end");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL {} (Took {} ms.)", url, latency);
}
if (entity == null) {
LOG.info("Processed URL {} but entity not found" + " (Took {} ms.)",
url, (Time.monotonicNow() - startTime));
throw new NotFoundException("Timeline entity with uid: " + uId +
"is not found");
}
return entity;
}
|
@Test
void testGetEntitiesByEventFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"eventfilters=event_2,event_4");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue(entities.contains(newEntity("app", "id_3")),
"Entity with id_3 should have been present in response.");
} finally {
client.destroy();
}
}
|
@Deprecated
@Override
public void init(final ProcessorContext context,
final StateStore root) {
this.context = context instanceof InternalProcessorContext ? (InternalProcessorContext) context : null;
taskId = context.taskId();
initStoreSerde(context);
streamsMetrics = (StreamsMetricsImpl) context.metrics();
registerMetrics();
final Sensor restoreSensor =
StateStoreMetrics.restoreSensor(taskId.toString(), metricsScope, name(), streamsMetrics);
// register and possibly restore the state from the logs
maybeMeasureLatency(() -> super.init(context, root), time, restoreSensor);
}
|
@SuppressWarnings("deprecation")
@Test
public void shouldDelegateDeprecatedInit() {
setUp();
final MeteredKeyValueStore<String, String> outer = new MeteredKeyValueStore<>(
inner,
STORE_TYPE,
new MockTime(),
Serdes.String(),
Serdes.String()
);
doNothing().when(inner).init((ProcessorContext) context, outer);
outer.init((ProcessorContext) context, outer);
}
|
@Override
public void execute(String commandName, BufferedReader reader, BufferedWriter writer)
throws Py4JException, IOException {
char subCommand = safeReadLine(reader).charAt(0);
String returnCommand = null;
if (subCommand == GET_UNKNOWN_SUB_COMMAND_NAME) {
returnCommand = getUnknownMember(reader);
} else if (subCommand == GET_JAVA_LANG_CLASS_SUB_COMMAND_NAME) {
returnCommand = getJavaLangClass(reader);
} else {
returnCommand = getMember(reader);
}
logger.finest("Returning command: " + returnCommand);
writer.write(returnCommand);
writer.flush();
}
|
@Test
public void testMember() {
String inputCommand1 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "java.lang.String\n" + "valueOf"
+ "\ne\n";
String inputCommand2 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "java.lang.String\n" + "length"
+ "\ne\n";
String inputCommand3 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "p1.Cat\n" + "meow" + "\ne\n";
String inputCommand4 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "p1.Cat\n" + "meow20" + "\ne\n"; // does not exist
String inputCommand5 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "p1.Cat\n" + "meow15" + "\ne\n";
String inputCommand6 = ReflectionCommand.GET_MEMBER_SUB_COMMAND_NAME + "\n" + "p1.Cat\n" + "CONSTANT" + "\ne\n";
try {
command.execute("r", new BufferedReader(new StringReader(inputCommand1)), writer);
assertEquals("!ym\n", sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand2)), writer);
assertEquals("!ym\n!xsTrying to access a non-static member from a " + "static context.\n",
sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand3)), writer);
assertEquals(
"!ym\n!xsTrying to access a non-static member from a static context.\n!xsTrying to access a non-static member from a static context.\n",
sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand4)), writer);
assertEquals(
"!ym\n!xsTrying to access a non-static member from a static context.\n!xsTrying to access a non-static member from a static context.\n!x\n",
sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand5)), writer);
assertEquals(
"!ym\n!xsTrying to access a non-static member from a static context.\n!xsTrying to access a non-static member from a static context.\n!x\n!ym\n",
sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand6)), writer);
assertEquals(
"!ym\n!xsTrying to access a non-static member from a static context.\n!xsTrying to access a non-static member from a static context.\n!x\n!ym\n!ysSalut!\n",
sWriter.toString());
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
|
@Override
public double score(int[] truth, int[] prediction) {
return of(truth, prediction, strategy);
}
|
@Test
public void testWeighted() {
System.out.println("Weighted-Recall");
int[] truth = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 5, 5, 5
};
int[] prediction = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 2, 3, 2, 2, 2, 3, 1, 3, 3, 3, 4, 5, 4, 4, 4, 4, 1, 5, 5
};
Recall instance = new Recall(Averaging.Weighted);
double expResult = 0.89;
double result = instance.score(truth, prediction);
assertEquals(expResult, result, 1E-4);
}
|
protected synchronized void update(final String md5, final long lastModifyTime) {
this.md5 = md5;
this.lastModifyTime = lastModifyTime;
}
|
@Test
public void testUpdate() {
String group = "default";
String json = "{\"name\":\"shenyu\"}";
String md51 = "8e8a3a2fdbd4368f169aa88c5fdce5a1";
ConfigDataCache cache = new ConfigDataCache(group, json, md51, 0);
assertEquals(cache.getMd5(), md51);
assertEquals(cache.getJson(), json);
assertEquals(cache.getGroup(), group);
String md52 = "8e8a3a2fdbd4368f169aa88c5fdce5au";
cache.update(md52, 1);
assertEquals(cache.getMd5(), md52);
assertEquals(cache.getLastModifyTime(), 1);
}
|
@Override
public TransactionRule build(final TransactionRuleConfiguration ruleConfig, final Map<String, ShardingSphereDatabase> databases, final ConfigurationProperties props) {
return new TransactionRule(ruleConfig, databases);
}
|
@Test
void assertBuild() {
TransactionRuleConfiguration ruleConfig = new TransactionRuleConfiguration("LOCAL", "provider", new Properties());
ShardingSphereDatabase database = new ShardingSphereDatabase("logic_db", null, new ResourceMetaData(createDataSourceMap()),
new RuleMetaData(Collections.singletonList(mock(ShardingSphereRule.class))), Collections.singletonMap("test", mock(ShardingSphereSchema.class)));
try (TransactionRule rule = new TransactionRuleBuilder().build(ruleConfig, Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database), mock(ConfigurationProperties.class))) {
assertNotNull(rule.getConfiguration());
}
}
|
@Override
public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException {
return copy(source, segmentService.list(source), target, status, callback, listener);
}
|
@Test
public void testCopyManifestSameBucket() throws Exception {
final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume));
container.attributes().setRegion("IAD");
final Path originFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory));
final Path sourceFile = new Path(originFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
final SwiftRegionService regionService = new SwiftRegionService(session);
final SwiftSegmentService segmentService = new SwiftSegmentService(session, ".segments-test/");
prepareFile(sourceFile, regionService, segmentService);
final SwiftFindFeature findFeature = new SwiftFindFeature(session);
assertTrue(findFeature.find(sourceFile));
final List<Path> sourceSegments = segmentService.list(sourceFile);
final Path targetFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory));
final Path targetFile = new Path(targetFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
final Path copiedFile = new SwiftDefaultCopyFeature(session, regionService)
.copy(sourceFile, targetFile, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener());
// copied file exists
assertTrue(findFeature.find(copiedFile));
final List<Path> targetSegments = segmentService.list(targetFile);
assertTrue(sourceSegments.containsAll(targetSegments) && targetSegments.containsAll(sourceSegments));
// delete source, without deleting segments
new SwiftDeleteFeature(session, segmentService, regionService).delete(
Collections.singletonMap(sourceFile, new TransferStatus()),
new DisabledPasswordCallback(), new Delete.DisabledCallback(), false);
assertFalse(findFeature.find(sourceFile));
assertTrue(targetSegments.stream().allMatch(p -> {
try {
return findFeature.find(p);
}
catch(BackgroundException e) {
return false;
}
}));
new SwiftDeleteFeature(session, segmentService, regionService).delete(
Collections.singletonMap(copiedFile, new TransferStatus()),
new DisabledPasswordCallback(), new Delete.DisabledCallback(), true);
assertFalse(findFeature.find(copiedFile));
}
|
@Override
public void verify(String value) {
long l = Long.parseLong(value);
if (l < min || l > max) {
throw new RuntimeException(format("value is not in range(%d, %d)", min, max));
}
}
|
@Test
public void verify_MinValue_NoExceptionThrown() {
longRangeAttribute.verify("0");
}
|
public void setClusters(Map<String, ClusterMetadata> clusters) {
this.clusters = clusters;
}
|
@Test
void testSetClusters() {
Map<String, ClusterMetadata> clusters = new HashMap<>();
clusters.put("key", clusterMetadata);
serviceMetadata.setClusters(clusters);
Map<String, ClusterMetadata> map = serviceMetadata.getClusters();
assertNotNull(map);
assertEquals(1, map.size());
}
|
@Override
public byte[] decompress(byte[] src) throws IOException {
byte[] result = src;
byte[] uncompressData = new byte[src.length];
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(src);
ZstdInputStream zstdInputStream = new ZstdInputStream(byteArrayInputStream);
ByteArrayOutputStream resultOutputStream = new ByteArrayOutputStream(src.length);
try {
while (true) {
int len = zstdInputStream.read(uncompressData, 0, uncompressData.length);
if (len <= 0) {
break;
}
resultOutputStream.write(uncompressData, 0, len);
}
resultOutputStream.flush();
resultOutputStream.close();
result = resultOutputStream.toByteArray();
} catch (IOException e) {
throw e;
} finally {
try {
zstdInputStream.close();
byteArrayInputStream.close();
} catch (IOException e) {
log.warn("Failed to close the zstd compress stream", e);
}
}
return result;
}
|
@Test(expected = IOException.class)
public void testDecompressWithInvalidData() throws IOException {
byte[] invalidData = new byte[] {-1, -1, -1, -1};
ZstdCompressor compressor = new ZstdCompressor();
compressor.decompress(invalidData);
}
|
public static synchronized YangXmlUtils getInstance() {
if (instance == null) {
instance = new YangXmlUtils();
}
return instance;
}
|
@Test
public void testGetXmlUtilsInstance() throws ConfigurationException {
YangXmlUtils instance1 = YangXmlUtils.getInstance();
YangXmlUtils instance2 = YangXmlUtils.getInstance();
assertEquals("Duplicate instance", instance1, instance2);
}
|
public static List<String> readLines(String path, Charset charset) {
List<String> strList = new ArrayList<>();
ClassPathResource classPathResource = new ClassPathResource(path);
try (
InputStreamReader in = new InputStreamReader(classPathResource.getInputStream(), charset);
BufferedReader reader = new BufferedReader(in)) {
String line;
while ((line = reader.readLine()) != null) {
strList.add(line);
}
} catch (IOException e) {
throw new RuntimeException("file read error", e);
}
return strList;
}
|
@Test
public void assertReadLines() {
String testFilePath = "test/test_utf8.txt";
List<String> readLines = FileUtil.readLines(testFilePath, StandardCharsets.UTF_8);
Assert.assertEquals(3, readLines.size());
}
|
public static void setField(
final Object object, final String fieldName, final Object fieldNewValue) {
try {
traverseClassHierarchy(
object.getClass(),
NoSuchFieldException.class,
(InsideTraversal<Void>)
traversalClass -> {
Field field = traversalClass.getDeclaredField(fieldName);
field.setAccessible(true);
field.set(object, fieldNewValue);
return null;
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
|
@Test
public void setFieldReflectively_givesHelpfulExceptions() {
ExampleDescendant example = new ExampleDescendant();
try {
ReflectionHelpers.setField(example, "nonExistent", 6);
fail("Expected exception not thrown");
} catch (RuntimeException e) {
if (!e.getMessage().contains("nonExistent")) {
throw new RuntimeException("Incorrect exception thrown", e);
}
}
}
|
public ASN1Sequence signedPipFromPplist(List<PolymorphicPseudonymType> response) {
for (PolymorphicPseudonymType polymorphicPseudonymType : response) {
ASN1Sequence sequence;
try {
sequence = (ASN1Sequence) ASN1Sequence.fromByteArray(polymorphicPseudonymType.getValue());
} catch (Exception e) {
logger.error(String.format("PolymorphicPseudonymType not a valid ASN1 Sequence. Exception: '%s'",
e.getMessage()));
continue;
}
if (sequence.getObjectAt(0) instanceof ASN1ObjectIdentifier) {
ASN1ObjectIdentifier objectIdentifier = (ASN1ObjectIdentifier) sequence.getObjectAt(0);
if (objectIdentifier.getId().equals(SIGNED_PIP_OID)) {
return sequence;
}
}
}
throw new IllegalArgumentException("No signed pip found in PolymorphicPseudonymType list");
}
|
@Test()
public void signedPipFromPplistNoPipTest() throws IOException {
List<PolymorphicPseudonymType> pplist = new ArrayList<>();
pplist.add(new PolymorphicPseudonymType() {
{
value = pp.getEncoded();
}
});
IllegalArgumentException ex = assertThrows(IllegalArgumentException.class,
() -> bsnkUtil.signedPipFromPplist(pplist));
assertEquals("No signed pip found in PolymorphicPseudonymType list", ex.getMessage());
}
|
public boolean isComplete() {
return !ruleMetaData.getRules().isEmpty() && !resourceMetaData.getStorageUnits().isEmpty();
}
|
@Test
void assertIsComplete() {
ResourceMetaData resourceMetaData = new ResourceMetaData(Collections.singletonMap("ds", new MockedDataSource()));
RuleMetaData ruleMetaData = new RuleMetaData(Collections.singleton(mock(ShardingSphereRule.class)));
assertTrue(new ShardingSphereDatabase("foo_db", mock(DatabaseType.class), resourceMetaData, ruleMetaData, Collections.emptyMap()).isComplete());
}
|
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
return helper.interpret(session, st, context);
}
|
@Test
void should_fail_when_executing_a_removed_prepared_statement() {
// Given
String prepareFirst = "@prepare[to_be_removed]=INSERT INTO zeppelin.users(login,deceased) " +
"VALUES(?,?)";
interpreter.interpret(prepareFirst, intrContext);
String removePrepared = "@remove_prepare[to_be_removed]\n" +
"@bind[to_be_removed]='bind_bool'";
// When
final InterpreterResult actual = interpreter.interpret(removePrepared, intrContext);
// Then
assertEquals(Code.ERROR, actual.code());
assertEquals("The statement 'to_be_removed' can " +
"not be bound to values. Are you sure you did prepare it with " +
"@prepare[to_be_removed] ?", actual.message().get(0).getData());
}
|
@Override
public RouteContext route(final ShardingRule shardingRule) {
RouteContext result = new RouteContext();
Collection<DataNode> dataNodes = getDataNodes(shardingRule, shardingRule.getShardingTable(logicTableName));
result.getOriginalDataNodes().addAll(originalDataNodes);
for (DataNode each : dataNodes) {
result.getRouteUnits().add(
new RouteUnit(new RouteMapper(each.getDataSourceName(), each.getDataSourceName()), Collections.singleton(new RouteMapper(logicTableName, each.getTableName()))));
}
return result;
}
|
@Test
void assertRouteByMixedWithHintTable() {
SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, withSettings().extraInterfaces(TableAvailable.class).defaultAnswer(RETURNS_DEEP_STUBS));
when(((TableAvailable) sqlStatementContext).getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_table_test"));
ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_table_test",
ShardingRoutingEngineFixtureBuilder.createShardingConditions("t_hint_table_test"), sqlStatementContext, new HintValueContext());
HintManager hintManager = HintManager.getInstance();
hintManager.addTableShardingValue("t_hint_table_test", 1);
RouteContext routeContext = standardRoutingEngine.route(ShardingRoutingEngineFixtureBuilder.createMixedShardingRule());
List<RouteUnit> routeUnits = new ArrayList<>(routeContext.getRouteUnits());
assertThat(routeContext.getRouteUnits().size(), is(1));
assertThat(routeUnits.get(0).getDataSourceMapper().getActualName(), is("ds_1"));
assertThat(routeUnits.get(0).getTableMappers().size(), is(1));
assertThat(routeUnits.get(0).getTableMappers().iterator().next().getActualName(), is("t_hint_table_test_1"));
assertThat(routeUnits.get(0).getTableMappers().iterator().next().getLogicName(), is("t_hint_table_test"));
}
|
public static String prepareUrl(@NonNull String url) {
url = url.trim();
String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive
if (lowerCaseUrl.startsWith("feed://")) {
Log.d(TAG, "Replacing feed:// with http://");
return prepareUrl(url.substring("feed://".length()));
} else if (lowerCaseUrl.startsWith("pcast://")) {
Log.d(TAG, "Removing pcast://");
return prepareUrl(url.substring("pcast://".length()));
} else if (lowerCaseUrl.startsWith("pcast:")) {
Log.d(TAG, "Removing pcast:");
return prepareUrl(url.substring("pcast:".length()));
} else if (lowerCaseUrl.startsWith("itpc")) {
Log.d(TAG, "Replacing itpc:// with http://");
return prepareUrl(url.substring("itpc://".length()));
} else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) {
Log.d(TAG, "Removing antennapod-subscribe://");
return prepareUrl(url.substring(AP_SUBSCRIBE.length()));
} else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) {
Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK);
String query = Uri.parse(url).getQueryParameter("url");
try {
return prepareUrl(URLDecoder.decode(query, "UTF-8"));
} catch (UnsupportedEncodingException e) {
return prepareUrl(query);
}
} else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) {
Log.d(TAG, "Adding http:// at the beginning of the URL");
return "http://" + url;
} else {
return url;
}
}
|
@Test
public void testProtocolRelativeUrlIsRelativeHttps() {
final String in = "//example.com";
final String inBase = "https://examplebase.com";
final String out = UrlChecker.prepareUrl(in, inBase);
assertEquals("https://example.com", out);
}
|
public static String getTypeName(final int type) {
switch (type) {
case START_EVENT_V3:
return "Start_v3";
case STOP_EVENT:
return "Stop";
case QUERY_EVENT:
return "Query";
case ROTATE_EVENT:
return "Rotate";
case INTVAR_EVENT:
return "Intvar";
case LOAD_EVENT:
return "Load";
case NEW_LOAD_EVENT:
return "New_load";
case SLAVE_EVENT:
return "Slave";
case CREATE_FILE_EVENT:
return "Create_file";
case APPEND_BLOCK_EVENT:
return "Append_block";
case DELETE_FILE_EVENT:
return "Delete_file";
case EXEC_LOAD_EVENT:
return "Exec_load";
case RAND_EVENT:
return "RAND";
case XID_EVENT:
return "Xid";
case USER_VAR_EVENT:
return "User var";
case FORMAT_DESCRIPTION_EVENT:
return "Format_desc";
case TABLE_MAP_EVENT:
return "Table_map";
case PRE_GA_WRITE_ROWS_EVENT:
return "Write_rows_event_old";
case PRE_GA_UPDATE_ROWS_EVENT:
return "Update_rows_event_old";
case PRE_GA_DELETE_ROWS_EVENT:
return "Delete_rows_event_old";
case WRITE_ROWS_EVENT_V1:
return "Write_rows_v1";
case UPDATE_ROWS_EVENT_V1:
return "Update_rows_v1";
case DELETE_ROWS_EVENT_V1:
return "Delete_rows_v1";
case BEGIN_LOAD_QUERY_EVENT:
return "Begin_load_query";
case EXECUTE_LOAD_QUERY_EVENT:
return "Execute_load_query";
case INCIDENT_EVENT:
return "Incident";
case HEARTBEAT_LOG_EVENT:
case HEARTBEAT_LOG_EVENT_V2:
return "Heartbeat";
case IGNORABLE_LOG_EVENT:
return "Ignorable";
case ROWS_QUERY_LOG_EVENT:
return "Rows_query";
case WRITE_ROWS_EVENT:
return "Write_rows";
case UPDATE_ROWS_EVENT:
return "Update_rows";
case DELETE_ROWS_EVENT:
return "Delete_rows";
case GTID_LOG_EVENT:
return "Gtid";
case ANONYMOUS_GTID_LOG_EVENT:
return "Anonymous_Gtid";
case PREVIOUS_GTIDS_LOG_EVENT:
return "Previous_gtids";
case PARTIAL_UPDATE_ROWS_EVENT:
return "Update_rows_partial";
case TRANSACTION_CONTEXT_EVENT :
return "Transaction_context";
case VIEW_CHANGE_EVENT :
return "view_change";
case XA_PREPARE_LOG_EVENT :
return "Xa_prepare";
case TRANSACTION_PAYLOAD_EVENT :
return "transaction_payload";
default:
return "Unknown type:" + type;
}
}
|
@Test
public void getTypeNameInputPositiveOutputNotNull35() {
// Arrange
final int type = 2;
// Act
final String actual = LogEvent.getTypeName(type);
// Assert result
Assert.assertEquals("Query", actual);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.