focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
protected final <T> void convertAndProcessManyJobs(Function<List<T>, List<T>> itemSupplier, Function<T, Job> toJobFunction, Consumer<Integer> amountOfProcessedJobsConsumer) {
int amountOfProcessedJobs = 0;
List<T> items = getItemsToProcess(itemSupplier, null);
while (!items.isEmpty()) {
convertAndProcessJobs(items, toJobFunction);
amountOfProcessedJobs += items.size();
items = getItemsToProcess(itemSupplier, items);
}
amountOfProcessedJobsConsumer.accept(amountOfProcessedJobs);
}
|
@Test
void convertAndProcessManyJobsDoesNotSaveNullItems() {
// GIVEN
List<Object> items = asList(null, null);
Function<Object, List<Job>> toJobFunction = x -> asList(null, null);
// WHEN
task.convertAndProcessManyJobs(items, toJobFunction, System.out::println);
// THEN
verify(storageProvider, never()).save(anyList());
}
|
public void tryResetPopRetryTopic(final List<MessageExt> msgs, String consumerGroup) {
String popRetryPrefix = MixAll.RETRY_GROUP_TOPIC_PREFIX + consumerGroup + "_";
for (MessageExt msg : msgs) {
if (msg.getTopic().startsWith(popRetryPrefix)) {
String normalTopic = KeyBuilder.parseNormalTopic(msg.getTopic(), consumerGroup);
if (normalTopic != null && !normalTopic.isEmpty()) {
msg.setTopic(normalTopic);
}
}
}
}
|
@Test
public void testTryResetPopRetryTopic() {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
MessageExt messageExt = createMessageExt();
List<MessageExt> msgs = new ArrayList<>();
messageExt.setTopic(MixAll.RETRY_GROUP_TOPIC_PREFIX + defaultGroup + "_" + defaultTopic);
msgs.add(messageExt);
defaultMQPushConsumerImpl.tryResetPopRetryTopic(msgs, defaultGroup);
assertEquals(defaultTopic, msgs.get(0).getTopic());
}
|
boolean eosEnabled() {
return StreamsConfigUtils.eosEnabled(processingMode);
}
|
@Test
public void shouldNotHaveEosEnabledIfEosDisabled() {
assertThat(nonEosStreamsProducer.eosEnabled(), is(false));
}
|
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) {
return new CreateStreamCommand(
outputNode.getSinkName().get(),
outputNode.getSchema(),
outputNode.getTimestampColumn(),
outputNode.getKsqlTopic().getKafkaTopicName(),
Formats.from(outputNode.getKsqlTopic()),
outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(),
Optional.of(outputNode.getOrReplace()),
Optional.of(false)
);
}
|
@Test
public void shouldAllowNonStringKeyColumn() {
// Given:
final CreateStream statement = new CreateStream(
SOME_NAME,
TableElements.of(tableElement("k", new Type(SqlTypes.INTEGER), KEY_CONSTRAINT)),
false,
true,
withProperties,
false
);
// When:
final CreateStreamCommand cmd = createSourceFactory
.createStreamCommand(statement, ksqlConfig);
// Then:
assertThat(cmd.getSchema().key(), contains(
keyColumn(ColumnName.of("k"), SqlTypes.INTEGER)
));
}
|
public static boolean shutdownThread(Thread thread) {
return shutdownThread(thread, SHUTDOWN_WAIT_MS);
}
|
@Test (timeout = 3000)
public void testShutdownThread() {
Thread thread = new Thread(sampleRunnable);
thread.start();
boolean ret = ShutdownThreadsHelper.shutdownThread(thread);
boolean isTerminated = !thread.isAlive();
assertEquals("Incorrect return value", ret, isTerminated);
assertTrue("Thread is not shutdown", isTerminated);
}
|
public void write(CruiseConfig configForEdit, OutputStream output, boolean skipPreprocessingAndValidation) throws Exception {
LOGGER.debug("[Serializing Config] Starting to write. Validation skipped? {}", skipPreprocessingAndValidation);
MagicalGoConfigXmlLoader loader = new MagicalGoConfigXmlLoader(configCache, registry);
if (!configForEdit.getOrigin().isLocal()) {
throw new GoConfigInvalidException(configForEdit, "Attempted to save merged configuration with partials");
}
if (!skipPreprocessingAndValidation) {
loader.preprocessAndValidate(configForEdit);
LOGGER.debug("[Serializing Config] Done with cruise config validators.");
}
Document document = createEmptyCruiseConfigDocument();
write(configForEdit, document.getRootElement(), configCache, registry);
LOGGER.debug("[Serializing Config] XSD and DOM validation.");
verifyXsdValid(document);
MagicalGoConfigXmlLoader.validateDom(document.getRootElement(), registry);
LOGGER.info("[Serializing Config] Generating config partial.");
XmlUtils.writeXml(document, output);
LOGGER.debug("[Serializing Config] Finished writing config partial.");
}
|
@Test
public void shouldSerialize_CaseInsensitiveString_whenUsedInConfigAttributeValue() {//for instance FetchTask uses PathFromAncestor which has CaseInsensitiveString
CruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("uppest", "upper", "downer", "downest");
cruiseConfig.initializeServer();
setDependencyOn(cruiseConfig, "upper", "uppest", "stage");
setDependencyOn(cruiseConfig, "downer", "upper", "stage");
setDependencyOn(cruiseConfig, "downest", "downer", "stage");
PipelineConfig downest = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("downest"));
FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("uppest/upper/downer"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src", "dest");
downest.add(com.thoughtworks.go.helper.StageConfigMother.stageConfig("stage-2", new JobConfigs(new JobConfig(new CaseInsensitiveString("downloader"), new ResourceConfigs(), new ArtifactTypeConfigs(), new Tasks(fetchTask)))));
try {
xmlWriter.write(cruiseConfig, output, false);
} catch (Exception e) {
fail("should not fail as workspace name is not mandatory anymore " + e);
}
assertThat(new String(output.toByteArray()), containsString("<fetchartifact artifactOrigin=\"gocd\" srcfile=\"src\" dest=\"dest\" pipeline=\"uppest/upper/downer\" stage=\"stage\" job=\"job\" />"));
}
|
public Component buildProject(ScannerReport.Component project, String scmBasePath) {
this.rootComponent = project;
this.scmBasePath = trimToNull(scmBasePath);
Node root = createProjectHierarchy(project);
return buildComponent(root, "", "");
}
|
@Test
void project_description_is_loaded_from_db_if_not_on_main_branch() {
String reportDescription = randomAlphabetic(5);
ScannerReport.Component reportProject = newBuilder()
.setType(PROJECT)
.setDescription(reportDescription)
.build();
Component root = newUnderTest(SOME_PROJECT_ATTRIBUTES, false).buildProject(reportProject, NO_SCM_BASE_PATH);
assertThat(root.getDescription()).isEqualTo(projectInDb.getDescription());
}
|
public String getUserName() {
return "cruise";
}
|
@Test
void shouldReturnCruiseAsUser() {
assertThat(dependencyMaterial.getUserName()).isEqualTo("cruise");
}
|
public static IpAddress valueOf(int value) {
byte[] bytes =
ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array();
return new IpAddress(Version.INET, bytes);
}
|
@Test
public void testEqualityIPv6() {
new EqualsTester()
.addEqualityGroup(
IpAddress.valueOf("1111:2222:3333:4444:5555:6666:7777:8888"),
IpAddress.valueOf("1111:2222:3333:4444:5555:6666:7777:8888"))
.addEqualityGroup(
IpAddress.valueOf("1111:2222:3333:4444:5555:6666:7777:888a"),
IpAddress.valueOf("1111:2222:3333:4444:5555:6666:7777:888a"))
.addEqualityGroup(
IpAddress.valueOf("::"),
IpAddress.valueOf("::"))
.addEqualityGroup(
IpAddress.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"),
IpAddress.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"))
.testEquals();
}
|
public void watchDataChange(final String key,
final BiConsumer<String, String> updateHandler,
final Consumer<String> deleteHandler) {
Watch.Listener listener = watch(updateHandler, deleteHandler);
if (!watchCache.containsKey(key)) {
Watch.Watcher watch = client.getWatchClient().watch(ByteSequence.from(key, UTF_8), listener);
watchCache.put(key, watch);
}
}
|
@Test
public void testWatchDataChange() {
BiConsumer<String, String> updateHandler = mock(BiConsumer.class);
Consumer<String> deleteHandler = mock(Consumer.class);
etcdClient.watchDataChange(WATCH_DATA_CHANGE_KEY, updateHandler, deleteHandler);
etcdClient.watchClose(WATCH_DATA_CHANGE_KEY);
etcdClient.watchClose("not hit");
verify(watcher).close();
}
|
@Override
public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor,
Object... arguments) {
return cache.invokeAll(keys, entryProcessor, arguments);
}
|
@Test
public void testInvokeAll() {
cache.put(23, "value-23");
cache.put(42, "value-42");
cache.put(65, "value-65");
Set<Integer> keys = new HashSet<>(asList(23, 65, 88));
Map<Integer, EntryProcessorResult<String>> resultMap = adapter.invokeAll(keys, new ICacheReplaceEntryProcessor(),
"value", "newValue");
assertEquals(2, resultMap.size());
assertEquals("newValue-23", resultMap.get(23).get());
assertEquals("newValue-65", resultMap.get(65).get());
assertEquals("newValue-23", cache.get(23));
assertEquals("value-42", cache.get(42));
assertEquals("newValue-65", cache.get(65));
assertNull(cache.get(88));
}
|
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException {
String path = ((HttpServletRequest) req).getRequestURI().replaceFirst(((HttpServletRequest) req).getContextPath(), "");
MAX_AGE_BY_PATH.entrySet().stream()
.filter(m -> path.startsWith(m.getKey()))
.map(Map.Entry::getValue)
.findFirst()
.ifPresent(maxAge -> ((HttpServletResponse) resp).addHeader(CACHE_CONTROL_HEADER, format(MAX_AGE_TEMPLATE, maxAge)));
chain.doFilter(req, resp);
}
|
@Test
public void max_age_is_set_to_one_year_on_css() throws Exception {
HttpServletRequest request = newRequest("/css/sonar.css");
underTest.doFilter(request, response, chain);
verify(response).addHeader("Cache-Control", format("max-age=%s", 31_536_000));
}
|
@Override
public void handlerDiscoveryUpstreamData(final DiscoverySyncData discoverySyncData) {
super.getWasmExtern(METHOD_NAME)
.map(handlerDiscoveryUpstreamData -> {
// WASI cannot easily pass Java objects like JNI, here we pass Long as arg
// then we can get the argument by Long
final Long argumentId = getArgumentId(discoverySyncData);
ARGUMENTS.put(argumentId, discoverySyncData);
// call WASI function
WasmFunctions.consumer(super.getStore(), handlerDiscoveryUpstreamData.func(), WasmValType.I64)
.accept(argumentId);
ARGUMENTS.remove(argumentId);
return argumentId;
}).orElseThrow(() -> new ShenyuWasmInitException(METHOD_NAME + " function not find in wasm file: " + getWasmName()));
}
|
@Test
public void handlerDiscoveryUpstreamDataTest() {
discoveryUpstreamDataHandler = mock(DiscoveryUpstreamDataHandler.class);
discoveryUpstreamDataHandler.handlerDiscoveryUpstreamData(discoverySyncData);
testWasmPluginDiscoveryHandler.handlerDiscoveryUpstreamData(discoverySyncData);
verify(discoveryUpstreamDataHandler).handlerDiscoveryUpstreamData(discoverySyncData);
}
|
public void write(byte[] b, int off, int len) throws IOException {
requireOpened();
if (len > 0) {
stream.write(b, off, len);
}
}
|
@Test
void writeAfterCloseShouldThrowException() {
assertThatExceptionOfType(IOException.class)
.isThrownBy(
() -> {
final RefCountedFileWithStream fileUnderTest =
getClosedRefCountedFileWithContent(
"hello world",
TempDirUtils.newFolder(tempFolder).toPath());
byte[] content = bytesOf("Hello Again");
fileUnderTest.write(content, 0, content.length);
});
}
|
@Udf(description = "Splits a string into an array of substrings based on a delimiter.")
public List<String> split(
@UdfParameter(
description = "The string to be split. If NULL, then function returns NULL.")
final String string,
@UdfParameter(
description = "The delimiter to split a string by. If NULL, then function returns NULL.")
final String delimiter) {
if (string == null || delimiter == null) {
return null;
}
// Java split() accepts regular expressions as a delimiter, but the behavior of this UDF split()
// is to accept only literal strings. This method uses Guava Splitter instead, which does not
// accept any regex pattern. This is to avoid a confusion to users when splitting by regex
// special characters, such as '.' and '|'.
try {
// Guava Splitter does not accept empty delimiters. Use the Java split() method instead.
if (delimiter.isEmpty()) {
return Arrays.asList(EMPTY_DELIMITER.split(string));
} else {
return Splitter.on(delimiter).splitToList(string);
}
} catch (final Exception e) {
throw new KsqlFunctionException(
String.format("Invalid delimiter '%s' in the split() function.", delimiter), e);
}
}
|
@Test
public void shouldReturnOriginalStringOnNotFoundDelimiter() {
assertThat(splitUdf.split("", "."), contains(""));
assertThat(splitUdf.split("x-y", "."), contains("x-y"));
}
|
public static String[] splitString( String string, String separator ) {
/*
* 0123456 Example a;b;c;d --> new String[] { a, b, c, d }
*/
// System.out.println("splitString ["+path+"] using ["+separator+"]");
List<String> list = new ArrayList<>();
if ( string == null || string.length() == 0 ) {
return new String[] {};
}
int sepLen = separator.length();
int from = 0;
int end = string.length() - sepLen + 1;
for ( int i = from; i < end; i += sepLen ) {
if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) {
// OK, we found a separator, the string to add to the list
// is [from, i[
list.add( nullToEmpty( string.substring( from, i ) ) );
from = i + sepLen;
}
}
// Wait, if the string didn't end with a separator, we still have information at the end of the string...
// In our example that would be "d"...
if ( from + sepLen <= string.length() ) {
list.add( nullToEmpty( string.substring( from, string.length() ) ) );
}
return list.toArray( new String[list.size()] );
}
|
@Test
public void testSplitStringWithDelimiterAndEnclosureNullRemoveEnclosure() {
String mask = "Hello%s world";
String[] chunks = {"Hello", " world"};
String stringToSplit = String.format( mask, DELIMITER1 );
String[] result = Const.splitString( stringToSplit, DELIMITER1, null, true );
assertSplit( result, chunks );
}
|
public static Builder in(Table table) {
return new Builder(table);
}
|
@TestTemplate
public void testNoSnapshot() {
// a table has no snapshot when it just gets created and no data is loaded yet
// if not handled properly, NPE will be thrown in collect()
Iterable<DataFile> files = FindFiles.in(table).collect();
// verify an empty collection of data file is returned
assertThat(files).hasSize(0);
}
|
SSLContext build() throws IOException {
try {
KeyStore keystore = KeyStore.getInstance("PKCS12");
keystore.load(null);
if (hasCertificateFile()) {
keystore.setKeyEntry("cert", privateKey(privateKeyFile), new char[0], certificates(certificateFile));
} else if (hasCertificateInstance()) {
keystore.setKeyEntry("cert", privateKey, new char[0], certificate.toArray(new Certificate[0]));
}
if (hasCaCertificateFile()) {
addCaCertificates(keystore, List.of(certificates(caCertificatesFile)));
} else if (hasCaCertificateInstance()) {
addCaCertificates(keystore, caCertificates);
}
// Protocol version must be equal to TlsContext.SSL_CONTEXT_VERSION or higher
SSLContext sslContext = SSLContext.getInstance("TLSv1.3");
sslContext.init(
createKeyManagers(keystore).orElse(null),
createTrustManagers(keystore).orElse(null),
/*Default secure random algorithm*/null);
return sslContext;
} catch (GeneralSecurityException e) {
throw new IOException(e);
}
}
|
@Test
void successfully_constructs_sslcontext_when_no_builder_parameter_given() {
SSLContext sslContext = Assertions.assertDoesNotThrow(() -> new SslContextBuilder().build());
assertEquals("TLSv1.3", sslContext.getProtocol());
}
|
@Override
public UseDefaultInsertColumnsToken generateSQLToken(final InsertStatementContext insertStatementContext) {
String tableName = Optional.ofNullable(insertStatementContext.getSqlStatement().getTable()).map(optional -> optional.getTableName().getIdentifier().getValue()).orElse("");
Optional<UseDefaultInsertColumnsToken> previousSQLToken = findInsertColumnsToken();
if (previousSQLToken.isPresent()) {
processPreviousSQLToken(previousSQLToken.get(), insertStatementContext, tableName);
return previousSQLToken.get();
}
return generateNewSQLToken(insertStatementContext, tableName);
}
|
@Test
void assertGenerateSQLTokenFromPreviousSQLTokens() {
generator.setPreviousSQLTokens(EncryptGeneratorFixtureBuilder.getPreviousSQLTokens());
assertThat(generator.generateSQLToken(EncryptGeneratorFixtureBuilder.createInsertStatementContext(Collections.emptyList())).toString(),
is("(id, name, status, pwd_cipher, pwd_assist, pwd_like)"));
}
|
@Override
public String getName() {
return FUNCTION_NAME;
}
|
@Test
public void testMultiplicationTransformFunction() {
ExpressionContext expression = RequestContextUtils.getExpression(
String.format("mult(%s,%s,%s,%s,%s)", INT_SV_COLUMN, LONG_SV_COLUMN, FLOAT_SV_COLUMN, DOUBLE_SV_COLUMN,
STRING_SV_COLUMN));
TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
Assert.assertTrue(transformFunction instanceof MultiplicationTransformFunction);
Assert.assertEquals(transformFunction.getName(), MultiplicationTransformFunction.FUNCTION_NAME);
double[] expectedValues = new double[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] =
(double) _intSVValues[i] * (double) _longSVValues[i] * (double) _floatSVValues[i] * _doubleSVValues[i]
* Double.parseDouble(_stringSVValues[i]);
}
testTransformFunction(transformFunction, expectedValues);
expression = RequestContextUtils.getExpression(
String.format("mult(%s,%s,%s)", INT_SV_COLUMN, FLOAT_SV_COLUMN, BIG_DECIMAL_SV_COLUMN));
transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
Assert.assertTrue(transformFunction instanceof MultiplicationTransformFunction);
Assert.assertEquals(transformFunction.getName(), MultiplicationTransformFunction.FUNCTION_NAME);
BigDecimal[] expectedBigDecimalValues = new BigDecimal[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedBigDecimalValues[i] = BigDecimal.valueOf(_intSVValues[i]).multiply(BigDecimal.valueOf(_floatSVValues[i]))
.multiply(_bigDecimalSVValues[i]);
}
testTransformFunction(transformFunction, expectedBigDecimalValues);
expression = RequestContextUtils.getExpression(
String.format("mult(mult(%s,%s,%s),%s,%s,%s)", INT_SV_COLUMN, LONG_SV_COLUMN, FLOAT_SV_COLUMN, DOUBLE_SV_COLUMN,
STRING_SV_COLUMN, BIG_DECIMAL_SV_COLUMN));
transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
Assert.assertTrue(transformFunction instanceof MultiplicationTransformFunction);
Assert.assertEquals(transformFunction.getName(), MultiplicationTransformFunction.FUNCTION_NAME);
expectedBigDecimalValues = new BigDecimal[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedBigDecimalValues[i] =
BigDecimal.valueOf((double) _intSVValues[i] * (double) _longSVValues[i] * (double) _floatSVValues[i])
.multiply(BigDecimal.valueOf(_doubleSVValues[i])).multiply(new BigDecimal(_stringSVValues[i]))
.multiply(_bigDecimalSVValues[i]);
}
testTransformFunction(transformFunction, expectedBigDecimalValues);
expression = RequestContextUtils.getExpression(
String.format("mult(mult(12,%s),%s,mult(mult(%s,%s),0.34,%s),%s)", STRING_SV_COLUMN, DOUBLE_SV_COLUMN,
FLOAT_SV_COLUMN, LONG_SV_COLUMN, INT_SV_COLUMN, DOUBLE_SV_COLUMN));
transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
Assert.assertTrue(transformFunction instanceof MultiplicationTransformFunction);
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] = ((12d * Double.parseDouble(_stringSVValues[i])) * _doubleSVValues[i] * (
((double) _floatSVValues[i] * (double) _longSVValues[i]) * 0.34 * (double) _intSVValues[i])
* _doubleSVValues[i]);
}
}
|
static BlockStmt getTextIndexNormalizationVariableDeclaration(final String variableName,
final TextIndexNormalization textIndexNormalization) {
if (textIndexNormalization.getInlineTable() == null && textIndexNormalization.getTableLocator() != null) {
throw new UnsupportedOperationException("TableLocator not supported, yet");
}
final MethodDeclaration methodDeclaration =
TEXTINDEXNORMALIZATION_TEMPLATE.getMethodsByName(GETKIEPMMLTEXTINDEXNORMALIZATION).get(0).clone();
final BlockStmt textIndexNormalizationBody =
methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration)));
final VariableDeclarator variableDeclarator =
getVariableDeclarator(textIndexNormalizationBody, TEXTINDEXNORMALIZATION).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TEXTINDEXNORMALIZATION, textIndexNormalizationBody)));
variableDeclarator.setName(variableName);
String inlineTableVariableName = String.format("%s_InlineTable", variableName);
final BlockStmt toReturn = new BlockStmt();
BlockStmt toAdd = getInlineTableVariableDeclaration(inlineTableVariableName,
textIndexNormalization.getInlineTable());
toAdd.getStatements().forEach(toReturn::addStatement);
final MethodCallExpr initializer = variableDeclarator.getInitializer()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE,
TEXTINDEXNORMALIZATION, textIndexNormalizationBody)))
.asMethodCallExpr();
final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer);
final StringLiteralExpr nameExpr = new StringLiteralExpr(variableName);
builder.setArgument(0, nameExpr);
getChainedMethodCallExprFrom("withInField", initializer).setArgument(0,
getExpressionForObject(textIndexNormalization.getInField()));
getChainedMethodCallExprFrom("withOutField", initializer).setArgument(0,
getExpressionForObject(textIndexNormalization.getOutField()));
getChainedMethodCallExprFrom("withKiePMMLInlineTable", initializer).setArgument(0,
new NameExpr(inlineTableVariableName));
getChainedMethodCallExprFrom("withRegexField", initializer).setArgument(0,
getExpressionForObject(textIndexNormalization.getRegexField()));
getChainedMethodCallExprFrom("withRecursive", initializer).setArgument(0,
getExpressionForObject(textIndexNormalization.isRecursive()));
BooleanLiteralExpr isCaseSensitiveExpression = textIndexNormalization.isCaseSensitive() != null ? (BooleanLiteralExpr) getExpressionForObject(textIndexNormalization.isCaseSensitive()) : new BooleanLiteralExpr(false);
getChainedMethodCallExprFrom("withIsCaseSensitive", initializer).setArgument(0, isCaseSensitiveExpression);
getChainedMethodCallExprFrom("withMaxLevenshteinDistance", initializer).setArgument(0,
getExpressionForObject(textIndexNormalization.getMaxLevenshteinDistance()));
Expression wordSeparatorCharacterREExpression;
if (textIndexNormalization.getWordSeparatorCharacterRE() != null) {
String wordSeparatorCharacterRE = StringEscapeUtils.escapeJava(textIndexNormalization.getWordSeparatorCharacterRE());
wordSeparatorCharacterREExpression = new StringLiteralExpr(wordSeparatorCharacterRE);
} else {
wordSeparatorCharacterREExpression = new NullLiteralExpr();
}
getChainedMethodCallExprFrom("withWordSeparatorCharacterRE", initializer).setArgument(0, wordSeparatorCharacterREExpression);
BooleanLiteralExpr tokenizeExpression = textIndexNormalization.isTokenize() != null ? (BooleanLiteralExpr) getExpressionForObject(textIndexNormalization.isTokenize()) : new BooleanLiteralExpr(false);
getChainedMethodCallExprFrom("withTokenize", initializer).setArgument(0, tokenizeExpression);
textIndexNormalizationBody.getStatements().forEach(toReturn::addStatement);
return toReturn;
}
|
@Test
void getTextIndexNormalizationVariableDeclaration() throws IOException {
String variableName = "variableName";
BlockStmt retrieved =
KiePMMLTextIndexNormalizationFactory.getTextIndexNormalizationVariableDeclaration(variableName,
TEXTINDEXNORMALIZATION);
String text = getFileContent(TEST_01_SOURCE);
Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName));
assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue();
List<Class<?>> imports = Arrays.asList(Arrays.class, Collections.class, Collectors.class,
KiePMMLInlineTable.class, KiePMMLTextIndexNormalization.class,
KiePMMLRow.class, Map.class, Stream.class);
commonValidateCompilationWithImports(retrieved, imports);
}
|
@Override
public FileConfigDO getFileConfig(Long id) {
return fileConfigMapper.selectById(id);
}
|
@Test
public void testGetFileConfig() {
// mock 数据
FileConfigDO dbFileConfig = randomFileConfigDO().setMaster(false);
fileConfigMapper.insert(dbFileConfig);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbFileConfig.getId();
// 调用,并断言
assertPojoEquals(dbFileConfig, fileConfigService.getFileConfig(id));
}
|
@Override
public Set<SystemScope> getAll() {
return repository.getAll();
}
|
@Test
public void getAll() {
assertThat(service.getAll(), equalTo(allScopes));
}
|
@Override
public Integer doCall() throws Exception {
CommandLineHelper
.loadProperties(p -> {
for (String k : p.stringPropertyNames()) {
String v = p.getProperty(k);
printer().printf("%s = %s%n", k, v);
}
});
return 0;
}
|
@Test
public void shouldListUserConfig() throws Exception {
UserConfigHelper.createUserConfig("""
camel-version=latest
kamelets-version=greatest
foo=bar
""");
ConfigList command = new ConfigList(new CamelJBangMain().withPrinter(printer));
command.doCall();
List<String> lines = printer.getLines();
Assertions.assertEquals(3, lines.size());
Assertions.assertEquals("camel-version = latest", lines.get(0));
Assertions.assertEquals("kamelets-version = greatest", lines.get(1));
Assertions.assertEquals("foo = bar", lines.get(2));
}
|
public static boolean matchesSelector(LabelSelector labelSelector, HasMetadata cr) {
if (labelSelector != null && labelSelector.getMatchLabels() != null) {
if (cr.getMetadata().getLabels() != null) {
return cr.getMetadata().getLabels().entrySet().containsAll(labelSelector.getMatchLabels().entrySet());
} else {
return labelSelector.getMatchLabels().isEmpty();
}
}
return true;
}
|
@Test
public void testMatchesSelector() {
Pod testResource = new PodBuilder()
.withNewMetadata()
.withName("test-pod")
.endMetadata()
.withNewSpec()
.endSpec()
.build();
// Resources without any labels
LabelSelector selector = null;
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(emptyMap()).build();
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label2", "value2")).build();
assertThat(matchesSelector(selector, testResource), is(false));
// Resources with Labels
testResource.getMetadata().setLabels(Map.of("label1", "value1", "label2", "value2"));
selector = null;
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(emptyMap()).build();
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label2", "value2")).build();
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label2", "value2", "label1", "value1")).build();
assertThat(matchesSelector(selector, testResource), is(true));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label2", "value1")).build();
assertThat(matchesSelector(selector, testResource), is(false));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label3", "value3")).build();
assertThat(matchesSelector(selector, testResource), is(false));
selector = new LabelSelectorBuilder().withMatchLabels(Map.of("label2", "value2", "label1", "value1", "label3", "value3")).build();
assertThat(matchesSelector(selector, testResource), is(false));
}
|
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
}
|
@Test
public void shouldFailOnPersistentQueryLimitClauseTable() {
// Given:
final SingleStatementContext stmt
= givenQuery("CREATE TABLE X AS SELECT * FROM TEST1 LIMIT 5;");
// Then:
Exception exception = assertThrows(KsqlException.class, () -> {
builder.buildStatement(stmt);
});
String expectedMessage = "CREATE TABLE AS SELECT statements don't support LIMIT clause.";
String actualMessage = exception.getMessage();
assertEquals(expectedMessage, actualMessage);
}
|
@Override
public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) {
IdentityProvider provider = resolveProviderOrHandleResponse(request, response, CALLBACK_PATH);
if (provider != null) {
handleProvider(request, response, provider);
}
}
|
@Test
public void do_filter_with_context() {
when(request.getContextPath()).thenReturn("/sonarqube");
when(request.getRequestURI()).thenReturn("/sonarqube/oauth2/callback/" + OAUTH2_PROVIDER_KEY);
identityProviderRepository.addIdentityProvider(oAuth2IdentityProvider);
when(threadLocalUserSession.hasSession()).thenReturn(true);
when(threadLocalUserSession.getLogin()).thenReturn(LOGIN);
underTest.doFilter(request, response, chain);
assertCallbackCalled(oAuth2IdentityProvider);
verify(authenticationEvent).loginSuccess(request, LOGIN, Source.oauth2(oAuth2IdentityProvider));
}
|
@Override
public void deregisterInstance(String serviceName, String ip, int port) throws NacosException {
deregisterInstance(serviceName, ip, port, Constants.DEFAULT_CLUSTER_NAME);
}
|
@Test
void testDeregisterInstance2() throws NacosException {
//given
String serviceName = "service1";
String groupName = "group1";
String ip = "1.1.1.1";
int port = 10000;
//when
client.deregisterInstance(serviceName, groupName, ip, port);
//then
verify(proxy, times(1)).deregisterService(eq(serviceName), eq(groupName),
argThat(instance -> instance.getIp().equals(ip) && instance.getPort() == port
&& Math.abs(instance.getWeight() - 1.0) < 0.01f && instance.getClusterName()
.equals(Constants.DEFAULT_CLUSTER_NAME)));
}
|
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
boolean addFieldName, boolean addCr ) {
String retval = "";
String fieldname = v.getName();
int length = v.getLength();
int precision = v.getPrecision();
if ( addFieldName ) {
retval += fieldname + " ";
}
int type = v.getType();
switch ( type ) {
case ValueMetaInterface.TYPE_TIMESTAMP:
case ValueMetaInterface.TYPE_DATE:
retval += "TIMESTAMP";
break;
case ValueMetaInterface.TYPE_BOOLEAN:
retval += "CHAR(1)";
break;
case ValueMetaInterface.TYPE_NUMBER:
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
fieldname.equalsIgnoreCase( pk ) // Primary key
) {
retval += "INTEGER NOT NULL PRIMARY KEY";
} else {
// Integer values...
if ( precision == 0 ) {
if ( length > 9 ) {
if ( length <= 18 ) { // can hold max. 18
retval += "NUMERIC(" + length + ")";
} else {
retval += "FLOAT";
}
} else {
retval += "INTEGER";
}
} else {
// Floating point values...
// A double-precision floating-point number is accurate to approximately 15 decimal places.
// +/- 2.2250738585072014e-308 through +/-1.7976931348623157e+308; stored in 8 byte
// NUMERIC values are stored in less bytes, so we try to use them instead of a FLOAT:
// 1 to 4 digits in 2 bytes, 5 to 9 digits in 4 bytes, 10 to 18 digits in 8 bytes
if ( length <= 18 ) {
retval += "NUMERIC(" + length;
if ( precision > 0 ) {
retval += ", " + precision;
}
retval += ")";
} else {
retval += "FLOAT";
}
}
}
break;
case ValueMetaInterface.TYPE_STRING:
// for LOB support see Neoview_JDBC_T4_Driver_Prog_Ref_2.2.pdf
if ( length > 0 ) {
if ( length <= 4028 ) {
retval += "VARCHAR(" + length + ")";
} else if ( length <= 4036 ) {
retval += "CHAR(" + length + ")"; // squeezing 8 bytes ;-)
} else {
retval += "CLOB"; // before we go to CLOB
}
} else {
retval += "CHAR(1)";
}
break;
case ValueMetaInterface.TYPE_BINARY:
retval += "BLOB";
break;
default:
retval += " UNKNOWN";
break;
}
if ( addCr ) {
retval += Const.CR;
}
return retval;
}
|
@Test
public void testGetFieldDefinition() {
assertEquals( "FOO TIMESTAMP",
nativeMeta.getFieldDefinition( new ValueMetaDate( "FOO" ), "", "", false, true, false ) );
assertEquals( "TIMESTAMP",
nativeMeta.getFieldDefinition( new ValueMetaTimestamp( "FOO" ), "", "", false, false, false ) );
assertEquals( "CHAR(1)",
nativeMeta.getFieldDefinition( new ValueMetaBoolean( "FOO" ), "", "", false, false, false ) );
// Primary/Tech Keys
assertEquals( "INTEGER NOT NULL PRIMARY KEY",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 8, 0 ), "", "FOO", true, false, false ) );
assertEquals( "INTEGER NOT NULL PRIMARY KEY",
nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 10, 0 ), "FOO", "", false, false, false ) );
assertEquals( "INTEGER NOT NULL PRIMARY KEY",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 8, 0 ), "", "FOO", false, false, false ) );
// Regular Integers
assertEquals( "NUMERIC(10)",
nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 10, 0 ), "", "", false, false, false ) );
assertEquals( "NUMERIC(18)",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 18, 0 ), "", "", false, false, false ) );
assertEquals( "INTEGER",
nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 9, 0 ), "", "", false, false, false ) );
assertEquals( "FLOAT",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 19, 0 ), "", "", false, false, false ) );
assertEquals( "NUMERIC(10, 5)",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 10, 5 ), "", "", false, false, false ) );
assertEquals( "FLOAT",
nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 19, 5 ), "", "", false, false, false ) );
assertEquals( "NUMERIC(-7)",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", -7, -2 ), "", "", false, false, false ) ); // This is a bug...
assertEquals( "NUMERIC(-7, 2)",
nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", -7, 2 ), "", "", false, false, false ) ); // This is a bug ...
// String Types
assertEquals( "VARCHAR(15)",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 15, 0 ), "", "", false, false, false ) );
assertEquals( "VARCHAR(4028)",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", nativeMeta.getMaxVARCHARLength(), 0 ), "", "", false, false, false ) );
assertEquals( "CHAR(4029)",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 4029, 0 ), "", "", false, false, false ) );
assertEquals( "CHAR(4036)",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 4036, 0 ), "", "", false, false, false ) );
assertEquals( "CLOB",
nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 4037, 0 ), "", "", false, false, false ) );
// Binary
assertEquals( "BLOB",
nativeMeta.getFieldDefinition( new ValueMetaBinary( "FOO", 4037, 0 ), "", "", false, false, false ) );
assertEquals( " UNKNOWN",
nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, false ) );
assertEquals( " UNKNOWN" + System.getProperty( "line.separator" ),
nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, true ) );
}
|
@Override public Span annotate(String value) {
return annotate(clock.currentTimeMicroseconds(), value);
}
|
@Test void annotate() {
span.annotate("foo");
span.flush();
assertThat(spans.get(0).containsAnnotation("foo"))
.isTrue();
}
|
public static boolean matricesSame(int[][] m1, int[][] m2) {
if (m1.length != m2.length) {
return false;
} else {
var answer = false;
for (var i = 0; i < m1.length; i++) {
if (arraysSame(m1[i], m2[i])) {
answer = true;
} else {
answer = false;
break;
}
}
return answer;
}
}
|
@Test
void matricesSameTest() {
var matrix1 = new int[][]{{1, 4, 2, 6}, {5, 8, 6, 7}};
var matrix2 = new int[][]{{1, 4, 2, 6}, {5, 8, 6, 7}};
assertTrue(ArrayUtilityMethods.matricesSame(matrix1, matrix2));
}
|
@Override
public V get(K key) {
return map.get(key);
}
|
@Test
public void testGet() {
map.put(42, "foobar");
String result = adapter.get(42);
assertEquals("foobar", result);
}
|
public String getFilepath() {
return filepath;
}
|
@Test
public void testConstructorMessage() {
try {
throw new KettleFileNotFoundException( errorMessage );
} catch ( KettleFileNotFoundException e ) {
assertEquals( null, e.getCause() );
assertTrue( e.getMessage().contains( errorMessage ) );
assertEquals( null, e.getFilepath() );
}
}
|
public static <E> Set<E> createHashSet(int expectedMapSize) {
final int initialCapacity = (int) (expectedMapSize / HASHSET_DEFAULT_LOAD_FACTOR) + 1;
return new HashSet<>(initialCapacity, HASHSET_DEFAULT_LOAD_FACTOR);
}
|
@Test
public void testCreateHashSet() {
Set set = createHashSet(5);
assertInstanceOf(HashSet.class, set);
}
|
public String getProgress(final boolean running, final long size, final long transferred) {
return this.getProgress(System.currentTimeMillis(), running, size, transferred);
}
|
@Test
public void testProgressRemaining3() {
final long start = System.currentTimeMillis();
Speedometer m = new Speedometer(start, true);
assertEquals("900.0 KB (900,000 bytes) of 1.0 MB (90%, 450.0 KB/sec, 1 seconds remaining)",
m.getProgress(start + 2000L, true, 1000000L, 900000L));
}
|
@PostMapping("/save.json")
@AuthAction(AuthService.PrivilegeType.WRITE_RULE)
public Result<ApiDefinitionEntity> updateApi(@RequestBody UpdateApiReqVo reqVo) {
String app = reqVo.getApp();
if (StringUtil.isBlank(app)) {
return Result.ofFail(-1, "app can't be null or empty");
}
Long id = reqVo.getId();
if (id == null) {
return Result.ofFail(-1, "id can't be null");
}
ApiDefinitionEntity entity = repository.findById(id);
if (entity == null) {
return Result.ofFail(-1, "api does not exist, id=" + id);
}
// 匹配规则列表
List<ApiPredicateItemVo> predicateItems = reqVo.getPredicateItems();
if (CollectionUtils.isEmpty(predicateItems)) {
return Result.ofFail(-1, "predicateItems can't empty");
}
List<ApiPredicateItemEntity> predicateItemEntities = new ArrayList<>();
for (ApiPredicateItemVo predicateItem : predicateItems) {
ApiPredicateItemEntity predicateItemEntity = new ApiPredicateItemEntity();
// 匹配模式
int matchStrategy = predicateItem.getMatchStrategy();
if (!Arrays.asList(URL_MATCH_STRATEGY_EXACT, URL_MATCH_STRATEGY_PREFIX, URL_MATCH_STRATEGY_REGEX).contains(matchStrategy)) {
return Result.ofFail(-1, "Invalid matchStrategy: " + matchStrategy);
}
predicateItemEntity.setMatchStrategy(matchStrategy);
// 匹配串
String pattern = predicateItem.getPattern();
if (StringUtil.isBlank(pattern)) {
return Result.ofFail(-1, "pattern can't be null or empty");
}
predicateItemEntity.setPattern(pattern);
predicateItemEntities.add(predicateItemEntity);
}
entity.setPredicateItems(new LinkedHashSet<>(predicateItemEntities));
Date date = new Date();
entity.setGmtModified(date);
try {
entity = repository.save(entity);
} catch (Throwable throwable) {
logger.error("update gateway api error:", throwable);
return Result.ofThrowable(-1, throwable);
}
if (!publishApis(app, entity.getIp(), entity.getPort())) {
logger.warn("publish gateway apis fail after update");
}
return Result.ofSuccess(entity);
}
|
@Test
public void testUpdateApi() throws Exception {
String path = "/gateway/api/save.json";
// Add one entity to memory repository for update
ApiDefinitionEntity addEntity = new ApiDefinitionEntity();
addEntity.setApp(TEST_APP);
addEntity.setIp(TEST_IP);
addEntity.setPort(TEST_PORT);
addEntity.setApiName("bbb");
Date date = new Date();
// To make the gmtModified different when do update
date = DateUtils.addSeconds(date, -1);
addEntity.setGmtCreate(date);
addEntity.setGmtModified(date);
Set<ApiPredicateItemEntity> addRedicateItemEntities = new HashSet<>();
addEntity.setPredicateItems(addRedicateItemEntities);
ApiPredicateItemEntity addPredicateItemEntity = new ApiPredicateItemEntity();
addPredicateItemEntity.setMatchStrategy(URL_MATCH_STRATEGY_EXACT);
addPredicateItemEntity.setPattern("/order");
addEntity = repository.save(addEntity);
UpdateApiReqVo reqVo = new UpdateApiReqVo();
reqVo.setId(addEntity.getId());
reqVo.setApp(TEST_APP);
List<ApiPredicateItemVo> itemVos = new ArrayList<>();
ApiPredicateItemVo itemVo = new ApiPredicateItemVo();
itemVo.setMatchStrategy(URL_MATCH_STRATEGY_PREFIX);
itemVo.setPattern("/my_order");
itemVos.add(itemVo);
reqVo.setPredicateItems(itemVos);
given(sentinelApiClient.modifyApis(eq(TEST_APP), eq(TEST_IP), eq(TEST_PORT), any())).willReturn(true);
MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.post(path);
requestBuilder.content(JSON.toJSONString(reqVo)).contentType(MediaType.APPLICATION_JSON);
// Do controller logic
MvcResult mvcResult = mockMvc.perform(requestBuilder)
.andExpect(MockMvcResultMatchers.status().isOk())
.andDo(MockMvcResultHandlers.print()).andReturn();
// Verify the modifyApis method has been called
verify(sentinelApiClient).modifyApis(eq(TEST_APP), eq(TEST_IP), eq(TEST_PORT), any());
Result<ApiDefinitionEntity> result = JSONObject.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference<Result<ApiDefinitionEntity>>() {});
assertTrue(result.isSuccess());
ApiDefinitionEntity entity = result.getData();
assertNotNull(entity);
assertEquals("bbb", entity.getApiName());
assertEquals(date, entity.getGmtCreate());
// To make sure gmtModified has been set and it's different from gmtCreate
assertNotNull(entity.getGmtModified());
assertNotEquals(entity.getGmtCreate(), entity.getGmtModified());
Set<ApiPredicateItemEntity> predicateItemEntities = entity.getPredicateItems();
assertEquals(1, predicateItemEntities.size());
ApiPredicateItemEntity predicateItemEntity = predicateItemEntities.iterator().next();
assertEquals(URL_MATCH_STRATEGY_PREFIX, predicateItemEntity.getMatchStrategy().intValue());
assertEquals("/my_order", predicateItemEntity.getPattern());
// Verify the entity which is update in memory repository
List<ApiDefinitionEntity> entitiesInMem = repository.findAllByApp(TEST_APP);
assertEquals(1, entitiesInMem.size());
assertEquals(entity, entitiesInMem.get(0));
}
|
public static Date parse(String date, ParsePosition pos) throws ParseException {
Exception fail = null;
try {
int offset = pos.getIndex();
// extract year
int year = parseInt(date, offset, offset += 4);
if (checkOffset(date, offset, '-')) {
offset += 1;
}
// extract month
int month = parseInt(date, offset, offset += 2);
if (checkOffset(date, offset, '-')) {
offset += 1;
}
// extract day
int day = parseInt(date, offset, offset += 2);
// default time value
int hour = 0;
int minutes = 0;
int seconds = 0;
// always use 0 otherwise returned date will include millis of current time
int milliseconds = 0;
// if the value has no time component (and no time zone), we are done
boolean hasT = checkOffset(date, offset, 'T');
if (!hasT && (date.length() <= offset)) {
Calendar calendar = new GregorianCalendar(year, month - 1, day);
calendar.setLenient(false);
pos.setIndex(offset);
return calendar.getTime();
}
if (hasT) {
// extract hours, minutes, seconds and milliseconds
hour = parseInt(date, offset += 1, offset += 2);
if (checkOffset(date, offset, ':')) {
offset += 1;
}
minutes = parseInt(date, offset, offset += 2);
if (checkOffset(date, offset, ':')) {
offset += 1;
}
// second and milliseconds can be optional
if (date.length() > offset) {
char c = date.charAt(offset);
if (c != 'Z' && c != '+' && c != '-') {
seconds = parseInt(date, offset, offset += 2);
if (seconds > 59 && seconds < 63) {
seconds = 59; // truncate up to 3 leap seconds
}
// milliseconds can be optional in the format
if (checkOffset(date, offset, '.')) {
offset += 1;
int endOffset = indexOfNonDigit(date, offset + 1); // assume at least one digit
int parseEndOffset = Math.min(endOffset, offset + 3); // parse up to 3 digits
int fraction = parseInt(date, offset, parseEndOffset);
// compensate for "missing" digits
switch (parseEndOffset - offset) { // number of digits parsed
case 2:
milliseconds = fraction * 10;
break;
case 1:
milliseconds = fraction * 100;
break;
default:
milliseconds = fraction;
}
offset = endOffset;
}
}
}
}
// extract timezone
if (date.length() <= offset) {
throw new IllegalArgumentException("No time zone indicator");
}
TimeZone timezone = null;
char timezoneIndicator = date.charAt(offset);
if (timezoneIndicator == 'Z') {
timezone = TIMEZONE_UTC;
offset += 1;
} else if (timezoneIndicator == '+' || timezoneIndicator == '-') {
String timezoneOffset = date.substring(offset);
// When timezone has no minutes, we should append it, valid timezones are, for example:
// +00:00, +0000 and +00
timezoneOffset = timezoneOffset.length() >= 5 ? timezoneOffset : timezoneOffset + "00";
offset += timezoneOffset.length();
// 18-Jun-2015, tatu: Minor simplification, skip offset of "+0000"/"+00:00"
if (timezoneOffset.equals("+0000") || timezoneOffset.equals("+00:00")) {
timezone = TIMEZONE_UTC;
} else {
// 18-Jun-2015, tatu: Looks like offsets only work from GMT, not UTC...
// not sure why, but that's the way it looks. Further, Javadocs for
// `java.util.TimeZone` specifically instruct use of GMT as base for
// custom timezones... odd.
String timezoneId = "GMT" + timezoneOffset;
// String timezoneId = "UTC" + timezoneOffset;
timezone = TimeZone.getTimeZone(timezoneId);
String act = timezone.getID();
if (!act.equals(timezoneId)) {
/* 22-Jan-2015, tatu: Looks like canonical version has colons, but we may be given
* one without. If so, don't sweat.
* Yes, very inefficient. Hopefully not hit often.
* If it becomes a perf problem, add 'loose' comparison instead.
*/
String cleaned = act.replace(":", "");
if (!cleaned.equals(timezoneId)) {
throw new IndexOutOfBoundsException(
"Mismatching time zone indicator: "
+ timezoneId
+ " given, resolves to "
+ timezone.getID());
}
}
}
} else {
throw new IndexOutOfBoundsException(
"Invalid time zone indicator '" + timezoneIndicator + "'");
}
Calendar calendar = new GregorianCalendar(timezone);
calendar.setLenient(false);
calendar.set(Calendar.YEAR, year);
calendar.set(Calendar.MONTH, month - 1);
calendar.set(Calendar.DAY_OF_MONTH, day);
calendar.set(Calendar.HOUR_OF_DAY, hour);
calendar.set(Calendar.MINUTE, minutes);
calendar.set(Calendar.SECOND, seconds);
calendar.set(Calendar.MILLISECOND, milliseconds);
pos.setIndex(offset);
return calendar.getTime();
// If we get a ParseException it'll already have the right message/offset.
// Other exception types can convert here.
} catch (IndexOutOfBoundsException | IllegalArgumentException e) {
fail = e;
}
String input = (date == null) ? null : ('"' + date + '"');
String msg = fail.getMessage();
if (msg == null || msg.isEmpty()) {
msg = "(" + fail.getClass().getName() + ")";
}
ParseException ex =
new ParseException("Failed to parse date [" + input + "]: " + msg, pos.getIndex());
ex.initCause(fail);
throw ex;
}
|
@Test
@SuppressWarnings("UndefinedEquals")
public void testDateParseSpecialTimezone() throws ParseException {
String dateStr = "2018-06-25T00:02:00-02:58";
Date date = ISO8601Utils.parse(dateStr, new ParsePosition(0));
GregorianCalendar calendar = createUtcCalendar();
calendar.set(2018, Calendar.JUNE, 25, 3, 0);
Date expectedDate = calendar.getTime();
assertThat(date).isEqualTo(expectedDate);
}
|
@Override protected void propagationField(String keyName, String value) {
attachments.put(keyName, value);
}
|
@Test void propagationField() {
request.propagationField("b3", "d");
assertThat(attachments).containsEntry("b3", "d");
}
|
public void run(String[] args) {
if (!parseArguments(args)) {
showOptions();
return;
}
if (command == null) {
System.out.println("Error: Command is empty");
System.out.println();
showOptions();
return;
}
if (password == null) {
System.out.println("Error: Password is empty");
System.out.println();
showOptions();
return;
}
if (input == null) {
System.out.println("Error: Input is empty");
System.out.println();
showOptions();
return;
}
encryptor.setPassword(password);
if (algorithm != null) {
encryptor.setAlgorithm(algorithm);
}
if (randomSaltGeneratorAlgorithm != null) {
encryptor.setSaltGenerator(new RandomSaltGenerator(randomSaltGeneratorAlgorithm));
}
if (randomIvGeneratorAlgorithm != null) {
encryptor.setIvGenerator(new RandomIvGenerator(randomIvGeneratorAlgorithm));
}
if ("encrypt".equals(command)) {
System.out.println("Encrypted text: " + encryptor.encrypt(input));
} else {
System.out.println("Decrypted text: " + encryptor.decrypt(input));
}
}
|
@Test
public void testUnknownOption() {
Main main = new Main();
assertDoesNotThrow(() -> main.run("-c encrypt -xxx foo".split(" ")));
}
|
@Override
public Result apply(ApplyNode applyNode, Captures captures, Context context)
{
if (applyNode.getMayParticipateInAntiJoin()) {
return Result.empty();
}
Assignments subqueryAssignments = applyNode.getSubqueryAssignments();
if (subqueryAssignments.size() != 1) {
return Result.empty();
}
RowExpression expression = getOnlyElement(subqueryAssignments.getExpressions());
if (!(expression instanceof InSubqueryExpression)) {
return Result.empty();
}
InSubqueryExpression inPredicate = (InSubqueryExpression) expression;
VariableReferenceExpression inPredicateOutputVariable = getOnlyElement(subqueryAssignments.getVariables());
PlanNode leftInput = applyNode.getInput();
// Add unique id column if the set of columns do not form a unique key already
if (!((GroupReference) leftInput).getLogicalProperties().isPresent() ||
!((GroupReference) leftInput).getLogicalProperties().get().isDistinct(ImmutableSet.copyOf(leftInput.getOutputVariables()))) {
VariableReferenceExpression uniqueKeyVariable = context.getVariableAllocator().newVariable("unique", BIGINT);
leftInput = new AssignUniqueId(
applyNode.getSourceLocation(),
context.getIdAllocator().getNextId(),
leftInput,
uniqueKeyVariable);
}
VariableReferenceExpression leftVariableReference = inPredicate.getValue();
VariableReferenceExpression rightVariableReference = inPredicate.getSubquery();
JoinNode innerJoin = new JoinNode(
applyNode.getSourceLocation(),
context.getIdAllocator().getNextId(),
JoinType.INNER,
leftInput,
applyNode.getSubquery(),
ImmutableList.of(new EquiJoinClause(
leftVariableReference,
rightVariableReference)),
ImmutableList.<VariableReferenceExpression>builder()
.addAll(leftInput.getOutputVariables())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
ImmutableMap.of());
AggregationNode distinctNode = new AggregationNode(
innerJoin.getSourceLocation(),
context.getIdAllocator().getNextId(),
innerJoin,
ImmutableMap.of(),
singleGroupingSet(ImmutableList.<VariableReferenceExpression>builder()
.addAll(innerJoin.getOutputVariables())
.build()),
ImmutableList.of(),
SINGLE,
Optional.empty(),
Optional.empty(),
Optional.empty());
ImmutableList<VariableReferenceExpression> referencedOutputs = ImmutableList.<VariableReferenceExpression>builder()
.addAll(applyNode.getInput().getOutputVariables())
.add(inPredicateOutputVariable)
.build();
ProjectNode finalProjectNdde = new ProjectNode(
context.getIdAllocator().getNextId(),
distinctNode,
Assignments.builder()
.putAll(identityAssignments(distinctNode.getOutputVariables()))
.put(inPredicateOutputVariable, TRUE_CONSTANT)
.build()
.filter(referencedOutputs));
return Result.ofPlanNode(finalProjectNdde);
}
|
@Test
public void testFiresForInPredicate()
{
tester().assertThat(new TransformUncorrelatedInPredicateSubqueryToDistinctInnerJoin())
.on(p -> p.apply(
assignment(
p.variable("x"),
inSubquery(p.variable("y"), p.variable("z"))),
emptyList(),
p.values(p.variable("y")),
p.values(p.variable("z"))))
.matches(project(
aggregation(ImmutableMap.of(),
SINGLE,
join(INNER,
ImmutableList.of(equiJoinClause("y", "z")),
assignUniqueId("unique", values("y")),
values("z")))));
}
|
@Override
protected int command() {
if (!validateConfigFilePresent()) {
return 1;
}
final MigrationConfig config;
try {
config = MigrationConfig.load(getConfigFile());
} catch (KsqlException | MigrationException e) {
LOGGER.error(e.getMessage());
return 1;
}
return command(
config,
MigrationsUtil::getKsqlClient,
getMigrationsDir(getConfigFile(), config),
Clock.systemDefaultZone()
);
}
|
@Test
public void shouldApplyFirstMigration() throws Exception {
// Given:
command = PARSER.parse("-n");
createMigrationFile(1, NAME, migrationsDir, COMMAND);
// extra migration to ensure only the first is applied
createMigrationFile(3, NAME, migrationsDir, COMMAND);
when(versionQueryResult.get()).thenReturn(ImmutableList.of());
// When:
final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed(
Instant.ofEpochMilli(1000), ZoneId.systemDefault()));
// Then:
assertThat(result, is(0));
final InOrder inOrder = inOrder(ksqlClient);
verifyMigratedVersion(inOrder, 1, "<none>", MigrationState.MIGRATED);
inOrder.verify(ksqlClient).close();
inOrder.verifyNoMoreInteractions();
}
|
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
return inject(statement, new TopicProperties.Builder());
}
|
@Test
public void shouldThrowIfCleanupPolicyConfigPresentInCreateStreamAs() {
// Given:
givenStatement("CREATE STREAM x WITH (kafka_topic='topic', partitions=1, cleanup_policy='whatever') AS SELECT * FROM SOURCE;");
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> injector.inject(statement, builder)
);
// Then:
assertThat(
e.getMessage(),
containsString("Invalid config variable in the WITH clause: CLEANUP_POLICY.\n"
+ "The CLEANUP_POLICY config is automatically inferred based on the type of source (STREAM or TABLE).\n"
+ "Users can't set the CLEANUP_POLICY config manually."));
}
|
@Override
public void process(ConfigChangedEvent event) {
if (event.getChangeType() == ConfigChangeType.DELETED) {
receiveConfigInfo("");
return;
}
receiveConfigInfo(event.getContent());
}
|
@Test
void process() {
MeshAppRuleListener meshAppRuleListener = new MeshAppRuleListener("demo-route");
StandardMeshRuleRouter standardMeshRuleRouter = Mockito.spy(new StandardMeshRuleRouter(URL.valueOf("")));
meshAppRuleListener.register(standardMeshRuleRouter);
ConfigChangedEvent configChangedEvent = new ConfigChangedEvent(
"demo-route" + MESH_RULE_DATA_ID_SUFFIX,
DynamicConfiguration.DEFAULT_GROUP,
rule1 + "---\n" + rule2,
ConfigChangeType.ADDED);
meshAppRuleListener.process(configChangedEvent);
ArgumentCaptor<String> appCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<List<Map<String, Object>>> ruleCaptor = ArgumentCaptor.forClass(List.class);
verify(standardMeshRuleRouter, times(1)).onRuleChange(appCaptor.capture(), ruleCaptor.capture());
List<Map<String, Object>> rulesReceived = ruleCaptor.getValue();
assertEquals(2, rulesReceived.size());
Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()));
Assertions.assertTrue(rulesReceived.contains(yaml.load(rule1)));
Assertions.assertTrue(rulesReceived.contains(yaml.load(rule2)));
configChangedEvent = new ConfigChangedEvent(
"demo-route" + MESH_RULE_DATA_ID_SUFFIX,
DynamicConfiguration.DEFAULT_GROUP,
rule1 + "---\n" + rule2,
ConfigChangeType.MODIFIED);
meshAppRuleListener.process(configChangedEvent);
verify(standardMeshRuleRouter, times(2)).onRuleChange(appCaptor.capture(), ruleCaptor.capture());
rulesReceived = ruleCaptor.getValue();
assertEquals(2, rulesReceived.size());
Assertions.assertTrue(rulesReceived.contains(yaml.load(rule1)));
Assertions.assertTrue(rulesReceived.contains(yaml.load(rule2)));
configChangedEvent = new ConfigChangedEvent(
"demo-route" + MESH_RULE_DATA_ID_SUFFIX,
DynamicConfiguration.DEFAULT_GROUP,
"",
ConfigChangeType.DELETED);
meshAppRuleListener.process(configChangedEvent);
verify(standardMeshRuleRouter, times(1)).clearRule("demo-route");
}
|
@VisibleForTesting
static long fromDuration(Duration duration, TimeUnit unit) {
return unit.convert(duration.toNanos(), TimeUnit.NANOSECONDS);
}
|
@Test
public void fromDuration() {
assertThat(TimerResultParser.fromDuration(Duration.ofNanos(5L), TimeUnit.NANOSECONDS))
.isEqualTo(5L);
assertThat(
TimerResultParser.fromDuration(
Duration.of(5L, ChronoUnit.MICROS), TimeUnit.MICROSECONDS))
.isEqualTo(5L);
assertThat(TimerResultParser.fromDuration(Duration.ofMillis(5L), TimeUnit.MILLISECONDS))
.isEqualTo(5L);
assertThat(TimerResultParser.fromDuration(Duration.ofSeconds(5L), TimeUnit.SECONDS))
.isEqualTo(5L);
assertThat(TimerResultParser.fromDuration(Duration.ofMinutes(5L), TimeUnit.MINUTES))
.isEqualTo(5L);
assertThat(TimerResultParser.fromDuration(Duration.ofHours(5L), TimeUnit.HOURS)).isEqualTo(5L);
assertThat(TimerResultParser.fromDuration(Duration.ofDays(5L), TimeUnit.DAYS)).isEqualTo(5L);
}
|
@VisibleForTesting
protected RunnerApi.Pipeline resolveArtifacts(RunnerApi.Pipeline pipeline) {
RunnerApi.Pipeline.Builder pipelineBuilder = pipeline.toBuilder();
RunnerApi.Components.Builder componentsBuilder = pipelineBuilder.getComponentsBuilder();
componentsBuilder.clearEnvironments();
for (Map.Entry<String, RunnerApi.Environment> entry :
pipeline.getComponents().getEnvironmentsMap().entrySet()) {
RunnerApi.Environment.Builder environmentBuilder = entry.getValue().toBuilder();
environmentBuilder.clearDependencies();
for (RunnerApi.ArtifactInformation info : entry.getValue().getDependenciesList()) {
if (!BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.FILE).equals(info.getTypeUrn())) {
throw new RuntimeException(
String.format("unsupported artifact type %s", info.getTypeUrn()));
}
RunnerApi.ArtifactFilePayload filePayload;
try {
filePayload = RunnerApi.ArtifactFilePayload.parseFrom(info.getTypePayload());
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException("Error parsing artifact file payload.", e);
}
String stagedName;
if (BeamUrns.getUrn(RunnerApi.StandardArtifacts.Roles.STAGING_TO)
.equals(info.getRoleUrn())) {
try {
RunnerApi.ArtifactStagingToRolePayload stagingPayload =
RunnerApi.ArtifactStagingToRolePayload.parseFrom(info.getRolePayload());
stagedName = stagingPayload.getStagedName();
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException("Error parsing artifact staging_to role payload.", e);
}
} else {
try {
File source = new File(filePayload.getPath());
HashCode hashCode = Files.asByteSource(source).hash(Hashing.sha256());
stagedName = Environments.createStagingFileName(source, hashCode);
} catch (IOException e) {
throw new RuntimeException(
String.format("Error creating staged name for artifact %s", filePayload.getPath()),
e);
}
}
environmentBuilder.addDependencies(
info.toBuilder()
.setTypeUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.URL))
.setTypePayload(
RunnerApi.ArtifactUrlPayload.newBuilder()
.setUrl(
FileSystems.matchNewResource(options.getStagingLocation(), true)
.resolve(
stagedName, ResolveOptions.StandardResolveOptions.RESOLVE_FILE)
.toString())
.setSha256(filePayload.getSha256())
.build()
.toByteString()));
}
componentsBuilder.putEnvironments(entry.getKey(), environmentBuilder.build());
}
return pipelineBuilder.build();
}
|
@Test
public void testResolveArtifacts() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
DataflowRunner runner = DataflowRunner.fromOptions(options);
String stagingLocation = options.getStagingLocation().replaceFirst("/$", "");
RunnerApi.ArtifactInformation fooLocalArtifact =
RunnerApi.ArtifactInformation.newBuilder()
.setTypeUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.FILE))
.setTypePayload(
RunnerApi.ArtifactFilePayload.newBuilder()
.setPath("/tmp/foo.jar")
.build()
.toByteString())
.setRoleUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Roles.STAGING_TO))
.setRolePayload(
RunnerApi.ArtifactStagingToRolePayload.newBuilder()
.setStagedName("foo_staged.jar")
.build()
.toByteString())
.build();
RunnerApi.ArtifactInformation barLocalArtifact =
RunnerApi.ArtifactInformation.newBuilder()
.setTypeUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.FILE))
.setTypePayload(
RunnerApi.ArtifactFilePayload.newBuilder()
.setPath("/tmp/bar.jar")
.build()
.toByteString())
.setRoleUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Roles.STAGING_TO))
.setRolePayload(
RunnerApi.ArtifactStagingToRolePayload.newBuilder()
.setStagedName("bar_staged.jar")
.build()
.toByteString())
.build();
RunnerApi.Pipeline pipeline =
RunnerApi.Pipeline.newBuilder()
.setComponents(
RunnerApi.Components.newBuilder()
.putEnvironments(
"env",
RunnerApi.Environment.newBuilder()
.addAllDependencies(
ImmutableList.of(fooLocalArtifact, barLocalArtifact))
.build()))
.build();
RunnerApi.ArtifactInformation fooStagedArtifact =
RunnerApi.ArtifactInformation.newBuilder()
.setTypeUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.URL))
.setTypePayload(
RunnerApi.ArtifactUrlPayload.newBuilder()
.setUrl(stagingLocation + "/foo_staged.jar")
.build()
.toByteString())
.setRoleUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Roles.STAGING_TO))
.setRolePayload(
RunnerApi.ArtifactStagingToRolePayload.newBuilder()
.setStagedName("foo_staged.jar")
.build()
.toByteString())
.build();
RunnerApi.ArtifactInformation barStagedArtifact =
RunnerApi.ArtifactInformation.newBuilder()
.setTypeUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Types.URL))
.setTypePayload(
RunnerApi.ArtifactUrlPayload.newBuilder()
.setUrl(stagingLocation + "/bar_staged.jar")
.build()
.toByteString())
.setRoleUrn(BeamUrns.getUrn(RunnerApi.StandardArtifacts.Roles.STAGING_TO))
.setRolePayload(
RunnerApi.ArtifactStagingToRolePayload.newBuilder()
.setStagedName("bar_staged.jar")
.build()
.toByteString())
.build();
RunnerApi.Pipeline expectedPipeline =
RunnerApi.Pipeline.newBuilder()
.setComponents(
RunnerApi.Components.newBuilder()
.putEnvironments(
"env",
RunnerApi.Environment.newBuilder()
.addAllDependencies(
ImmutableList.of(fooStagedArtifact, barStagedArtifact))
.build()))
.build();
assertThat(runner.resolveArtifacts(pipeline), equalTo(expectedPipeline));
}
|
public static <T extends Enum<T>> Validator enumValues(final Class<T> enumClass) {
final String[] enumValues = EnumSet.allOf(enumClass)
.stream()
.map(Object::toString)
.toArray(String[]::new);
final String[] validValues = Arrays.copyOf(enumValues, enumValues.length + 1);
validValues[enumValues.length] = null;
return ValidCaseInsensitiveString.in(validValues);
}
|
@Test
public void shouldFailIfValueNotInEnum() {
// Given:
final Validator validator = ConfigValidators.enumValues(TestEnum.class);
// When:
final Exception e = assertThrows(
ConfigException.class,
() -> validator.ensureValid("propName", "NotValid")
);
// Then:
assertThat(e.getMessage(), containsString("String must be one of: FOO, BAR"));
}
|
@Override
public int hashCode() {
int result = major;
result = 31 * result + minor;
return result;
}
|
@Test
public void hashCodeTest() {
assertEquals(Version.UNKNOWN.hashCode(), Version.UNKNOWN.hashCode());
assertTrue(Version.UNKNOWN.hashCode() != Version.of(4, 0).hashCode());
}
|
public static List<Event> computeEventDiff(final Params params) {
final List<Event> events = new ArrayList<>();
emitPerNodeDiffEvents(createBaselineParams(params), events);
emitWholeClusterDiffEvent(createBaselineParams(params), events);
emitDerivedBucketSpaceStatesDiffEvents(params, events);
return events;
}
|
@Test
void cluster_down_event_without_reason_annotation_emits_generic_down_event() {
final EventFixture fixture = EventFixture.createForNodes(3)
.clusterStateBefore("distributor:3 storage:3")
.clusterStateAfter("cluster:d distributor:3 storage:3");
final List<Event> events = fixture.computeEventDiff();
assertThat(events.size(), equalTo(1));
assertThat(events, hasItem(
clusterEventWithDescription("Cluster is down")));
}
|
@Override
public CompletableFuture<Void> heartbeatFromJobManager(
ResourceID resourceID, AllocatedSlotReport allocatedSlotReport) {
return jobManagerHeartbeatManager.requestHeartbeat(resourceID, allocatedSlotReport);
}
|
@Test
void testJobManagerBecomesUnreachableTriggersDisconnect() throws Exception {
final ResourceID jmResourceId = ResourceID.generate();
runJobManagerHeartbeatTest(
jmResourceId,
failedRpcEnabledHeartbeatServices,
jobMasterGatewayBuilder ->
jobMasterGatewayBuilder.setTaskManagerHeartbeatFunction(
(resourceID, taskExecutorToJobManagerHeartbeatPayload) ->
FutureUtils.completedExceptionally(
new RecipientUnreachableException(
"sender",
"recipient",
"job manager is unreachable."))),
(ignoredTaskExecutorResourceId, taskExecutorGateway, allocationId) ->
taskExecutorGateway.heartbeatFromJobManager(
jmResourceId,
new AllocatedSlotReport(
jobId,
Collections.singleton(
new AllocatedSlotInfo(0, allocationId)))));
}
|
@Override
public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception {
if (args.isEmpty()) {
printHelp(out);
return 0;
}
OutputStream output = out;
if (args.size() > 1) {
output = Util.fileOrStdout(args.get(args.size() - 1), out);
args = args.subList(0, args.size() - 1);
}
DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<>());
Schema schema = null;
Map<String, byte[]> metadata = new TreeMap<>();
String inputCodec = null;
for (String inFile : expandsInputFiles(args)) {
InputStream input = Util.fileOrStdin(inFile, in);
DataFileStream<GenericRecord> reader = new DataFileStream<>(input, new GenericDatumReader<>());
if (schema == null) {
// this is the first file - set up the writer, and store the
// Schema & metadata we'll use.
schema = reader.getSchema();
for (String key : reader.getMetaKeys()) {
if (!DataFileWriter.isReservedMeta(key)) {
byte[] metadatum = reader.getMeta(key);
metadata.put(key, metadatum);
writer.setMeta(key, metadatum);
}
}
inputCodec = reader.getMetaString(DataFileConstants.CODEC);
if (inputCodec == null) {
inputCodec = DataFileConstants.NULL_CODEC;
}
writer.setCodec(CodecFactory.fromString(inputCodec));
writer.create(schema, output);
} else {
// check that we're appending to the same schema & metadata.
if (!schema.equals(reader.getSchema())) {
err.println("input files have different schemas");
reader.close();
return 1;
}
for (String key : reader.getMetaKeys()) {
if (!DataFileWriter.isReservedMeta(key)) {
byte[] metadatum = reader.getMeta(key);
byte[] writersMetadatum = metadata.get(key);
if (!Arrays.equals(metadatum, writersMetadatum)) {
err.println("input files have different non-reserved metadata");
reader.close();
return 2;
}
}
}
String thisCodec = reader.getMetaString(DataFileConstants.CODEC);
if (thisCodec == null) {
thisCodec = DataFileConstants.NULL_CODEC;
}
if (!inputCodec.equals(thisCodec)) {
err.println("input files have different codecs");
reader.close();
return 3;
}
}
writer.appendAllFrom(reader, /* recompress */ false);
reader.close();
}
writer.close();
return 0;
}
|
@Test
void fileDoesNotExist() throws Exception {
assertThrows(FileNotFoundException.class, () -> {
File output = new File(INPUT_DIR, name.getMethodName() + ".avro");
List<String> args = asList(new File(INPUT_DIR, "/doNotExist").getAbsolutePath(), output.getAbsolutePath());
new ConcatTool().run(System.in, System.out, System.err, args);
});
}
|
private int transitionToObserver(final CommandLine cmd)
throws IOException {
String[] argv = cmd.getArgs();
if (argv.length != 1) {
errOut.println("transitionToObserver: incorrect number of arguments");
printUsage(errOut, "-transitionToObserver", USAGE_DFS_MERGED);
return -1;
}
HAServiceTarget target = resolveTarget(argv[0]);
if (!checkSupportObserver(target)) {
return -1;
}
if (!checkManualStateManagementOK(target)) {
return -1;
}
try {
HAServiceProtocol proto = target.getProxy(getConf(), 0);
HAServiceProtocolHelper.transitionToObserver(proto, createReqInfo());
} catch (ServiceFailedException e) {
errOut.println("transitionToObserver failed! " + e.getLocalizedMessage());
return -1;
}
return 0;
}
|
@Test
public void testTransitionToObserver() throws Exception {
assertEquals(0, runTool("-transitionToObserver", "nn1"));
Mockito.verify(mockProtocol).transitionToObserver(anyReqInfo());
}
|
@Override
public Collection<CommittableWithLineage<CommT>> commit(
boolean fullyReceived, Committer<CommT> committer)
throws IOException, InterruptedException {
Collection<CommitRequestImpl<CommT>> requests = getPendingRequests(fullyReceived);
requests.forEach(CommitRequestImpl::setSelected);
committer.commit(new ArrayList<>(requests));
requests.forEach(CommitRequestImpl::setCommittedIfNoError);
Collection<CommittableWithLineage<CommT>> committed = drainFinished();
metricGroup.setCurrentPendingCommittablesGauge(() -> getPendingRequests(false).size());
return committed;
}
|
@Test
void testCommit() throws IOException, InterruptedException {
final CheckpointCommittableManagerImpl<Integer> checkpointCommittables =
new CheckpointCommittableManagerImpl<>(1, 1, 1L, METRIC_GROUP);
checkpointCommittables.upsertSummary(new CommittableSummary<>(1, 1, 1L, 1, 0, 0));
checkpointCommittables.upsertSummary(new CommittableSummary<>(2, 1, 1L, 2, 0, 0));
checkpointCommittables.addCommittable(new CommittableWithLineage<>(3, 1L, 1));
checkpointCommittables.addCommittable(new CommittableWithLineage<>(4, 1L, 2));
final Committer<Integer> committer = new NoOpCommitter();
// Only commit fully received committables
Collection<CommittableWithLineage<Integer>> commitRequests =
checkpointCommittables.commit(true, committer);
assertThat(commitRequests)
.hasSize(1)
.satisfiesExactly(c -> assertThat(c.getCommittable()).isEqualTo(3));
// Commit all committables
commitRequests = checkpointCommittables.commit(false, committer);
assertThat(commitRequests)
.hasSize(1)
.satisfiesExactly(c -> assertThat(c.getCommittable()).isEqualTo(4));
}
|
public List<ErasureCodingPolicy> loadPolicy(String policyFilePath) {
try {
File policyFile = getPolicyFile(policyFilePath);
if (!policyFile.exists()) {
LOG.warn("Not found any EC policy file");
return Collections.emptyList();
}
return loadECPolicies(policyFile);
} catch (ParserConfigurationException | IOException | SAXException e) {
throw new RuntimeException("Failed to load EC policy file: "
+ policyFilePath);
}
}
|
@Test
public void testNullECSchemaOptionValue() throws Exception {
PrintWriter out = new PrintWriter(new FileWriter(POLICY_FILE));
out.println("<?xml version=\"1.0\"?>");
out.println("<configuration>");
out.println("<layoutversion>1</layoutversion>");
out.println("<schemas>");
out.println(" <schema id=\"RSk12m4\">");
out.println(" <codec>RS</codec>");
out.println(" <k>12</k>");
out.println(" <m>4</m>");
out.println(" </schema>");
out.println(" <schema id=\"RS-legacyk12m4\">");
out.println(" <codec>RS-legacy</codec>");
out.println(" <k>12</k>");
out.println(" <m>4</m>");
out.println(" <option></option>");
out.println(" </schema>");
out.println("</schemas>");
out.println("<policies>");
out.println(" <policy>");
out.println(" <schema>RS-legacyk12m4</schema>");
out.println(" <cellsize>1024</cellsize>");
out.println(" </policy>");
out.println(" <policy>");
out.println(" <schema>RSk12m4</schema>");
out.println(" <cellsize>20480</cellsize>");
out.println(" </policy>");
out.println("</policies>");
out.println("</configuration>");
out.close();
ECPolicyLoader ecPolicyLoader = new ECPolicyLoader();
try {
ecPolicyLoader.loadPolicy(POLICY_FILE);
fail("IllegalArgumentException should be thrown for null value");
} catch (IllegalArgumentException e) {
assertExceptionContains("Value of <option> is null", e);
}
}
|
public String doLayout(ILoggingEvent event) {
if (!isStarted()) {
return CoreConstants.EMPTY_STRING;
}
return writeLoopOnConverters(event);
}
|
@Test
public void testNopExeptionHandler() {
pl.setPattern("%nopex %m%n");
pl.start();
String val = pl.doLayout(le);
assertThat(val, not(containsString("java.lang.Exception: Bogus exception")));
}
|
public static boolean checkOtherParams(String otherParams) {
return !StringUtils.isEmpty(otherParams) && !JSONUtils.checkJsonValid(otherParams);
}
|
@Test
public void testCheckOtherParams() {
Assertions.assertFalse(CheckUtils.checkOtherParams(null));
Assertions.assertFalse(CheckUtils.checkOtherParams(""));
Assertions.assertTrue(CheckUtils.checkOtherParams("xxx"));
Assertions.assertFalse(CheckUtils.checkOtherParams("{}"));
Assertions.assertFalse(CheckUtils.checkOtherParams("{\"key1\":111}"));
}
|
@Override
public DescribeShareGroupsResult describeShareGroups(final Collection<String> groupIds,
final DescribeShareGroupsOptions options) {
SimpleAdminApiFuture<CoordinatorKey, ShareGroupDescription> future =
DescribeShareGroupsHandler.newFuture(groupIds);
DescribeShareGroupsHandler handler = new DescribeShareGroupsHandler(options.includeAuthorizedOperations(), logContext);
invokeDriver(handler, future, options.timeoutMs);
return new DescribeShareGroupsResult(future.all().entrySet().stream()
.collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue)));
}
|
@Test
public void testDescribeMultipleShareGroups() {
try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller()));
ShareGroupDescribeResponseData.TopicPartitions topicPartitions = new ShareGroupDescribeResponseData.TopicPartitions()
.setTopicName("my_topic")
.setPartitions(asList(0, 1, 2));
final ShareGroupDescribeResponseData.Assignment memberAssignment = new ShareGroupDescribeResponseData.Assignment()
.setTopicPartitions(asList(topicPartitions));
ShareGroupDescribeResponseData group0Data = new ShareGroupDescribeResponseData();
group0Data.groups().add(new ShareGroupDescribeResponseData.DescribedGroup()
.setGroupId(GROUP_ID)
.setGroupState(ShareGroupState.STABLE.toString())
.setMembers(asList(
new ShareGroupDescribeResponseData.Member()
.setMemberId("0")
.setClientId("clientId0")
.setClientHost("clientHost")
.setAssignment(memberAssignment),
new ShareGroupDescribeResponseData.Member()
.setMemberId("1")
.setClientId("clientId1")
.setClientHost("clientHost")
.setAssignment(memberAssignment))));
ShareGroupDescribeResponseData group1Data = new ShareGroupDescribeResponseData();
group1Data.groups().add(new ShareGroupDescribeResponseData.DescribedGroup()
.setGroupId("group-1")
.setGroupState(ShareGroupState.STABLE.toString())
.setMembers(asList(
new ShareGroupDescribeResponseData.Member()
.setMemberId("0")
.setClientId("clientId0")
.setClientHost("clientHost")
.setAssignment(memberAssignment),
new ShareGroupDescribeResponseData.Member()
.setMemberId("1")
.setClientId("clientId1")
.setClientHost("clientHost")
.setAssignment(memberAssignment))));
env.kafkaClient().prepareResponse(new ShareGroupDescribeResponse(group0Data));
env.kafkaClient().prepareResponse(new ShareGroupDescribeResponse(group1Data));
Collection<String> groups = new HashSet<>();
groups.add(GROUP_ID);
groups.add("group-1");
final DescribeShareGroupsResult result = env.adminClient().describeShareGroups(groups);
assertEquals(2, result.describedGroups().size());
assertEquals(groups, result.describedGroups().keySet());
}
}
|
@Override
public double read() {
return gaugeSource.read();
}
|
@Test
public void whenLongProbe() {
metricsRegistry.registerStaticProbe(this, "foo", MANDATORY,
(LongProbeFunction) o -> 10);
DoubleGauge gauge = metricsRegistry.newDoubleGauge("foo");
double actual = gauge.read();
assertEquals(10, actual, 0.1);
}
|
public static ScheduledTaskHandler of(UUID uuid, String schedulerName, String taskName) {
return new ScheduledTaskHandlerImpl(uuid, -1, schedulerName, taskName);
}
|
@Test(expected = IllegalArgumentException.class)
public void of_withWrongParts() {
ScheduledTaskHandler.of("urn:hzScheduledTaskHandler:-\u00000\u0000Scheduler");
}
|
public String nextNonCliCommand() {
String line;
do {
line = terminal.readLine();
} while (maybeHandleCliSpecificCommands(line));
return line;
}
|
@Test
public void shouldExecuteCliCommands() {
// Given:
when(lineSupplier.get())
.thenReturn(CLI_CMD_NAME)
.thenReturn("not a CLI command;");
// When:
console.nextNonCliCommand();
// Then:
verify(cliCommand).execute(eq(ImmutableList.of()), any());
}
|
public Value parse(String json) {
return this.delegate.parse(json);
}
|
@Test
public void testOrdinaryInteger() throws Exception {
final JsonParser parser = new JsonParser();
final Value msgpackValue = parser.parse("12345");
assertTrue(msgpackValue.getValueType().isNumberType());
assertTrue(msgpackValue.getValueType().isIntegerType());
assertFalse(msgpackValue.getValueType().isFloatType());
assertFalse(msgpackValue.getValueType().isStringType());
assertEquals(12345, msgpackValue.asIntegerValue().asInt());
}
|
public MessageType convert(Schema avroSchema) {
if (!avroSchema.getType().equals(Schema.Type.RECORD)) {
throw new IllegalArgumentException("Avro schema must be a record.");
}
return new MessageType(avroSchema.getFullName(), convertFields(avroSchema.getFields(), ""));
}
|
@Test
public void testParquetInt96DefaultFail() throws Exception {
Schema schema = Schema.createRecord("myrecord", null, null, false);
MessageType parquetSchemaWithInt96 =
MessageTypeParser.parseMessageType("message myrecord {\n required int96 int96_field;\n}\n");
assertThrows(
"INT96 is deprecated. As interim enable READ_INT96_AS_FIXED flag to read as byte array.",
IllegalArgumentException.class,
() -> new AvroSchemaConverter().convert(parquetSchemaWithInt96));
}
|
public String retrieveBucketStats(ClientParameters.SelectionType type, String id, BucketId bucketId, String bucketSpace) throws BucketStatsException {
String documentSelection = createDocumentSelection(type, id);
StatBucketMessage msg = new StatBucketMessage(bucketId, bucketSpace, documentSelection);
StatBucketReply statBucketReply = sendMessage(msg, StatBucketReply.class);
return statBucketReply.getResults();
}
|
@Test
void testRetrieveBucketStats() throws BucketStatsException {
String docId = "id:ns:type::another";
String bucketInfo = "I like turtles!";
BucketId bucketId = bucketIdFactory.getBucketId(new DocumentId(docId));
StatBucketReply reply = new StatBucketReply();
reply.setResults(bucketInfo);
when(mockedSession.syncSend(any())).thenReturn(reply);
String result = createRetriever().retrieveBucketStats(ClientParameters.SelectionType.DOCUMENT, docId, bucketId, bucketSpace);
verify(mockedSession, times(1)).syncSend(any());
assertEquals(bucketInfo, result);
}
|
@Override
public void deletePost(Long id) {
// 校验是否存在
validatePostExists(id);
// 删除部门
postMapper.deleteById(id);
}
|
@Test
public void testValidatePost_notFoundForDelete() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> postService.deletePost(id), POST_NOT_FOUND);
}
|
@Override
@Transactional(rollbackFor = Exception.class)
public void updateDiscountActivity(DiscountActivityUpdateReqVO updateReqVO) {
// 校验存在
DiscountActivityDO discountActivity = validateDiscountActivityExists(updateReqVO.getId());
if (discountActivity.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) { // 已关闭的活动,不能修改噢
throw exception(DISCOUNT_ACTIVITY_UPDATE_FAIL_STATUS_CLOSED);
}
// 校验商品是否冲突
validateDiscountActivityProductConflicts(updateReqVO.getId(), updateReqVO.getProducts());
// 更新活动
DiscountActivityDO updateObj = DiscountActivityConvert.INSTANCE.convert(updateReqVO)
.setStatus(PromotionUtils.calculateActivityStatus(updateReqVO.getEndTime()));
discountActivityMapper.updateById(updateObj);
// 更新商品
updateDiscountProduct(updateReqVO);
}
|
@Test
public void testUpdateDiscountActivity_success() {
// mock 数据(商品)
DiscountActivityDO dbDiscountActivity = randomPojo(DiscountActivityDO.class);
discountActivityMapper.insert(dbDiscountActivity);// @Sql: 先插入出一条存在的数据
// mock 数据(活动)
DiscountProductDO dbDiscountProduct01 = randomPojo(DiscountProductDO.class, o -> o.setActivityId(dbDiscountActivity.getId())
.setSpuId(1L).setSkuId(2L).setDiscountType(PromotionDiscountTypeEnum.PRICE.getType()).setDiscountPrice(3).setDiscountPercent(null));
DiscountProductDO dbDiscountProduct02 = randomPojo(DiscountProductDO.class, o -> o.setActivityId(dbDiscountActivity.getId())
.setSpuId(10L).setSkuId(20L).setDiscountType(PromotionDiscountTypeEnum.PERCENT.getType()).setDiscountPercent(30).setDiscountPrice(null));
discountProductMapper.insert(dbDiscountProduct01);
discountProductMapper.insert(dbDiscountProduct02);
// 准备参数
DiscountActivityUpdateReqVO reqVO = randomPojo(DiscountActivityUpdateReqVO.class, o -> {
o.setId(dbDiscountActivity.getId()); // 设置更新的 ID
// 用于触发进行中的状态
o.setStartTime(addTime(Duration.ofDays(1))).setEndTime(addTime(Duration.ofDays(2)));
// 设置商品
o.setProducts(asList(new DiscountActivityBaseVO.Product().setSpuId(1L).setSkuId(2L)
.setDiscountType(PromotionDiscountTypeEnum.PRICE.getType()).setDiscountPrice(3).setDiscountPercent(null),
new DiscountActivityBaseVO.Product().setSpuId(100L).setSkuId(200L)
.setDiscountType(PromotionDiscountTypeEnum.PERCENT.getType()).setDiscountPercent(30).setDiscountPrice(null)));
});
// 调用
discountActivityService.updateDiscountActivity(reqVO);
// 校验活动
DiscountActivityDO discountActivity = discountActivityMapper.selectById(reqVO.getId()); // 获取最新的
assertPojoEquals(reqVO, discountActivity);
assertEquals(discountActivity.getStatus(), PromotionActivityStatusEnum.WAIT.getStatus());
// 校验商品
List<DiscountProductDO> discountProducts = discountProductMapper.selectList(DiscountProductDO::getActivityId, discountActivity.getId());
assertEquals(discountProducts.size(), reqVO.getProducts().size());
for (int i = 0; i < reqVO.getProducts().size(); i++) {
DiscountActivityBaseVO.Product product = reqVO.getProducts().get(i);
DiscountProductDO discountProduct = discountProducts.get(i);
assertEquals(discountProduct.getActivityId(), discountActivity.getId());
assertEquals(discountProduct.getSpuId(), product.getSpuId());
assertEquals(discountProduct.getSkuId(), product.getSkuId());
assertEquals(discountProduct.getDiscountType(), product.getDiscountType());
assertEquals(discountProduct.getDiscountPrice(), product.getDiscountPrice());
assertEquals(discountProduct.getDiscountPercent(), product.getDiscountPercent());
}
}
|
public String getString(HazelcastProperty property) {
String value = properties.getProperty(property.getName());
if (value != null) {
return value;
}
value = property.getSystemProperty();
if (value != null) {
return value;
}
HazelcastProperty parent = property.getParent();
if (parent != null) {
return getString(parent);
}
String deprecatedName = property.getDeprecatedName();
if (deprecatedName != null) {
value = get(deprecatedName);
if (value == null) {
value = System.getProperty(deprecatedName);
}
if (value != null) {
// we don't have a logger available, and the Logging service is constructed after the Properties are created.
System.err.print("Don't use deprecated '" + deprecatedName + "' "
+ "but use '" + property.getName() + "' instead. "
+ "The former name will be removed in the next Hazelcast release.");
return value;
}
}
Function<HazelcastProperties, ?> function = property.getFunction();
if (function != null) {
return "" + function.apply(this);
}
return property.getDefaultValue();
}
|
@Test
public void setProperty_inheritActualValueOfParentProperty() {
config.setProperty(ClusterProperty.IO_THREAD_COUNT.getName(), "1");
HazelcastProperties properties = new HazelcastProperties(config);
String inputIOThreadCount = properties.getString(ClusterProperty.IO_INPUT_THREAD_COUNT);
assertEquals("1", inputIOThreadCount);
assertNotEquals(ClusterProperty.IO_THREAD_COUNT.getDefaultValue(), inputIOThreadCount);
}
|
public static Area getArea(Integer id) {
return areas.get(id);
}
|
@Test
public void testGetArea() {
// 调用:北京
Area area = AreaUtils.getArea(110100);
// 断言
assertEquals(area.getId(), 110100);
assertEquals(area.getName(), "北京市");
assertEquals(area.getType(), AreaTypeEnum.CITY.getType());
assertEquals(area.getParent().getId(), 110000);
assertEquals(area.getChildren().size(), 16);
}
|
@Override
public void start() {
boolean isTelemetryActivated = config.getBoolean(SONAR_TELEMETRY_ENABLE.getKey())
.orElseThrow(() -> new IllegalStateException(String.format("Setting '%s' must be provided.", SONAR_TELEMETRY_URL.getKey())));
boolean hasOptOut = internalProperties.read(I_PROP_OPT_OUT).isPresent();
if (!isTelemetryActivated && !hasOptOut) {
optOut();
internalProperties.write(I_PROP_OPT_OUT, String.valueOf(system2.now()));
LOG.info("Sharing of SonarQube statistics is disabled.");
}
if (isTelemetryActivated && hasOptOut) {
internalProperties.write(I_PROP_OPT_OUT, null);
}
if (!isTelemetryActivated) {
return;
}
LOG.info("Sharing of SonarQube statistics is enabled.");
int frequencyInSeconds = frequency();
scheduleWithFixedDelay(telemetryCommand(), frequencyInSeconds, frequencyInSeconds, TimeUnit.SECONDS);
}
|
@Test
void write_sequence_as_one_if_not_previously_present() {
initTelemetrySettingsToDefaultValues();
when(lockManager.tryLock(any(), anyInt())).thenReturn(true);
settings.setProperty("sonar.telemetry.frequencyInSeconds", "1");
mockDataJsonWriterDoingSomething();
underTest.start();
verify(internalProperties, timeout(4_000)).write("telemetry.messageSeq", "1");
}
|
public static String v6ipProcess(String netAddress) {
int part;
String subAddress;
boolean isAsterisk = isAsterisk(netAddress);
boolean isMinus = isMinus(netAddress);
if (isAsterisk && isMinus) {
part = 6;
int lastColon = netAddress.lastIndexOf(':');
int secondLastColon = netAddress.substring(0, lastColon).lastIndexOf(':');
subAddress = netAddress.substring(0, secondLastColon);
} else if (!isAsterisk && !isMinus) {
part = 8;
subAddress = netAddress;
} else {
part = 7;
subAddress = netAddress.substring(0, netAddress.lastIndexOf(':'));
}
return expandIP(subAddress, part);
}
|
@Test
public void testV6ipProcess() {
String remoteAddr = "5::7:6:1-200:*";
Assert.assertEquals(AclUtils.v6ipProcess(remoteAddr), "0005:0000:0000:0000:0007:0006");
remoteAddr = "5::7:6:1-200";
Assert.assertEquals(AclUtils.v6ipProcess(remoteAddr), "0005:0000:0000:0000:0000:0007:0006");
remoteAddr = "5::7:6:*";
Assert.assertEquals(AclUtils.v6ipProcess(remoteAddr), "0005:0000:0000:0000:0000:0007:0006");
remoteAddr = "5:7:6:*";
Assert.assertEquals(AclUtils.v6ipProcess(remoteAddr), "0005:0007:0006");
}
|
@Override
@NonNull public CharSequence getKeyboardName() {
return mKeyboardName;
}
|
@Test
public void testKeyboardPopupCharacterStringConstructor() throws Exception {
AnyPopupKeyboard keyboard =
new AnyPopupKeyboard(
new DefaultAddOn(getApplicationContext(), getApplicationContext()),
getApplicationContext(),
"ûūùú",
SIMPLE_KeyboardDimens,
"POP_KEYBOARD");
Assert.assertEquals("POP_KEYBOARD", keyboard.getKeyboardName());
Assert.assertEquals(4, keyboard.getKeys().size());
int row0Y = (int) SIMPLE_KeyboardDimens.getRowVerticalGap();
assertKeyValues(keyboard, 'û', row0Y);
assertKeyValues(keyboard, 'ū', row0Y);
assertKeyValues(keyboard, 'ù', row0Y);
assertKeyValues(keyboard, 'ú', row0Y);
Assert.assertEquals(
1, // one row
keyboard.getKeys().stream().map(k -> k.y).distinct().count());
}
|
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) {
List<Object> valuesInOrder =
schema.getFields().stream()
.map(
field -> {
try {
org.apache.avro.Schema.Field avroField =
record.getSchema().getField(field.getName());
Object value = avroField != null ? record.get(avroField.pos()) : null;
return convertAvroFormat(field.getType(), value, options);
} catch (Exception cause) {
throw new IllegalArgumentException(
"Error converting field " + field + ": " + cause.getMessage(), cause);
}
})
.collect(toList());
return Row.withSchema(schema).addValues(valuesInOrder).build();
}
|
@Test
public void testToBeamRow_projection() {
long testId = 123L;
// recordSchema is a projection of FLAT_TYPE schema
org.apache.avro.Schema recordSchema =
org.apache.avro.SchemaBuilder.record("__root__").fields().optionalLong("id").endRecord();
GenericData.Record record = new GenericData.Record(recordSchema);
record.put("id", testId);
Row expected = Row.withSchema(FLAT_TYPE).withFieldValue("id", testId).build();
Row actual =
BigQueryUtils.toBeamRow(
record, FLAT_TYPE, BigQueryUtils.ConversionOptions.builder().build());
assertEquals(expected, actual);
}
|
@Override
public void add(T item) {
final int sizeAtTimeOfAdd;
synchronized (items) {
items.add(item);
sizeAtTimeOfAdd = items.size();
}
/*
WARNING: It is possible that the item that was just added to the list
has been processed by an existing idle task at this point.
By rescheduling the following timers, it is possible that a
superfluous maxTask is generated now OR that the idle task and max
task are scheduled at their specified delays. This could result in
calls to processItems sooner than expected.
*/
// Did we hit the max item threshold?
if (sizeAtTimeOfAdd >= maxItems) {
if (maxIdleMillis < maxBatchMillis) {
cancelTask(idleTask);
}
rescheduleTask(maxTask, 0 /* now! */);
} else {
// Otherwise, schedule idle task and if this is a first item
// also schedule the max batch age task.
if (maxIdleMillis < maxBatchMillis) {
rescheduleTask(idleTask, maxIdleMillis);
}
if (sizeAtTimeOfAdd == 1) {
rescheduleTask(maxTask, maxBatchMillis);
}
}
}
|
@Test
public void eventTrigger() {
TestAccumulator accumulator = new TestAccumulator();
accumulator.add(new TestItem("a"));
accumulator.add(new TestItem("b"));
accumulator.add(new TestItem("c"));
accumulator.add(new TestItem("d"));
assertTrue("should not have fired yet", accumulator.batch.isEmpty());
accumulator.add(new TestItem("e"));
timer.advanceTimeMillis(20, LONG_REAL_TIME_DELAY);
assertFalse("should have fired", accumulator.batch.isEmpty());
assertEquals("incorrect batch", "abcde", accumulator.batch);
}
|
Collection<AzureAddress> getAddresses() {
LOGGER.finest("Fetching OAuth Access Token");
final String accessToken = fetchAccessToken();
LOGGER.finest("Fetching instances for subscription '%s' and resourceGroup '%s'",
subscriptionId, resourceGroup);
Collection<AzureAddress> addresses = azureComputeApi.instances(subscriptionId, resourceGroup,
scaleSet, tag, accessToken);
LOGGER.finest("Found the following instances for project '%s' and zone '%s': %s",
subscriptionId, resourceGroup,
addresses);
return addresses;
}
|
@Test
public void getAddressesCurrentSubscriptionCurrentResourceGroupCurrentScaleSetNoTag() {
// given
given(azureComputeApi.instances(SUBSCRIPTION_ID, RESOURCE_GROUP, SCALE_SET, null, ACCESS_TOKEN)).willReturn(ADDRESSES);
AzureConfig azureConfig = AzureConfig.builder().setInstanceMetadataAvailable(true).build();
AzureClient azureClient = new AzureClient(azureMetadataApi, azureComputeApi, azureAuthenticator, azureConfig);
// when
Collection<AzureAddress> result = azureClient.getAddresses();
// then
assertEquals(ADDRESSES, result);
}
|
public void setContract(@Nullable Produce contract)
{
this.contract = contract;
setStoredContract(contract);
handleContractState();
}
|
@Test
public void cabbageContractCabbageDeadAndCabbageDiseased()
{
// Get the two allotment patches
final FarmingPatch patch1 = farmingGuildPatches.get(Varbits.FARMING_4773);
final FarmingPatch patch2 = farmingGuildPatches.get(Varbits.FARMING_4774);
assertNotNull(patch1);
assertNotNull(patch2);
// Specify the two allotment patches
when(farmingTracker.predictPatch(patch1))
.thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.DISEASED, 0, 2, 3));
when(farmingTracker.predictPatch(patch2))
.thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.DEAD, 0, 2, 3));
farmingContractManager.setContract(Produce.CABBAGE);
assertEquals(SummaryState.IN_PROGRESS, farmingContractManager.getSummary());
// Prefer diseased cabbages
assertEquals(CropState.DISEASED, farmingContractManager.getContractCropState());
}
|
public static ParsedCommand parse(
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final String sql, final Map<String, String> variables) {
validateSupportedStatementType(sql);
final String substituted;
try {
substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables);
} catch (ParseFailedException e) {
throw new MigrationException(String.format(
"Failed to parse the statement. Statement: %s. Reason: %s",
sql, e.getMessage()));
}
final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted)
.get(0).getStatement();
final boolean isStatement = StatementType.get(statementContext.statement().getClass())
== StatementType.STATEMENT;
return new ParsedCommand(substituted,
isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY)
.buildStatement(statementContext)));
}
|
@Test
public void shouldParseInsertIntoStatement() {
// When:
List<CommandParser.ParsedCommand> commands = parse("INSERT INTO FOO SELECT * FROM BAR;");
// Then:
assertThat(commands.size(), is(1));
assertThat(commands.get(0).getStatement().isPresent(), is (false));
assertThat(commands.get(0).getCommand(), is("INSERT INTO FOO SELECT * FROM BAR;"));
}
|
public Searcher searcher() {
return new Searcher();
}
|
@Test
void require_that_not_works_when_k_is_2() {
ConjunctionIndexBuilder builder = new ConjunctionIndexBuilder();
IndexableFeatureConjunction c1 = indexableConj(
conj(
feature("a").inSet("1"),
feature("b").inSet("1"),
not(feature("c").inSet("1"))));
IndexableFeatureConjunction c2 = indexableConj(
conj(
feature("a").inSet("1"),
feature("b").inSet("1"),
not(feature("c").inSet("1")),
not(feature("d").inSet("1"))));
IndexableFeatureConjunction c3 = indexableConj(
conj(
feature("a").inSet("1"),
feature("b").inSet("1"),
not(feature("c").inSet("1")),
not(feature("d").inSet("1")),
not(feature("e").inSet("1"))));
builder.indexConjunction(c1);
builder.indexConjunction(c2);
builder.indexConjunction(c3);
ConjunctionIndex index = builder.build();
ConjunctionIndex.Searcher searcher = index.searcher();
PredicateQuery query = new PredicateQuery();
query.addFeature("a", "1");
query.addFeature("b", "1");
assertHitsEquals(searcher.search(query), c1, c2, c3);
query.addFeature("c", "1");
assertTrue(searcher.search(query).isEmpty());
query.addFeature("d", "1");
assertTrue(searcher.search(query).isEmpty());
query.addFeature("e", "1");
assertTrue(searcher.search(query).isEmpty());
}
|
public boolean setNewAuthor(DefaultIssue issue, @Nullable String newAuthorLogin, IssueChangeContext context) {
if (isNullOrEmpty(newAuthorLogin)) {
return false;
}
checkState(issue.authorLogin() == null, "It's not possible to update the author with this method, please use setAuthorLogin()");
issue.setFieldChange(context, AUTHOR, null, newAuthorLogin);
issue.setAuthorLogin(newAuthorLogin);
issue.setUpdateDate(context.date());
issue.setChanged(true);
// do not send notifications to prevent spam when installing the developer cockpit plugin
return true;
}
|
@Test
void set_new_author() {
boolean updated = underTest.setNewAuthor(issue, "simon", context);
assertThat(updated).isTrue();
FieldDiffs.Diff diff = issue.currentChange().get("author");
assertThat(diff.oldValue()).isNull();
assertThat(diff.newValue()).isEqualTo("simon");
assertThat(issue.mustSendNotifications()).isFalse();
}
|
boolean isModified(Namespace namespace) {
Release release = releaseService.findLatestActiveRelease(namespace);
List<Item> items = itemService.findItemsWithoutOrdered(namespace.getId());
if (release == null) {
return hasNormalItems(items);
}
Map<String, String> releasedConfiguration = GSON.fromJson(release.getConfigurations(), GsonType.CONFIG);
Map<String, String> configurationFromItems = generateConfigurationFromItems(namespace, items);
MapDifference<String, String> difference = Maps.difference(releasedConfiguration, configurationFromItems);
return !difference.areEqual();
}
|
@Test
public void testParentNamespaceNotReleased() {
long childNamespaceId = 1, parentNamespaceId = 2;
Namespace childNamespace = createNamespace(childNamespaceId);
Namespace parentNamespace = createNamespace(parentNamespaceId);
Release childRelease = createRelease("{\"k1\":\"v3\", \"k2\":\"v2\"}");
List<Item> childItems = Arrays.asList(createItem("k1", "v2"), createItem("k2", "v2"));
when(releaseService.findLatestActiveRelease(childNamespace)).thenReturn(childRelease);
when(releaseService.findLatestActiveRelease(parentNamespace)).thenReturn(null);
when(itemService.findItemsWithoutOrdered(childNamespaceId)).thenReturn(childItems);
when(namespaceService.findParentNamespace(childNamespace)).thenReturn(parentNamespace);
boolean isModified = namespaceUnlockAspect.isModified(childNamespace);
Assert.assertTrue(isModified);
}
|
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(DM_BIT);
builder.dataType(DM_BIT);
break;
case TINYINT:
builder.columnType(DM_TINYINT);
builder.dataType(DM_TINYINT);
break;
case SMALLINT:
builder.columnType(DM_SMALLINT);
builder.dataType(DM_SMALLINT);
break;
case INT:
builder.columnType(DM_INT);
builder.dataType(DM_INT);
break;
case BIGINT:
builder.columnType(DM_BIGINT);
builder.dataType(DM_BIGINT);
break;
case FLOAT:
builder.columnType(DM_REAL);
builder.dataType(DM_REAL);
break;
case DOUBLE:
builder.columnType(DM_DOUBLE);
builder.dataType(DM_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", DM_DECIMAL, precision, scale));
builder.dataType(DM_DECIMAL);
builder.precision(precision);
builder.scale(scale);
break;
case STRING:
builder.length(column.getColumnLength());
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(DM_TEXT);
builder.dataType(DM_TEXT);
} else if (column.getColumnLength() <= MAX_CHAR_LENGTH_FOR_PAGE_4K) {
builder.columnType(
String.format("%s(%s)", DM_VARCHAR2, column.getColumnLength()));
builder.dataType(DM_VARCHAR2);
} else {
builder.columnType(DM_TEXT);
builder.dataType(DM_TEXT);
}
break;
case BYTES:
builder.length(column.getColumnLength());
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(DM_LONGVARBINARY);
builder.dataType(DM_LONGVARBINARY);
} else if (column.getColumnLength() <= MAX_BINARY_LENGTH_FOR_PAGE_4K) {
builder.columnType(
String.format("%s(%s)", DM_VARBINARY, column.getColumnLength()));
builder.dataType(DM_VARBINARY);
} else {
builder.columnType(DM_LONGVARBINARY);
builder.dataType(DM_LONGVARBINARY);
}
break;
case DATE:
builder.columnType(DM_DATE);
builder.dataType(DM_DATE);
break;
case TIME:
builder.dataType(DM_TIME);
if (column.getScale() != null && column.getScale() > 0) {
Integer timeScale = column.getScale();
if (timeScale > MAX_TIME_SCALE) {
timeScale = MAX_TIME_SCALE;
log.warn(
"The time column {} type time({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to time({})",
column.getName(),
column.getScale(),
MAX_SCALE,
timeScale);
}
builder.columnType(String.format("%s(%s)", DM_TIME, timeScale));
builder.scale(timeScale);
} else {
builder.columnType(DM_TIME);
}
break;
case TIMESTAMP:
builder.dataType(DM_TIMESTAMP);
if (column.getScale() != null && column.getScale() > 0) {
Integer timestampScale = column.getScale();
if (timestampScale > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(String.format("%s(%s)", DM_TIMESTAMP, timestampScale));
builder.scale(timestampScale);
} else {
builder.columnType(DM_TIMESTAMP);
}
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.DAMENG,
column.getDataType().toString(),
column.getName());
}
return builder.build();
}
|
@Test
public void testReconvertDate() {
Column column =
PhysicalColumn.builder()
.name("test")
.dataType(LocalTimeType.LOCAL_DATE_TYPE)
.build();
BasicTypeDefine typeDefine = DmdbTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(DmdbTypeConverter.DM_DATE, typeDefine.getColumnType());
Assertions.assertEquals(DmdbTypeConverter.DM_DATE, typeDefine.getDataType());
}
|
@SuppressWarnings("deprecation")
static Object[] buildArgs(final Object[] positionalArguments,
final ResourceMethodDescriptor resourceMethod,
final ServerResourceContext context,
final DynamicRecordTemplate template,
final ResourceMethodConfig resourceMethodConfig)
{
List<Parameter<?>> parameters = resourceMethod.getParameters();
Object[] arguments = Arrays.copyOf(positionalArguments, parameters.size());
fixUpComplexKeySingletonArraysInArguments(arguments);
boolean attachmentsDesired = false;
for (int i = positionalArguments.length; i < parameters.size(); ++i)
{
Parameter<?> param = parameters.get(i);
try
{
if (param.getParamType() == Parameter.ParamType.KEY || param.getParamType() == Parameter.ParamType.ASSOC_KEY_PARAM)
{
Object value = context.getPathKeys().get(param.getName());
if (value != null)
{
arguments[i] = value;
continue;
}
}
else if (param.getParamType() == Parameter.ParamType.CALLBACK)
{
continue;
}
else if (param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT_PARAM || param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT)
{
continue; // don't know what to fill in yet
}
else if (param.getParamType() == Parameter.ParamType.HEADER)
{
HeaderParam headerParam = param.getAnnotations().get(HeaderParam.class);
String value = context.getRequestHeaders().get(headerParam.value());
arguments[i] = value;
continue;
}
//Since we have multiple different types of MaskTrees that can be passed into resource methods,
//we must evaluate based on the param type (annotation used)
else if (param.getParamType() == Parameter.ParamType.PROJECTION || param.getParamType() == Parameter.ParamType.PROJECTION_PARAM)
{
arguments[i] = context.getProjectionMask();
continue;
}
else if (param.getParamType() == Parameter.ParamType.METADATA_PROJECTION_PARAM)
{
arguments[i] = context.getMetadataProjectionMask();
continue;
}
else if (param.getParamType() == Parameter.ParamType.PAGING_PROJECTION_PARAM)
{
arguments[i] = context.getPagingProjectionMask();
continue;
}
else if (param.getParamType() == Parameter.ParamType.CONTEXT || param.getParamType() == Parameter.ParamType.PAGING_CONTEXT_PARAM)
{
PagingContext ctx = RestUtils.getPagingContext(context, (PagingContext) param.getDefaultValue());
arguments[i] = ctx;
continue;
}
else if (param.getParamType() == Parameter.ParamType.PATH_KEYS || param.getParamType() == Parameter.ParamType.PATH_KEYS_PARAM)
{
arguments[i] = context.getPathKeys();
continue;
}
else if (param.getParamType() == Parameter.ParamType.PATH_KEY_PARAM) {
Object value = context.getPathKeys().get(param.getName());
if (value != null)
{
arguments[i] = value;
continue;
}
}
else if (param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT || param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT_PARAM)
{
arguments[i] = context;
continue;
}
else if (param.getParamType() == Parameter.ParamType.VALIDATOR_PARAM)
{
RestLiDataValidator validator = new RestLiDataValidator(resourceMethod.getResourceModel().getResourceClass().getAnnotations(),
resourceMethod.getResourceModel().getValueClass(), resourceMethod.getMethodType());
arguments[i] = validator;
continue;
}
else if (param.getParamType() == Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM)
{
arguments[i] = context.getRequestAttachmentReader();
attachmentsDesired = true;
continue;
}
else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM)
{
// The OutputStream is passed to the resource implementation in a synchronous call. Upon return of the
// resource method, all the bytes would haven't written to the OutputStream. The EntityStream would have
// contained all the bytes by the time data is requested. The ownership of the OutputStream is passed to
// the ByteArrayOutputStreamWriter, which is responsible of closing the OutputStream if necessary.
ByteArrayOutputStream out = new ByteArrayOutputStream();
context.setResponseEntityStream(EntityStreams.newEntityStream(new ByteArrayOutputStreamWriter(out)));
arguments[i] = new UnstructuredDataWriter(out, context);
continue;
}
else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_REACTIVE_READER_PARAM)
{
arguments[i] = new UnstructuredDataReactiveReader(context.getRequestEntityStream(), context.getRawRequest().getHeader(RestConstants.HEADER_CONTENT_TYPE));
continue;
}
else if (param.getParamType() == Parameter.ParamType.POST)
{
// handle action parameters
if (template != null)
{
DataMap data = template.data();
if (data.containsKey(param.getName()))
{
arguments[i] = template.getValue(param);
continue;
}
}
}
else if (param.getParamType() == Parameter.ParamType.QUERY)
{
Object value;
if (DataTemplate.class.isAssignableFrom(param.getType()))
{
value = buildDataTemplateArgument(context.getStructuredParameter(param.getName()), param,
resourceMethodConfig.shouldValidateQueryParams());
}
else
{
value = buildRegularArgument(context, param, resourceMethodConfig.shouldValidateQueryParams());
}
if (value != null)
{
arguments[i] = value;
continue;
}
}
else if (param.getParamType() == Parameter.ParamType.BATCH || param.getParamType() == Parameter.ParamType.RESOURCE_KEY)
{
// should not come to this routine since it should be handled by passing in positionalArguments
throw new RoutingException("Parameter '" + param.getName() + "' should be passed in as a positional argument",
HttpStatus.S_400_BAD_REQUEST.getCode());
}
else
{
// unknown param type
throw new RoutingException(
"Parameter '" + param.getName() + "' has an unknown parameter type '" + param.getParamType().name() + "'",
HttpStatus.S_400_BAD_REQUEST.getCode());
}
}
catch (TemplateRuntimeException e)
{
throw new RoutingException("Parameter '" + param.getName() + "' is invalid", HttpStatus.S_400_BAD_REQUEST.getCode());
}
try
{
// Handling null-valued parameters not provided in resource context or entity body
// check if it is optional parameter
if (param.isOptional() && param.hasDefaultValue())
{
arguments[i] = param.getDefaultValue();
}
else if (param.isOptional() && !param.getType().isPrimitive())
{
// optional primitive parameter must have default value or provided
arguments[i] = null;
}
else
{
throw new RoutingException("Parameter '" + param.getName() + "' is required", HttpStatus.S_400_BAD_REQUEST.getCode());
}
}
catch (ResourceConfigException e)
{
// Parameter default value format exception should result in server error code 500.
throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR,
"Parameter '" + param.getName() + "' default value is invalid", e);
}
}
//Verify that if the resource method did not expect attachments, and attachments were present, that we drain all
//incoming attachments and send back a bad request. We must take precaution here since simply ignoring the request
//attachments is not correct behavior here. Ignoring other request level constructs such as headers or query parameters
//that were not needed is safe, but not for request attachments.
if (!attachmentsDesired && context.getRequestAttachmentReader() != null)
{
throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST,
"Resource method endpoint invoked does not accept any request attachments.");
}
return arguments;
}
|
@Test
public void testRestLiAttachmentsParamResourceExpectNotPresent()
{
//This test makes sure that a resource method that expects attachments, but none are present in the request,
//is given a null for the RestLiAttachmentReader.
ServerResourceContext mockResourceContext = EasyMock.createMock(ServerResourceContext.class);
EasyMock.expect(mockResourceContext.getRequestAttachmentReader()).andReturn(null);
EasyMock.replay(mockResourceContext);
@SuppressWarnings({"unchecked","rawtypes"})
final Parameter<RestLiAttachmentReader> param = new Parameter("RestLi Attachment Reader",
RestLiAttachmentReader.class, null, false, null,
Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM, false,
AnnotationSet.EMPTY);
List<Parameter<?>> parameters = Collections.singletonList(param);
Object[] results = ArgumentBuilder.buildArgs(new Object[0], getMockResourceMethod(parameters), mockResourceContext, null, getMockResourceMethodConfig(false));
Assert.assertEquals(results[0], null);
}
|
@Override
public void parse(InputStream stream, ContentHandler handler, Metadata metadata,
ParseContext context) throws IOException, SAXException, TikaException {
if (!ExternalParser.check("gdalinfo")) {
return;
}
// first set up and run GDAL
// process the command
TemporaryResources tmp = new TemporaryResources();
TikaInputStream tis = TikaInputStream.get(stream, tmp, metadata);
String runCommand = processCommand(tis);
String output = execCommand(new String[]{runCommand});
// now extract the actual metadata params
// from the GDAL output in the content stream
// to do this, we need to literally process the output
// from the invoked command b/c we can't read metadata and
// output text from the handler in ExternalParser
// at the same time, so for now, we can't use the
// ExternalParser to do this and I've had to bring some of
// that functionality directly into this class
// TODO: investigate a way to do both using ExternalParser
extractMetFromOutput(output, metadata);
applyPatternsToOutput(output, metadata, getPatterns());
// make the content handler and provide output there
// now that we have metadata
processOutput(handler, metadata, output);
}
|
@Test
public void testParseMetadata() {
assumeTrue(canRun());
final String expectedNcInst =
"NCAR (National Center for Atmospheric Research, Boulder, CO, USA)";
final String expectedModelNameEnglish = "NCAR CCSM";
final String expectedProgramId = "Source file unknown Version unknown Date unknown";
final String expectedProjectId = "IPCC Fourth Assessment";
final String expectedRealization = "1";
final String expectedTitle = "model output prepared for IPCC AR4";
final String expectedSub8Name = "\":ua";
final String expectedSub8Desc = "[1x17x128x256] eastward_wind (32-bit floating-point)";
GDALParser parser = new GDALParser();
InputStream stream = TestGDALParser.class
.getResourceAsStream("/test-documents/sresa1b_ncar_ccsm3_0_run1_200001.nc");
Metadata met = new Metadata();
BodyContentHandler handler = new BodyContentHandler();
try {
parser.parse(stream, handler, met, new ParseContext());
assertNotNull(met);
assertNotNull(met.get("NC_GLOBAL#institution"));
assertEquals(expectedNcInst, met.get("NC_GLOBAL#institution"));
assertNotNull(met.get("NC_GLOBAL#model_name_english"));
assertEquals(expectedModelNameEnglish, met.get("NC_GLOBAL#model_name_english"));
assertNotNull(met.get("NC_GLOBAL#prg_ID"));
assertEquals(expectedProgramId, met.get("NC_GLOBAL#prg_ID"));
assertNotNull(met.get("NC_GLOBAL#prg_ID"));
assertEquals(expectedProgramId, met.get("NC_GLOBAL#prg_ID"));
assertNotNull(met.get("NC_GLOBAL#project_id"));
assertEquals(expectedProjectId, met.get("NC_GLOBAL#project_id"));
assertNotNull(met.get("NC_GLOBAL#realization"));
assertEquals(expectedRealization, met.get("NC_GLOBAL#realization"));
assertNotNull(met.get("NC_GLOBAL#title"));
assertEquals(expectedTitle, met.get("NC_GLOBAL#title"));
assertNotNull(met.get("SUBDATASET_8_NAME"));
assertTrue(met.get("SUBDATASET_8_NAME").endsWith(expectedSub8Name));
assertNotNull(met.get("SUBDATASET_8_DESC"));
assertEquals(expectedSub8Desc, met.get("SUBDATASET_8_DESC"));
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
|
private static Schema optional(Schema original) {
// null is first in the union because Parquet's default is always null
return Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), original));
}
|
@Test
public void testUnknownTwoLevelListOfLists() throws Exception {
// This tests the case where we don't detect a 2-level list by the repeated
// group's name, but it must be 2-level because the repeated group doesn't
// contain an optional or repeated element as required for 3-level lists
Schema listOfLists = optional(Schema.createArray(Schema.createArray(Schema.create(INT))));
Schema schema = Schema.createRecord("UnknownTwoLevelListInList", null, null, false);
schema.setFields(
Lists.newArrayList(new Schema.Field("listOfLists", listOfLists, null, JsonProperties.NULL_VALUE)));
System.err.println("Avro schema: " + schema.toString(true));
// Cannot use round-trip assertion because repeated group names differ
testParquetToAvroConversion(
schema,
"message UnknownTwoLevelListInList {\n" + " optional group listOfLists (LIST) {\n"
+ " repeated group mylist (LIST) {\n"
+ " repeated int32 innerlist;\n"
+ " }\n"
+ " }\n"
+ "}");
// Cannot use round-trip assertion because 3-level representation is used
testParquetToAvroConversion(
NEW_BEHAVIOR,
schema,
"message UnknownTwoLevelListInList {\n" + " optional group listOfLists (LIST) {\n"
+ " repeated group mylist (LIST) {\n"
+ " repeated int32 innerlist;\n"
+ " }\n"
+ " }\n"
+ "}");
}
|
@Udf(description = "Converts a TIMESTAMP value into the"
+ " string representation of the timestamp in the given format. Single quotes in the"
+ " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'"
+ " The system default time zone is used when no time zone is explicitly provided."
+ " The format pattern should be in the format expected"
+ " by java.time.format.DateTimeFormatter")
public String formatTimestamp(
@UdfParameter(
description = "TIMESTAMP value.") final Timestamp timestamp,
@UdfParameter(
description = "The format pattern should be in the format expected by"
+ " java.time.format.DateTimeFormatter.") final String formatPattern) {
return formatTimestamp(timestamp, formatPattern, ZoneId.of("GMT").getId());
}
|
@Test
public void testTimeZoneInUniversalTime() {
// Given:
final Timestamp timestamp = new Timestamp(1534353043000L);
// When:
final String universalTime = udf.formatTimestamp(timestamp,
"yyyy-MM-dd HH:mm:ss zz", "UTC");
// Then:
assertThat(universalTime, is("2018-08-15 17:10:43 UTC"));
}
|
@Override
public void configure(ResourceInfo resourceInfo, FeatureContext context) {
final Method resourceMethod = resourceInfo.getResourceMethod();
final Class resourceClass = resourceInfo.getResourceClass();
if ((resourceMethod != null && (resourceMethod.isAnnotationPresent(SupportedSearchVersion.class)
|| resourceMethod.isAnnotationPresent(SupportedSearchVersions.class)))
|| (resourceClass != null && (resourceClass.isAnnotationPresent(SupportedSearchVersion.class)
|| resourceClass.isAnnotationPresent(SupportedSearchVersions.class)))) {
context.register(SupportedSearchVersionFilter.class);
}
}
|
@Test
public void configureRegistersResponseFilterIfAnnotationIsPresentOnBoth() throws Exception {
final Class clazz = TestResourceWithClassAnnotation.class;
when(resourceInfo.getResourceClass()).thenReturn(clazz);
final Method method = TestResourceWithMethodAnnotation.class.getMethod("methodWithAnnotation");
when(resourceInfo.getResourceMethod()).thenReturn(method);
supportedSearchVersionDynamicFeature.configure(resourceInfo, featureContext);
verify(featureContext, only()).register(SupportedSearchVersionFilter.class);
}
|
public static UArrayType create(UType componentType) {
return new AutoValue_UArrayType(componentType);
}
|
@Test
public void serialization() {
SerializableTester.reserializeAndAssert(
UArrayType.create(UClassType.create("java.lang.String")));
SerializableTester.reserializeAndAssert(UArrayType.create(UPrimitiveType.INT));
SerializableTester.reserializeAndAssert(
UArrayType.create(UArrayType.create(UPrimitiveType.INT)));
}
|
public static Read read() {
return new Read(null, "", new Scan());
}
|
@Test
public void testReadingSplitAtFractionExhaustive() throws Exception {
final String table = tmpTable.getName();
final int numRows = 7;
createAndWriteData(table, numRows);
HBaseIO.Read read = HBaseIO.read().withConfiguration(conf).withTableId(table);
HBaseSource source =
new HBaseSource(read, null /* estimatedSizeBytes */)
.withStartKey(ByteKey.of(48))
.withEndKey(ByteKey.of(58));
assertSplitAtFractionExhaustive(source, null);
}
|
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor(
DoFn<InputT, OutputT> fn) {
return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn);
}
|
@Test
public void testSplittableDoFnWithAllMethods() throws Exception {
MockFn fn = mock(MockFn.class);
DoFnInvoker<String, String> invoker = DoFnInvokers.invokerFor(fn);
final SomeRestrictionTracker tracker = mock(SomeRestrictionTracker.class);
final SomeRestrictionCoder coder = mock(SomeRestrictionCoder.class);
final InstantCoder watermarkEstimatorStateCoder = InstantCoder.of();
final Instant watermarkEstimatorState = Instant.now();
final WatermarkEstimator<Instant> watermarkEstimator =
new WatermarkEstimators.Manual(watermarkEstimatorState);
SomeRestriction restriction = new SomeRestriction();
final SomeRestriction part1 = new SomeRestriction();
final SomeRestriction part2 = new SomeRestriction();
final SomeRestriction part3 = new SomeRestriction();
when(fn.getRestrictionCoder()).thenReturn(coder);
when(fn.getWatermarkEstimatorStateCoder()).thenReturn(watermarkEstimatorStateCoder);
when(fn.getInitialRestriction(mockElement)).thenReturn(restriction);
doAnswer(
AdditionalAnswers.delegatesTo(
new MockFn() {
@DoFn.SplitRestriction
@Override
public void splitRestriction(
@Element String element,
@Restriction SomeRestriction restriction,
DoFn.OutputReceiver<SomeRestriction> receiver) {
receiver.output(part1);
receiver.output(part2);
receiver.output(part3);
}
}))
.when(fn)
.splitRestriction(eq(mockElement), same(restriction), any());
when(fn.getInitialWatermarkEstimatorState()).thenReturn(watermarkEstimatorState);
when(fn.newTracker(restriction)).thenReturn(tracker);
when(fn.newWatermarkEstimator(watermarkEstimatorState)).thenReturn(watermarkEstimator);
when(fn.processElement(mockProcessContext, tracker, watermarkEstimator)).thenReturn(resume());
when(fn.getSize()).thenReturn(2.0);
assertEquals(coder, invoker.invokeGetRestrictionCoder(CoderRegistry.createDefault()));
assertEquals(
watermarkEstimatorStateCoder,
invoker.invokeGetWatermarkEstimatorStateCoder(CoderRegistry.createDefault()));
assertEquals(
restriction,
invoker.invokeGetInitialRestriction(
new FakeArgumentProvider<String, String>() {
@Override
public String element(DoFn<String, String> doFn) {
return mockElement;
}
}));
List<SomeRestriction> outputs = new ArrayList<>();
invoker.invokeSplitRestriction(
new FakeArgumentProvider<String, String>() {
@Override
public String element(DoFn<String, String> doFn) {
return mockElement;
}
@Override
public Object restriction() {
return restriction;
}
@Override
public OutputReceiver outputReceiver(DoFn doFn) {
return new OutputReceiver<SomeRestriction>() {
@Override
public void output(SomeRestriction output) {
outputs.add(output);
}
@Override
public void outputWithTimestamp(SomeRestriction output, Instant timestamp) {
fail("Unexpected output with timestamp");
}
@Override
public void outputWindowedValue(
SomeRestriction output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo paneInfo) {
fail("Unexpected outputWindowedValue");
}
};
}
});
assertEquals(Arrays.asList(part1, part2, part3), outputs);
assertEquals(
watermarkEstimatorState,
invoker.invokeGetInitialWatermarkEstimatorState(new FakeArgumentProvider<>()));
assertEquals(
tracker,
invoker.invokeNewTracker(
new FakeArgumentProvider<String, String>() {
@Override
public String element(DoFn<String, String> doFn) {
return mockElement;
}
@Override
public Object restriction() {
return restriction;
}
}));
assertEquals(
watermarkEstimator,
invoker.invokeNewWatermarkEstimator(
new FakeArgumentProvider<String, String>() {
@Override
public Object watermarkEstimatorState() {
return watermarkEstimatorState;
}
}));
assertEquals(
resume(),
invoker.invokeProcessElement(
new FakeArgumentProvider<String, String>() {
@Override
public DoFn<String, String>.ProcessContext processContext(DoFn<String, String> fn) {
return mockProcessContext;
}
@Override
public RestrictionTracker<?, ?> restrictionTracker() {
return tracker;
}
@Override
public WatermarkEstimator<?> watermarkEstimator() {
return watermarkEstimator;
}
}));
assertEquals(2.0, invoker.invokeGetSize(mockArgumentProvider), 0.0001);
}
|
public static @CheckForNull String getActionUrl(String itUrl, Action action) {
String urlName = action.getUrlName();
if (urlName == null) return null; // Should not be displayed
try {
if (new URI(urlName).isAbsolute()) {
return urlName;
}
} catch (URISyntaxException x) {
Logger.getLogger(Functions.class.getName()).log(Level.WARNING, "Failed to parse URL for {0}: {1}", new Object[] {action, x});
return null;
}
if (urlName.startsWith("/"))
return joinPath(Stapler.getCurrentRequest().getContextPath(), urlName);
else
// relative URL name
return joinPath(Stapler.getCurrentRequest().getContextPath() + '/' + itUrl, urlName);
}
|
@Test
public void testGetActionUrl_unparseable() {
assertNull(Functions.getActionUrl(null, createMockAction("http://example.net/stuff?something=^woohoo")));
}
|
@VisibleForTesting
int persistNextQueues(final Instant currentTime) {
final int slot = messagesCache.getNextSlotToPersist();
List<String> queuesToPersist;
int queuesPersisted = 0;
do {
queuesToPersist = getQueuesTimer.record(
() -> messagesCache.getQueuesToPersist(slot, currentTime.minus(persistDelay), QUEUE_BATCH_LIMIT));
for (final String queue : queuesToPersist) {
final UUID accountUuid = MessagesCache.getAccountUuidFromQueueName(queue);
final byte deviceId = MessagesCache.getDeviceIdFromQueueName(queue);
final Optional<Account> maybeAccount = accountsManager.getByAccountIdentifier(accountUuid);
if (maybeAccount.isEmpty()) {
logger.error("No account record found for account {}", accountUuid);
continue;
}
final Optional<Device> maybeDevice = maybeAccount.flatMap(account -> account.getDevice(deviceId));
if (maybeDevice.isEmpty()) {
logger.error("Account {} does not have a device with id {}", accountUuid, deviceId);
continue;
}
try {
persistQueue(maybeAccount.get(), maybeDevice.get());
} catch (final Exception e) {
persistQueueExceptionMeter.increment();
logger.warn("Failed to persist queue {}::{}; will schedule for retry", accountUuid, deviceId, e);
messagesCache.addQueueToPersist(accountUuid, deviceId);
Util.sleep(EXCEPTION_PAUSE_MILLIS);
}
}
queuesPersisted += queuesToPersist.size();
} while (queuesToPersist.size() >= QUEUE_BATCH_LIMIT);
return queuesPersisted;
}
|
@Test
void testPersistNextQueuesNoQueues() {
messagePersister.persistNextQueues(Instant.now());
verify(accountsManager, never()).getByAccountIdentifier(any(UUID.class));
}
|
@Override
public String buildContext() {
final PluginHandleDO after = (PluginHandleDO) getAfter();
if (Objects.isNull(getBefore())) {
return String.format("the plugin-handle [%s] is %s", after.getField(), StringUtils.lowerCase(getType().getType().toString()));
}
return String.format("the plugin-handle [%s] is %s : %s", after.getField(), StringUtils.lowerCase(getType().getType().toString()), contrast());
}
|
@Test
public void createPluginHandleBuildContextTest() {
PluginHandleChangedEvent pluginChangedEvent = new PluginHandleChangedEvent(pluginHandleDO, null,
EventTypeEnum.PLUGIN_HANDLE_CREATE, "test-operator");
String context = String.format("the plugin-handle [%s] is %s", pluginHandleDO.getField(),
StringUtils.lowerCase(EventTypeEnum.PLUGIN_HANDLE_CREATE.getType().toString()));
assertEquals(context, pluginChangedEvent.buildContext());
}
|
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) {
return this.send(record, null);
}
|
@Test void should_add_parent_trace_when_context_injected_on_headers() {
ProducerRecord<String, String> record = new ProducerRecord<>(TEST_TOPIC, TEST_KEY, TEST_VALUE);
tracingProducer.injector.inject(parent, new KafkaProducerRequest(record));
tracingProducer.send(record);
mockProducer.completeNext();
MutableSpan producerSpan = spans.get(0);
assertThat(producerSpan.kind()).isEqualTo(PRODUCER);
assertChildOf(producerSpan, parent);
assertThat(lastHeaders(mockProducer))
.containsEntry("b3", producerSpan.traceId() + "-" + producerSpan.id() + "-1");
}
|
@Override
public List<FileEntriesLayer> createLayers() throws IOException {
// Add dependencies layers.
List<FileEntriesLayer> layers =
JarLayers.getDependenciesLayers(jarPath, ProcessingMode.packaged);
// Add layer for jar.
FileEntriesLayer jarLayer =
FileEntriesLayer.builder()
.setName(JarLayers.JAR)
.addEntry(jarPath, JarLayers.APP_ROOT.resolve(jarPath.getFileName()))
.build();
layers.add(jarLayer);
return layers;
}
|
@Test
public void testCreateLayers_dependencyDoesNotExist() throws URISyntaxException {
Path standardJar = Paths.get(Resources.getResource(STANDARD_SINGLE_DEPENDENCY_JAR).toURI());
StandardPackagedProcessor standardPackagedModeProcessor =
new StandardPackagedProcessor(standardJar, JAR_JAVA_VERSION);
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class, () -> standardPackagedModeProcessor.createLayers());
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Dependency required by the JAR (as specified in `Class-Path` in the JAR manifest) doesn't exist: "
+ standardJar.getParent().resolve("dependency.jar"));
}
|
public static ReadRows readRows() {
return new AutoValue_JdbcIO_ReadRows.Builder()
.setFetchSize(DEFAULT_FETCH_SIZE)
.setOutputParallelization(true)
.setStatementPreparator(ignored -> {})
.build();
}
|
@Test
public void testReadRowsWithDataSourceConfiguration() {
PCollection<Row> rows =
pipeline.apply(
JdbcIO.readRows()
.withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION)
.withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME))
.withStatementPreparator(
preparedStatement ->
preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema =
Schema.of(
Schema.Field.of("NAME", LogicalTypes.variableLengthString(JDBCType.VARCHAR, 500))
.withNullable(true),
Schema.Field.of("ID", Schema.FieldType.INT32).withNullable(true));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("NAME", "ID"));
PAssert.that(output)
.containsInAnyOrder(
ImmutableList.of(Row.withSchema(expectedSchema).addValues("Testval1", 1).build()));
pipeline.run();
}
|
@Override
public Executor getExecutor(URL url) {
String name =
url.getParameter(THREAD_NAME_KEY, (String) url.getAttribute(THREAD_NAME_KEY, DEFAULT_THREAD_NAME));
int cores = url.getParameter(CORE_THREADS_KEY, DEFAULT_CORE_THREADS);
int threads = url.getParameter(THREADS_KEY, Integer.MAX_VALUE);
int queues = url.getParameter(QUEUES_KEY, DEFAULT_QUEUES);
int alive = url.getParameter(ALIVE_KEY, DEFAULT_ALIVE);
// init queue and executor
TaskQueue<Runnable> taskQueue = new TaskQueue<>(queues <= 0 ? 1 : queues);
EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor(
cores,
threads,
alive,
TimeUnit.MILLISECONDS,
taskQueue,
new NamedInternalThreadFactory(name, true),
new AbortPolicyWithReport(name, url));
taskQueue.setExecutor(executor);
return executor;
}
|
@Test
void getExecutor1() throws Exception {
URL url = URL.valueOf("dubbo://10.20.130.230:20880/context/path?" + THREAD_NAME_KEY
+ "=demo&" + CORE_THREADS_KEY
+ "=1&" + THREADS_KEY
+ "=2&" + ALIVE_KEY
+ "=1000&" + QUEUES_KEY
+ "=0");
ThreadPool threadPool = new EagerThreadPool();
ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.getExecutor(url);
assertThat(executor, instanceOf(EagerThreadPoolExecutor.class));
assertThat(executor.getCorePoolSize(), is(1));
assertThat(executor.getMaximumPoolSize(), is(2));
assertThat(executor.getKeepAliveTime(TimeUnit.MILLISECONDS), is(1000L));
assertThat(executor.getQueue().remainingCapacity(), is(1));
assertThat(executor.getQueue(), Matchers.<BlockingQueue<Runnable>>instanceOf(TaskQueue.class));
assertThat(
executor.getRejectedExecutionHandler(),
Matchers.<RejectedExecutionHandler>instanceOf(AbortPolicyWithReport.class));
final CountDownLatch latch = new CountDownLatch(1);
executor.execute(() -> {
Thread thread = Thread.currentThread();
assertThat(thread, instanceOf(InternalThread.class));
assertThat(thread.getName(), startsWith("demo"));
latch.countDown();
});
latch.await();
assertThat(latch.getCount(), is(0L));
}
|
public static Optional<ScalablePushRegistry> create(
final LogicalSchema logicalSchema,
final Supplier<List<PersistentQueryMetadata>> allPersistentQueries,
final boolean isTable,
final Map<String, Object> streamsProperties,
final Map<String, Object> consumerProperties,
final String sourceApplicationId,
final KsqlTopic ksqlTopic,
final ServiceContext serviceContext,
final KsqlConfig ksqlConfig
) {
final Object appServer = streamsProperties.get(StreamsConfig.APPLICATION_SERVER_CONFIG);
if (appServer == null) {
return Optional.empty();
}
if (!(appServer instanceof String)) {
throw new IllegalArgumentException(StreamsConfig.APPLICATION_SERVER_CONFIG + " not String");
}
final URL localhost;
try {
localhost = new URL((String) appServer);
} catch (final MalformedURLException e) {
throw new IllegalArgumentException(StreamsConfig.APPLICATION_SERVER_CONFIG + " malformed: "
+ "'" + appServer + "'");
}
final PushLocator pushLocator = new AllHostsLocator(allPersistentQueries, localhost);
return Optional.of(new ScalablePushRegistry(
pushLocator, logicalSchema, isTable,
consumerProperties, ksqlTopic, serviceContext, ksqlConfig, sourceApplicationId,
KafkaConsumerFactory::create, LatestConsumer::new, CatchupConsumer::new,
Executors.newSingleThreadExecutor(),
Executors.newScheduledThreadPool(
ksqlConfig.getInt(KsqlConfig.KSQL_QUERY_PUSH_V2_MAX_CATCHUP_CONSUMERS))));
}
|
@Test
public void shouldCreate() {
// When:
final Optional<ScalablePushRegistry> registry =
ScalablePushRegistry.create(SCHEMA, Collections::emptyList, false,
ImmutableMap.of(StreamsConfig.APPLICATION_SERVER_CONFIG, "http://localhost:8088"),
ImmutableMap.of(), SOURCE_APP_ID, ksqlTopic, serviceContext, ksqlConfig);
// Then:
assertThat(registry.isPresent(), is(true));
}
|
static DateTime determineRotationPeriodAnchor(@Nullable DateTime lastAnchor, Period period) {
final Period normalized = period.normalizedStandard();
int years = normalized.getYears();
int months = normalized.getMonths();
int weeks = normalized.getWeeks();
int days = normalized.getDays();
int hours = normalized.getHours();
int minutes = normalized.getMinutes();
int seconds = normalized.getSeconds();
if (years == 0 && months == 0 && weeks == 0 && days == 0 && hours == 0 && minutes == 0 && seconds == 0) {
throw new IllegalArgumentException("Invalid rotation period specified");
}
// find the largest non-zero stride in the period. that's our anchor type. statement order matters here!
DateTimeFieldType largestStrideType = null;
if (seconds > 0) {
largestStrideType = secondOfMinute();
}
if (minutes > 0) {
largestStrideType = minuteOfHour();
}
if (hours > 0) {
largestStrideType = hourOfDay();
}
if (days > 0) {
largestStrideType = dayOfMonth();
}
if (weeks > 0) {
largestStrideType = weekOfWeekyear();
}
if (months > 0) {
largestStrideType = monthOfYear();
}
if (years > 0) {
largestStrideType = year();
}
if (largestStrideType == null) {
throw new IllegalArgumentException("Could not determine rotation stride length.");
}
final DateTime anchorTime = anchorTimeFrom(lastAnchor);
final DateTimeField field = largestStrideType.getField(anchorTime.getChronology());
// use normalized here to make sure we actually have the largestStride type available! see https://github.com/Graylog2/graylog2-server/issues/836
int periodValue = normalized.get(largestStrideType.getDurationType());
final long fieldValue = field.roundFloor(anchorTime.getMillis());
final int fieldValueInUnit = field.get(fieldValue);
if (periodValue == 0) {
// https://github.com/Graylog2/graylog2-server/issues/836
log.warn("Determining stride length failed because of a 0 period. Defaulting back to 1 period to avoid crashing, but this is a bug!");
periodValue = 1;
}
final long difference = fieldValueInUnit % periodValue;
final long newValue = field.add(fieldValue, -1 * difference);
return new DateTime(newValue, DateTimeZone.UTC);
}
|
@Test
public void anchorCalculationShouldWorkWhenLastAnchorIsNotUTC() {
final DateTime initialTime = new DateTime(2020, 7, 31, 14, 48, 35, 0, DateTimeZone.UTC);
final InstantMillisProvider clock = new InstantMillisProvider(initialTime);
DateTimeUtils.setCurrentMillisProvider(clock);
Period period = Period.months(1);
DateTime lastAnchor = initialTime.minusHours(1).withZone(DateTimeZone.forOffsetHours(2));
final DateTime monthAnchor = TimeBasedRotationStrategy.determineRotationPeriodAnchor(lastAnchor, period);
assertThat(monthAnchor).isEqualTo(DateTime.parse("2020-07-01T00:00:00.000Z"));
}
|
@Override
public boolean applyFilterToCamelHeaders(String headerName, Object headerValue, Exchange exchange) {
boolean answer = super.applyFilterToCamelHeaders(headerName, headerValue, exchange);
// using rest producer then headers are mapping to uri and query parameters using {key} syntax
// if there is a match to an existing Camel Message header, then we should filter (=true) this
// header as its already been mapped by the RestProducer from camel-core, and we do not want
// the header to included as HTTP header also (eg as duplicate value)
if (!answer) {
if (templateUri != null) {
String token = "{" + headerName + "}";
if (templateUri.contains(token)) {
answer = true;
}
}
if (!answer && queryParameters != null) {
String[] tokens = new String[4];
tokens[0] = "={" + headerName + "}";
tokens[1] = "={" + headerName + "?}";
tokens[2] = "=%7B" + headerName + "%7D";
tokens[3] = "=%7B" + headerName + "%3F%7D";
for (String token : tokens) {
if (queryParameters.contains(token)) {
answer = true;
break;
}
}
}
}
return answer;
}
|
@Test
public void shouldDecideOnApplingHeaderFilterToTemplateTokens() {
final HttpRestHeaderFilterStrategy strategy = new HttpRestHeaderFilterStrategy(
"{uriToken1}{uriToken2}",
"q1=%7BqueryToken1%7D%26q2=%7BqueryToken2%3F%7D%26");
assertTrue(strategy.applyFilterToCamelHeaders("uriToken1", "value", NOT_USED));
assertTrue(strategy.applyFilterToCamelHeaders("uriToken2", "value", NOT_USED));
assertTrue(strategy.applyFilterToCamelHeaders("queryToken1", "value", NOT_USED));
assertTrue(strategy.applyFilterToCamelHeaders("queryToken2", "value", NOT_USED));
assertFalse(strategy.applyFilterToCamelHeaders("unknown", "value", NOT_USED));
}
|
public static RetryRegistry of(Configuration configuration, CompositeCustomizer<RetryConfigCustomizer> customizer) {
CommonRetryConfigurationProperties retryConfiguration = CommonsConfigurationRetryConfiguration.of(configuration);
Map<String, RetryConfig> retryConfigMap = retryConfiguration.getInstances()
.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> retryConfiguration.createRetryConfig(entry.getValue(), customizer, entry.getKey())));
return RetryRegistry.of(retryConfigMap);
}
|
@Test
public void testRetryRegistryFromPropertiesFile() throws ConfigurationException {
Configuration config = CommonsConfigurationUtil.getConfiguration(PropertiesConfiguration.class, TestConstants.RESILIENCE_CONFIG_PROPERTIES_FILE_NAME);
RetryRegistry registry = CommonsConfigurationRetryRegistry.of(config, new CompositeCustomizer<>(List.of()));
Assertions.assertThat(registry.retry(TestConstants.BACKEND_A).getName()).isEqualTo(TestConstants.BACKEND_A);
Assertions.assertThat(registry.retry(TestConstants.BACKEND_B).getName()).isEqualTo(TestConstants.BACKEND_B);
}
|
public void writeUshort(int value) throws IOException {
if (value < 0 || value > 0xFFFF) {
throw new ExceptionWithContext("Unsigned short value out of range: %d", value);
}
write(value);
write(value >> 8);
}
|
@Test
public void testWriteUshort() throws IOException {
writer.writeUshort(0);
writer.writeUshort(0x1122);
writer.writeUshort(0x8899);
writer.writeUshort(0xFFFF);
expectData(0x00, 0x00,
0x22, 0x11,
0x99, 0x88,
0xFF, 0xFF);
}
|
@Override
protected void initChannel(Channel ch) throws Exception {
final ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new HttpServerCodec());
pipeline.addLast(new HttpObjectAggregator(65536));
addPushMessageHandlers(pipeline);
}
|
@Test
void testInitChannel() throws Exception {
initializer.initChannel(channel);
assertNotNull(channel.pipeline().context(HttpServerCodec.class));
assertNotNull(channel.pipeline().context(HttpObjectAggregator.class));
assertNotNull(channel.pipeline().get("mockHandler"));
}
|
public void awaitFutures(Collection<TopicPartition> topicPartitions) {
futuresFor(topicPartitions).forEach(future -> {
try {
future.get();
} catch (InterruptedException | ExecutionException e) {
log.error("Encountered an error while awaiting an errant record future's completion.", e);
throw new ConnectException(e);
}
});
}
|
@Test
public void testGetFutures() {
initializeReporter(true);
Collection<TopicPartition> topicPartitions = new ArrayList<>();
assertTrue(reporter.futures.isEmpty());
for (int i = 0; i < 4; i++) {
TopicPartition topicPartition = new TopicPartition("topic", i);
topicPartitions.add(topicPartition);
reporter.futures.put(topicPartition, Collections.singletonList(CompletableFuture.completedFuture(null)));
}
assertFalse(reporter.futures.isEmpty());
reporter.awaitFutures(topicPartitions);
assertTrue(reporter.futures.isEmpty());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.