focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public static synchronized @Nonnull Map<String, Object> loadYamlFile(File file)
throws Exception {
try (FileInputStream inputStream = new FileInputStream((file))) {
Map<String, Object> yamlResult =
(Map<String, Object>) loader.loadFromInputStream(inputStream);
return yamlResult == null ? new HashMap<>() : yamlResult;
} catch (FileNotFoundException e) {
LOG.error("Failed to find YAML file", e);
throw e;
} catch (IOException | YamlEngineException e) {
if (e instanceof MarkedYamlEngineException) {
YamlEngineException exception =
wrapExceptionToHiddenSensitiveData((MarkedYamlEngineException) e);
LOG.error("Failed to parse YAML configuration", exception);
throw exception;
} else {
throw e;
}
}
}
|
@Test
void testLoadEmptyYamlFile() throws Exception {
File confFile = new File(tmpDir, "test.yaml");
confFile.createNewFile();
assertThat(YamlParserUtils.loadYamlFile(confFile)).isEmpty();
}
|
@Override
public QuoteCharacter getQuoteCharacter() {
return QuoteCharacter.QUOTE;
}
|
@Test
void assertGetQuoteCharacter() {
assertThat(dialectDatabaseMetaData.getQuoteCharacter(), is(QuoteCharacter.QUOTE));
}
|
@Override
public void isEqualTo(@Nullable Object expected) {
@SuppressWarnings("UndefinedEquals") // method contract requires testing iterables for equality
boolean equal = Objects.equal(actual, expected);
if (equal) {
return;
}
// Fail but with a more descriptive message:
if (actual instanceof List && expected instanceof List) {
containsExactlyElementsIn((List<?>) expected).inOrder();
} else if ((actual instanceof Set && expected instanceof Set)
|| (actual instanceof Multiset && expected instanceof Multiset)) {
containsExactlyElementsIn((Collection<?>) expected);
} else {
/*
* TODO(b/18430105): Consider a special message if comparing incompatible collection types
* (similar to what MultimapSubject has).
*/
super.isEqualTo(expected);
}
}
|
@Test
public void nullEqualToSomething() {
expectFailureWhenTestingThat(null).isEqualTo(ImmutableList.of());
}
|
public static <T extends Message> ProtoCoder<T> of(Class<T> protoMessageClass) {
return new ProtoCoder<>(protoMessageClass, ImmutableSet.of());
}
|
@Test
public void testCoderEncodeDecodeEqual() throws Exception {
MessageA value =
MessageA.newBuilder()
.setField1("hello")
.addField2(MessageB.newBuilder().setField1(true).build())
.addField2(MessageB.newBuilder().setField1(false).build())
.build();
CoderProperties.coderDecodeEncodeEqual(ProtoCoder.of(MessageA.class), value);
}
|
@Override
public boolean trySetPermits(int permits) {
return get(trySetPermitsAsync(permits));
}
|
@Test
public void testTrySetPermits() throws InterruptedException {
RSemaphore s = redisson.getSemaphore("test");
assertThat(s.trySetPermits(10)).isTrue();
assertThat(s.availablePermits()).isEqualTo(10);
assertThat(s.trySetPermits(15)).isFalse();
assertThat(s.availablePermits()).isEqualTo(10);
s.delete();
assertThat(s.isExists()).isFalse();
assertThat(s.trySetPermits(1, Duration.ofSeconds(2))).isTrue();
Thread.sleep(1000);
assertThat(s.availablePermits()).isEqualTo(1);
Thread.sleep(1000);
assertThat(s.availablePermits()).isZero();
assertThat(s.isExists()).isFalse();
}
|
@ScalarOperator(MULTIPLY)
@SqlType(StandardTypes.INTEGER)
public static long multiply(@SqlType(StandardTypes.INTEGER) long left, @SqlType(StandardTypes.INTEGER) long right)
{
try {
return Math.multiplyExact((int) left, (int) right);
}
catch (ArithmeticException e) {
throw new PrestoException(NUMERIC_VALUE_OUT_OF_RANGE, format("integer multiplication overflow: %s * %s", left, right), e);
}
}
|
@Test
public void testMultiply()
{
assertFunction("INTEGER'37' * INTEGER'37'", INTEGER, 37 * 37);
assertFunction("INTEGER'37' * INTEGER'17'", INTEGER, 37 * 17);
assertFunction("INTEGER'17' * INTEGER'37'", INTEGER, 17 * 37);
assertFunction("INTEGER'17' * INTEGER'17'", INTEGER, 17 * 17);
assertNumericOverflow(format("INTEGER'%s' * INTEGER'2'", Integer.MAX_VALUE), "integer multiplication overflow: 2147483647 * 2");
}
|
@Nullable
public static TNetworkAddress getComputeNodeHost(ImmutableMap<Long, ComputeNode> computeNodes,
Reference<Long> computeNodeIdRef) {
ComputeNode node = getComputeNode(computeNodes);
if (node != null) {
computeNodeIdRef.setRef(node.getId());
return new TNetworkAddress(node.getHost(), node.getBePort());
}
return null;
}
|
@Test
public void testNoAliveComputeNode() {
ImmutableMap.Builder<Long, ComputeNode> builder = ImmutableMap.builder();
for (int i = 0; i < 6; i++) {
ComputeNode node = new ComputeNode(i, "address" + i, 0);
node.setAlive(false);
builder.put(node.getId(), node);
}
ImmutableMap<Long, ComputeNode> nodes = builder.build();
Reference<Long> idRef = new Reference<>();
TNetworkAddress address = SimpleScheduler.getComputeNodeHost(nodes, idRef);
Assert.assertNull(address);
}
|
public FEELFnResult<TemporalAmount> invoke(@ParameterName( "from" ) String val) {
if ( val == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null"));
}
try {
// try to parse as days/hours/minute/seconds
return FEELFnResult.ofResult( Duration.parse( val ) );
} catch( DateTimeParseException e ) {
// if it failed, try to parse as years/months
try {
return FEELFnResult.ofResult(ComparablePeriod.parse(val).normalized());
} catch( DateTimeParseException e2 ) {
// failed to parse, so return null according to the spec
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "date-parsing exception",
new RuntimeException(new Throwable() { public final List<Throwable> causes = Arrays.asList( new Throwable[]{e, e2} ); } )));
}
}
}
|
@Test
void invokeParamStringPeriod() {
FunctionTestUtil.assertResult(durationFunction.invoke("P2Y3M"), ComparablePeriod.of(2, 3, 0));
FunctionTestUtil.assertResult(durationFunction.invoke("P2Y3M4D"), ComparablePeriod.of(2, 3, 4));
}
|
public int doWork()
{
final long nowNs = nanoClock.nanoTime();
cachedNanoClock.update(nowNs);
dutyCycleTracker.measureAndUpdate(nowNs);
final int workCount = commandQueue.drain(CommandProxy.RUN_TASK, Configuration.COMMAND_DRAIN_LIMIT);
final long shortSendsBefore = shortSends.get();
final int bytesSent = doSend(nowNs);
int bytesReceived = 0;
if (0 == bytesSent ||
++dutyCycleCounter >= dutyCycleRatio ||
(controlPollDeadlineNs - nowNs < 0) ||
shortSendsBefore < shortSends.get())
{
bytesReceived = controlTransportPoller.pollTransports();
dutyCycleCounter = 0;
controlPollDeadlineNs = nowNs + statusMessageReadTimeoutNs;
}
if (reResolutionCheckIntervalNs > 0 && (reResolutionDeadlineNs - nowNs) < 0)
{
reResolutionDeadlineNs = nowNs + reResolutionCheckIntervalNs;
controlTransportPoller.checkForReResolutions(nowNs, conductorProxy);
}
return workCount + bytesSent + bytesReceived;
}
|
@Test
void shouldSendMultipleSetupFramesOnChannelWhenTimeoutWithoutStatusMessage()
{
sender.doWork();
assertThat(receivedFrames.size(), is(1));
nanoClock.advance(Configuration.PUBLICATION_SETUP_TIMEOUT_NS - 1);
sender.doWork();
nanoClock.advance(10);
sender.doWork();
assertThat(receivedFrames.size(), is(2));
}
|
@Override
public double[] smoothDerivative(double[] input) {
if (input.length < weights.length) {
return averageDerivativeForVeryShortTrack(input);
}
double[] smoothed = new double[input.length];
int halfWindowFloored = weights.length / 2; // we want to exclude the center point
for (int i = halfWindowFloored; i < input.length - halfWindowFloored; i++) {
for (int windowIndex = 0; windowIndex < smoothCoeff.length; windowIndex++) {
smoothed[i] += derivCoeff[windowIndex] * input[i + windowIndex + offsetFromWindowCenter];
}
smoothed[i] = smoothed[i] / timeStep;
}
fillSmoothDerivativeLeftSide(smoothed, halfWindowFloored);
fillSmoothDerivativeRightSide(smoothed, halfWindowFloored);
return smoothed;
}
|
@Test
public void Derivative_FromFakeTrackWithSymmetricOutliers_RemoveBumps() {
SavitzkyGolayFilter test = new SavitzkyGolayFilter(1.0);
double[] input = new double[]{
10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0,
13.0, 16.0, 13.0, // <-- outlier points make a symmetric "triangle", center at index = 8
10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0
};
double[] actual = test.smoothDerivative(input);
assertThat(actual.length, equalTo(input.length));
assertThat(actual[8], closeTo(0.0, 1E-3));
}
|
@Override
public Object execute(String command, byte[]... args) {
for (Method method : this.getClass().getDeclaredMethods()) {
if (method.getName().equalsIgnoreCase(command)
&& Modifier.isPublic(method.getModifiers())
&& (method.getParameterTypes().length == args.length)) {
try {
Object t = execute(method, args);
if (t instanceof String) {
return ((String) t).getBytes();
}
return t;
} catch (IllegalArgumentException e) {
if (isPipelined()) {
throw new RedisPipelineException(e);
}
throw new InvalidDataAccessApiUsageException(e.getMessage(), e);
}
}
}
throw new UnsupportedOperationException();
}
|
@Test
public void testExecute() {
Long s = (Long) connection.execute("ttl", "key".getBytes());
assertThat(s).isEqualTo(-2);
connection.execute("flushDb");
}
|
static int getStringDisplayWidth(String str) {
int numOfFullWidthCh = (int) str.codePoints().filter(TableauStyle::isFullWidth).count();
return str.length() + numOfFullWidthCh;
}
|
@Test
void testStringDisplayWidth() {
List<String> data =
Arrays.asList(
"abcdefg,12345,ABC",
"to be or not to be that's a question.",
"这是一段中文",
"これは日本語をテストするための文です");
int[] expected = new int[] {17, 37, 12, 36};
for (int i = 0; i < data.size(); i++) {
assertThat(TableauStyle.getStringDisplayWidth(data.get(i))).isEqualTo(expected[i]);
}
}
|
@Override
public String getOperationName(Exchange exchange, Endpoint endpoint) {
Map<String, String> queryParameters = toQueryParameters(endpoint.getEndpointUri());
return queryParameters.containsKey("operation")
? queryParameters.get("operation")
: super.getOperationName(exchange, endpoint);
}
|
@Test
public void testOperationName() {
String opName = "INDEX";
Endpoint endpoint = Mockito.mock(Endpoint.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("elasticsearch://local?operation="
+ opName + "&indexName=twitter&indexType=tweet");
SpanDecorator decorator = new ElasticsearchSpanDecorator();
assertEquals(opName, decorator.getOperationName(null, endpoint));
}
|
public static boolean isOrHasCause(Throwable t, Class<?> classToFind) {
while (t != null && t.getCause() != t && !classToFind.isAssignableFrom(t.getClass())) {
t = t.getCause();
}
return t != null && classToFind.isAssignableFrom(t.getClass());
}
|
@Test
public void test_isOrHasCause_when_expectedTypeADeepCause_then_true() {
Throwable throwable = new TargetNotMemberException("");
for (int i = 0; i < 10; i++) {
throwable = new Exception(throwable);
}
assertTrue(isOrHasCause(throwable, TargetNotMemberException.class));
}
|
@Override
public Optional<AuthProperty> inferAuth(String registry) throws InferredAuthException {
Server server = getServerFromMavenSettings(registry);
if (server == null) {
return Optional.empty();
}
SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest(server);
SettingsDecryptionResult result = decrypter.decrypt(request);
// Un-encrypted passwords are passed through, so a problem indicates a real issue.
// If there are any ERROR or FATAL problems reported, then decryption failed.
for (SettingsProblem problem : result.getProblems()) {
if (problem.getSeverity() == SettingsProblem.Severity.ERROR
|| problem.getSeverity() == SettingsProblem.Severity.FATAL) {
throw new InferredAuthException(
"Unable to decrypt server(" + registry + ") info from settings.xml: " + problem);
}
}
Server resultServer = result.getServer();
String username = resultServer.getUsername();
String password = resultServer.getPassword();
return Optional.of(
new AuthProperty() {
@Override
public String getUsername() {
return username;
}
@Override
public String getPassword() {
return password;
}
@Override
public String getAuthDescriptor() {
return CREDENTIAL_SOURCE;
}
@Override
public String getUsernameDescriptor() {
return CREDENTIAL_SOURCE;
}
@Override
public String getPasswordDescriptor() {
return CREDENTIAL_SOURCE;
}
});
}
|
@Test
public void testInferredAuth_decrypterFailure() {
try {
mavenSettingsServerCredentials.inferAuth("badServer");
Assert.fail();
} catch (InferredAuthException ex) {
MatcherAssert.assertThat(
ex.getMessage(),
CoreMatchers.startsWith("Unable to decrypt server(badServer) info from settings.xml:"));
}
}
|
@VisibleForTesting
ExportResult<MusicContainerResource> exportPlaylistItems(
TokensAndUrlAuthData authData,
IdOnlyContainerResource playlistData,
Optional<PaginationData> paginationData, UUID jobId)
throws IOException, InvalidTokenException, PermissionDeniedException, ParseException {
String playlistId = playlistData.getId();
Optional<String> paginationToken =
paginationData.map((PaginationData value) -> ((StringPaginationToken) value).getToken());
PlaylistItemExportResponse playlistItemExportResponse =
getOrCreateMusicHttpApi(authData).exportPlaylistItems(playlistId, paginationToken);
PaginationData nextPageData = null;
if (!Strings.isNullOrEmpty(playlistItemExportResponse.getNextPageToken())) {
nextPageData = new StringPaginationToken(playlistItemExportResponse.getNextPageToken());
}
ContinuationData continuationData = new ContinuationData(nextPageData);
MusicContainerResource containerResource = null;
GooglePlaylistItem[] googlePlaylistItems = playlistItemExportResponse.getPlaylistItems();
List<MusicPlaylistItem> playlistItems = new ArrayList<>();
if (googlePlaylistItems != null && googlePlaylistItems.length > 0) {
for (GooglePlaylistItem googlePlaylistItem : googlePlaylistItems) {
playlistItems.add(convertPlaylistItem(playlistId, googlePlaylistItem));
monitor.debug(
() ->
String.format(
"%s: Google Music exporting playlist item in %s : [track title: %s, track isrc: %s]",
jobId, playlistId,
googlePlaylistItem.getTrack().getTitle(),
googlePlaylistItem.getTrack().getIsrc()));
}
containerResource = new MusicContainerResource(null, playlistItems, null, null);
}
return new ExportResult<>(ResultType.CONTINUE, containerResource, continuationData);
}
|
@Test
public void exportPlaylistItemSubsequentSet()
throws IOException, InvalidTokenException, PermissionDeniedException, ParseException {
GooglePlaylistItem playlistItem = setUpSinglePlaylistItem("t1_isrc", "r1_icpn");
when(playlistItemExportResponse.getPlaylistItems())
.thenReturn(new GooglePlaylistItem[]{playlistItem});
when(playlistItemExportResponse.getNextPageToken()).thenReturn(null);
StringPaginationToken inputPaginationToken = new StringPaginationToken(PLAYLIST_ITEM_TOKEN);
IdOnlyContainerResource idOnlyContainerResource = new IdOnlyContainerResource("p1_id");
// Run test
ExportResult<MusicContainerResource> result =
googleMusicExporter.exportPlaylistItems(
null, idOnlyContainerResource, Optional.of(inputPaginationToken), uuid);
// Check results
// Verify correct methods were called
verify(musicHttpApi).exportPlaylistItems("p1_id", Optional.of(PLAYLIST_ITEM_TOKEN));
verify(playlistItemExportResponse).getPlaylistItems();
// Check pagination token
ContinuationData continuationData = result.getContinuationData();
PaginationData paginationToken = continuationData.getPaginationData();
assertThat(paginationToken).isNull();
}
|
@Override
public Iterator<E> iterator() {
return Iterators.transform(map.entrySet().iterator(), Map.Entry::getValue);
}
|
@Test
public void testIterator() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
assertTrue(set.contains(nextval));
Iterator<TestValue> iterator = set.iterator();
assertEquals(val, iterator.next());
assertTrue(iterator.hasNext());
assertEquals(nextval, iterator.next());
}
|
@Override
public <R> RFuture<R> evalShaAsync(Mode mode, String shaDigest, ReturnType returnType, List<Object> keys, Object... values) {
return evalShaAsync(null, mode, codec, shaDigest, returnType, keys, values);
}
|
@Test
public void testEvalshaAsync() {
RScript s = redisson.getScript();
String res = s.scriptLoad("return redis.call('get', 'foo')");
Assertions.assertEquals("282297a0228f48cd3fc6a55de6316f31422f5d17", res);
redisson.getBucket("foo").set("bar");
String r = redisson.getScript().eval(Mode.READ_ONLY, "return redis.call('get', 'foo')", RScript.ReturnType.VALUE);
Assertions.assertEquals("bar", r);
RFuture<Object> r1 = redisson.getScript().evalShaAsync(Mode.READ_ONLY, "282297a0228f48cd3fc6a55de6316f31422f5d17", RScript.ReturnType.VALUE, Collections.emptyList());
Assertions.assertEquals("bar", r1.toCompletableFuture().join());
}
|
@PatchMapping
public ResponseEntity<?> updateProduct(@PathVariable("productId") int productId,
@Valid @RequestBody UpdateProductPayload payload,
BindingResult bindingResult) throws BindException {
if (bindingResult.hasErrors()) {
if (bindingResult instanceof BindException exception) {
throw exception;
} else {
throw new BindException(bindingResult);
}
} else {
this.productService.updateProduct(productId, payload.title(), payload.details());
return ResponseEntity.noContent()
.build();
}
}
|
@Test
void updateProduct_RequestIsValid_ReturnsNoContent() throws BindException {
// given
var payload = new UpdateProductPayload("Новое название", "Новое описание");
var bindingResult = new MapBindingResult(Map.of(), "payload");
// when
var result = this.controller.updateProduct(1, payload, bindingResult);
// then
assertNotNull(result);
assertEquals(HttpStatus.NO_CONTENT, result.getStatusCode());
verify(this.productService).updateProduct(1, "Новое название", "Новое описание");
}
|
public <InputT, OutputT> DoFn<InputT, OutputT> get() throws Exception {
Thread currentThread = Thread.currentThread();
return (DoFn<InputT, OutputT>) outstanding.get(currentThread);
}
|
@Test
public void getMultipleCallsSingleSetupCall() throws Exception {
TestFn obtained = (TestFn) mgr.get();
TestFn secondObtained = (TestFn) mgr.get();
assertThat(obtained, theInstance(secondObtained));
assertThat(obtained.setupCalled, is(true));
assertThat(obtained.teardownCalled, is(false));
}
|
@Override
public void validateDictDataList(String dictType, Collection<String> values) {
if (CollUtil.isEmpty(values)) {
return;
}
Map<String, DictDataDO> dictDataMap = CollectionUtils.convertMap(
dictDataMapper.selectByDictTypeAndValues(dictType, values), DictDataDO::getValue);
// 校验
values.forEach(value -> {
DictDataDO dictData = dictDataMap.get(value);
if (dictData == null) {
throw exception(DICT_DATA_NOT_EXISTS);
}
if (!CommonStatusEnum.ENABLE.getStatus().equals(dictData.getStatus())) {
throw exception(DICT_DATA_NOT_ENABLE, dictData.getLabel());
}
});
}
|
@Test
public void testValidateDictDataList_success() {
// mock 数据
DictDataDO dictDataDO = randomDictDataDO().setStatus(CommonStatusEnum.ENABLE.getStatus());
dictDataMapper.insert(dictDataDO);
// 准备参数
String dictType = dictDataDO.getDictType();
List<String> values = singletonList(dictDataDO.getValue());
// 调用,无需断言
dictDataService.validateDictDataList(dictType, values);
}
|
public Optional<JobTriggerDto> cancelTriggerByQuery(Bson query) {
final var update = set(FIELD_IS_CANCELLED, true);
return Optional.ofNullable(collection.findOneAndUpdate(query, update));
}
|
@Test
@MongoDBFixtures("locked-job-triggers.json")
public void cancelTriggerByQuery() {
// Must return an empty Optional if the query didn't match any trigger
assertThat(dbJobTriggerService.cancelTriggerByQuery(DBQuery.is("foo", "bar"))).isEmpty();
final JobTriggerDto lockedTrigger = dbJobTriggerService.get("54e3deadbeefdeadbeef0001").orElseThrow(AssertionError::new);
assertThat(lockedTrigger.isCancelled()).isFalse();
assertThat(dbJobTriggerService.cancelTriggerByQuery(DBQuery.is("_id", "54e3deadbeefdeadbeef0001"))).isPresent();
final JobTriggerDto cancelledTrigger = dbJobTriggerService.get(lockedTrigger.id()).orElseThrow(AssertionError::new);
assertThat(cancelledTrigger.isCancelled()).isTrue();
}
|
@Override
public byte[] fromConnectData(String topic, Schema schema, Object value) {
if (schema != null && schema.type() != Schema.Type.BYTES)
throw new DataException("Invalid schema type for ByteArrayConverter: " + schema.type().toString());
if (value != null && !(value instanceof byte[]) && !(value instanceof ByteBuffer))
throw new DataException("ByteArrayConverter is not compatible with objects of type " + value.getClass());
return value instanceof ByteBuffer ? getBytesFromByteBuffer((ByteBuffer) value) : (byte[]) value;
}
|
@Test
public void testFromConnectBadSchema() {
assertThrows(DataException.class,
() -> converter.fromConnectData(TOPIC, Schema.INT32_SCHEMA, SAMPLE_BYTES));
}
|
static <T extends CompoundPredicate> T flattenCompound(Predicate predicateLeft, Predicate predicateRight, Class<T> klass) {
// The following could have been achieved with {@link com.hazelcast.query.impl.predicates.FlatteningVisitor},
// however since we only care for 2-argument flattening, we can avoid constructing a visitor and its internals
// for each token pass at the cost of the following explicit code.
Predicate[] predicates;
if (klass.isInstance(predicateLeft) || klass.isInstance(predicateRight)) {
Predicate[] left = getSubPredicatesIfClass(predicateLeft, klass);
Predicate[] right = getSubPredicatesIfClass(predicateRight, klass);
predicates = new Predicate[left.length + right.length];
ArrayUtils.concat(left, right, predicates);
} else {
predicates = new Predicate[]{predicateLeft, predicateRight};
}
try {
T compoundPredicate = klass.getDeclaredConstructor().newInstance();
compoundPredicate.setPredicates(predicates);
return compoundPredicate;
} catch (ReflectiveOperationException e) {
throw new IllegalArgumentException(String.format("%s must have a public default constructor", klass.getName()));
}
}
|
@Test
public void testOr_whenBothPredicatesOr() {
OrPredicate predicate1 = new OrPredicate(new SqlPredicate("a == 1"), new SqlPredicate("a == 2"));
OrPredicate predicate2 = new OrPredicate(new SqlPredicate("a == 3"));
OrPredicate concatenatedOr = SqlPredicate.flattenCompound(predicate1, predicate2, OrPredicate.class);
assertEquals(3, concatenatedOr.getPredicates().length);
}
|
public static ChannelBuffer directBuffer(int capacity) {
if (capacity == 0) {
return EMPTY_BUFFER;
}
ChannelBuffer buffer = new ByteBufferBackedChannelBuffer(ByteBuffer.allocateDirect(capacity));
buffer.clear();
return buffer;
}
|
@Test
void testDirectBuffer() {
ChannelBuffer channelBuffer = ChannelBuffers.directBuffer(0);
Assertions.assertEquals(channelBuffer, EMPTY_BUFFER);
channelBuffer = ChannelBuffers.directBuffer(16);
Assertions.assertTrue(channelBuffer instanceof ByteBufferBackedChannelBuffer);
}
|
public static String[] split(String splittee, String splitChar, boolean truncate) { //NOSONAR
if (splittee == null || splitChar == null) {
return new String[0];
}
final String EMPTY_ELEMENT = "";
int spot;
final int splitLength = splitChar.length();
final String adjacentSplit = splitChar + splitChar;
final int adjacentSplitLength = adjacentSplit.length();
if (truncate) {
while ((spot = splittee.indexOf(adjacentSplit)) != -1) {
splittee = splittee.substring(0, spot + splitLength)
+ splittee.substring(spot + adjacentSplitLength, splittee.length());
}
if (splittee.startsWith(splitChar)) {
splittee = splittee.substring(splitLength);
}
if (splittee.endsWith(splitChar)) { // Remove trailing splitter
splittee = splittee.substring(0, splittee.length() - splitLength);
}
}
List<String> returns = new ArrayList<>();
final int length = splittee.length(); // This is the new length
int start = 0;
spot = 0;
while (start < length && (spot = splittee.indexOf(splitChar, start)) > -1) {
if (spot > 0) {
returns.add(splittee.substring(start, spot));
} else {
returns.add(EMPTY_ELEMENT);
}
start = spot + splitLength;
}
if (start < length) {
returns.add(splittee.substring(start));
} else if (spot == length - splitLength) {// Found splitChar at end of line
returns.add(EMPTY_ELEMENT);
}
return returns.toArray(new String[returns.size()]);
}
|
@Test
public void testSplitStringStringNullWithMultipleDelimiter() {
assertThat(JOrphanUtils.split("a,;bc,;,", ",;", null), CoreMatchers.equalTo(new String[]{"a", "bc"}));
}
|
@Nonnull
static String extractJavaScriptUrlWithIframeResource() throws ParsingException {
final String iframeUrl;
final String iframeContent;
try {
iframeUrl = "https://www.youtube.com/iframe_api";
iframeContent = NewPipe.getDownloader()
.get(iframeUrl, Localization.DEFAULT)
.responseBody();
} catch (final Exception e) {
throw new ParsingException("Could not fetch IFrame resource", e);
}
try {
final String hash = Parser.matchGroup1(
IFRAME_RES_JS_BASE_PLAYER_HASH_PATTERN, iframeContent);
return String.format(BASE_JS_PLAYER_URL_FORMAT, hash);
} catch (final Parser.RegexException e) {
throw new ParsingException(
"IFrame resource didn't provide JavaScript base player's hash", e);
}
}
|
@Test
public void testExtractJavaScriptUrlIframe() throws ParsingException {
assertTrue(YoutubeJavaScriptExtractor.extractJavaScriptUrlWithIframeResource()
.endsWith("base.js"));
}
|
@Override
public SplitWeight weightForSplitSizeInBytes(long splitSizeInBytes)
{
// Clamp the value be between the minimum weight and 1.0 (standard weight)
return SplitWeight.fromProportion(Math.min(Math.max(splitSizeInBytes / targetSplitSizeInBytes, minimumWeight), 1.0));
}
|
@Test
public void testMinimumAndMaximumSplitWeightHandling()
{
DataSize targetSplitSize = DataSize.succinctBytes(megabytesToBytes(64));
SizeBasedSplitWeightProvider provider = new SizeBasedSplitWeightProvider(0.05, targetSplitSize);
assertEquals(provider.weightForSplitSizeInBytes(1), SplitWeight.fromRawValue(5));
DataSize largerThanTarget = DataSize.succinctBytes(megabytesToBytes(128));
assertEquals(provider.weightForSplitSizeInBytes(largerThanTarget.toBytes()), SplitWeight.fromRawValue(STANDARD_SPLIT_WEIGHT));
}
|
public ProvidePPPPCAOptimizedRequest createPpPpcaRequest(String bsn) throws BsnkException {
ProvidePPPPCAOptimizedRequest request = new ProvidePPPPCAOptimizedRequest();
request.setDateTime(getDateTime());
request.setRequestID("DGD-" + UUID.randomUUID().toString());
request.setRequester(digidMuOin);
request.setRequesterKeySetVersion(digidMuKsv);
request.setBSN(bsn);
return request;
}
|
@Test
public void createPpPpcaRequest() throws IOException, BsnkException {
String bsn = "PPPPPPPPP";
ProvidePPPPCAOptimizedRequest result = bsnkUtil.createPpPpcaRequest(bsn);
assertNotNull(result.getDateTime());
assertEquals(DatatypeConstants.FIELD_UNDEFINED, result.getDateTime().getMillisecond());
assertTrue(result.getRequestID().startsWith("DGD-"));
assertNotNull(UUID.fromString(result.getRequestID().replaceAll("DGD-", "")));
assertEquals(ReflectionTestUtils.getField(bsnkUtil, "digidMuKsv"), result.getRequesterKeySetVersion());
assertEquals(ReflectionTestUtils.getField(bsnkUtil, "digidMuOin"), result.getRequester());
assertEquals(bsn, result.getBSN());
}
|
public Person getPerson(int key) {
// Try to find person in the identity map
Person person = this.identityMap.getPerson(key);
if (person != null) {
LOGGER.info("Person found in the Map");
return person;
} else {
// Try to find person in the database
person = this.db.find(key);
if (person != null) {
this.identityMap.addPerson(person);
LOGGER.info("Person found in DB.");
return person;
}
LOGGER.info("Person with this ID does not exist.");
return null;
}
}
|
@Test
void personFoundInIdMap(){
// personFinderInstance
PersonFinder personFinder = new PersonFinder();
// init database for our personFinder
PersonDbSimulatorImplementation db = new PersonDbSimulatorImplementation();
// Dummy persons
Person person1 = new Person(1, "John", 27304159);
Person person2 = new Person(2, "Thomas", 42273631);
Person person3 = new Person(3, "Arthur", 27489171);
Person person4 = new Person(4, "Finn", 20499078);
Person person5 = new Person(5, "Michael", 40599078);
// Add data to the database.
db.insert(person1);
db.insert(person2);
db.insert(person3);
db.insert(person4);
db.insert(person5);
personFinder.setDb(db);
// Assure key is not in the ID map.
Assertions.assertFalse(personFinder.getIdentityMap().getPersonMap().containsKey(3));
// Assure key is in the database.
Assertions.assertEquals(person3,personFinder.getPerson(3),"Finder returns incorrect record.");
// Assure that the record for this key is cached in the Map now.
Assertions.assertTrue(personFinder.getIdentityMap().getPersonMap().containsKey(3));
// Find the record again. This time it will be found in the map.
Assertions.assertEquals(person3,personFinder.getPerson(3),"Finder returns incorrect record.");
}
|
@ConstantFunction(name = "str2date", argTypes = {VARCHAR, VARCHAR}, returnType = DATE)
public static ConstantOperator str2Date(ConstantOperator date, ConstantOperator fmtLiteral) {
DateTimeFormatterBuilder builder = DateUtils.unixDatetimeFormatBuilder(fmtLiteral.getVarchar(), false);
LocalDate ld = LocalDate.from(builder.toFormatter().withResolverStyle(ResolverStyle.STRICT).parse(
StringUtils.strip(date.getVarchar(), "\r\n\t ")));
return ConstantOperator.createDatetime(ld.atTime(0, 0, 0), Type.DATE);
}
|
@Test
public void str2Date() {
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013,05,10"), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-10T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar(" 2013,05,10 "), ConstantOperator.createVarchar("%Y,%m,%d"))
.getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2013-05-17 12:35:10"),
ConstantOperator.createVarchar("%Y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("2013-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("13-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
assertEquals("1998-05-17T00:00", ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("98-05-17 12:35:10"),
ConstantOperator.createVarchar("%y-%m-%d %H:%i:%s")).getDate().toString());
Assert.assertThrows(DateTimeParseException.class, () -> ScalarOperatorFunctions
.str2Date(ConstantOperator.createVarchar("2019-02-29"),
ConstantOperator.createVarchar("%Y-%m-%d")).getDatetime());
}
|
public TemplateResponse mapToTemplateResponse(ReviewGroup reviewGroup, Template template) {
List<SectionResponse> sectionResponses = template.getSectionIds()
.stream()
.map(templateSection -> mapToSectionResponse(templateSection, reviewGroup))
.toList();
return new TemplateResponse(
template.getId(),
reviewGroup.getReviewee(),
reviewGroup.getProjectName(),
sectionResponses
);
}
|
@Test
void 가이드라인이_없는_경우_가이드_라인을_제공하지_않는다() {
// given
Question question = new Question(true, QuestionType.TEXT, "질문", null, 1);
questionRepository.save(question);
OptionGroup optionGroup = new OptionGroup(question.getId(), 1, 2);
optionGroupRepository.save(optionGroup);
OptionItem optionItem = new OptionItem("선택지", optionGroup.getId(), 1, OptionType.CATEGORY);
optionItemRepository.save(optionItem);
Section section = new Section(VisibleType.ALWAYS, List.of(question.getId()), null, "섹션명", "말머리", 1);
sectionRepository.save(section);
Template template = new Template(List.of(section.getId()));
templateRepository.save(template);
ReviewGroup reviewGroup = new ReviewGroup("리뷰이명", "프로젝트명", "reviewRequestCode", "groupAccessCode");
reviewGroupRepository.save(reviewGroup);
// when
TemplateResponse templateResponse = templateMapper.mapToTemplateResponse(reviewGroup, template);
// then
QuestionResponse questionResponse = templateResponse.sections().get(0).questions().get(0);
assertAll(
() -> assertThat(questionResponse.hasGuideline()).isFalse(),
() -> assertThat(questionResponse.guideline()).isNull()
);
}
|
@Override
public Expression getExpression(String tableName, Alias tableAlias) {
// 只有有登陆用户的情况下,才进行数据权限的处理
LoginUser loginUser = SecurityFrameworkUtils.getLoginUser();
if (loginUser == null) {
return null;
}
// 只有管理员类型的用户,才进行数据权限的处理
if (ObjectUtil.notEqual(loginUser.getUserType(), UserTypeEnum.ADMIN.getValue())) {
return null;
}
// 获得数据权限
DeptDataPermissionRespDTO deptDataPermission = loginUser.getContext(CONTEXT_KEY, DeptDataPermissionRespDTO.class);
// 从上下文中拿不到,则调用逻辑进行获取
if (deptDataPermission == null) {
deptDataPermission = permissionApi.getDeptDataPermission(loginUser.getId()).getCheckedData();
if (deptDataPermission == null) {
log.error("[getExpression][LoginUser({}) 获取数据权限为 null]", JsonUtils.toJsonString(loginUser));
throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 未返回数据权限",
loginUser.getId(), tableName, tableAlias.getName()));
}
// 添加到上下文中,避免重复计算
loginUser.setContext(CONTEXT_KEY, deptDataPermission);
}
// 情况一,如果是 ALL 可查看全部,则无需拼接条件
if (deptDataPermission.getAll()) {
return null;
}
// 情况二,即不能查看部门,又不能查看自己,则说明 100% 无权限
if (CollUtil.isEmpty(deptDataPermission.getDeptIds())
&& Boolean.FALSE.equals(deptDataPermission.getSelf())) {
return new EqualsTo(null, null); // WHERE null = null,可以保证返回的数据为空
}
// 情况三,拼接 Dept 和 User 的条件,最后组合
Expression deptExpression = buildDeptExpression(tableName,tableAlias, deptDataPermission.getDeptIds());
Expression userExpression = buildUserExpression(tableName, tableAlias, deptDataPermission.getSelf(), loginUser.getId());
if (deptExpression == null && userExpression == null) {
// TODO 芋艿:获得不到条件的时候,暂时不抛出异常,而是不返回数据
log.warn("[getExpression][LoginUser({}) Table({}/{}) DeptDataPermission({}) 构建的条件为空]",
JsonUtils.toJsonString(loginUser), tableName, tableAlias, JsonUtils.toJsonString(deptDataPermission));
// throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 构建的条件为空",
// loginUser.getId(), tableName, tableAlias.getName()));
return EXPRESSION_NULL;
}
if (deptExpression == null) {
return userExpression;
}
if (userExpression == null) {
return deptExpression;
}
// 目前,如果有指定部门 + 可查看自己,采用 OR 条件。即,WHERE (dept_id IN ? OR user_id = ?)
return new Parenthesis(new OrExpression(deptExpression, userExpression));
}
|
@Test // 拼接 Dept 和 User 的条件(字段都不符合)
public void testGetExpression_noDeptColumn_noSelfColumn() {
try (MockedStatic<SecurityFrameworkUtils> securityFrameworkUtilsMock
= mockStatic(SecurityFrameworkUtils.class)) {
// 准备参数
String tableName = "t_user";
Alias tableAlias = new Alias("u");
// mock 方法(LoginUser)
LoginUser loginUser = randomPojo(LoginUser.class, o -> o.setId(1L)
.setUserType(UserTypeEnum.ADMIN.getValue()));
securityFrameworkUtilsMock.when(SecurityFrameworkUtils::getLoginUser).thenReturn(loginUser);
// mock 方法(DeptDataPermissionRespDTO)
DeptDataPermissionRespDTO deptDataPermission = new DeptDataPermissionRespDTO()
.setDeptIds(SetUtils.asSet(10L, 20L)).setSelf(true);
when(permissionApi.getDeptDataPermission(same(1L))).thenReturn(success(deptDataPermission));
// 调用
Expression expression = rule.getExpression(tableName, tableAlias);
// 断言
assertSame(EXPRESSION_NULL, expression);
assertSame(deptDataPermission, loginUser.getContext(DeptDataPermissionRule.CONTEXT_KEY, DeptDataPermissionRespDTO.class));
}
}
|
@Override
public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) {
return sqlStatementContext instanceof InsertStatementContext && ((InsertStatementContext) sqlStatementContext).containsInsertColumns();
}
|
@Test
void assertIsGenerateSQLTokenWithInsertColumns() {
InsertStatementContext insertStatementContext = mock(InsertStatementContext.class, RETURNS_DEEP_STUBS);
when(insertStatementContext.containsInsertColumns()).thenReturn(true);
assertTrue(new EncryptInsertDerivedColumnsTokenGenerator(mock(EncryptRule.class)).isGenerateSQLToken(insertStatementContext));
}
|
@Override
public ComponentCreationData createProjectAndBindToDevOpsPlatform(DbSession dbSession, CreationMethod creationMethod, Boolean monorepo, @Nullable String projectKey,
@Nullable String projectName) {
String pat = findPersonalAccessTokenOrThrow(dbSession, almSettingDto);
String workspace = ofNullable(almSettingDto.getAppId())
.orElseThrow(() -> new IllegalArgumentException(String.format("workspace for alm setting %s is missing", almSettingDto.getKey())));
Repository repo = bitbucketCloudRestClient.getRepo(pat, workspace, devOpsProjectDescriptor.repositoryIdentifier());
ComponentCreationData componentCreationData = projectCreator.createProject(
dbSession,
getProjectKey(workspace, projectKey, repo),
getProjectName(projectName, repo),
repo.getMainBranch().getName(),
creationMethod);
ProjectDto projectDto = Optional.ofNullable(componentCreationData.projectDto()).orElseThrow();
createProjectAlmSettingDto(dbSession, repo.getSlug(), projectDto, almSettingDto, monorepo);
return componentCreationData;
}
|
@Test
void createProjectAndBindToDevOpsPlatform_whenNoKeyAndNameSpecified_generatesKeyAndUsersBitbucketRepositoryName() {
mockPatForUser();
when(almSettingDto.getAppId()).thenReturn(WORKSPACE);
mockBitbucketCloudRepository();
String generatedProjectKey = "generatedProjectKey";
when(projectKeyGenerator.generateUniqueProjectKey(WORKSPACE, REPOSITORY_SLUG)).thenReturn(generatedProjectKey);
mockProjectCreation(generatedProjectKey, REPOSITORY_NAME);
underTest.createProjectAndBindToDevOpsPlatform(mock(DbSession.class), CreationMethod.ALM_IMPORT_API, true, null, null);
ArgumentCaptor<ProjectAlmSettingDto> projectAlmSettingCaptor = ArgumentCaptor.forClass(ProjectAlmSettingDto.class);
verify(dbClient.projectAlmSettingDao()).insertOrUpdate(any(), projectAlmSettingCaptor.capture(), eq(ALM_SETTING_KEY), eq(REPOSITORY_NAME), eq(generatedProjectKey));
ProjectAlmSettingDto createdProjectAlmSettingDto = projectAlmSettingCaptor.getValue();
assertThat(createdProjectAlmSettingDto.getAlmSettingUuid()).isEqualTo(ALM_SETTING_UUID);
assertThat(createdProjectAlmSettingDto.getAlmRepo()).isEqualTo(REPOSITORY_SLUG);
assertThat(createdProjectAlmSettingDto.getProjectUuid()).isEqualTo(PROJECT_UUID);
assertThat(createdProjectAlmSettingDto.getMonorepo()).isTrue();
}
|
@Override
public Boolean exists(final String key) {
try {
List<Instance> instances = namingService.selectInstances(key, groupName, true);
return !instances.isEmpty();
} catch (NacosException e) {
LOGGER.error("Error checking Nacos service existence: {}", e.getMessage(), e);
throw new ShenyuException(e);
}
}
|
@Test
void testExists() throws NacosException {
List<Instance> mockInstances = new ArrayList<>();
mockInstances.add(mock(Instance.class));
// Mock this service exists
when(namingService.selectInstances(anyString(), anyString(), anyBoolean())).thenReturn(mockInstances);
assertTrue(nacosDiscoveryServiceUnderTest.exists("key"));
// Mock the service does not exist
when(namingService.selectInstances(anyString(), anyString(), anyBoolean())).thenReturn(Collections.emptyList());
assertFalse(nacosDiscoveryServiceUnderTest.exists("key"));
// Mock the throwing of NacosException
when(namingService.selectInstances(anyString(), anyString(), anyBoolean())).thenThrow(new NacosException());
assertThrows(ShenyuException.class, () -> nacosDiscoveryServiceUnderTest.exists("key"));
}
|
@Override
public void addProducer(ConnectionContext context, ProducerInfo info) throws Exception {
// JMS allows producers to be created without first specifying a destination. In these cases, every send
// operation must specify a destination. Because of this, we only authorize 'addProducer' if a destination is
// specified. If not specified, the authz check in the 'send' method below will ensure authorization.
if (info.getDestination() != null) {
DestinationAction action = new DestinationAction(context, info.getDestination(), "write");
assertAuthorized(action, "write to");
}
super.addProducer(context, info);
}
|
@Test
public void testAddProducerWithoutDestination() throws Exception {
Subject subject = new PermsSubject();
ConnectionContext context = createContext(subject);
ProducerInfo info = new ProducerInfo(null);
filter.addProducer(context, info);
}
|
static Properties readProps(List<String> producerProps, String producerConfig) throws IOException {
Properties props = new Properties();
if (producerConfig != null) {
props.putAll(Utils.loadProps(producerConfig));
}
if (producerProps != null)
for (String prop : producerProps) {
String[] pieces = prop.split("=");
if (pieces.length != 2)
throw new IllegalArgumentException("Invalid property: " + prop);
props.put(pieces[0], pieces[1]);
}
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
if (props.getProperty(ProducerConfig.CLIENT_ID_CONFIG) == null) {
props.put(ProducerConfig.CLIENT_ID_CONFIG, "perf-producer-client");
}
return props;
}
|
@Test
public void testClientIdOverride() throws Exception {
List<String> producerProps = Collections.singletonList("client.id=producer-1");
Properties prop = ProducerPerformance.readProps(producerProps, null);
assertNotNull(prop);
assertEquals("producer-1", prop.getProperty("client.id"));
}
|
static Optional<String> globalResponseError(Optional<ClientResponse> response) {
if (!response.isPresent()) {
return Optional.of("Timeout");
}
if (response.get().authenticationException() != null) {
return Optional.of("AuthenticationException");
}
if (response.get().wasTimedOut()) {
return Optional.of("Disonnected[Timeout]");
}
if (response.get().wasDisconnected()) {
return Optional.of("Disconnected");
}
if (response.get().versionMismatch() != null) {
return Optional.of("UnsupportedVersionException");
}
if (response.get().responseBody() == null) {
return Optional.of("EmptyResponse");
}
if (!(response.get().responseBody() instanceof AssignReplicasToDirsResponse)) {
return Optional.of("ClassCastException");
}
AssignReplicasToDirsResponseData data = ((AssignReplicasToDirsResponse)
response.get().responseBody()).data();
Errors error = Errors.forCode(data.errorCode());
if (error != Errors.NONE) {
return Optional.of("Response-level error: " + error.name());
}
return Optional.empty();
}
|
@Test
public void testGlobalResponseErrorClassCastException() {
assertEquals(Optional.of("ClassCastException"),
AssignmentsManager.globalResponseError(Optional.of(
new ClientResponse(null, null, "", 0, 0, false, false,
null, null, new ApiVersionsResponse(new ApiVersionsResponseData())))));
}
|
@Override
public String toString() {
Map<String, Object> map = new HashMap<>();
map.put(NUM_SERVERS_QUERIED, getNumServersQueried());
map.put(NUM_SERVERS_RESPONDED, getNumServersResponded());
map.put(NUM_DOCS_SCANNED, getNumDocsScanned());
map.put(NUM_ENTRIES_SCANNED_IN_FILTER, getNumEntriesScannedInFilter());
map.put(NUM_ENTRIES_SCANNED_POST_FILTER, getNumEntriesScannedPostFilter());
map.put(NUM_SEGMENTS_QUERIED, getNumSegmentsQueried());
map.put(NUM_SEGMENTS_PROCESSED, getNumSegmentsProcessed());
map.put(NUM_SEGMENTS_MATCHED, getNumSegmentsMatched());
map.put(NUM_CONSUMING_SEGMENTS_QUERIED, getNumConsumingSegmentsQueried());
map.put(MIN_CONSUMING_FRESHNESS_TIME_MS, getMinConsumingFreshnessTimeMs() + "ms");
map.put(TOTAL_DOCS, getTotalDocs());
map.put(NUM_GROUPS_LIMIT_REACHED, isNumGroupsLimitReached());
map.put(BROKER_REDUCE_TIME_MS, getBrokerReduceTimeMs() + "ms");
map.put(TIME_USED_MS, getTimeUsedMs() + "ms");
map.put(PARTIAL_RESULT, isPartialResult());
return map.toString();
}
|
@Test
public void testToString() {
// Run the test
final String result = _executionStatsUnderTest.toString();
// Verify the results
assertNotEquals("", result);
}
|
public static <T extends PipelineOptions> T validate(Class<T> klass, PipelineOptions options) {
return validate(klass, options, false);
}
|
@Test
public void testWhenOptionIsDefinedOnOtherOptionsClassMeetsGroupRequirement() {
RightOptions rightOpts = PipelineOptionsFactory.as(RightOptions.class);
rightOpts.setFoo("true");
rightOpts.setBoth("bar");
LeftOptions leftOpts = PipelineOptionsFactory.as(LeftOptions.class);
leftOpts.setFoo("Untrue");
leftOpts.setBoth("Raise the");
rightOpts.setRunner(CrashingRunner.class);
leftOpts.setRunner(CrashingRunner.class);
PipelineOptionsValidator.validate(RightOptions.class, leftOpts);
PipelineOptionsValidator.validate(LeftOptions.class, rightOpts);
}
|
private void decodeSofaResponse(AbstractByteBuf data, SofaResponse sofaResponse, Map<String, String> head) {
if (head == null) {
throw buildDeserializeError("head is null!");
}
String targetService = head.remove(RemotingConstants.HEAD_TARGET_SERVICE);
if (targetService == null) {
throw buildDeserializeError("HEAD_TARGET_SERVICE is null");
}
String methodName = head.remove(RemotingConstants.HEAD_METHOD_NAME);
if (methodName == null) {
throw buildDeserializeError("HEAD_METHOD_NAME is null");
}
boolean isError = false;
if (StringUtils.TRUE.equals(head.remove(RemotingConstants.HEAD_RESPONSE_ERROR))) {
isError = true;
}
if (!head.isEmpty()) {
sofaResponse.setResponseProps(head);
}
if (isError) {
String errorMessage = (String) decode(data, String.class, head);
if (errorMessage == null) {
errorMessage = "";
}
sofaResponse.setErrorMsg(errorMessage);
} else {
// according interface and method name to find paramter types
JavaType respType = jacksonHelper.getResClass(targetService, methodName);
Object result;
try {
result = mapper.readValue(data.array(), respType);
} catch (IOException e) {
throw buildDeserializeError(e.getMessage());
}
sofaResponse.setAppResponse(result);
}
}
|
@Test
public void testDecodeSofaResponse() {
AbstractByteBuf nullByteBuf = serializer.encode(null, null);
JacksonSerializer jacksonSerializer = new JacksonSerializer();
SofaResponse sofaResponse = new SofaResponse();
Map<String, String> ctx = new HashMap<>();
ctx.put(RemotingConstants.HEAD_TARGET_SERVICE, "xxx");
ctx.put(RemotingConstants.HEAD_METHOD_NAME, "xxx");
ctx.put(RemotingConstants.HEAD_RESPONSE_ERROR, "true");
jacksonSerializer.decode(nullByteBuf, sofaResponse, ctx);
Assert.assertTrue(sofaResponse.isError());
Assert.assertEquals("", sofaResponse.getErrorMsg());
}
|
public TermsAggregationBuilder buildTermsAggregation(String name,
TopAggregationDefinition<?> topAggregation, @Nullable Integer numberOfTerms) {
TermsAggregationBuilder termsAggregation = AggregationBuilders.terms(name)
.field(topAggregation.getFilterScope().getFieldName())
.order(order)
.minDocCount(TERM_AGGREGATION_MIN_DOC_COUNT);
if (numberOfTerms != null) {
termsAggregation.size(numberOfTerms);
}
if (subAggregation != null) {
termsAggregation = termsAggregation.subAggregation(subAggregation);
}
return termsAggregation;
}
|
@Test
public void buildTermsAggregation_adds_custom_sub_agg_from_constructor() {
String aggName = randomAlphabetic(10);
SimpleFieldTopAggregationDefinition topAggregation = new SimpleFieldTopAggregationDefinition("bar", false);
Stream.of(
underTestWithCustomSubAgg,
underTestWithCustomsSubAggAndOrder)
.forEach(t -> {
TermsAggregationBuilder agg = t.buildTermsAggregation(aggName, topAggregation, null);
assertThat(agg.getName()).isEqualTo(aggName);
assertThat(agg.field()).isEqualTo(topAggregation.getFilterScope().getFieldName());
assertThat(agg.getSubAggregations()).hasSize(1);
assertThat(agg.getSubAggregations().iterator().next()).isSameAs(customSubAgg);
});
}
|
public static String buildErrorMessage(final Throwable throwable) {
if (throwable == null) {
return "";
}
final List<String> messages = dedup(getErrorMessages(throwable));
final String msg = messages.remove(0);
final String causeMsg = messages.stream()
.filter(s -> !s.isEmpty())
.map(cause -> WordUtils.wrap(PREFIX + cause, 80, "\n\t", true))
.collect(Collectors.joining(System.lineSeparator()));
return causeMsg.isEmpty() ? msg : msg + System.lineSeparator() + causeMsg;
}
|
@Test
public void shouldBuildErrorMessageFromExceptionWithNoMessage() {
assertThat(
buildErrorMessage(new NullPointerException()),
is("java.lang.NullPointerException")
);
}
|
@Override
public long estimate() {
final double raw = (1 / computeE()) * alpha() * m * m;
return applyRangeCorrection(raw);
}
|
@Test
public void testAlpha_withMemoryFootprintOf64() {
DenseHyperLogLogEncoder encoder = new DenseHyperLogLogEncoder(6);
encoder.estimate();
}
|
private Set<TimelineEntity> getEntities(Path dir, String entityType,
TimelineEntityFilters filters, TimelineDataToRetrieve dataToRetrieve)
throws IOException {
// First sort the selected entities based on created/start time.
Map<Long, Set<TimelineEntity>> sortedEntities =
new TreeMap<>(
new Comparator<Long>() {
@Override
public int compare(Long l1, Long l2) {
return l2.compareTo(l1);
}
}
);
dir = getNormalPath(dir);
if (dir != null) {
RemoteIterator<LocatedFileStatus> fileStatuses = fs.listFiles(dir,
false);
if (fileStatuses != null) {
while (fileStatuses.hasNext()) {
LocatedFileStatus locatedFileStatus = fileStatuses.next();
Path entityFile = locatedFileStatus.getPath();
if (!entityFile.getName()
.contains(TIMELINE_SERVICE_STORAGE_EXTENSION)) {
continue;
}
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(fs.open(entityFile),
StandardCharsets.UTF_8))) {
TimelineEntity entity = readEntityFromFile(reader);
if (!entity.getType().equals(entityType)) {
continue;
}
if (!isTimeInRange(entity.getCreatedTime(),
filters.getCreatedTimeBegin(),
filters.getCreatedTimeEnd())) {
continue;
}
if (filters.getRelatesTo() != null &&
!filters.getRelatesTo().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchRelatesTo(entity,
filters.getRelatesTo())) {
continue;
}
if (filters.getIsRelatedTo() != null &&
!filters.getIsRelatedTo().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchIsRelatedTo(entity,
filters.getIsRelatedTo())) {
continue;
}
if (filters.getInfoFilters() != null &&
!filters.getInfoFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchInfoFilters(entity,
filters.getInfoFilters())) {
continue;
}
if (filters.getConfigFilters() != null &&
!filters.getConfigFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchConfigFilters(entity,
filters.getConfigFilters())) {
continue;
}
if (filters.getMetricFilters() != null &&
!filters.getMetricFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchMetricFilters(entity,
filters.getMetricFilters())) {
continue;
}
if (filters.getEventFilters() != null &&
!filters.getEventFilters().getFilterList().isEmpty() &&
!TimelineStorageUtils.matchEventFilters(entity,
filters.getEventFilters())) {
continue;
}
TimelineEntity entityToBeReturned = createEntityToBeReturned(
entity, dataToRetrieve.getFieldsToRetrieve());
Set<TimelineEntity> entitiesCreatedAtSameTime =
sortedEntities.get(entityToBeReturned.getCreatedTime());
if (entitiesCreatedAtSameTime == null) {
entitiesCreatedAtSameTime = new HashSet<TimelineEntity>();
}
entitiesCreatedAtSameTime.add(entityToBeReturned);
sortedEntities.put(entityToBeReturned.getCreatedTime(),
entitiesCreatedAtSameTime);
}
}
}
}
Set<TimelineEntity> entities = new HashSet<TimelineEntity>();
long entitiesAdded = 0;
for (Set<TimelineEntity> entitySet : sortedEntities.values()) {
for (TimelineEntity entity : entitySet) {
entities.add(entity);
++entitiesAdded;
if (entitiesAdded >= filters.getLimit()) {
return entities;
}
}
}
return entities;
}
|
@Test
void testGetFilteredEntities() throws Exception {
// Get entities based on info filters.
TimelineFilterList infoFilterList = new TimelineFilterList();
infoFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
Set<TimelineEntity> result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList).build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
// Only one entity with ID id_3 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) {
fail("Incorrect filtering based on info filters");
}
}
// Get entities based on config filters.
TimelineFilterList confFilterList = new TimelineFilterList();
confFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "123"));
confFilterList.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList)
.build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) {
fail("Incorrect filtering based on config filters");
}
}
// Get entities based on event filters.
TimelineFilterList eventFilters = new TimelineFilterList();
eventFilters.addFilter(
new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_2"));
eventFilters.addFilter(
new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().eventFilters(eventFilters).build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_3")) {
fail("Incorrect filtering based on event filters");
}
}
// Get entities based on metric filters.
TimelineFilterList metricFilterList = new TimelineFilterList();
metricFilterList.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_2 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
fail("Incorrect filtering based on metric filters");
}
}
// Get entities based on complex config filters.
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "129"));
list1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
list2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
TimelineFilterList confFilterList1 =
new TimelineFilterList(Operator.OR, list1, list2);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList1)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_1", "123"));
list3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
TimelineFilterList confFilterList2 =
new TimelineFilterList(Operator.OR, list3, list4);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList2)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList confFilterList3 = new TimelineFilterList();
confFilterList3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_1", "127"));
confFilterList3.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList3)
.build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) {
fail("Incorrect filtering based on config filters");
}
}
TimelineFilterList confFilterList4 = new TimelineFilterList();
confFilterList4.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
confFilterList4.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList4)
.build(),
new TimelineDataToRetrieve());
assertEquals(0, result.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR);
confFilterList5.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
confFilterList5.addFilter(new TimelineKeyValueFilter(
TimelineCompareOp.EQUAL, "config_3", "def"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().configFilters(confFilterList5)
.build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2")) {
fail("Incorrect filtering based on config filters");
}
}
// Get entities based on complex metric filters.
TimelineFilterList list6 = new TimelineFilterList();
list6.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_THAN, "metric1", 200));
list6.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, "metric3", 23));
TimelineFilterList list7 = new TimelineFilterList();
list7.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, "metric2", 74));
TimelineFilterList metricFilterList1 =
new TimelineFilterList(Operator.OR, list6, list7);
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList1)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
// Two entities with IDs' id_2 and id_3 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) {
fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList2 = new TimelineFilterList();
metricFilterList2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "metric2", 70));
metricFilterList2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList2)
.build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList3 = new TimelineFilterList();
metricFilterList3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
metricFilterList3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList3)
.build(),
new TimelineDataToRetrieve());
assertEquals(0, result.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR);
metricFilterList4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
metricFilterList4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList4)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList metricFilterList5 =
new TimelineFilterList(new TimelineCompareFilter(
TimelineCompareOp.NOT_EQUAL, "metric2", 74));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().metricFilters(metricFilterList5)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
fail("Incorrect filtering based on metric filters");
}
}
TimelineFilterList infoFilterList1 = new TimelineFilterList();
infoFilterList1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
infoFilterList1.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList1)
.build(),
new TimelineDataToRetrieve());
assertEquals(0, result.size());
TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR);
infoFilterList2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
infoFilterList2.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList2)
.build(),
new TimelineDataToRetrieve());
assertEquals(2, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
fail("Incorrect filtering based on info filters");
}
}
TimelineFilterList infoFilterList3 = new TimelineFilterList();
infoFilterList3.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
infoFilterList3.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList3)
.build(),
new TimelineDataToRetrieve());
assertEquals(0, result.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR);
infoFilterList4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
infoFilterList4.addFilter(
new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
result = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
"app", null),
new TimelineEntityFilters.Builder().infoFilters(infoFilterList4)
.build(),
new TimelineDataToRetrieve());
assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
fail("Incorrect filtering based on info filters");
}
}
}
|
@Override
public void update(List<V> values) {
throw MODIFICATION_ATTEMPT_ERROR;
}
|
@Test
void testUpdate() throws Exception {
List<Long> list = getStateContents();
assertThat(list).containsExactly(42L);
assertThatThrownBy(() -> listState.add(54L))
.isInstanceOf(UnsupportedOperationException.class);
}
|
public List<MappingField> resolveAndValidateFields(
List<MappingField> userFields,
Map<String, String> options,
NodeEngine nodeEngine
) {
final InternalSerializationService serializationService = (InternalSerializationService) nodeEngine
.getSerializationService();
final AbstractRelationsStorage relationsStorage = ((CalciteSqlOptimizer) nodeEngine.getSqlService().getOptimizer())
.relationsStorage();
// normalize and validate the names and external names
for (MappingField field : userFields) {
String name = field.name();
String externalName = field.externalName();
if (externalName == null) {
if (name.equals(KEY) || name.equals(VALUE)) {
externalName = name;
} else {
externalName = VALUE_PREFIX + name;
}
field.setExternalName(name);
}
if ((name.equals(KEY) && !externalName.equals(KEY))
|| (name.equals(VALUE) && !externalName.equals(VALUE))) {
throw QueryException.error("Cannot rename field: '" + name + '\'');
}
if (!EXT_NAME_PATTERN.matcher(externalName).matches()) {
throw QueryException.error("Invalid external name: " + externalName);
}
}
Stream<MappingField> keyFields = resolveAndValidateFields(true, userFields, options,
serializationService, relationsStorage);
Stream<MappingField> valueFields = resolveAndValidateFields(false, userFields, options,
serializationService, relationsStorage);
Map<String, MappingField> fields = Stream.concat(keyFields, valueFields)
.collect(LinkedHashMap::new, (map, field) -> map.putIfAbsent(field.name(), field), Map::putAll);
if (fields.isEmpty()) {
throw QueryException.error("The resolved field list is empty");
}
return new ArrayList<>(fields.values());
}
|
@Test
public void when_keyFieldsEmpty_then_doesNotFail() {
Map<String, String> options = ImmutableMap.of(
OPTION_KEY_FORMAT, JAVA_FORMAT,
OPTION_VALUE_FORMAT, JAVA_FORMAT
);
given(resolver.resolveAndValidateFields(eq(true), eq(emptyList()), eq(options), eq(ss)))
.willReturn(Stream.empty());
given(resolver.resolveAndValidateFields(eq(false), eq(emptyList()), eq(options), eq(ss)))
.willReturn(Stream.of(field("this", QueryDataType.INT)));
List<MappingField> fields = resolvers.resolveAndValidateFields(emptyList(), options, nodeEngine);
assertThat(fields).containsExactly(field("this", QueryDataType.INT));
}
|
public void resolveAssertionConsumerService(AuthenticationRequest authenticationRequest) throws SamlValidationException {
// set URL if set in authnRequest
final String authnAcsURL = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceURL();
if (authnAcsURL != null) {
authenticationRequest.setAssertionConsumerURL(authnAcsURL);
return;
}
// search url from metadata endpoints
final Integer authnAcsIdx = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceIndex();
List<Endpoint> endpoints = authenticationRequest.getConnectionEntity().getRoleDescriptors().get(0).getEndpoints(AssertionConsumerService.DEFAULT_ELEMENT_NAME);
if (endpoints.isEmpty()) {
throw new SamlValidationException("Authentication: Assertion Consumer Service not found in metadata");
}
if (authnAcsIdx != null && endpoints.size() <= authnAcsIdx) {
throw new SamlValidationException("Authentication: Assertion Consumer Index is out of bounds");
}
// TODO: check if this statement is correct
if (endpoints.size() == 1) {
authenticationRequest.setAssertionConsumerURL(endpoints.get(0).getLocation());
return;
}
if(authnAcsIdx == null) {
AssertionConsumerService defaultAcs = endpoints.stream()
.filter(e -> e instanceof AssertionConsumerService)
.map(acs -> (AssertionConsumerService) acs)
.filter(IndexedEndpoint::isDefault)
.findAny()
.orElse(null);
if (defaultAcs == null) {
throw new SamlValidationException("Authentication: There is no default AssertionConsumerService");
}
authenticationRequest.setAssertionConsumerURL(defaultAcs.getLocation());
return;
}
authenticationRequest.setAssertionConsumerURL(endpoints.get(authnAcsIdx).getLocation());
}
|
@Test
void resolveAcsUrlWithIndex0InMultiAcsMetadata() throws SamlValidationException {
AuthnRequest authnRequest = OpenSAMLUtils.buildSAMLObject(AuthnRequest.class);
authnRequest.setAssertionConsumerServiceIndex(0);
AuthenticationRequest authenticationRequest = new AuthenticationRequest();
authenticationRequest.setAuthnRequest(authnRequest);
authenticationRequest.setConnectionEntity(MetadataParser.readMetadata(stubsMultiAcsMetadataFile, CONNECTION_ENTITY_ID));
assertionConsumerServiceUrlService.resolveAssertionConsumerService(authenticationRequest);
assertEquals("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", authenticationRequest.getAssertionConsumerURL());
}
|
public Vector3 toVector() {
return position;
}
|
@Test
public void testToVector() throws Exception {
World world = mock(World.class);
Vector3 position = Vector3.at(1, 1, 1);
Location location = new Location(world, position);
assertEquals(position, location.toVector());
}
|
public static long getNextScheduledTime(final String cronEntry, long currentTime) throws MessageFormatException {
long result = 0;
if (cronEntry == null || cronEntry.length() == 0) {
return result;
}
// Handle the once per minute case "* * * * *"
// starting the next event at the top of the minute.
if (cronEntry.equals("* * * * *")) {
result = currentTime + 60 * 1000;
result = result / 60000 * 60000;
return result;
}
List<String> list = tokenize(cronEntry);
List<CronEntry> entries = buildCronEntries(list);
Calendar working = Calendar.getInstance();
working.setTimeInMillis(currentTime);
working.set(Calendar.SECOND, 0);
CronEntry minutes = entries.get(MINUTES);
CronEntry hours = entries.get(HOURS);
CronEntry dayOfMonth = entries.get(DAY_OF_MONTH);
CronEntry month = entries.get(MONTH);
CronEntry dayOfWeek = entries.get(DAY_OF_WEEK);
// Start at the top of the next minute, cron is only guaranteed to be
// run on the minute.
int timeToNextMinute = 60 - working.get(Calendar.SECOND);
working.add(Calendar.SECOND, timeToNextMinute);
// If its already to late in the day this will roll us over to tomorrow
// so we'll need to check again when done updating month and day.
int currentMinutes = working.get(Calendar.MINUTE);
if (!isCurrent(minutes, currentMinutes)) {
int nextMinutes = getNext(minutes, currentMinutes, working);
working.add(Calendar.MINUTE, nextMinutes);
}
int currentHours = working.get(Calendar.HOUR_OF_DAY);
if (!isCurrent(hours, currentHours)) {
int nextHour = getNext(hours, currentHours, working);
working.add(Calendar.HOUR_OF_DAY, nextHour);
}
// We can roll into the next month here which might violate the cron setting
// rules so we check once then recheck again after applying the month settings.
doUpdateCurrentDay(working, dayOfMonth, dayOfWeek);
// Start by checking if we are in the right month, if not then calculations
// need to start from the beginning of the month to ensure that we don't end
// up on the wrong day. (Can happen when DAY_OF_WEEK is set and current time
// is ahead of the day of the week to execute on).
doUpdateCurrentMonth(working, month);
// Now Check day of week and day of month together since they can be specified
// together in one entry, if both "day of month" and "day of week" are restricted
// (not "*"), then either the "day of month" field (3) or the "day of week" field
// (5) must match the current day or the Calenday must be advanced.
doUpdateCurrentDay(working, dayOfMonth, dayOfWeek);
// Now we can chose the correct hour and minute of the day in question.
currentHours = working.get(Calendar.HOUR_OF_DAY);
if (!isCurrent(hours, currentHours)) {
int nextHour = getNext(hours, currentHours, working);
working.add(Calendar.HOUR_OF_DAY, nextHour);
}
currentMinutes = working.get(Calendar.MINUTE);
if (!isCurrent(minutes, currentMinutes)) {
int nextMinutes = getNext(minutes, currentMinutes, working);
working.add(Calendar.MINUTE, nextMinutes);
}
result = working.getTimeInMillis();
if (result <= currentTime) {
throw new ArithmeticException("Unable to compute next scheduled exection time.");
}
return result;
}
|
@Test
public void testgetNextTimeHours() throws MessageFormatException {
String test = "* 1 * * *";
Calendar calender = Calendar.getInstance();
calender.set(1972, 2, 2, 17, 10, 0);
long current = calender.getTimeInMillis();
long next = CronParser.getNextScheduledTime(test, current);
calender.setTimeInMillis(next);
long result = next - current;
long expected = 60*1000*60*8 + 60 * 1000;
assertEquals(expected,result);
}
|
public static <T> Flattened<T> flattenedSchema() {
return new AutoValue_Select_Flattened.Builder<T>()
.setNameFn(CONCAT_FIELD_NAMES)
.setNameOverrides(Collections.emptyMap())
.build();
}
|
@Test
@Category(NeedsRunner.class)
public void testFlatSchemaWith2DArrayNestedField() {
Schema banksSchema = Schema.builder().addStringField("name").addStringField("address").build();
Schema transactionSchema =
Schema.builder()
.addArrayField("banks", Schema.FieldType.row(banksSchema))
.addDoubleField("purchaseAmount")
.build();
Schema nestedSchema =
Schema.builder()
.addArrayField("transactions", Schema.FieldType.row(transactionSchema))
.build();
String bankName1 = "bank1_1";
String bankName2 = "bank1_2";
String bankName3 = "bank2_1";
String bankName4 = "bank2_2";
String bankAddress1 = "address1_1";
String bankAddress2 = "address1_2";
String bankAddress3 = "address2_1";
String bankAddress4 = "address2_2";
double purchaseAmount1 = 1.0;
double purchaseAmount2 = 2.0;
Row bank1 = Row.withSchema(banksSchema).addValues(bankName1, bankAddress1).build();
Row bank2 = Row.withSchema(banksSchema).addValues(bankName2, bankAddress2).build();
Row bank3 = Row.withSchema(banksSchema).addValues(bankName3, bankAddress3).build();
Row bank4 = Row.withSchema(banksSchema).addValues(bankName4, bankAddress4).build();
Row transactionOne =
Row.withSchema(transactionSchema).addArray(bank1, bank2).addValue(purchaseAmount1).build();
Row transactionTwo =
Row.withSchema(transactionSchema).addArray(bank3, bank4).addValue(purchaseAmount2).build();
Row row = Row.withSchema(nestedSchema).addArray(transactionOne, transactionTwo).build();
PCollection<Row> unnested =
pipeline.apply(Create.of(row).withRowSchema(nestedSchema)).apply(Select.flattenedSchema());
Schema expectedUnnestedSchema =
Schema.builder()
.addArrayField("transactions_purchaseAmount", FieldType.DOUBLE)
.addArrayField("transactions_banks_name", FieldType.array(FieldType.STRING))
.addArrayField("transactions_banks_address", FieldType.array(FieldType.STRING))
.build();
assertEquals(expectedUnnestedSchema, unnested.getSchema());
Row expectedUnnestedRow =
Row.withSchema(unnested.getSchema())
.addArray(purchaseAmount1, purchaseAmount2)
.addArray(Arrays.asList(bankName1, bankName2), Arrays.asList(bankName3, bankName4))
.addArray(
Arrays.asList(bankAddress1, bankAddress2),
Arrays.asList(bankAddress3, bankAddress4))
.build();
PAssert.that(unnested).containsInAnyOrder(expectedUnnestedRow);
pipeline.run();
}
|
public static String getTpContentMd5(ThreadPoolParameter config) {
return Md5Util.md5Hex(ContentUtil.getPoolContent(config), "UTF-8");
}
|
@Test
public void assetGetTpContentMd5() {
final ThreadPoolParameterInfo threadPoolParameterInfo = new ThreadPoolParameterInfo();
final String mockContent = "mockContent";
final String mockContentMd5 = "34cf17bc632ece6e4c81a4ce8aa97d5e";
try (final MockedStatic<ContentUtil> mockedContentUtil = mockStatic(ContentUtil.class)) {
mockedContentUtil.when(() -> ContentUtil.getPoolContent(threadPoolParameterInfo)).thenReturn(mockContent);
final String result = Md5Util.getTpContentMd5(threadPoolParameterInfo);
Assert.isTrue(result.equals(mockContentMd5));
mockedContentUtil.verify(() -> ContentUtil.getPoolContent(threadPoolParameterInfo), times(1));
}
}
|
public void add() {
add(1L, defaultPosition);
}
|
@Test
final void testAdd() {
final String metricName = "unitTestCounter";
Counter c = receiver.declareCounter(metricName);
c.add();
Bucket b = receiver.getSnapshot();
final Map<String, List<Entry<Point, UntypedMetric>>> valuesByMetricName = b.getValuesByMetricName();
assertEquals(1, valuesByMetricName.size());
List<Entry<Point, UntypedMetric>> x = valuesByMetricName.get(metricName);
assertEquals(1, x.size());
assertEquals(Point.emptyPoint(), x.get(0).getKey());
assertEquals(1L, x.get(0).getValue().getCount());
}
|
public static Instant parseDateTime(String s) throws DateTimeParseException {
ValidationUtils.checkArgument(Objects.nonNull(s), "Input String cannot be null.");
try {
return Instant.ofEpochMilli(Long.parseLong(s));
} catch (NumberFormatException e) {
return Instant.parse(s);
}
}
|
@Test
public void testParseDateTimeWithNull() {
assertThrows(IllegalArgumentException.class, () -> {
DateTimeUtils.parseDateTime(null);
});
}
|
public static void addCompactionPendingMetric(final StreamsMetricsImpl streamsMetrics,
final RocksDBMetricContext metricContext,
final Gauge<BigInteger> valueProvider) {
addMutableMetric(
streamsMetrics,
metricContext,
valueProvider,
COMPACTION_PENDING,
COMPACTION_PENDING_DESCRIPTION
);
}
|
@Test
public void shouldAddCompactionPendingMetric() {
final String name = "compaction-pending";
final String description = "Reports 1 if at least one compaction is pending, otherwise it reports 0";
runAndVerifyMutableMetric(
name,
description,
() -> RocksDBMetrics.addCompactionPendingMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER)
);
}
|
public void displayGiant(GiantModel giant) {
LOGGER.info(giant.toString());
}
|
@Test
void testDispalyGiant() {
GiantModel giantModel = new GiantModel("giant1", Health.HEALTHY, Fatigue.ALERT,
Nourishment.SATURATED);
GiantView giantView = new GiantView();
assertDoesNotThrow(() -> giantView.displayGiant(giantModel));
}
|
public CompletableFuture<WorkerStatusResponse> getWorkerStatus() {
WorkerStatusRequest request =
WorkerStatusRequest.newBuilder().setId(idGenerator.getId()).build();
return getWorkerStatus(request);
}
|
@Test
@SuppressWarnings("FutureReturnValueIgnored")
public void testGetWorkerStatusRequestSent() {
client.getWorkerStatus();
verify(mockObserver).onNext(any(WorkerStatusRequest.class));
}
|
public static Response executeRequest(String requestUrl, OAuth20Service scribe, OAuth2AccessToken accessToken) throws IOException {
OAuthRequest request = new OAuthRequest(Verb.GET, requestUrl);
scribe.signRequest(accessToken, request);
try {
Response response = scribe.execute(request);
if (!response.isSuccessful()) {
throw unexpectedResponseCode(requestUrl, response);
}
return response;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
} catch (ExecutionException e) {
throw new IllegalStateException(e);
}
}
|
@Test
public void fail_to_execute_request() throws IOException {
mockWebServer.enqueue(new MockResponse().setResponseCode(404).setBody("Error!"));
assertThatThrownBy(() -> executeRequest(serverUrl + "/test", oAuth20Service, auth2AccessToken))
.isInstanceOf(IllegalStateException.class)
.hasMessage(format("Fail to execute request '%s/test'. HTTP code: 404, response: Error!", serverUrl));
}
|
@Override
public byte[] serialize(final String topic, final T data) {
try {
return delegate.serialize(topic, data);
} catch (final RuntimeException e) {
processingLogger.error(new SerializationError<>(e, Optional.of(data), topic, isKey));
throw e;
}
}
|
@Test
public void shouldThrowIfDelegateThrows() {
// Given:
when(delegate.serialize(any(), any())).thenThrow(ERROR);
// When:
final RuntimeException e = assertThrows(
RuntimeException.class,
() -> serializer.serialize("t", SOME_ROW)
);
// Then:
assertThat(e, is(ERROR));
}
|
LetterComposite messageFromOrcs() {
var words = List.of(
new Word('W', 'h', 'e', 'r', 'e'),
new Word('t', 'h', 'e', 'r', 'e'),
new Word('i', 's'),
new Word('a'),
new Word('w', 'h', 'i', 'p'),
new Word('t', 'h', 'e', 'r', 'e'),
new Word('i', 's'),
new Word('a'),
new Word('w', 'a', 'y')
);
return new Sentence(words);
}
|
@Test
void testMessageFromOrcs() {
final var messenger = new Messenger();
testMessage(
messenger.messageFromOrcs(),
"Where there is a whip there is a way."
);
}
|
public static Map<String, String> toMap(List<String> queryString) {
return queryString == null ? null : queryString
.stream()
.map(s -> {
String[] split = s.split("[: ]+");
if (split.length < 2 || split[0] == null || split[0].isEmpty()) {
throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, "Invalid queryString parameter");
}
return new AbstractMap.SimpleEntry<>(
split[0],
s.substring(s.indexOf(":") + 1).trim()
);
})
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
|
@Test
void toMap() {
final Map<String, String> resultMap = RequestUtils.toMap(List.of("timestamp:2023-12-18T14:32:14Z"));
assertThat(resultMap.get("timestamp"), is("2023-12-18T14:32:14Z"));
}
|
@Override
public StageBundleFactory forStage(ExecutableStage executableStage) {
return new SimpleStageBundleFactory(executableStage);
}
|
@Test
public void doesNotCacheDifferentEnvironments() throws Exception {
Environment envFoo = Environment.newBuilder().setUrn("dummy:urn:another").build();
RemoteEnvironment remoteEnvFoo = mock(RemoteEnvironment.class);
InstructionRequestHandler fooInstructionHandler = mock(InstructionRequestHandler.class);
Map<String, EnvironmentFactory.Provider> envFactoryProviderMapFoo =
ImmutableMap.of(
environment.getUrn(), envFactoryProvider, envFoo.getUrn(), envFactoryProvider);
when(envFactory.createEnvironment(eq(envFoo), any())).thenReturn(remoteEnvFoo);
when(remoteEnvFoo.getInstructionRequestHandler()).thenReturn(fooInstructionHandler);
// Don't bother creating a distinct instruction response because we don't use it here.
when(fooInstructionHandler.handle(any()))
.thenReturn(CompletableFuture.completedFuture(instructionResponse));
try (DefaultJobBundleFactory bundleFactory =
createDefaultJobBundleFactory(envFactoryProviderMapFoo)) {
bundleFactory.forStage(getExecutableStage(environment));
bundleFactory.forStage(getExecutableStage(envFoo));
verify(envFactory).createEnvironment(eq(environment), any());
verify(envFactory).createEnvironment(eq(envFoo), any());
verifyNoMoreInteractions(envFactory);
}
}
|
@Override
public Map<String, Validator> getValidations() {
return ImmutableMap.<String, Validator>builder()
.put(USERNAME, new LimitedStringValidator(1, MAX_USERNAME_LENGTH))
.put(PASSWORD, new FilledStringValidator())
.put(EMAIL, new LimitedStringValidator(1, MAX_EMAIL_LENGTH))
.put(FIRST_NAME, new LimitedOptionalStringValidator(MAX_FIRST_LAST_NAME_LENGTH))
.put(LAST_NAME, new LimitedOptionalStringValidator(MAX_FIRST_LAST_NAME_LENGTH))
.put(FULL_NAME, new LimitedOptionalStringValidator(MAX_FULL_NAME_LENGTH))
.put(PERMISSIONS, new ListValidator())
.put(ROLES, new ListValidator(true))
.build();
}
|
@Test
public void testLastNameLengthValidation() {
user = createUserImpl(null, null, null);
ValidationResult result = user.getValidations().get(UserImpl.LAST_NAME)
.validate(StringUtils.repeat("*", 10));
assertTrue(result.passed());
result = user.getValidations().get(UserImpl.LAST_NAME)
.validate(StringUtils.repeat("*", 210));
assertFalse(result.passed());
}
|
@Override
public synchronized void editSchedule() {
updateConfigIfNeeded();
long startTs = clock.getTime();
CSQueue root = scheduler.getRootQueue();
Resource clusterResources = Resources.clone(scheduler.getClusterResource());
containerBasedPreemptOrKill(root, clusterResources);
if (LOG.isDebugEnabled()) {
LOG.debug("Total time used=" + (clock.getTime() - startTs) + " ms.");
}
}
|
@Test
public void testNaturalTermination() {
int[][] qData = new int[][]{
// / A B C
{ 100, 40, 40, 20 }, // abs
{ 100, 100, 100, 100 }, // maxCap
{ 100, 55, 45, 0 }, // used
{ 20, 10, 10, 0 }, // pending
{ 0, 0, 0, 0 }, // reserved
{ 2, 1, 1, 0 }, // apps
{ -1, 1, 1, 0 }, // req granularity
{ 3, 0, 0, 0 }, // subqueues
};
conf.setFloat(
CapacitySchedulerConfiguration.PREEMPTION_NATURAL_TERMINATION_FACTOR,
(float) 0.1);
ProportionalCapacityPreemptionPolicy policy = buildPolicy(qData);
policy.editSchedule();
// ignore 10% imbalance between over-capacity queues
verify(mDisp, never()).handle(isA(ContainerPreemptEvent.class));
}
|
public Optional<Long> validateAndGetTimestamp(final ExternalServiceCredentials credentials) {
final String[] parts = requireNonNull(credentials).password().split(DELIMITER);
final String timestampSeconds;
final String actualSignature;
// making sure password format matches our expectations based on the generator configuration
if (parts.length == 3 && prependUsername) {
final String username = usernameIsTimestamp() ? parts[0] + DELIMITER + parts[1] : parts[0];
// username has to match the one from `credentials`
if (!credentials.username().equals(username)) {
return Optional.empty();
}
timestampSeconds = parts[1];
actualSignature = parts[2];
} else if (parts.length == 2 && !prependUsername) {
timestampSeconds = parts[0];
actualSignature = parts[1];
} else {
// unexpected password format
return Optional.empty();
}
final String signedData = usernameIsTimestamp() ? credentials.username() : credentials.username() + DELIMITER + timestampSeconds;
final String expectedSignature = truncateSignature
? hmac256TruncatedToHexString(key, signedData, TRUNCATED_SIGNATURE_LENGTH)
: hmac256ToHexString(key, signedData);
// if the signature is valid it's safe to parse the `timestampSeconds` string into Long
return hmacHexStringsEqual(expectedSignature, actualSignature)
? Optional.of(Long.valueOf(timestampSeconds))
: Optional.empty();
}
|
@Test
public void testValidateValidWithUsernameIsTimestamp() {
final long expectedTimestamp = Instant.ofEpochSecond(TIME_SECONDS).truncatedTo(ChronoUnit.DAYS).getEpochSecond();
assertEquals(expectedTimestamp, usernameIsTimestampGenerator.validateAndGetTimestamp(usernameIsTimestampCredentials).orElseThrow());
}
|
public void decrementIndex(int taskIndex) {
moveTask(taskIndex, DECREMENT_INDEX);
}
|
@Test
public void shouldErrorOutWhenTaskIsNotFoundWhileDecrementing() {
try {
new Tasks().decrementIndex(1);
fail("Should have thrown up");
} catch (Exception e) {
assertThat(e.getMessage(), is("There is not valid task at position 1."));
}
}
|
public boolean containsValue(final long value) {
final long[] entries = this.entries;
for (int i = 1; i < entries.length; i += 2) {
final long entryValue = entries[i];
if (entryValue == value) {
return true;
}
}
return false;
}
|
@Test
public void shouldNotContainValueForAMissingEntry() {
assertFalse(map.containsValue(1L));
}
|
public String getGtidSetStr() {
return gtidMap.get(GTID_SET_STRING);
}
|
@Test
public void getGtidSetStrOutputNull() {
// Arrange
final LogHeader objectUnderTest = new LogHeader(0);
// Act
final String actual = objectUnderTest.getGtidSetStr();
// Assert result
Assert.assertNull(actual);
}
|
public static Optional<Expression> convert(
org.apache.flink.table.expressions.Expression flinkExpression) {
if (!(flinkExpression instanceof CallExpression)) {
return Optional.empty();
}
CallExpression call = (CallExpression) flinkExpression;
Operation op = FILTERS.get(call.getFunctionDefinition());
if (op != null) {
switch (op) {
case IS_NULL:
return onlyChildAs(call, FieldReferenceExpression.class)
.map(FieldReferenceExpression::getName)
.map(Expressions::isNull);
case NOT_NULL:
return onlyChildAs(call, FieldReferenceExpression.class)
.map(FieldReferenceExpression::getName)
.map(Expressions::notNull);
case LT:
return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call);
case LT_EQ:
return convertFieldAndLiteral(
Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call);
case GT:
return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call);
case GT_EQ:
return convertFieldAndLiteral(
Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call);
case EQ:
return convertFieldAndLiteral(
(ref, lit) -> {
if (NaNUtil.isNaN(lit)) {
return Expressions.isNaN(ref);
} else {
return Expressions.equal(ref, lit);
}
},
call);
case NOT_EQ:
return convertFieldAndLiteral(
(ref, lit) -> {
if (NaNUtil.isNaN(lit)) {
return Expressions.notNaN(ref);
} else {
return Expressions.notEqual(ref, lit);
}
},
call);
case NOT:
return onlyChildAs(call, CallExpression.class)
.flatMap(FlinkFilters::convert)
.map(Expressions::not);
case AND:
return convertLogicExpression(Expressions::and, call);
case OR:
return convertLogicExpression(Expressions::or, call);
case STARTS_WITH:
return convertLike(call);
}
}
return Optional.empty();
}
|
@Test
public void testIsNull() {
Expression expr = resolve(Expressions.$("field1").isNull());
Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(expr);
assertThat(actual).isPresent();
UnboundPredicate<Object> expected = org.apache.iceberg.expressions.Expressions.isNull("field1");
assertPredicatesMatch(expected, actual.get());
}
|
@Override
public String ping(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<String> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.PING);
return syncFuture(f);
}
|
@Test
public void testClusterPing() {
RedisClusterNode master = getFirstMaster();
String res = connection.ping(master);
assertThat(res).isEqualTo("PONG");
}
|
@Override
protected void doExecute() {
if (vpls == null) {
vpls = get(Vpls.class);
}
if (interfaceService == null) {
interfaceService = get(InterfaceService.class);
}
VplsCommandEnum enumCommand = VplsCommandEnum.enumFromString(command);
if (enumCommand != null) {
switch (enumCommand) {
case ADD_IFACE:
addIface(vplsName, optArg);
break;
case CREATE:
create(vplsName);
break;
case DELETE:
delete(vplsName);
break;
case LIST:
list();
break;
case REMOVE_IFACE:
removeIface(vplsName, optArg);
break;
case SET_ENCAP:
setEncap(vplsName, optArg);
break;
case SHOW:
show(vplsName);
break;
case CLEAN:
cleanVpls();
break;
default:
print(VPLS_COMMAND_NOT_FOUND, command);
}
} else {
print(VPLS_COMMAND_NOT_FOUND, command);
}
}
|
@Test
public void testShowAll() {
((TestVpls) vplsCommand.vpls).initSampleData();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
System.setOut(ps);
vplsCommand.command = VplsCommandEnum.SHOW.toString();
vplsCommand.doExecute();
String result = baos.toString();
assertEquals(SHOW_ALL_RES, result);
}
|
public static AbstractProtocolNegotiatorBuilderSingleton getSingleton() {
return SINGLETON;
}
|
@Test
void testSingletonInstance() {
AbstractProtocolNegotiatorBuilderSingleton singleton1 = SdkProtocolNegotiatorBuilderSingleton.getSingleton();
AbstractProtocolNegotiatorBuilderSingleton singleton2 = SdkProtocolNegotiatorBuilderSingleton.getSingleton();
assertSame(singleton1, singleton2);
}
|
@Override
public String toString() {
return "ByteArrayObjectDataOutput{"
+ "size=" + (buffer != null ? buffer.length : 0)
+ ", pos=" + pos
+ '}';
}
|
@Test
public void testToString() {
assertNotNull(out.toString());
}
|
public static CsvIOParse<Row> parseRows(Schema schema, CSVFormat csvFormat) {
CsvIOParseHelpers.validateCsvFormat(csvFormat);
CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema);
RowCoder coder = RowCoder.of(schema);
CsvIOParseConfiguration.Builder<Row> builder = CsvIOParseConfiguration.builder();
builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(row -> row);
return CsvIOParse.<Row>builder().setConfigBuilder(builder).build();
}
|
@Test
public void parseRows() {
Pipeline pipeline = Pipeline.create();
PCollection<String> input =
csvRecords(
pipeline,
"# This is a comment",
"aBoolean,aDouble,aFloat,anInteger,aLong,aString",
"true,1.0,2.0,3,4,foo",
"N/A,6.0,7.0,8,9,bar",
"false,12.0,14.0,8,24,\"foo\nbar\"",
"true,1.0,2.0,3,4,foo$,bar");
List<Row> want =
Arrays.asList(
Row.withSchema(NULLABLE_ALL_PRIMITIVE_DATA_TYPES_SCHEMA)
.withFieldValue("aBoolean", true)
.withFieldValue("aDouble", 1.0)
.withFieldValue("aFloat", 2.0f)
.withFieldValue("anInteger", 3)
.withFieldValue("aLong", 4L)
.withFieldValue("aString", "foo")
.build(),
Row.withSchema(NULLABLE_ALL_PRIMITIVE_DATA_TYPES_SCHEMA)
.withFieldValue("aBoolean", null)
.withFieldValue("aDouble", 6.0)
.withFieldValue("aFloat", 7.0f)
.withFieldValue("anInteger", 8)
.withFieldValue("aLong", 9L)
.withFieldValue("aString", "bar")
.build(),
Row.withSchema(NULLABLE_ALL_PRIMITIVE_DATA_TYPES_SCHEMA)
.withFieldValue("aBoolean", false)
.withFieldValue("aDouble", 12.0)
.withFieldValue("aFloat", 14.0f)
.withFieldValue("anInteger", 8)
.withFieldValue("aLong", 24L)
.withFieldValue("aString", "foo\nbar")
.build(),
Row.withSchema(NULLABLE_ALL_PRIMITIVE_DATA_TYPES_SCHEMA)
.withFieldValue("aBoolean", true)
.withFieldValue("aDouble", 1.0)
.withFieldValue("aFloat", 2.0f)
.withFieldValue("anInteger", 3)
.withFieldValue("aLong", 4L)
.withFieldValue("aString", "foo,bar")
.build());
CsvIOParse<Row> underTest =
CsvIO.parseRows(NULLABLE_ALL_PRIMITIVE_DATA_TYPES_SCHEMA, csvFormat());
CsvIOParseResult<Row> result = input.apply(underTest);
PAssert.that(result.getOutput()).containsInAnyOrder(want);
PAssert.that(result.getErrors()).empty();
pipeline.run();
}
|
public boolean record(final Throwable observation)
{
final long timestampMs;
DistinctObservation distinctObservation;
timestampMs = clock.time();
synchronized (this)
{
distinctObservation = find(distinctObservations, observation);
if (null == distinctObservation)
{
distinctObservation = newObservation(timestampMs, observation);
if (INSUFFICIENT_SPACE == distinctObservation)
{
return false;
}
}
}
final int offset = distinctObservation.offset;
buffer.getAndAddInt(offset + OBSERVATION_COUNT_OFFSET, 1);
buffer.putLongOrdered(offset + LAST_OBSERVATION_TIMESTAMP_OFFSET, timestampMs);
return true;
}
|
@Test
void shouldTestRecordingWithoutStackTrace()
{
final CachedEpochClock clock = new CachedEpochClock();
final DistinctErrorLog log = new DistinctErrorLog(DIRECT_BUFFER, clock);
clock.advance(10);
log.record(new TestEvent("event one"));
clock.advance(10);
log.record(new TestEvent("event one"));
clock.advance(10);
log.record(new TestEvent("event two"));
assertTrue(ErrorLogReader.hasErrors(DIRECT_BUFFER));
final StringBuilder sb = new StringBuilder();
final int errorCount = ErrorLogReader.read(
DIRECT_BUFFER,
(observationCount, firstObservationTimestamp, lastObservationTimestamp, encodedException) ->
{
sb
.append(observationCount)
.append(',')
.append(firstObservationTimestamp)
.append(',')
.append(lastObservationTimestamp)
.append(',')
.append(encodedException);
});
assertEquals(2, errorCount);
final String expectedOutput =
"2,10,20,org.agrona.concurrent.errors.DistinctErrorLogTest$TestEvent: event one" + System.lineSeparator() +
"1,30,30,org.agrona.concurrent.errors.DistinctErrorLogTest$TestEvent: event two" + System.lineSeparator();
assertEquals(expectedOutput, sb.toString());
}
|
public ConfigCenterBuilder configFile(String configFile) {
this.configFile = configFile;
return getThis();
}
|
@Test
void configFile() {
ConfigCenterBuilder builder = ConfigCenterBuilder.newBuilder();
builder.configFile("configFile");
Assertions.assertEquals("configFile", builder.build().getConfigFile());
}
|
public static Write write() {
return new AutoValue_InfluxDbIO_Write.Builder()
.setRetentionPolicy(DEFAULT_RETENTION_POLICY)
.setDisableCertificateValidation(false)
.setBatchSize(DEFAULT_BUFFER_LIMIT)
.setConsistencyLevel(ConsistencyLevel.QUORUM)
.build();
}
|
@Test
public void validateWriteTest() {
InfluxDB influxDb = Mockito.mock(InfluxDB.class);
PowerMockito.when(
InfluxDBFactory.connect(
anyString(), anyString(), anyString(), any(OkHttpClient.Builder.class)))
.thenReturn(influxDb);
PowerMockito.when(InfluxDBFactory.connect(anyString(), anyString(), anyString()))
.thenReturn(influxDb);
String influxHost = "http://localhost";
String userName = "admin";
String password = "admin";
String influxDatabaseName = "testDataBase";
AtomicInteger countInvocation = new AtomicInteger();
Mockito.doAnswer(invocation -> countInvocation.getAndIncrement())
.when(influxDb)
.write(any(List.class));
doReturn(getDatabase(influxDatabaseName)).when(influxDb).query(new Query("SHOW DATABASES"));
final int numOfElementsToWrite = 1000;
pipeline
.apply("Generate data", Create.of(GenerateData.getMetric("test_m", numOfElementsToWrite)))
.apply(
"Write data to InfluxDB",
InfluxDbIO.write()
.withDataSourceConfiguration(
DataSourceConfiguration.create(
StaticValueProvider.of(influxHost),
StaticValueProvider.of(userName),
StaticValueProvider.of(password)))
.withDatabase(influxDatabaseName));
PipelineResult result = pipeline.run();
Assert.assertEquals(State.DONE, result.waitUntilFinish());
Assert.assertTrue(countInvocation.get() > 0);
}
|
public String getMysqlType() {
switch (type) {
case INLINE_VIEW:
case VIEW:
case MATERIALIZED_VIEW:
case CLOUD_NATIVE_MATERIALIZED_VIEW:
return "VIEW";
case SCHEMA:
return "SYSTEM VIEW";
default:
// external table also returns "BASE TABLE" for BI compatibility
return "BASE TABLE";
}
}
|
@Test
public void testGetMysqlType() {
Assert.assertEquals("BASE TABLE", new Table(TableType.OLAP).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.OLAP_EXTERNAL).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.CLOUD_NATIVE).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.MYSQL).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.BROKER).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.ELASTICSEARCH).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.HIVE).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.ICEBERG).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.HUDI).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.JDBC).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.DELTALAKE).getMysqlType());
Assert.assertEquals("BASE TABLE", new Table(TableType.FILE).getMysqlType());
Assert.assertEquals("VIEW", new Table(TableType.INLINE_VIEW).getMysqlType());
Assert.assertEquals("VIEW", new Table(TableType.VIEW).getMysqlType());
Assert.assertEquals("VIEW", new Table(TableType.MATERIALIZED_VIEW).getMysqlType());
Assert.assertEquals("VIEW", new Table(TableType.CLOUD_NATIVE_MATERIALIZED_VIEW).getMysqlType());
Assert.assertEquals("SYSTEM VIEW", new Table(TableType.SCHEMA).getMysqlType());
}
|
public static int toInt(String val) {
return toInt(val, 0);
}
|
@Test
void testToInt() {
// ConvertUtils.toInt(String)
assertEquals(0, ConvertUtils.toInt("0"));
assertEquals(-1, ConvertUtils.toInt("-1"));
assertEquals(10, ConvertUtils.toInt("10"));
assertEquals(Integer.MAX_VALUE, ConvertUtils.toInt(String.valueOf(Integer.MAX_VALUE)));
assertEquals(Integer.MIN_VALUE, ConvertUtils.toInt(String.valueOf(Integer.MIN_VALUE)));
assertEquals(0, ConvertUtils.toInt("notIntValue"));
// ConvertUtils.toInt(String, Integer)
assertEquals(0, ConvertUtils.toInt("0", 100));
assertEquals(100, ConvertUtils.toInt(null, 100));
assertEquals(100, ConvertUtils.toInt("null", 100));
assertEquals(100, ConvertUtils.toInt("notIntValue", 100));
}
|
public void initialize(ConnectorContext ctx) {
context = ctx;
}
|
@Test
public void shouldInitializeContext() {
connector.initialize(context);
assertableConnector.assertInitialized();
assertableConnector.assertContext(context);
assertableConnector.assertTaskConfigs(null);
}
|
@Override
public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
throws IOException, ServletException
{
HttpServletRequest request = (HttpServletRequest)req;
HttpServletResponse response = (HttpServletResponse)res;
// Do not allow framing; OF-997
response.setHeader("X-Frame-Options", JiveGlobals.getProperty("adminConsole.frame-options", "SAMEORIGIN"));
// Reset the defaultLoginPage variable
String loginPage = defaultLoginPage;
if (loginPage == null) {
loginPage = request.getContextPath() + (AuthFactory.isOneTimeAccessTokenEnabled() ? "/loginToken.jsp" : "/login.jsp" );
}
// Get the page we're on:
String url = request.getRequestURI().substring(1);
if (url.startsWith("plugins/")) {
url = url.substring("plugins/".length());
}
// See if it's contained in the exclude list. If so, skip filter execution
boolean doExclude = false;
for (String exclude : excludes) {
if (testURLPassesExclude(url, exclude)) {
doExclude = true;
break;
}
}
if (!doExclude || IP_ACCESS_IGNORE_EXCLUDES.getValue()) {
if (!passesBlocklist(req) || !passesAllowList(req)) {
response.sendError(HttpServletResponse.SC_FORBIDDEN);
return;
}
}
if (!doExclude) {
WebManager manager = new WebManager();
manager.init(request, response, request.getSession(), context);
boolean haveOneTimeToken = manager.getAuthToken() instanceof AuthToken.OneTimeAuthToken;
User loggedUser = manager.getUser();
boolean loggedAdmin = loggedUser == null ? false : adminManager.isUserAdmin(loggedUser.getUsername(), true);
if (!haveOneTimeToken && !loggedAdmin && !authUserFromRequest(request)) {
response.sendRedirect(getRedirectURL(request, loginPage, null));
return;
}
}
chain.doFilter(req, res);
}
|
@Test
public void nonExcludedUrlWillErrorWhenOnBlocklist() throws Exception {
AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(AdminUserServletAuthenticatorClass.class);
final AuthCheckFilter filter = new AuthCheckFilter(adminManager, loginLimitManager);
AuthCheckFilter.IP_ACCESS_BLOCKLIST.setValue(Collections.singleton(request.getRemoteAddr()));
filter.doFilter(request, response, filterChain);
verify(response, atLeastOnce()).sendError(anyInt());
verify(filterChain, never()).doFilter(any(), any());
}
|
@Override
public <T> @Nullable Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
checkForDynamicType(typeDescriptor);
return ProtoSchemaTranslator.getSchema((Class<Message>) typeDescriptor.getRawType());
}
|
@Test
public void testMapSchema() {
Schema schema = new ProtoMessageSchema().schemaFor(TypeDescriptor.of(MapPrimitive.class));
assertEquals(MAP_PRIMITIVE_SCHEMA, schema);
}
|
@Override
public Long clusterCountKeysInSlot(int slot) {
RedisClusterNode node = clusterGetNodeForSlot(slot);
MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(new InetSocketAddress(node.getHost(), node.getPort()));
RFuture<Long> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_COUNTKEYSINSLOT, slot);
return syncFuture(f);
}
|
@Test
public void testClusterCountKeysInSlot() {
Long t = connection.clusterCountKeysInSlot(1);
assertThat(t).isZero();
}
|
@Override
public YamlShardingStrategyConfiguration swapToYamlConfiguration(final ShardingStrategyConfiguration data) {
YamlShardingStrategyConfiguration result = new YamlShardingStrategyConfiguration();
if (data instanceof StandardShardingStrategyConfiguration) {
result.setStandard(createYamlStandardShardingStrategyConfiguration((StandardShardingStrategyConfiguration) data));
}
if (data instanceof ComplexShardingStrategyConfiguration) {
result.setComplex(createYamlComplexShardingStrategyConfiguration((ComplexShardingStrategyConfiguration) data));
}
if (data instanceof HintShardingStrategyConfiguration) {
result.setHint(createYamlHintShardingStrategyConfiguration((HintShardingStrategyConfiguration) data));
}
if (data instanceof NoneShardingStrategyConfiguration) {
result.setNone(new YamlNoneShardingStrategyConfiguration());
}
return result;
}
|
@Test
void assertSwapToYamlConfigurationForComplexShardingStrategy() {
ShardingStrategyConfiguration data = new ComplexShardingStrategyConfiguration("region_id, user_id", "core_complex_fixture");
YamlShardingStrategyConfigurationSwapper swapper = new YamlShardingStrategyConfigurationSwapper();
YamlShardingStrategyConfiguration actual = swapper.swapToYamlConfiguration(data);
assertThat(actual.getComplex().getShardingColumns(), is("region_id, user_id"));
assertThat(actual.getComplex().getShardingAlgorithmName(), is("core_complex_fixture"));
}
|
@Override
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext,
final ShardingSphereDatabase database, final ConnectionContext connectionContext) throws SQLException {
if (1 == queryResults.size() && !isNeedAggregateRewrite(sqlStatementContext)) {
return new IteratorStreamMergedResult(queryResults);
}
Map<String, Integer> columnLabelIndexMap = getColumnLabelIndexMap(queryResults.get(0));
SelectStatementContext selectStatementContext = (SelectStatementContext) sqlStatementContext;
selectStatementContext.setIndexes(columnLabelIndexMap);
MergedResult mergedResult = build(queryResults, selectStatementContext, columnLabelIndexMap, database);
return decorate(queryResults, selectStatementContext, mergedResult);
}
|
@Test
void assertBuildGroupByStreamMergedResultWithSQLServerLimit() throws SQLException {
final ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(TypedSPILoader.getService(DatabaseType.class, "SQLServer"));
ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS);
when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(mock(ShardingSphereSchema.class));
SQLServerSelectStatement selectStatement = (SQLServerSelectStatement) buildSelectStatement(new SQLServerSelectStatement());
selectStatement.setGroupBy(new GroupBySegment(0, 0, Collections.singletonList(new IndexOrderByItemSegment(0, 0, 1, OrderDirection.DESC, NullsOrderType.FIRST))));
selectStatement.setOrderBy(new OrderBySegment(0, 0, Collections.singletonList(new IndexOrderByItemSegment(0, 0, 1, OrderDirection.DESC, NullsOrderType.FIRST))));
selectStatement.setProjections(new ProjectionsSegment(0, 0));
selectStatement.setLimit(new LimitSegment(0, 0, new NumberLiteralRowNumberValueSegment(0, 0, 1L, true), null));
SelectStatementContext selectStatementContext = new SelectStatementContext(createShardingSphereMetaData(database), Collections.emptyList(),
selectStatement, DefaultDatabase.LOGIC_NAME, Collections.emptyList());
MergedResult actual = resultMerger.merge(createQueryResults(), selectStatementContext, createSQLServerDatabase(), mock(ConnectionContext.class));
assertThat(actual, instanceOf(TopAndRowNumberDecoratorMergedResult.class));
assertThat(((TopAndRowNumberDecoratorMergedResult) actual).getMergedResult(), instanceOf(GroupByStreamMergedResult.class));
}
|
public synchronized void start() throws IllegalStateException, StreamsException {
if (setState(State.REBALANCING)) {
log.debug("Starting Streams client");
if (globalStreamThread != null) {
globalStreamThread.start();
}
final int numThreads = processStreamThread(StreamThread::start);
log.info("Started {} stream threads", numThreads);
final Long cleanupDelay = applicationConfigs.getLong(StreamsConfig.STATE_CLEANUP_DELAY_MS_CONFIG);
stateDirCleaner.scheduleAtFixedRate(() -> {
// we do not use lock here since we only read on the value and act on it
if (state == State.RUNNING) {
stateDirectory.cleanRemovedTasks(cleanupDelay);
}
}, cleanupDelay, cleanupDelay, TimeUnit.MILLISECONDS);
final long recordingDelay = 0;
final long recordingInterval = 1;
if (rocksDBMetricsRecordingService != null) {
rocksDBMetricsRecordingService.scheduleAtFixedRate(
streamsMetrics.rocksDBMetricsRecordingTrigger(),
recordingDelay,
recordingInterval,
TimeUnit.MINUTES
);
}
} else {
throw new IllegalStateException("The client is either already started or already stopped, cannot re-start");
}
}
|
@Test
public void shouldCleanupOldStateDirs() {
prepareStreams();
prepareStreamThread(streamThreadOne, 1);
prepareStreamThread(streamThreadTwo, 2);
try (final MockedStatic<Executors> executorsMockedStatic = mockStatic(Executors.class)) {
final ScheduledExecutorService cleanupSchedule = mock(ScheduledExecutorService.class);
executorsMockedStatic.when(() -> Executors.newSingleThreadScheduledExecutor(
any(ThreadFactory.class)
)).thenReturn(cleanupSchedule);
try (MockedConstruction<StateDirectory> ignored = mockConstruction(StateDirectory.class,
(mock, context) -> when(mock.initializeProcessId()).thenReturn(UUID.randomUUID()))) {
props.setProperty(StreamsConfig.STATE_CLEANUP_DELAY_MS_CONFIG, "1");
final StreamsBuilder builder = new StreamsBuilder();
builder.table("topic", Materialized.as("store"));
try (final KafkaStreams streams = new KafkaStreams(builder.build(), props, supplier, time)) {
streams.start();
}
}
verify(cleanupSchedule).scheduleAtFixedRate(any(Runnable.class), eq(1L), eq(1L), eq(TimeUnit.MILLISECONDS));
verify(cleanupSchedule).shutdownNow();
}
}
|
public static Ip4Prefix valueOf(int address, int prefixLength) {
return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength);
}
|
@Test
public void testValueOfByteArrayIPv4() {
Ip4Prefix ipPrefix;
byte[] value;
value = new byte[] {1, 2, 3, 4};
ipPrefix = Ip4Prefix.valueOf(value, 24);
assertThat(ipPrefix.toString(), is("1.2.3.0/24"));
ipPrefix = Ip4Prefix.valueOf(value, 32);
assertThat(ipPrefix.toString(), is("1.2.3.4/32"));
value = new byte[] {1, 2, 3, 5};
ipPrefix = Ip4Prefix.valueOf(value, 32);
assertThat(ipPrefix.toString(), is("1.2.3.5/32"));
value = new byte[] {0, 0, 0, 0};
ipPrefix = Ip4Prefix.valueOf(value, 0);
assertThat(ipPrefix.toString(), is("0.0.0.0/0"));
ipPrefix = Ip4Prefix.valueOf(value, 32);
assertThat(ipPrefix.toString(), is("0.0.0.0/32"));
value = new byte[] {(byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff};
ipPrefix = Ip4Prefix.valueOf(value, 0);
assertThat(ipPrefix.toString(), is("0.0.0.0/0"));
ipPrefix = Ip4Prefix.valueOf(value, 16);
assertThat(ipPrefix.toString(), is("255.255.0.0/16"));
ipPrefix = Ip4Prefix.valueOf(value, 32);
assertThat(ipPrefix.toString(), is("255.255.255.255/32"));
}
|
public int remap(int var, int size) {
if ((var & REMAP_FLAG) != 0) {
return unmask(var);
}
int offset = var - argsSize;
if (offset < 0) {
// self projection for method arguments
return var;
}
if (offset >= mapping.length) {
mapping = Arrays.copyOf(mapping, Math.max(mapping.length * 2, offset + 1));
}
int mappedVar = mapping[offset];
int unmasked = unmask(mappedVar);
boolean isRemapped = ((mappedVar & REMAP_FLAG) != 0);
if (size == 2) {
if ((mappedVar & DOUBLE_SLOT_FLAG) == 0) {
// no double slot mapping over an int slot;
// must re-map unless the int slot is the last used one or there is a free double-ext slot
isRemapped = false;
}
} else {
// size == 1
if ((mappedVar & DOUBLE_SLOT_FLAG_2) != 0) {
// no mapping over a previously 2-slot value
isRemapped = false;
} else if ((mappedVar & DOUBLE_SLOT_FLAG) != 0) {
// the previously second part of the double slot is free to reuse
mapping[unmasked + 1] = (unmasked + 1) | REMAP_FLAG;
}
}
if (!isRemapped) {
mappedVar = remapVar(newVarIdxInternal(size), size);
setMapping(offset, mappedVar, size);
}
unmasked = unmask(mappedVar);
// adjust the mapping pointer if remapping with variable occupying 2 slots
nextMappedVar = Math.max(unmasked + size, nextMappedVar);
return unmasked;
}
|
@Test
public void remapOverflow() {
assertEquals(
0,
instance.remap(
16, 1)); // default mapping array size is 8 so going for double should trigger overflow
// handling
}
|
public SchemaMapping fromArrow(Schema arrowSchema) {
List<Field> fields = arrowSchema.getFields();
List<TypeMapping> parquetFields = fromArrow(fields);
MessageType parquetType =
addToBuilder(parquetFields, Types.buildMessage()).named("root");
return new SchemaMapping(arrowSchema, parquetType, parquetFields);
}
|
@Test(expected = UnsupportedOperationException.class)
public void testArrowTimestampSecondToParquet() {
converter
.fromArrow(new Schema(asList(field("a", new ArrowType.Timestamp(TimeUnit.SECOND, "UTC")))))
.getParquetSchema();
}
|
@Override
public RLock readLock() {
return new RedissonReadLock(commandExecutor, getName());
}
|
@Test
public void testIsHeldByCurrentThread() {
RReadWriteLock rwlock = redisson.getReadWriteLock("lock");
RLock lock = rwlock.readLock();
Assertions.assertFalse(lock.isHeldByCurrentThread());
lock.lock();
Assertions.assertTrue(lock.isHeldByCurrentThread());
lock.unlock();
Assertions.assertFalse(lock.isHeldByCurrentThread());
}
|
@Override
public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) {
checkPermission(FLOWRULE_WRITE);
if (forwardingObjective.nextId() == null ||
flowObjectiveStore.getNextGroup(forwardingObjective.nextId()) != null ||
!queueFwdObjective(deviceId, forwardingObjective)) {
// fast path
installerExecutor.execute(new ObjectiveProcessor(deviceId, forwardingObjective, installerExecutor));
}
}
|
@Test
public void pendingForwardingObjective() throws TestUtilsException {
TrafficSelector selector = DefaultTrafficSelector.emptySelector();
TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment();
ForwardingObjective forward4 =
DefaultForwardingObjective.builder()
.fromApp(NetTestTools.APP_ID)
.withFlag(ForwardingObjective.Flag.SPECIFIC)
.withSelector(selector)
.withTreatment(treatment)
.makePermanent()
.nextStep(4)
.add();
ForwardingObjective forward5 =
DefaultForwardingObjective.builder()
.fromApp(NetTestTools.APP_ID)
.withFlag(ForwardingObjective.Flag.SPECIFIC)
.withSelector(selector)
.withTreatment(treatment)
.makePermanent()
.nextStep(5)
.add();
// multiple pending forwards should be combined
manager.forward(id1, forward4);
manager.forward(id1, forward4);
manager.forward(id1, forward5);
// 1 should be complete, 1 pending
TestTools.assertAfter(RETRY_MS, () ->
assertThat(forwardingObjectives, hasSize(1)));
assertThat(forwardingObjectives, hasItem("of:d1"));
assertThat(filteringObjectives, hasSize(0));
assertThat(nextObjectives, hasSize(0));
// Now send events to trigger the objective still in the queue
ObjectiveEvent event1 = new ObjectiveEvent(ObjectiveEvent.Type.ADD, 4);
FlowObjectiveStoreDelegate delegate = TestUtils.getField(manager, "delegate");
delegate.notify(event1);
// all should be processed now
TestTools.assertAfter(RETRY_MS, () ->
assertThat(forwardingObjectives, hasSize(2)));
assertThat(forwardingObjectives, hasItem("of:d1"));
assertThat(filteringObjectives, hasSize(0));
assertThat(nextObjectives, hasSize(0));
}
|
@Override
public Object[] toArray() {
return underlying().toArray();
}
|
@Test
public void testDelegationOfToArrayIntoGivenDestination() {
Object[] destinationArray = new Object[0];
new PCollectionsTreeSetWrapperDelegationChecker<>()
.defineMockConfigurationForFunctionInvocation(mock -> mock.toArray(eq(destinationArray)), new Object[0])
.defineWrapperFunctionInvocationAndMockReturnValueTransformation(wrapper -> wrapper.toArray(destinationArray), identity())
.doFunctionDelegationCheck();
}
|
public void hasValue(@Nullable Object expected) {
if (expected == null) {
throw new NullPointerException("Optional cannot have a null value.");
}
if (actual == null) {
failWithActual("expected an optional with value", expected);
} else if (!actual.isPresent()) {
failWithoutActual(fact("expected to have value", expected), simpleFact("but was absent"));
} else {
checkNoNeedToDisplayBothValues("get()").that(actual.get()).isEqualTo(expected);
}
}
|
@Test
public void hasValue() {
assertThat(Optional.of("foo")).hasValue("foo");
}
|
public HollowHashIndexResult findMatches(Object... query) {
if (hashStateVolatile == null) {
throw new IllegalStateException(this + " wasn't initialized");
}
int hashCode = 0;
for(int i=0;i<query.length;i++) {
if(query[i] == null)
throw new IllegalArgumentException("querying by null unsupported; i=" + i);
hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i));
}
HollowHashIndexResult result;
HollowHashIndexState hashState;
do {
result = null;
hashState = hashStateVolatile;
long bucket = hashCode & hashState.getMatchHashMask();
long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
while (!bucketIsEmpty) {
if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) {
int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize());
long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer());
result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize);
break;
}
bucket = (bucket + 1) & hashState.getMatchHashMask();
hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
}
} while (hashState != hashStateVolatile);
return result;
}
|
@Test
public void testIndexingStringTypeFieldsWithNullValues() throws Exception {
mapper.add(new TypeTwoStrings(null, "onez:"));
mapper.add(new TypeTwoStrings("onez:", "onez:"));
mapper.add(new TypeTwoStrings(null, null));
roundTripSnapshot();
HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeTwoStrings", "", "b1.value", "b2.value");
Assert.assertNull(index.findMatches("one"));
Assert.assertNull(index.findMatches("one", "onez:"));
assertIteratorContainsAll(index.findMatches("onez:", "onez:").iterator(), 1);
}
|
public static int parseIntAscii(final CharSequence cs, final int index, final int length)
{
if (length <= 0)
{
throw new AsciiNumberFormatException("empty string: index=" + index + " length=" + length);
}
final boolean negative = MINUS_SIGN == cs.charAt(index);
int i = index;
if (negative)
{
i++;
if (1 == length)
{
throwParseIntError(cs, index, length);
}
}
final int end = index + length;
if (end - i < INT_MAX_DIGITS)
{
final int tally = parsePositiveIntAscii(cs, index, length, i, end);
return negative ? -tally : tally;
}
else
{
final long tally = parsePositiveIntAsciiOverflowCheck(cs, index, length, i, end);
if (tally > INTEGER_ABSOLUTE_MIN_VALUE || INTEGER_ABSOLUTE_MIN_VALUE == tally && !negative)
{
throwParseIntOverflowError(cs, index, length);
}
return (int)(negative ? -tally : tally);
}
}
|
@Test
void shouldParseInt()
{
assertEquals(0, parseIntAscii("0", 0, 1));
assertEquals(0, parseIntAscii("-0", 0, 2));
assertEquals(7, parseIntAscii("7", 0, 1));
assertEquals(-7, parseIntAscii("-7", 0, 2));
assertEquals(33, parseIntAscii("3333", 1, 2));
assertEquals(-123456789, parseIntAscii("-123456789", 0, 10));
final String maxValueMinusOne = String.valueOf(Integer.MAX_VALUE - 1);
assertEquals(Integer.MAX_VALUE - 1, parseIntAscii(maxValueMinusOne, 0, maxValueMinusOne.length()));
final String maxValue = String.valueOf(Integer.MAX_VALUE);
assertEquals(Integer.MAX_VALUE, parseIntAscii(maxValue, 0, maxValue.length()));
final String minValuePlusOne = String.valueOf(Integer.MIN_VALUE + 1);
assertEquals(Integer.MIN_VALUE + 1, parseIntAscii(minValuePlusOne, 0, minValuePlusOne.length()));
final String minValue = String.valueOf(Integer.MIN_VALUE);
assertEquals(Integer.MIN_VALUE, parseIntAscii(minValue, 0, minValue.length()));
}
|
@Override
public Integer clusterGetSlotForKey(byte[] key) {
RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key);
return syncFuture(f);
}
|
@Test
public void testClusterGetSlotForKey() {
Integer slot = connection.clusterGetSlotForKey("123".getBytes());
assertThat(slot).isNotNull();
}
|
public static NettySourceConfig load(Map<String, Object> map) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(mapper.writeValueAsString(map), NettySourceConfig.class);
}
|
@Test(expectedExceptions = UnrecognizedPropertyException.class)
public void testNettyTcpConfigLoadWithMapWhenInvalidPropertyIsSet() throws IOException {
Map<String, Object> map = new HashMap<>();
map.put("invalidProperty", 1);
NettySourceConfig.load(map);
}
|
@Override
public void flush() throws IOException {
mLocalOutputStream.flush();
}
|
@Test
@PrepareForTest(GCSOutputStream.class)
public void testFlush() throws Exception {
PowerMockito.whenNew(BufferedOutputStream.class)
.withArguments(any(DigestOutputStream.class)).thenReturn(mLocalOutputStream);
GCSOutputStream stream = new GCSOutputStream("testBucketName", "testKey", mClient, sConf
.getList(PropertyKey.TMP_DIRS));
stream.flush();
stream.close();
assertEquals(mMd5Hash, stream.getContentHash().get());
Mockito.verify(mLocalOutputStream).flush();
}
|
@ConstantFunction.List(list = {
@ConstantFunction(name = "months_add", argTypes = {DATETIME,
INT}, returnType = DATETIME, isMonotonic = true),
@ConstantFunction(name = "add_months", argTypes = {DATETIME,
INT}, returnType = DATETIME, isMonotonic = true),
@ConstantFunction(name = "months_add", argTypes = {DATE, INT}, returnType = DATE, isMonotonic = true),
@ConstantFunction(name = "add_months", argTypes = {DATE, INT}, returnType = DATE, isMonotonic = true)
})
public static ConstantOperator monthsAdd(ConstantOperator date, ConstantOperator month) {
if (date.getType().isDate()) {
return ConstantOperator.createDateOrNull(date.getDate().plusMonths(month.getInt()));
} else {
return ConstantOperator.createDatetimeOrNull(date.getDatetime().plusMonths(month.getInt()));
}
}
|
@Test
public void monthsAdd() {
assertEquals("2016-01-23T09:23:55",
ScalarOperatorFunctions.monthsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.