focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public Optional<String> getErrorMessage() { return error != null ? Optional.ofNullable(getRootCause(error).getMessage()) : Optional.empty(); }
@Test public void getErrorMessage_returns_root_cause_message_if_error() { Exception rootCause = new IOException("fail to connect"); Exception cause = new IOException("nested", rootCause); WebhookDelivery delivery = newBuilderTemplate() .setError(cause) .build(); assertThat(delivery.getErrorMessage()).contains("fail to connect"); }
public LogAggregationFileController getFileControllerForWrite() { return controllers.getFirst(); }
@Test void testDefaultConfUsed() { Configuration conf = getConf(); conf.unset(YarnConfiguration.NM_REMOTE_APP_LOG_DIR); conf.unset(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX); conf.set(LOG_AGGREGATION_FILE_FORMATS, "TFile"); LogAggregationFileControllerFactory factory = new LogAggregationFileControllerFactory(getConf()); LogAggregationFileController fc = factory.getFileControllerForWrite(); assertEquals("/tmp/logs", fc.getRemoteRootLogDir().toString(), WRONG_ROOT_LOG_DIR_MSG); assertEquals("logs-tfile", fc.getRemoteRootLogDirSuffix(), WRONG_ROOT_LOG_DIR_SUFFIX_MSG); }
public static SignatureData signatureDataFromHex(String hexSignature) throws SignatureException { byte[] sigBytes = Numeric.hexStringToByteArray(hexSignature); byte v; byte[] r, s; if (sigBytes.length == 64) { // EIP-2098; pull the v from the top bit of s and clear it v = (byte) (27 + (sigBytes[32] >> 7)); sigBytes[32] &= 0x7f; r = Arrays.copyOfRange(sigBytes, 0, 32); s = Arrays.copyOfRange(sigBytes, 32, 64); } else if (sigBytes.length == 65) { r = Arrays.copyOfRange(sigBytes, 0, 32); s = Arrays.copyOfRange(sigBytes, 32, 64); v = sigBytes[64]; } else { throw new SignatureException("invalid signature string"); } // Allow a recid to be used as the v if (v < 27) { if (v == 0 || v == 1) { v = (byte) (v + 27); } else { throw new SignatureException("signature invalid v byte"); } } return new Sign.SignatureData(v, r, s); }
@Test public void testFromHex() throws SignatureException { Sign.SignatureData signatureData = Sign.signatureDataFromHex( "0x0464eee9e2fe1a10ffe48c78b80de1ed8dcf996f3f60955cb2e03cb21903d93006624da478b3f862582e85b31c6a21c6cae2eee2bd50f55c93c4faad9d9c8d7f1c"); Sign.SignatureData expected = new Sign.SignatureData( (byte) 28, Numeric.hexStringToByteArray( "0x0464eee9e2fe1a10ffe48c78b80de1ed8dcf996f3f60955cb2e03cb21903d930"), Numeric.hexStringToByteArray( "0x06624da478b3f862582e85b31c6a21c6cae2eee2bd50f55c93c4faad9d9c8d7f")); assertEquals(signatureData, (expected)); }
public static boolean useCrossRepositoryBlobMounts() { return System.getProperty(CROSS_REPOSITORY_BLOB_MOUNTS) == null || Boolean.getBoolean(CROSS_REPOSITORY_BLOB_MOUNTS); }
@Test public void testUseBlobMounts_true() { System.setProperty(JibSystemProperties.CROSS_REPOSITORY_BLOB_MOUNTS, "true"); Assert.assertTrue(JibSystemProperties.useCrossRepositoryBlobMounts()); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldThrowIfCanNotCoerceToBigDecimal() { // Given: final KsqlJsonDeserializer<BigDecimal> deserializer = givenDeserializerForSchema(DecimalUtil.builder(20, 19).build(), BigDecimal.class); final byte[] bytes = serializeJson(BooleanNode.valueOf(true)); // When: final Exception e = assertThrows( SerializationException.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(startsWith( "Can't convert type. sourceType: BooleanNode, requiredType: DECIMAL(20, 19)")))); }
public double distanceToAsDouble(final IGeoPoint other) { final double lat1 = DEG2RAD * getLatitude(); final double lat2 = DEG2RAD * other.getLatitude(); final double lon1 = DEG2RAD * getLongitude(); final double lon2 = DEG2RAD * other.getLongitude(); return RADIUS_EARTH_METERS * 2 * Math.asin(Math.min(1, Math.sqrt( Math.pow(Math.sin((lat2 - lat1) / 2), 2) + Math.cos(lat1) * Math.cos(lat2) * Math.pow(Math.sin((lon2 - lon1) / 2), 2) ))); }
@Test public void test_distanceTo_Equator() { final double ratioDelta = 1E-10; final int iterations = 100; final double latitude = 0; for (int i = 0; i < iterations; i++) { final double longitude1 = getRandomLongitude(); final double longitude2 = getRandomLongitude(); final GeoPoint target = new GeoPoint(latitude, longitude1); final GeoPoint other = new GeoPoint(latitude, longitude2); final double diff = getCleanLongitudeDiff(longitude1, longitude2); final double expected = GeoConstants.RADIUS_EARTH_METERS * diff * MathConstants.DEG2RAD; if (expected < minimumDistance) { continue; } final double delta = expected * ratioDelta; assertEquals("distance between " + target + " and " + other, expected, target.distanceToAsDouble(other), delta); } }
public static <T> Iterable<T> loadServicesOrdered(Class<T> iface, ClassLoader classLoader) { ServiceLoader<T> loader = ServiceLoader.load(iface, classLoader); ImmutableSortedSet.Builder<T> builder = new ImmutableSortedSet.Builder<>(ObjectsClassComparator.INSTANCE); builder.addAll(loader); return builder.build(); }
@Test public void testLoadServicesOrderedReordersClassesByName() { List<String> names = new ArrayList<>(); for (FakeService service : ReflectHelpers.loadServicesOrdered(FakeService.class)) { names.add(service.getName()); } assertThat(names, contains("Alpha", "Zeta")); }
public void writeNumDecreasing(long value) { // Values are encoded with a complemented single byte length prefix, // followed by the complement of the actual value in big-endian format with // leading 0xff bytes dropped. byte[] bufer = new byte[9]; // 8 bytes for value plus one byte for length int len = 0; while (value != 0) { len++; bufer[9 - len] = (byte) ~(value & 0xff); value >>>= 8; } bufer[9 - len - 1] = (byte) ~len; len++; byte[] encodedArray = new byte[len]; System.arraycopy(bufer, 9 - len, encodedArray, 0, len); encodedArrays.add(encodedArray); }
@Test public void testWriteNumDecreasing() { OrderedCode orderedCode = new OrderedCode(); orderedCode.writeNumDecreasing(0); orderedCode.writeNumDecreasing(1); orderedCode.writeNumDecreasing(Long.MIN_VALUE); orderedCode.writeNumDecreasing(Long.MAX_VALUE); assertEquals(0, orderedCode.readNumDecreasing()); assertEquals(1, orderedCode.readNumDecreasing()); assertEquals(Long.MIN_VALUE, orderedCode.readNumDecreasing()); assertEquals(Long.MAX_VALUE, orderedCode.readNumDecreasing()); }
@Override public void read(ChannelHandlerContext ctx) throws Exception { if (dequeue(ctx, 1) == 0) { // It seems no messages were consumed. We need to read() some // messages from upstream and once one arrives it need to be // relayed to downstream to keep the flow going. shouldConsume = true; ctx.read(); } else if (config.isAutoRead()) { ctx.read(); } }
@Test public void testSwallowedReadComplete() throws Exception { final long delayMillis = 100; final Queue<IdleStateEvent> userEvents = new LinkedBlockingQueue<IdleStateEvent>(); final EmbeddedChannel channel = new EmbeddedChannel(false, false, new FlowControlHandler(), new IdleStateHandler(delayMillis, 0, 0, MILLISECONDS), new ChannelInboundHandlerAdapter() { @Override public void channelActive(ChannelHandlerContext ctx) { ctx.fireChannelActive(); ctx.read(); } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) { ctx.fireChannelRead(msg); ctx.read(); } @Override public void channelReadComplete(ChannelHandlerContext ctx) { ctx.fireChannelReadComplete(); ctx.read(); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof IdleStateEvent) { userEvents.add((IdleStateEvent) evt); } ctx.fireUserEventTriggered(evt); } } ); channel.config().setAutoRead(false); assertFalse(channel.config().isAutoRead()); channel.register(); // Reset read timeout by some message assertTrue(channel.writeInbound(Unpooled.EMPTY_BUFFER)); channel.flushInbound(); assertEquals(Unpooled.EMPTY_BUFFER, channel.readInbound()); // Emulate 'no more messages in NIO channel' on the next read attempt. channel.flushInbound(); assertNull(channel.readInbound()); Thread.sleep(delayMillis + 20L); channel.runPendingTasks(); assertEquals(IdleStateEvent.FIRST_READER_IDLE_STATE_EVENT, userEvents.poll()); assertFalse(channel.finish()); }
@Nullable public static Map<String, Set<FieldConfig.IndexType>> getSkipIndexes(Map<String, String> queryOptions) { // Example config: skipIndexes='col1=inverted,range&col2=inverted' String skipIndexesStr = queryOptions.get(QueryOptionKey.SKIP_INDEXES); if (skipIndexesStr == null) { return null; } String[] perColumnIndexSkip = StringUtils.split(skipIndexesStr, '&'); Map<String, Set<FieldConfig.IndexType>> skipIndexes = new HashMap<>(); for (String columnConf : perColumnIndexSkip) { String[] conf = StringUtils.split(columnConf, '='); if (conf.length != 2) { throw new RuntimeException("Invalid format for " + QueryOptionKey.SKIP_INDEXES + ". Example of valid format: SET skipIndexes='col1=inverted,range&col2=inverted'"); } String columnName = conf[0]; String[] indexTypes = StringUtils.split(conf[1], ','); for (String indexType : indexTypes) { skipIndexes.computeIfAbsent(columnName, k -> new HashSet<>()) .add(FieldConfig.IndexType.valueOf(indexType.toUpperCase())); } } return skipIndexes; }
@Test(expectedExceptions = RuntimeException.class) public void testSkipIndexesParsingInvalid() { String skipIndexesStr = "col1=inverted,range&col2"; Map<String, String> queryOptions = Map.of(CommonConstants.Broker.Request.QueryOptionKey.SKIP_INDEXES, skipIndexesStr); QueryOptionsUtils.getSkipIndexes(queryOptions); }
public static boolean equals(FlatRecordTraversalObjectNode left, FlatRecordTraversalObjectNode right) { if (left == null && right == null) { return true; } if (left == null || right == null) { return false; } if (!left.getSchema().getName().equals(right.getSchema().getName())) { return false; } extractCommonObjectSchema(left, right); return compare(left, right); }
@Test public void shouldFindRecordsUnequalOnTheSameDataModelWithAnObjectFieldNotSetOnOne() { HollowSchemaIdentifierMapper schemaMapper = new FakeHollowIdentifierMapper(); HollowObjectMapper objectMapper = new HollowObjectMapper(new HollowWriteStateEngine()); objectMapper.initializeTypeState(RecordWithSubObject1.class); objectMapper.initializeTypeState(RecordSubObject.class); FlatRecordWriter flatRecordWriter = new FlatRecordWriter(objectMapper.getStateEngine(), schemaMapper); RecordWithSubObject1 left = new RecordWithSubObject1(); left.id = "ID"; left.intField = 1; left.subObject = new RecordSubObject(); left.subObject.stringField = "A"; left.subObject.intField = 1; flatRecordWriter.reset(); objectMapper.writeFlat(left, flatRecordWriter); FlatRecord leftRec = flatRecordWriter.generateFlatRecord(); RecordWithSubObject1 right = new RecordWithSubObject1(); right.id = "ID"; right.intField = 1; flatRecordWriter.reset(); objectMapper.writeFlat(right, flatRecordWriter); FlatRecord rightRec = flatRecordWriter.generateFlatRecord(); // Even with fuzzy matching these records not equal bc "right" does not have `subObject` // it's defined in both schemas assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(leftRec), new FlatRecordTraversalObjectNode(rightRec))).isFalse(); assertThat(FlatRecordTraversalObjectNodeEquality.equals(new FlatRecordTraversalObjectNode(rightRec), new FlatRecordTraversalObjectNode(leftRec))).isFalse(); }
public static String maskJson(String input, String key) { if(input == null) return null; DocumentContext ctx = JsonPath.parse(input); return maskJson(ctx, key); }
@Test public void testMaskResponseBody() { String input = "{\"name\":\"Steve\",\n" + "\"list\":[\n" + " {\"name\": \"Nick\"},\n" + " {\n" + " \"name\": \"Wen\",\n" + " \"accounts\": [\"1\", \"2\", \"3\"]\n" + " },\n" + " {\n" + " \"name\": \"Steve\",\n" + " \"accounts\": [\"4\", \"5\", \"666666\"]\n" + " },\n" + " \"secret1\", \"secret2\"],\n" + "\"list1\": [\"1\", \"333\", \"55555\"],\n" + "\"password\":\"secret\"}"; // String input = "{\"name\":\"Steve\",\"list\":[\"secret1\", \"secret2\"],\"password\":\"secret\"}"; String output = Mask.maskJson(input, "test2"); System.out.println(output); Assert.assertEquals(JsonPath.parse(output).read("$.list[2].accounts[2]"), "******"); Assert.assertEquals(JsonPath.parse(output).read("$.list[1].name"), "***"); Assert.assertEquals(JsonPath.parse(output).read("$.list1[2]"), "*****"); Assert.assertEquals(JsonPath.parse(output).read("$.password"), "******"); }
@Override public AppResponse process(Flow flow, ActivateWithCodeRequest request) throws SharedServiceClientException { digidClient.remoteLog("1092", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); if (appAuthenticator.getCreatedAt().isBefore(ZonedDateTime.now().minusDays(Integer.parseInt(appAuthenticator.getGeldigheidstermijn())))) { digidClient.remoteLog("90", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new EnterActivationResponse("expired", Map.of(DAYS_VALID, Integer.valueOf(appAuthenticator.getGeldigheidstermijn()))); } if (correctActivationCode(request.getActivationCode()) && digidClient.activateAccount(appSession.getAccountId(), appAuthenticator.getIssuerType()).get(lowerUnderscore(STATUS)).equals("OK")) { ((ActivationFlow) flow).activateApp(appAuthenticator, appSession); attemptService.removeAttemptsForAppAuthenticator(appAuthenticator, "activation"); return new OkResponse(); } else if (attemptService.registerFailedAttempt(appAuthenticator, "activation")) { digidClient.remoteLog("87", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); if(appAuthenticator.getStatus().equals("pending")) appAuthenticatorService.destroyExistingAppsByInstanceId(appAuthenticator.getInstanceId()); appSession.setState("CANCELLED"); appSessionService.save(appSession); setValid(false); return new StatusResponse(BLOCKED); } else { digidClient.remoteLog("88", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); var letterSent = digidClient.letterSendDate((appSession.getRegistrationId())); return new EnterActivationResponse(INVALID, Map.of(REMAINING_ATTEMPTS, attemptService.remainingAttempts(appAuthenticator, "activation"), DATE_LETTER_SENT, letterSent.get("date"))); } }
@Test void activationCodeBlockedResponse() throws SharedServiceClientException { //given mockedAppAuthenticator.setCreatedAt(ZonedDateTime.now()); mockedAppAuthenticator.setActivationCode("3"); when(attemptService.registerFailedAttempt(mockedAppAuthenticator, "activation")).thenReturn(true); mockedAppAuthenticator.setStatus("none"); //when AppResponse appResponse = enterActivationCode.process(mockedFlow, activateWithCodeRequest); //then assertTrue(appResponse instanceof StatusResponse); assertEquals("BLOCKED", ((StatusResponse) appResponse).getStatus()); }
int getCollectionSize( BeanInjectionInfo.Property property, Object obj ) { BeanLevelInfo beanLevelInfo = getFinalPath( property ); return getCollectionSize( beanLevelInfo, obj ); }
@Test public void getCollectionSize_Property_List() throws Exception { BeanInjector bi = new BeanInjector(null ); BeanInjectionInfo bii = new BeanInjectionInfo( MetaBeanLevel1.class ); MetaBeanLevel1 mbl1 = new MetaBeanLevel1(); mbl1.setSub( new MetaBeanLevel2() ); mbl1.getSub().setAscending( Arrays.asList( new Boolean[] { true, false, false, true} ) ); BeanInjectionInfo.Property property = bii.getProperties().values().stream() .filter( p -> p.getName().equals( "ASCENDING_LIST" ) ).findFirst().orElse( null ); assertEquals(4, bi.getCollectionSize( property, mbl1.getSub() ) ); }
public static MemberLookup switchLookup(String name, ServerMemberManager memberManager) throws NacosException { LookupType lookupType = LookupType.sourceOf(name); if (Objects.isNull(lookupType)) { throw new IllegalArgumentException( "The addressing mode exists : " + name + ", just support : [" + Arrays.toString(LookupType.values()) + "]"); } if (Objects.equals(currentLookupType, lookupType)) { return LOOK_UP; } MemberLookup newLookup = find(lookupType); currentLookupType = lookupType; if (Objects.nonNull(LOOK_UP)) { LOOK_UP.destroy(); } LOOK_UP = newLookup; LOOK_UP.injectMemberManager(memberManager); Loggers.CLUSTER.info("Current addressing mode selection : {}", LOOK_UP.getClass().getSimpleName()); return LOOK_UP; }
@Test void testSwitchLookup() throws Exception { EnvUtil.setIsStandalone(false); createLookUpFileConfigMemberLookup(); EnvUtil.setIsStandalone(false); String name1 = "file"; MemberLookup memberLookup = LookupFactory.switchLookup(name1, memberManager); assertEquals(FileConfigMemberLookup.class, memberLookup.getClass()); createLookUpAddressServerMemberLookup(); String name2 = "address-server"; memberLookup = LookupFactory.switchLookup(name2, memberManager); assertEquals(AddressServerMemberLookup.class, memberLookup.getClass()); createLookUpStandaloneMemberLookup(); String name3 = "address-server"; memberLookup = LookupFactory.switchLookup(name3, memberManager); assertEquals(StandaloneMemberLookup.class, memberLookup.getClass()); }
public Page<Certificate> searchAll(CertSearchRequest request, int pageIndex, int pageSize) { return certificateRepository.searchAll(request, PageRequest.of(pageIndex, pageSize)); }
@Test public void searchAll() { CertSearchRequest csr = new CertSearchRequest(); when(certificateRepositoryMock.searchAll(csr, PageRequest.of(1, 10))).thenReturn(getPageCertificates()); Page<Certificate> result = certificateServiceMock.searchAll(csr, 1, 10); assertNotNull(result); }
public static Range<Comparable<?>> safeIntersection(final Range<Comparable<?>> range, final Range<Comparable<?>> connectedRange) { try { return range.intersection(connectedRange); } catch (final ClassCastException ex) { Class<?> clazz = getRangeTargetNumericType(range, connectedRange); if (null == clazz) { throw ex; } Range<Comparable<?>> newRange = createTargetNumericTypeRange(range, clazz); Range<Comparable<?>> newConnectedRange = createTargetNumericTypeRange(connectedRange, clazz); return newRange.intersection(newConnectedRange); } }
@Test void assertSafeIntersectionForFloat() { Range<Comparable<?>> range = Range.closed(5.5F, 13.8F); Range<Comparable<?>> connectedRange = Range.closed(7.14F, 11.3F); Range<Comparable<?>> newRange = SafeNumberOperationUtils.safeIntersection(range, connectedRange); assertThat(newRange.lowerEndpoint(), is(7.14F)); assertThat(newRange.lowerBoundType(), is(BoundType.CLOSED)); assertThat(newRange.upperEndpoint(), is(11.3F)); assertThat(newRange.upperBoundType(), is(BoundType.CLOSED)); }
static KafkaNodePoolTemplate convertTemplate(KafkaClusterTemplate template) { if (template != null) { return new KafkaNodePoolTemplateBuilder() .withPodSet(template.getPodSet()) .withPod(template.getPod()) .withPerPodService(template.getPerPodService()) .withPerPodRoute(template.getPerPodRoute()) .withPerPodIngress(template.getPerPodIngress()) .withPersistentVolumeClaim(template.getPersistentVolumeClaim()) .withKafkaContainer(template.getKafkaContainer()) .withInitContainer(template.getInitContainer()) .build(); } else { return null; } }
@Test public void testConvertTemplateWithAllValues() { KafkaClusterTemplate kafkaTemplate = new KafkaClusterTemplateBuilder() .withNewInitContainer() .addToEnv(new ContainerEnvVarBuilder().withName("MY_INIT_ENV_VAR").withValue("my-init-env-var-value").build()) .endInitContainer() .withNewKafkaContainer() .addToEnv(new ContainerEnvVarBuilder().withName("MY_ENV_VAR").withValue("my-env-var-value").build()) .endKafkaContainer() .withNewPod() .withTmpDirSizeLimit("100Mi") .endPod() .withNewPodSet() .withNewMetadata() .addToAnnotations(Map.of("custom-podset-anno", "custom-podset-anno-value")) .endMetadata() .endPodSet() .withNewPerPodService() .withNewMetadata() .addToAnnotations(Map.of("custom-service-anno", "custom-service-anno-value")) .endMetadata() .endPerPodService() .withNewPerPodIngress() .withNewMetadata() .addToAnnotations(Map.of("custom-ingress-anno", "custom-ingress-anno-value")) .endMetadata() .endPerPodIngress() .withNewPerPodRoute() .withNewMetadata() .addToAnnotations(Map.of("custom-route-anno", "custom-route-anno-value")) .endMetadata() .endPerPodRoute() .withNewPersistentVolumeClaim() .withNewMetadata() .addToAnnotations(Map.of("custom-pvc-anno", "custom-pvc-anno-value")) .endMetadata() .endPersistentVolumeClaim() .build(); KafkaNodePoolTemplate template = VirtualNodePoolConverter.convertTemplate(kafkaTemplate); assertThat(template, is(notNullValue())); assertThat(template.getInitContainer(), is(notNullValue())); assertThat(template.getInitContainer().getEnv().size(), is(1)); assertThat(template.getInitContainer().getEnv().get(0).getName(), is("MY_INIT_ENV_VAR")); assertThat(template.getInitContainer().getEnv().get(0).getValue(), is("my-init-env-var-value")); assertThat(template.getPodSet(), is(notNullValue())); assertThat(template.getPodSet().getMetadata().getAnnotations(), is(Map.of("custom-podset-anno", "custom-podset-anno-value"))); assertThat(template.getPod(), is(notNullValue())); assertThat(template.getPod().getTmpDirSizeLimit(), is("100Mi")); assertThat(template.getPerPodService(), is(notNullValue())); assertThat(template.getPerPodService().getMetadata().getAnnotations(), is(Map.of("custom-service-anno", "custom-service-anno-value"))); assertThat(template.getPerPodRoute(), is(notNullValue())); assertThat(template.getPerPodRoute().getMetadata().getAnnotations(), is(Map.of("custom-route-anno", "custom-route-anno-value"))); assertThat(template.getPerPodIngress(), is(notNullValue())); assertThat(template.getPerPodIngress().getMetadata().getAnnotations(), is(Map.of("custom-ingress-anno", "custom-ingress-anno-value"))); assertThat(template.getKafkaContainer(), is(notNullValue())); assertThat(template.getKafkaContainer().getEnv().size(), is(1)); assertThat(template.getKafkaContainer().getEnv().get(0).getName(), is("MY_ENV_VAR")); assertThat(template.getKafkaContainer().getEnv().get(0).getValue(), is("my-env-var-value")); assertThat(template.getPersistentVolumeClaim(), is(notNullValue())); assertThat(template.getPersistentVolumeClaim().getMetadata().getAnnotations(), is(Map.of("custom-pvc-anno", "custom-pvc-anno-value"))); }
@Override public Object evaluate(final ProcessingDTO processingDTO) { Number input = (Number) getFromPossibleSources(name, processingDTO) .orElse(mapMissingTo); if (input == null) { throw new KiePMMLException("Failed to retrieve input number for " + name); } return evaluate(input); }
@Test void evaluateWithExpectedValue() { KiePMMLNormContinuous kiePMMLNormContinuous = getKiePMMLNormContinuous(null, null, null); Number input = 24; Number retrieved = kiePMMLNormContinuous.evaluate(input); Number expected = kiePMMLNormContinuous.linearNorms.get(0).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(0).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(1).getOrig() - kiePMMLNormContinuous.linearNorms.get(0).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(1).getNorm() - kiePMMLNormContinuous.linearNorms.get(0).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 28; expected = kiePMMLNormContinuous.linearNorms.get(0).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(0).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(1).getOrig() - kiePMMLNormContinuous.linearNorms.get(0).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(1).getNorm() - kiePMMLNormContinuous.linearNorms.get(0).getNorm()); retrieved = kiePMMLNormContinuous.evaluate(input); assertThat(retrieved).isEqualTo(expected); input = 30; retrieved = kiePMMLNormContinuous.evaluate(input); expected = kiePMMLNormContinuous.linearNorms.get(1).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(1).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(2).getOrig() - kiePMMLNormContinuous.linearNorms.get(1).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(2).getNorm() - kiePMMLNormContinuous.linearNorms.get(1).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 31; retrieved = kiePMMLNormContinuous.evaluate(input); expected = kiePMMLNormContinuous.linearNorms.get(1).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(1).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(2).getOrig() - kiePMMLNormContinuous.linearNorms.get(1).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(2).getNorm() - kiePMMLNormContinuous.linearNorms.get(1).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 36; retrieved = kiePMMLNormContinuous.evaluate(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 37; retrieved = kiePMMLNormContinuous.evaluate(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 40; retrieved = kiePMMLNormContinuous.evaluate(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); }
protected void addService(Service service) { if (LOG.isDebugEnabled()) { LOG.debug("Adding service " + service.getName()); } synchronized (serviceList) { serviceList.add(service); } }
@Test(timeout = 10000) public void testAddInitedSiblingInStop() throws Throwable { CompositeService parent = new CompositeService("parent"); BreakableService sibling = new BreakableService(); sibling.init(new Configuration()); parent.addService(new AddSiblingService(parent, sibling, STATE.STOPPED)); parent.init(new Configuration()); }
public static DefaultProcessCommands main(File directory, int processNumber) { return new DefaultProcessCommands(directory, processNumber, true); }
@Test public void fail_to_init_if_dir_does_not_exist() throws Exception { File dir = temp.newFolder(); FileUtils.deleteQuietly(dir); try { DefaultProcessCommands.main(dir, PROCESS_NUMBER); fail(); } catch (IllegalArgumentException e) { assertThat(e).hasMessage("Not a valid directory: " + dir.getAbsolutePath()); } }
public static int setBytes(byte[] bytes, int index, byte[] values, int offset, int length) { requireNonNull(bytes, "bytes is null"); requireNonNull(values, "values is null"); checkValidRange(index, length, bytes.length); checkValidRange(offset, length, values.length); // The performance of one copy and two copies (one big chunk at 8 bytes boundary + rest) are about the same. unsafe.copyMemory(values, (long) offset + ARRAY_BYTE_BASE_OFFSET, bytes, (long) index + ARRAY_BYTE_BASE_OFFSET, length); return index + length; }
@Test public static void testSetBytes() { byte[] destination = new byte[POSITIONS_PER_PAGE]; byte[] source = new byte[POSITIONS_PER_PAGE]; ThreadLocalRandom.current().nextBytes(source); int setBytes = setBytes(destination, 0, source, 0, POSITIONS_PER_PAGE); assertEquals(setBytes, POSITIONS_PER_PAGE); assertCopied(Bytes.asList(source), destination, ARRAY_BYTE_INDEX_SCALE, MoreByteArrays::getByte); }
@Override public SuspensionReasons verifyGroupGoingDownIsFine(ClusterApi clusterApi) throws HostStateChangeDeniedException { return verifyGroupGoingDownIsFine(clusterApi, false); }
@Test public void verifyGroupGoingDownIsFine_noServicesInGroupIsUp() throws HostStateChangeDeniedException { var reasons = new SuspensionReasons().addReason(new HostName("host1"), "supension reason 1"); verifyGroupGoingDownIsFine(false, Optional.of(reasons), 13, true); }
@Override public void delete(City domain) { cityRepository.delete(domain); }
@Test void delete() throws ElementNotFoundException { City expected = createCity(); cityService.delete(expected); Mockito.verify(cityRepository).delete(expected); }
@Override public boolean isInSameDatabaseInstance(final ConnectionProperties connectionProps) { return hostname.equals(connectionProps.getHostname()) && port == connectionProps.getPort(); }
@Test void assertIsNotInSameDatabaseInstanceWithDifferentPort() { assertFalse(new StandardConnectionProperties("127.0.0.1", 9999, "foo", "foo") .isInSameDatabaseInstance(new StandardConnectionProperties("127.0.0.1", 8888, "foo", "foo"))); }
@Override public PageResult<FileDO> getFilePage(FilePageReqVO pageReqVO) { return fileMapper.selectPage(pageReqVO); }
@Test public void testGetFilePage() { // mock 数据 FileDO dbFile = randomPojo(FileDO.class, o -> { // 等会查询到 o.setPath("yunai"); o.setType("image/jpg"); o.setCreateTime(buildTime(2021, 1, 15)); }); fileMapper.insert(dbFile); // 测试 path 不匹配 fileMapper.insert(ObjectUtils.cloneIgnoreId(dbFile, o -> o.setPath("tudou"))); // 测试 type 不匹配 fileMapper.insert(ObjectUtils.cloneIgnoreId(dbFile, o -> { o.setType("image/png"); })); // 测试 createTime 不匹配 fileMapper.insert(ObjectUtils.cloneIgnoreId(dbFile, o -> { o.setCreateTime(buildTime(2020, 1, 15)); })); // 准备参数 FilePageReqVO reqVO = new FilePageReqVO(); reqVO.setPath("yunai"); reqVO.setType("jp"); reqVO.setCreateTime((new LocalDateTime[]{buildTime(2021, 1, 10), buildTime(2021, 1, 20)})); // 调用 PageResult<FileDO> pageResult = fileService.getFilePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); AssertUtils.assertPojoEquals(dbFile, pageResult.getList().get(0)); }
public static UReturn create(UExpression expression) { return new AutoValue_UReturn(expression); }
@Test public void equality() { new EqualsTester() .addEqualityGroup(UReturn.create(ULiteral.stringLit("foo"))) .addEqualityGroup(UReturn.create(ULiteral.intLit(5))) .addEqualityGroup(UReturn.create(null)) .testEquals(); }
public boolean isPercentageCoupon() { return this.policy.isDiscountPercentage(); }
@Test void 할인율_쿠폰이면_true를_반환한다() { // given Coupon coupon = 쿠픈_생성_독자_사용_할인율_10_퍼센트(); // when boolean result = coupon.isPercentageCoupon(); // then assertThat(result).isTrue(); }
@Override public long until(Temporal endExclusive, TemporalUnit unit) { return offsetTime.until(endExclusive, unit); }
@Test void until() { ZoneTime endExclusive = ZoneTime.of(DateTimeFormatter.ISO_TIME.parse("09:34:31", LocalTime::from), zoneId, false); long expected = offsetTime.until(endExclusive, ChronoUnit.SECONDS); long retrieved = zoneTime.until(endExclusive, ChronoUnit.SECONDS); assertEquals(expected, retrieved); }
public Long getEndTime() { return endTime; }
@Test public void testGetEndTime() { // Test the getEndTime method assertEquals(1234567890L, event.getEndTime().longValue()); }
public static byte[] uncompress(final byte[] src) throws IOException { byte[] result; byte[] uncompressData = new byte[src.length]; ByteArrayInputStream bis = new ByteArrayInputStream(src); GZIPInputStream iis = new GZIPInputStream(bis); ByteArrayOutputStream bos = new ByteArrayOutputStream(src.length); try { while (true) { int len = iis.read(uncompressData, 0, uncompressData.length); if (len <= 0) { break; } bos.write(uncompressData, 0, len); } bos.flush(); result = bos.toByteArray(); } finally { IOUtil.close(bis, iis, bos); } return result; }
@Test public void testUncompress() throws IOException { Assertions.assertArrayEquals(originBytes, CompressUtil.uncompress(compressedBytes1)); Assertions.assertArrayEquals(originBytes, CompressUtil.uncompress(compressedBytes2)); }
public static boolean isSameDay(long date1, long date2) { Calendar cal1 = Calendar.getInstance(); Calendar cal2 = Calendar.getInstance(); cal1.setTimeInMillis(date1); cal2.setTimeInMillis(date2); return cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) && cal1.get(Calendar.DAY_OF_YEAR) == cal2.get (Calendar.DAY_OF_YEAR); }
@Test public void isSameDay() { long today = Calendar.getInstance().getTimeInMillis(); long tomorrow = today + (1000 * 60 * 60 * 24); assertTrue(DateUtils.isSameDay(today, today)); assertFalse(DateUtils.isSameDay(today, tomorrow)); }
@Override public AnalysisPhase getAnalysisPhase() { return ANALYSIS_PHASE; }
@Test public void testGetAnalysisPhase() { JarAnalyzer instance = new JarAnalyzer(); AnalysisPhase expResult = AnalysisPhase.INFORMATION_COLLECTION; AnalysisPhase result = instance.getAnalysisPhase(); assertEquals(expResult, result); }
@Override public Map<String, Integer> getIdPositionMap() { readLock.lock(); try { // asMap is sorted by key var keyObjectMap = getKeyObjectMap(); int i = 0; var idPositionMap = new HashMap<String, Integer>(); for (var valueIdsEntry : keyObjectMap.entrySet()) { var ids = valueIdsEntry.getValue(); for (String id : ids) { idPositionMap.put(id, i); } i++; } return idPositionMap; } finally { readLock.unlock(); } }
@Test void getIdPositionMapTest() { var indexView = createCommentIndexView(); var topIndexEntry = prepareForPositionMapTest(indexView, "spec.top"); var topIndexEntryFromView = indexView.getIndexEntry("spec.top"); assertThat(topIndexEntry.getIdPositionMap()) .isEqualTo(topIndexEntryFromView.getIdPositionMap()); var creationTimeIndexEntry = prepareForPositionMapTest(indexView, "spec.creationTime"); var creationTimeIndexEntryFromView = indexView.getIndexEntry("spec.creationTime"); assertThat(creationTimeIndexEntry.getIdPositionMap()) .isEqualTo(creationTimeIndexEntryFromView.getIdPositionMap()); var priorityIndexEntry = prepareForPositionMapTest(indexView, "spec.priority"); var priorityIndexEntryFromView = indexView.getIndexEntry("spec.priority"); assertThat(priorityIndexEntry.getIdPositionMap()) .isEqualTo(priorityIndexEntryFromView.getIdPositionMap()); }
@Override public <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator) { return aggregate(initializer, aggregator, Materialized.with(keySerde, null)); }
@Test public void shouldDropWindowsOutsideOfRetention() { final WindowBytesStoreSupplier storeSupplier = Stores.inMemoryWindowStore("aggregated", ofMillis(1200L), ofMillis(100L), false); windowedStream.aggregate( MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.<String, String>as(storeSupplier) .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String()) .withCachingDisabled()); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { final TestInputTopic<String, String> inputTopic = driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer()); inputTopic.pipeInput("1", "2", 100L); inputTopic.pipeInput("1", "3", 500L); inputTopic.pipeInput("1", "4", 799L); inputTopic.pipeInput("1", "4", 1000L); inputTopic.pipeInput("1", "5", 2000L); { final WindowStore<String, String> windowStore = driver.getWindowStore("aggregated"); final List<KeyValue<Windowed<String>, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(10000L))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), "0+4"), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), "0+5")))); } { final WindowStore<String, ValueAndTimestamp<Long>> windowStore = driver.getTimestampedWindowStore("aggregated"); final List<KeyValue<Windowed<String>, ValueAndTimestamp<Long>>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(2000L))); assertThat(data, equalTo(Arrays.asList( KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), ValueAndTimestamp.make("0+4", 1000L)), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), ValueAndTimestamp.make("0+5", 2000L))))); } } }
public String getValue() { return value; }
@Test public void shouldHandleBooleanValueAsAString() throws Exception { final ConfigurationValue configurationValue = new ConfigurationValue(true); assertThat(configurationValue.getValue(), is("true")); }
public String format() { StringBuilder builder = new StringBuilder(); for (RefeedActions.Entry entry : actions.getEntries()) { builder.append(entry.name() + ": Consider removing data and re-feed document type '" + entry.getDocumentType() + "' in cluster '" + entry.getClusterName() + "' because:\n"); int counter = 1; for (String message : entry.getMessages()) { builder.append(" " + (counter++) + ") " + message + "\n"); } } return builder.toString(); }
@Test public void formatting_of_multiple_actions() { RefeedActions actions = new ConfigChangeActionsBuilder(). refeed(CHANGE_ID, CHANGE_MSG, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(CHANGE_ID, CHANGE_MSG_2, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(CHANGE_ID_2, CHANGE_MSG_2, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(CHANGE_ID_2, CHANGE_MSG_2, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(CHANGE_ID, CHANGE_MSG_2, DOC_TYPE_2, CLUSTER, SERVICE_NAME). build().getRefeedActions(); assertEquals("field-type-change: Consider removing data and re-feed document type 'book' in cluster 'foo' because:\n" + " 1) other change\n" + "field-type-change: Consider removing data and re-feed document type 'music' in cluster 'foo' because:\n" + " 1) change\n" + " 2) other change\n" + "indexing-change: Consider removing data and re-feed document type 'music' in cluster 'foo' because:\n" + " 1) other change\n", new RefeedActionsFormatter(actions).format()); }
@Override public Object convert(String value) { if (value == null || value.isEmpty()) { return value; } final CSVParser parser = getCsvParser(); final Map<String, String> fields = Maps.newHashMap(); try { final String[] strings = parser.parseLine(value); if (strings.length != fieldNames.length) { LOG.error("Different number of columns in CSV data ({}) and configured field names ({}). Discarding input.", strings.length, fieldNames.length); return null; } for (int i = 0; i < strings.length; i++) { fields.put(fieldNames[i], strings[i]); } } catch (IOException e) { LOG.error("Invalid CSV input, discarding input", e); return null; } return fields; }
@Test public void testSuccessfulConversion() throws ConfigurationException { Map<String, Object> configMap = Maps.newHashMap(); configMap.put("column_header", "f1,f2"); CsvConverter csvConverter = new CsvConverter(configMap); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) csvConverter.convert("\"content1\",\"cont\\\\ent3\""); assertEquals("content1", result.get("f1")); assertEquals("cont\\ent3", result.get("f2")); }
@Override public Executor getExecutor(URL url) { String name = url.getParameter(THREAD_NAME_KEY, (String) url.getAttribute(THREAD_NAME_KEY, DEFAULT_THREAD_NAME)); int cores = url.getParameter(CORE_THREADS_KEY, DEFAULT_CORE_THREADS); int threads = url.getParameter(THREADS_KEY, Integer.MAX_VALUE); int queues = url.getParameter(QUEUES_KEY, DEFAULT_QUEUES); int alive = url.getParameter(ALIVE_KEY, DEFAULT_ALIVE); BlockingQueue<Runnable> blockingQueue; if (queues == 0) { blockingQueue = new SynchronousQueue<>(); } else if (queues < 0) { blockingQueue = new MemorySafeLinkedBlockingQueue<>(); } else { blockingQueue = new LinkedBlockingQueue<>(queues); } return new ThreadPoolExecutor( cores, threads, alive, TimeUnit.MILLISECONDS, blockingQueue, new NamedInternalThreadFactory(name, true), new AbortPolicyWithReport(name, url)); }
@Test void getExecutor1() throws Exception { URL url = URL.valueOf("dubbo://10.20.130.230:20880/context/path?" + THREAD_NAME_KEY + "=demo&" + CORE_THREADS_KEY + "=1&" + THREADS_KEY + "=2&" + ALIVE_KEY + "=1000&" + QUEUES_KEY + "=0"); ThreadPool threadPool = new CachedThreadPool(); ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.getExecutor(url); assertThat(executor.getCorePoolSize(), is(1)); assertThat(executor.getMaximumPoolSize(), is(2)); assertThat(executor.getQueue(), Matchers.<BlockingQueue<Runnable>>instanceOf(SynchronousQueue.class)); assertThat( executor.getRejectedExecutionHandler(), Matchers.<RejectedExecutionHandler>instanceOf(AbortPolicyWithReport.class)); final CountDownLatch latch = new CountDownLatch(1); executor.execute(() -> { Thread thread = Thread.currentThread(); assertThat(thread, instanceOf(InternalThread.class)); assertThat(thread.getName(), startsWith("demo")); latch.countDown(); }); latch.await(); assertThat(latch.getCount(), is(0L)); }
public Map<String, Object> getAllLocalProperties() { Map<String, Object> result = new LinkedHashMap<>( connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getLocalProperties()); result.putAll(poolPropertySynonyms.getLocalProperties()); result.putAll(customProperties.getProperties()); return result; }
@Test void assertGetAllLocalProperties() { DataSourcePoolProperties originalProps = new DataSourcePoolProperties(MockedDataSource.class.getName(), getProperties()); Map<String, Object> actualAllProps = originalProps.getAllLocalProperties(); assertThat(actualAllProps.size(), is(7)); assertTrue(actualAllProps.containsKey("driverClassName")); assertTrue(actualAllProps.containsValue(MockedDataSource.class.getName())); assertTrue(actualAllProps.containsKey("jdbcUrl")); assertTrue(actualAllProps.containsValue("jdbc:mock://127.0.0.1/foo_ds")); assertTrue(actualAllProps.containsKey("username")); assertTrue(actualAllProps.containsValue("root")); assertTrue(actualAllProps.containsKey("password")); assertTrue(actualAllProps.containsValue("root")); assertTrue(actualAllProps.containsKey("loginTimeout")); assertTrue(actualAllProps.containsValue("5000")); assertTrue(actualAllProps.containsKey("maximumPoolSize")); assertTrue(actualAllProps.containsValue("30")); assertTrue(actualAllProps.containsKey("idleTimeout")); assertTrue(actualAllProps.containsValue("30000")); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatCreateOrReplaceStreamStatement() { // Given: final CreateSourceProperties props = CreateSourceProperties.from( new ImmutableMap.Builder<String, Literal>() .putAll(SOME_WITH_PROPS.copyOfOriginalLiterals()) .build() ); final CreateStream createTable = new CreateStream( TEST, ELEMENTS_WITHOUT_KEY, true, false, props, false); // When: final String sql = SqlFormatter.formatSql(createTable); // Then: assertThat(sql, is("CREATE OR REPLACE STREAM TEST (`Foo` STRING, `Bar` STRING) " + "WITH (KAFKA_TOPIC='topic_test', VALUE_FORMAT='JSON');")); }
@VisibleForTesting static long roundTo(long x, int multiple) { return ((x + multiple - 1) / multiple) * multiple; }
@Test public void testComplexObject() { ComplexObject<Object> l = new ComplexObject<Object>(); l.add(new Object()); l.add(new Object()); l.add(new Object()); long expectedSize = 0; // The complex object itself plus first and last refs. expectedSize += roundTo(mObjectHeaderSize + 2 * mReferenceHeaderSize, 8); // 3 Nodes - each with 3 object references. expectedSize += roundTo(mObjectHeaderSize + 3 * mReferenceHeaderSize, 8) * 3; // 3 vanilla objects contained in the node values. expectedSize += roundTo(mObjectHeaderSize, 8) * 3; assertSizeIs(expectedSize, l); }
public void undelete() { // make a copy because the selected trash items changes as soon as trashService.undelete is called List<UIDeletedObject> selectedTrashFileItemsSnapshot = new ArrayList<UIDeletedObject>( selectedTrashFileItems ); if ( selectedTrashFileItemsSnapshot != null && selectedTrashFileItemsSnapshot.size() > 0 ) { List<ObjectId> ids = new ArrayList<ObjectId>(); for ( UIDeletedObject uiObj : selectedTrashFileItemsSnapshot ) { ids.add( uiObj.getId() ); } try { trashService.undelete( ids ); setTrash( trashService.getTrash() ); for ( UIDeletedObject uiObj : selectedTrashFileItemsSnapshot ) { // find the closest UIRepositoryDirectory that is in the dirMap RepositoryDirectoryInterface dir = repository.findDirectory( uiObj.getOriginalParentPath() ); while ( dir != null && dirMap.get( dir.getObjectId() ) == null ) { dir = dir.getParent(); } // now refresh that UIRepositoryDirectory so that the file/folders deck instantly refreshes on undelete if ( dir != null ) { dirMap.get( dir.getObjectId() ).refresh(); } // if transformation or directory with transformations call extension to restore data services references. if ( RepositoryObjectType.TRANSFORMATION.name().equals( uiObj.getType() ) ) { TransMeta transMeta = repository.loadTransformation( uiObj.getId(), null ); ExtensionPointHandler .callExtensionPoint( LogChannel.GENERAL, KettleExtensionPoint.TransAfterOpen.id, transMeta ); transMeta.clearChanged(); } else if ( !RepositoryObjectType.JOB.name().equals( uiObj.getType() ) ) { // if not a transformation and not a job then is a Directory RepositoryDirectoryInterface actualDir = repository.findDirectory( uiObj.getOriginalParentPath() + RepositoryDirectory.DIRECTORY_SEPARATOR + uiObj.getName() ); if ( actualDir != null ) { List<RepositoryElementMetaInterface> transformations = new ArrayList<>(); getAllTransformations( actualDir, transformations ); for ( RepositoryElementMetaInterface repositoryElementMetaInterface : transformations ) { TransMeta transMeta = repository.loadTransformation( repositoryElementMetaInterface.getObjectId(), null ); ExtensionPointHandler .callExtensionPoint( LogChannel.GENERAL, KettleExtensionPoint.TransAfterOpen.id, transMeta ); transMeta.clearChanged(); } } else { displayExceptionMessage( BaseMessages.getString( PKG, "TrashBrowseController.UnableToRestoreDirectory", uiObj.getOriginalParentPath() + RepositoryDirectory.DIRECTORY_SEPARATOR + uiObj.getName() ) ); } } } deck.setSelectedIndex( 1 ); } catch ( Throwable th ) { if ( mainController == null || !mainController.handleLostRepository( th ) ) { displayExceptionMessage( BaseMessages.getString( PKG, "TrashBrowseController.UnableToRestoreFile", th.getLocalizedMessage() ) ); //$NON-NLS-1$ } } } else { // ui probably allowed the button to be enabled when it shouldn't have been enabled throw new RuntimeException(); } }
@Test public void testExceptionHandle() throws Exception { RuntimeException runtimeException = new RuntimeException( "Exception handle" ); when( selectedTrashFileItemsMock.toArray() ) .thenReturn( new TrashBrowseController.UIDeletedObject[] { uiDirectoryObjectMock } ); doThrow( runtimeException ).when( trashServiceMock ).undelete( anyList() ); doReturn( false ).when( mainControllerMock ).handleLostRepository( any( Throwable.class ) ); trBrController.undelete(); verify( messageBoxMock ).setTitle( "Error" ); verify( messageBoxMock ).setAcceptLabel( "OK" ); verify( messageBoxMock ).setMessage( contains( "Exception handle" ) ); verify( messageBoxMock, times( 1 ) ).open(); verify( deckMock, never() ).setSelectedIndex( 1 ); }
static Supplier supplier( QueryPath[] paths, QueryDataType[] types, UpsertTargetDescriptor descriptor, List<Expression<?>> projection ) { return new Supplier(paths, types, descriptor, projection); }
@Test public void test_supplierSerialization() { InternalSerializationService serializationService = new DefaultSerializationServiceBuilder().build(); Projector.Supplier original = Projector.supplier( new QueryPath[]{QueryPath.create("this.field")}, new QueryDataType[]{QueryDataType.INT}, PrimitiveUpsertTargetDescriptor.INSTANCE, singletonList(ColumnExpression.create(0, QueryDataType.INT)) ); Projector.Supplier serialized = serializationService.toObject(serializationService.toData(original)); assertThat(serialized).isEqualToComparingFieldByField(original); }
public static String extractCharset(String line, String defaultValue) { if (line == null) { return defaultValue; } final String[] parts = line.split(" "); String charsetInfo = ""; for (var part : parts) { if (part.startsWith("charset")) { charsetInfo = part; break; } } final String charset = charsetInfo.replace("charset=", "").replace(";", ""); if (charset.isBlank()) { return defaultValue; } return charset; }
@DisplayName("default charset information") @Test void testMissingCharset() { assertEquals("ISO-8859-1", TelegramAsyncHandler.extractCharset("Content-Type: text/plain", StandardCharsets.ISO_8859_1.name())); }
@Override public void schedulePendingRequestBulkTimeoutCheck( final PhysicalSlotRequestBulk bulk, Time timeout) { PhysicalSlotRequestBulkWithTimestamp bulkWithTimestamp = new PhysicalSlotRequestBulkWithTimestamp(bulk); bulkWithTimestamp.markUnfulfillable(clock.relativeTimeMillis()); schedulePendingRequestBulkWithTimestampCheck(bulkWithTimestamp, timeout); }
@Test void testFulfilledBulkIsNotCancelled() throws InterruptedException, ExecutionException { final CompletableFuture<SlotRequestId> cancellationFuture = new CompletableFuture<>(); final PhysicalSlotRequestBulk bulk = createPhysicalSlotRequestBulkWithCancellationFuture( cancellationFuture, new SlotRequestId()); bulkChecker.schedulePendingRequestBulkTimeoutCheck(bulk, TIMEOUT); checkNotCancelledAfter(cancellationFuture, 2 * TIMEOUT.toMilliseconds()); }
public static void executeIgnore(Runnable runnable) { DataPermission dataPermission = getDisableDataPermissionDisable(); DataPermissionContextHolder.add(dataPermission); try { // 执行 runnable runnable.run(); } finally { DataPermissionContextHolder.remove(); } }
@Test public void testExecuteIgnore() { DataPermissionUtils.executeIgnore(() -> assertFalse(DataPermissionContextHolder.get().enable())); }
@Override public URIStatus getStatus(AlluxioURI path, GetStatusPOptions options) throws FileDoesNotExistException, IOException, AlluxioException { URIStatus status = mMetadataCache.get(path); if (status == null || !status.isCompleted()) { try { status = mDelegatedFileSystem.getStatus(path, options); mMetadataCache.put(path, status); } catch (FileDoesNotExistException e) { mMetadataCache.put(path, NOT_FOUND_STATUS); throw e; } } else if (status == NOT_FOUND_STATUS) { throw new FileDoesNotExistException("Path \"" + path.getPath() + "\" does not exist."); } else if (options.getUpdateTimestamps()) { // Asynchronously send an RPC to master to update the access time. // Otherwise, if we need to synchronously send RPC to master to do this, // caching the status does not bring any benefit. asyncUpdateFileAccessTime(path); } return status; }
@Test public void getNoneExistStatus() throws Exception { try { mFs.getStatus(NOT_EXIST_FILE); Assert.fail("Failed while getStatus for a non-exist path."); } catch (FileDoesNotExistException e) { // expected exception thrown. test passes } assertEquals(1, mRpcCountingFs.getStatusRpcCount(NOT_EXIST_FILE)); // The following getStatus gets from cache, so no RPC will be made. try { mFs.getStatus(NOT_EXIST_FILE); Assert.fail("Failed while getStatus for a non-exist path."); } catch (FileDoesNotExistException e) { // expected exception thrown. test passes } assertEquals(1, mRpcCountingFs.getStatusRpcCount(NOT_EXIST_FILE)); }
public <T> void addStoreLevelMutableMetric(final String taskId, final String metricsScope, final String storeName, final String name, final String description, final RecordingLevel recordingLevel, final Gauge<T> valueProvider) { final MetricName metricName = metrics.metricName( name, STATE_STORE_LEVEL_GROUP, description, storeLevelTagMap(taskId, metricsScope, storeName) ); if (metrics.metric(metricName) == null) { metrics.addMetricIfAbsent(metricName, new MetricConfig().recordLevel(recordingLevel), valueProvider); final String key = storeSensorPrefix(Thread.currentThread().getName(), taskId, storeName); storeLevelMetrics.computeIfAbsent(key, ignored -> new LinkedList<>()).push(metricName); } }
@Test public void shouldAddNewStoreLevelMutableMetric() { final Metrics metrics = mock(Metrics.class); final MetricName metricName = new MetricName(METRIC_NAME1, STATE_STORE_LEVEL_GROUP, DESCRIPTION1, STORE_LEVEL_TAG_MAP); final MetricConfig metricConfig = new MetricConfig().recordLevel(INFO_RECORDING_LEVEL); when(metrics.metricName(METRIC_NAME1, STATE_STORE_LEVEL_GROUP, DESCRIPTION1, STORE_LEVEL_TAG_MAP)) .thenReturn(metricName); when(metrics.metric(metricName)).thenReturn(null); when(metrics.addMetricIfAbsent(eq(metricName), eqMetricConfig(metricConfig), eq(VALUE_PROVIDER))).thenReturn(null); final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, VERSION, time); streamsMetrics.addStoreLevelMutableMetric( TASK_ID1, SCOPE_NAME, STORE_NAME1, METRIC_NAME1, DESCRIPTION1, INFO_RECORDING_LEVEL, VALUE_PROVIDER ); }
public ConnectionDetails createConnectionDetails( String scheme ) { try { ConnectionProvider<? extends ConnectionDetails> provider = connectionProviders.get( scheme ); return provider.getClassType().newInstance(); } catch ( Exception e ) { logger.error( "Error in createConnectionDetails {}", scheme, e ); return null; } }
@Test public void testCreateConnectionDetailsNull() { addProvider(); Assert.assertNull( connectionManager.createConnectionDetails( DOES_NOT_EXIST ) ); }
public static <T> T objectFromCommandLineArgument(String argument, Class<T> clazz) throws Exception { if (openBraceComesFirst(argument)) { return JSON_SERDE.readValue(argument, clazz); } else { return JSON_SERDE.readValue(new File(argument), clazz); } }
@Test public void testObjectFromCommandLineArgument() throws Exception { assertEquals(123, JsonUtil.objectFromCommandLineArgument("{\"bar\":123}", Foo.class).bar); assertEquals(1, JsonUtil.objectFromCommandLineArgument(" {\"bar\": 1} ", Foo.class).bar); File tempFile = TestUtils.tempFile(); try { Files.write(tempFile.toPath(), "{\"bar\": 456}".getBytes(StandardCharsets.UTF_8)); assertEquals(456, JsonUtil.objectFromCommandLineArgument(tempFile.getAbsolutePath(), Foo.class).bar); } finally { Files.delete(tempFile.toPath()); } }
public void execute(final PrioritizableRunnable runnable) { _queue.add(runnable); // Guarantees that execution loop is scheduled only once to the underlying executor. // Also makes sure that all memory effects of last Runnable are visible to the next Runnable // in case value returned by decrementAndGet == 0. if (_pendingCount.getAndIncrement() == 0) { tryExecuteLoop(); } }
@Test(dataProvider = "draining") public void testExecuteOneStepPlan(boolean draining) throws InterruptedException { final LatchedRunnable runnable = new LatchedRunnable(); _serialExecutor.execute(runnable); assertTrue(runnable.await(5, TimeUnit.SECONDS)); assertFalse(_rejectionHandler.wasExecuted()); assertTrue(_capturingDeactivationListener.await(5, TimeUnit.SECONDS)); assertEquals(_capturingDeactivationListener.getDeactivatedCount(), 1); }
@Override public List<String> assignSegment(String segmentName, Map<String, Map<String, String>> currentAssignment, Map<InstancePartitionsType, InstancePartitions> instancePartitionsMap) { Preconditions.checkState(instancePartitionsMap.size() == 1, "One instance partition type should be provided"); InstancePartitions instancePartitions = instancePartitionsMap.get(InstancePartitionsType.CONSUMING); Preconditions.checkState(instancePartitions != null, "Failed to find CONSUMING instance partitions for table: %s", _tableNameWithType); _logger.info("Assigning segment: {} with instance partitions: {} for table: {}", segmentName, instancePartitions, _tableNameWithType); int partitionId = getPartitionId(segmentName); List<String> instancesAssigned = assignConsumingSegment(partitionId, instancePartitions); Set<String> existingAssignment = getExistingAssignment(partitionId, currentAssignment); // Check if the candidate assignment is consistent with existing assignment. Use existing assignment if not. if (existingAssignment == null) { _logger.info("No existing assignment from idealState, using the one decided by instancePartitions"); } else if (!isSameAssignment(existingAssignment, instancesAssigned)) { _logger.warn("Assignment: {} is inconsistent with idealState: {}, using the one from idealState", instancesAssigned, existingAssignment); instancesAssigned = new ArrayList<>(existingAssignment); if (_controllerMetrics != null) { _controllerMetrics.addMeteredTableValue(_tableNameWithType, ControllerMeter.CONTROLLER_REALTIME_TABLE_SEGMENT_ASSIGNMENT_MISMATCH, 1L); } } _logger.info("Assigned segment: {} to instances: {} for table: {}", segmentName, instancesAssigned, _tableNameWithType); return instancesAssigned; }
@Test(expectedExceptions = IllegalStateException.class) public void testAssignSegmentToCompletedServers() { _segmentAssignment.assignSegment("seg01", new TreeMap<>(), new TreeMap<>()); }
public static void writeString(final @NotNull ByteBuf buf, final @NotNull CharSequence string) { writeString(buf, string, Short.MAX_VALUE); }
@Test void testWriteStringWithNullValue() { assertThrows(EncoderException.class, () -> BufUtil.writeString(this.buf, null)); }
@Override public void onMsg(TbContext ctx, TbMsg msg) throws TbNodeException { ctx.tellNext(msg, checkMatches(msg) ? TbNodeConnectionType.TRUE : TbNodeConnectionType.FALSE); }
@Test void givenTypePolygonAndConfigWithoutPerimeterKeyName_whenOnMsg_thenFalse() throws TbNodeException { // GIVEN var config = new TbGpsGeofencingFilterNodeConfiguration().defaultConfiguration(); config.setPerimeterKeyName(null); node.init(ctx, new TbNodeConfiguration(JacksonUtil.valueToTree(config))); DeviceId deviceId = new DeviceId(UUID.randomUUID()); TbMsgMetaData metadata = getMetadataForOldVersionPolygonPerimeter(); TbMsg msg = getTbMsg(deviceId, metadata, GeoUtilTest.POINT_OUTSIDE_SIMPLE_RECT.getLatitude(), GeoUtilTest.POINT_OUTSIDE_SIMPLE_RECT.getLongitude()); // WHEN node.onMsg(ctx, msg); // THEN ArgumentCaptor<TbMsg> newMsgCaptor = ArgumentCaptor.forClass(TbMsg.class); verify(ctx, times(1)).tellNext(newMsgCaptor.capture(), eq(TbNodeConnectionType.FALSE)); verify(ctx, never()).tellFailure(any(), any()); TbMsg newMsg = newMsgCaptor.getValue(); assertThat(newMsg).isNotNull(); assertThat(newMsg).isSameAs(msg); }
public static <@NonNull E> CompletableSource resolveScopeFromLifecycle( final LifecycleScopeProvider<E> provider) throws OutsideScopeException { return resolveScopeFromLifecycle(provider, true); }
@Test public void resolveScopeFromLifecycle_normal() { PublishSubject<Integer> lifecycle = PublishSubject.create(); TestObserver<?> o = testSource(resolveScopeFromLifecycle(lifecycle, 3)); lifecycle.onNext(0); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(1); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(0); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(2); o.assertNoErrors().assertNotComplete(); // Now we end lifecycle.onNext(3); o.assertComplete(); }
public Future<Collection<Integer>> resizeAndReconcilePvcs(KafkaStatus kafkaStatus, List<PersistentVolumeClaim> pvcs) { Set<Integer> podIdsToRestart = new HashSet<>(); List<Future<Void>> futures = new ArrayList<>(pvcs.size()); for (PersistentVolumeClaim desiredPvc : pvcs) { Future<Void> perPvcFuture = pvcOperator.getAsync(reconciliation.namespace(), desiredPvc.getMetadata().getName()) .compose(currentPvc -> { if (currentPvc == null || currentPvc.getStatus() == null || !"Bound".equals(currentPvc.getStatus().getPhase())) { // This branch handles the following conditions: // * The PVC doesn't exist yet, we should create it // * The PVC is not Bound, we should reconcile it return pvcOperator.reconcile(reconciliation, reconciliation.namespace(), desiredPvc.getMetadata().getName(), desiredPvc) .map((Void) null); } else if (currentPvc.getStatus().getConditions().stream().anyMatch(cond -> "Resizing".equals(cond.getType()) && "true".equals(cond.getStatus().toLowerCase(Locale.ENGLISH)))) { // The PVC is Bound, but it is already resizing => Nothing to do, we should let it resize LOGGER.debugCr(reconciliation, "The PVC {} is resizing, nothing to do", desiredPvc.getMetadata().getName()); return Future.succeededFuture(); } else if (currentPvc.getStatus().getConditions().stream().anyMatch(cond -> "FileSystemResizePending".equals(cond.getType()) && "true".equals(cond.getStatus().toLowerCase(Locale.ENGLISH)))) { // The PVC is Bound and resized but waiting for FS resizing => We need to restart the pod which is using it podIdsToRestart.add(getPodIndexFromPvcName(desiredPvc.getMetadata().getName())); LOGGER.infoCr(reconciliation, "The PVC {} is waiting for file system resizing and the pod using it might need to be restarted.", desiredPvc.getMetadata().getName()); return Future.succeededFuture(); } else { // The PVC is Bound and resizing is not in progress => We should check if the SC supports resizing and check if size changed Long currentSize = StorageUtils.convertToMillibytes(currentPvc.getSpec().getResources().getRequests().get("storage")); Long desiredSize = StorageUtils.convertToMillibytes(desiredPvc.getSpec().getResources().getRequests().get("storage")); if (!currentSize.equals(desiredSize)) { // The sizes are different => we should resize (shrinking will be handled in StorageDiff, so we do not need to check that) return resizePvc(kafkaStatus, currentPvc, desiredPvc); } else { // size didn't change, just reconcile return pvcOperator.reconcile(reconciliation, reconciliation.namespace(), desiredPvc.getMetadata().getName(), desiredPvc) .map((Void) null); } } }); futures.add(perPvcFuture); } return Future.all(futures) .map(podIdsToRestart); }
@Test public void testNoExistingVolumes(VertxTestContext context) { List<PersistentVolumeClaim> pvcs = List.of( createPvc("data-pod-0"), createPvc("data-pod-1"), createPvc("data-pod-2") ); ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false); // Mock the PVC Operator PvcOperator mockPvcOps = supplier.pvcOperations; when(mockPvcOps.getAsync(eq(NAMESPACE), ArgumentMatchers.startsWith("data-"))).thenReturn(Future.succeededFuture()); ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class); when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture()); // Mock the StorageClass Operator StorageClassOperator mockSco = supplier.storageClassOperations; when(mockSco.getAsync(eq(STORAGE_CLASS_NAME))).thenReturn(Future.succeededFuture(RESIZABLE_STORAGE_CLASS)); // Reconcile the PVCs PvcReconciler reconciler = new PvcReconciler( new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, NAMESPACE, CLUSTER_NAME), mockPvcOps, mockSco ); Checkpoint async = context.checkpoint(); reconciler.resizeAndReconcilePvcs(new KafkaStatus(), pvcs) .onComplete(res -> { assertThat(res.succeeded(), is(true)); assertThat(res.result().size(), is(0)); assertThat(pvcCaptor.getAllValues().size(), is(3)); assertThat(pvcCaptor.getAllValues(), is(pvcs)); async.flag(); }); }
public void addTask(Task task) { tasks.add(requireNonNull(task)); }
@Test void addsATaskServlet() throws Exception { final Task task = new Task("thing") { @Override public void execute(Map<String, List<String>> parameters, PrintWriter output) throws Exception { } }; env.addTask(task); handler.setServer(new Server()); handler.start(); final ServletRegistration registration = handler.getServletHandler() .getServletContext() .getServletRegistration("tasks"); assertThat(registration.getMappings()) .containsOnly("/tasks/*"); }
@Override public void run() { // top-level command, do nothing }
@Test public void test_resumeJob_jobNotSuspended() { // Given Job job = newJob(); assertThat(job).eventuallyHasStatus(JobStatus.RUNNING); // When // Then exception.expectMessage("is not suspended"); run("resume", job.getName()); }
public String toBaseMessageIdString(Object messageId) { if (messageId == null) { return null; } else if (messageId instanceof String) { String stringId = (String) messageId; // If the given string has a type encoding prefix, // we need to escape it as an encoded string (even if // the existing encoding prefix was also for string) if (hasTypeEncodingPrefix(stringId)) { return AMQP_STRING_PREFIX + stringId; } else { return stringId; } } else if (messageId instanceof UUID) { return AMQP_UUID_PREFIX + messageId.toString(); } else if (messageId instanceof UnsignedLong) { return AMQP_ULONG_PREFIX + messageId.toString(); } else if (messageId instanceof Binary) { ByteBuffer dup = ((Binary) messageId).asByteBuffer(); byte[] bytes = new byte[dup.remaining()]; dup.get(bytes); String hex = convertBinaryToHexString(bytes); return AMQP_BINARY_PREFIX + hex; } else { throw new IllegalArgumentException("Unsupported type provided: " + messageId.getClass()); } }
@Test public void testToBaseMessageIdStringWithUUID() { UUID uuidMessageId = UUID.randomUUID(); String expected = AMQPMessageIdHelper.AMQP_UUID_PREFIX + uuidMessageId.toString(); String baseMessageIdString = messageIdHelper.toBaseMessageIdString(uuidMessageId); assertNotNull("null string should not have been returned", baseMessageIdString); assertEquals("expected base id string was not returned", expected, baseMessageIdString); }
public static short translateBucketAcl(GSAccessControlList acl, String userId) { short mode = (short) 0; for (GrantAndPermission gp : acl.getGrantAndPermissions()) { Permission perm = gp.getPermission(); GranteeInterface grantee = gp.getGrantee(); if (perm.equals(Permission.PERMISSION_READ)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is readable by the user, add r and x to the owner mode. mode |= (short) 0500; } } else if (perm.equals(Permission.PERMISSION_WRITE)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is writable by the user, +w to the owner mode. mode |= (short) 0200; } } else if (perm.equals(Permission.PERMISSION_FULL_CONTROL)) { if (isUserIdInGrantee(grantee, userId)) { // If the user has full control to the bucket, +rwx to the owner mode. mode |= (short) 0700; } } } return mode; }
@Test public void translateAuthenticatedUserFullPermission() { GroupGrantee authenticatedUsersGrantee = GroupGrantee.AUTHENTICATED_USERS; mAcl.grantPermission(authenticatedUsersGrantee, Permission.PERMISSION_FULL_CONTROL); assertEquals((short) 0700, GCSUtils.translateBucketAcl(mAcl, ID)); assertEquals((short) 0700, GCSUtils.translateBucketAcl(mAcl, OTHER_ID)); }
public Job toEnqueuedJob() { return toJob(new EnqueuedState()); }
@Test void testToEnqueuedJob() { final RecurringJob recurringJob = aDefaultRecurringJob() .withId("the-recurring-job") .withName("the recurring job") .withAmountOfRetries(3) .withLabels("some label") .build(); final Job job = recurringJob.toEnqueuedJob(); assertThat(job) .hasRecurringJobId("the-recurring-job") .hasJobName("the recurring job") .hasState(ENQUEUED) .hasAmountOfRetries(3) .hasLabels(Set.of("some label")); }
public void convertOffset(String brokerList, String topic, Map<String, String> properties, long warehouseId) throws UserException { List<Integer> beginningPartitions = Lists.newArrayList(); List<Integer> endPartitions = Lists.newArrayList(); for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { Integer part = entry.getKey(); Long offset = entry.getValue(); if (offset == -2L) { beginningPartitions.add(part); } if (offset == -1L) { endPartitions.add(part); } } if (beginningPartitions.size() > 0) { Map<Integer, Long> partOffsets = KafkaUtil .getBeginningOffsets(brokerList, topic, ImmutableMap.copyOf(properties), beginningPartitions, warehouseId); partitionIdToOffset.putAll(partOffsets); } if (endPartitions.size() > 0) { Map<Integer, Long> partOffsets = KafkaUtil.getLatestOffsets(brokerList, topic, ImmutableMap.copyOf(properties), endPartitions, warehouseId); partitionIdToOffset.putAll(partOffsets); } }
@Test public void testConvertOffset() throws Exception { new MockUp<KafkaUtil>() { @Mock public Map<Integer, Long> getLatestOffsets(String brokerList, String topic, ImmutableMap<String, String> properties, List<Integer> partitions, long warehouseId) throws UserException { Map<Integer, Long> result = Maps.newHashMap(); result.put(0, 100L); return result; } @Mock public Map<Integer, Long> getBeginningOffsets(String brokerList, String topic, ImmutableMap<String, String> properties, List<Integer> partitions, long warehouseId) throws UserException { Map<Integer, Long> result = Maps.newHashMap(); result.put(1, 1L); return result; } }; KafkaProgress progress = new KafkaProgress(); // modify offset while paused when partition is not ready try { List<Pair<Integer, Long>> partitionToOffset = new ArrayList<>(); partitionToOffset.add(new Pair<>(3, 20L)); progress.modifyOffset(partitionToOffset); } catch (DdlException e) { Assert.assertEquals("The specified partition 3 is not in the consumed partitions", e.getMessage()); } progress.addPartitionOffset(new Pair<>(0, -1L)); progress.addPartitionOffset(new Pair<>(1, -2L)); progress.addPartitionOffset(new Pair<>(2, 10L)); progress.addPartitionOffset(new Pair<>(3, 10L)); progress.convertOffset("127.0.0.1:9020", "topic", Maps.newHashMap(), WarehouseManager.DEFAULT_WAREHOUSE_ID); List<Pair<Integer, Long>> partitionToOffset = new ArrayList<>(); partitionToOffset.add(new Pair<>(3, 20L)); progress.modifyOffset(partitionToOffset); Assert.assertEquals(4, partitionToOffset.size()); Assert.assertEquals(100L, (long) progress.getOffsetByPartition(0)); Assert.assertEquals(1L, (long) progress.getOffsetByPartition(1)); Assert.assertEquals(10L, (long) progress.getOffsetByPartition(2)); Assert.assertEquals(20L, (long) progress.getOffsetByPartition(3)); }
@Override public void close() { connectionPool.close(); }
@Test public void testClose() { snowflakeClient.close(); verify(mockClientPool).close(); }
@Override public <R> R queryOne(String sql, Class<R> cls) { return queryOne(jdbcTemplate, sql, cls); }
@Test void testQueryOne5() { final String sql = "SELECT * FROM config_info WHERE id = ? AND data_id = ? AND group_id = ?"; MockConfigInfo configInfo = new MockConfigInfo(); configInfo.setId(1L); configInfo.setDataId("test"); configInfo.setGroup("test"); Object[] args = new Object[] {configInfo.getId(), configInfo.getDataId(), configInfo.getGroup()}; when(tempJdbcTemplate.queryForObject(sql, args, MockConfigInfo.class)).thenReturn(configInfo); assertEquals(operate.queryOne(tempJdbcTemplate, sql, args, MockConfigInfo.class), configInfo); }
public static Method[] getMethods(Class<?> clazz, Filter<Method> filter) throws SecurityException { if (null == clazz) { return null; } return ArrayUtil.filter(getMethods(clazz), filter); }
@Test public void getMethodsTest() { Method[] methods = ReflectUtil.getMethods(ExamInfoDict.class); assertEquals(20, methods.length); //过滤器测试 methods = ReflectUtil.getMethods(ExamInfoDict.class, t -> Integer.class.equals(t.getReturnType())); assertEquals(4, methods.length); final Method method = methods[0]; assertNotNull(method); //null过滤器测试 methods = ReflectUtil.getMethods(ExamInfoDict.class, null); assertEquals(20, methods.length); final Method method2 = methods[0]; assertNotNull(method2); }
@SuppressWarnings("unchecked") public <T extends Expression> T rewrite(final T expression, final C context) { return (T) rewriter.process(expression, context); }
@Test public void shouldRewriteArithmeticBinary() { // Given: final ArithmeticBinaryExpression parsed = parseExpression("1 + 2"); when(processor.apply(parsed.getLeft(), context)).thenReturn(expr1); when(processor.apply(parsed.getRight(), context)).thenReturn(expr2); // When final Expression rewritten = expressionRewriter.rewrite(parsed, context); // Then: assertThat( rewritten, equalTo( new ArithmeticBinaryExpression( parsed.getLocation(), parsed.getOperator(), expr1, expr2 ) ) ); }
public Collection<ViewParameterSummaryDTO> forValue() { final Set<String> searches = viewService.streamAll() .map(ViewDTO::searchId) .collect(Collectors.toSet()); final Map<String, Search> qualifyingSearches = this.searchDbService.findByIds(searches).stream() .filter(search -> !search.parameters().isEmpty()) .collect(Collectors.toMap(Search::id, Functions.identity())); return viewService.streamAll() .filter(view -> qualifyingSearches.keySet().contains(view.searchId())) .map(view -> ViewParameterSummaryDTO.create(view, qualifyingSearches.get(view.searchId()))) .collect(Collectors.toSet()); }
@Test public void returnViewWhenSearchWithParametersIsPresent() { final Search search = Search.builder() .id("searchWithParameter") .parameters(ImmutableSet.of(ValueParameter.any("foobar"))) .build(); final ViewDTO view1 = createView("searchWithParameter", "viewWithParameter"); final QualifyingViewsService service = new QualifyingViewsService(mockSearchService(search), mockViewService(view1)); final Collection<ViewParameterSummaryDTO> result = service.forValue(); assertThat(result) .hasOnlyOneElementSatisfying(summary -> { assertThat(summary.id()).isEqualTo("viewWithParameter"); assertThat(summary.title()).isEqualTo("My View"); assertThat(summary.summary()).isEqualTo("My Summary"); assertThat(summary.description()).isEqualTo("My Description"); } ); }
@Override public RepositoryConfiguration responseMessageForRepositoryConfiguration(String responseBody) { try { RepositoryConfiguration repositoryConfiguration = new RepositoryConfiguration(); Map<String, Map> configurations; try { configurations = parseResponseToMap(responseBody); } catch (Exception e) { throw new RuntimeException("Repository configuration should be returned as a map"); } if (configurations == null || configurations.isEmpty()) { throw new RuntimeException("Empty response body"); } for (String key : configurations.keySet()) { if (isEmpty(key)) { throw new RuntimeException("Repository configuration key cannot be empty"); } if (!(configurations.get(key) instanceof Map)) { throw new RuntimeException(format("Repository configuration properties for key '%s' should be represented as a Map", key)); } repositoryConfiguration.add(toPackageMaterialProperty(key, configurations.get(key))); } return repositoryConfiguration; } catch (Exception e) { throw new RuntimeException(format("Unable to de-serialize json response. %s", e.getMessage())); } }
@Test public void shouldBuildRepositoryConfigurationFromResponseBody() throws Exception { String responseBody = "{" + "\"key-one\":{}," + "\"key-two\":{\"default-value\":\"two\",\"part-of-identity\":true,\"secure\":true,\"required\":true,\"display-name\":\"display-two\",\"display-order\":\"1\"}," + "\"key-three\":{\"default-value\":\"three\",\"part-of-identity\":false,\"secure\":false,\"required\":false,\"display-name\":\"display-three\",\"display-order\":\"2\"}" + "}"; RepositoryConfiguration repositoryConfiguration = messageHandler.responseMessageForRepositoryConfiguration(responseBody); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-one"), "key-one", "", true, true, false, "", 0); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-two"), "key-two", "two", true, true, true, "display-two", 1); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-three"), "key-three", "three", false, false, false, "display-three", 2); }
@VisibleForTesting Map<Object, Long> getCallCostSnapshot() { HashMap<Object, Long> snapshot = new HashMap<Object, Long>(); for (Map.Entry<Object, List<AtomicLong>> entry : callCosts.entrySet()) { snapshot.put(entry.getKey(), entry.getValue().get(0).get()); } return Collections.unmodifiableMap(snapshot); }
@Test @SuppressWarnings("deprecation") public void testAccumulate() { Configuration conf = new Configuration(); conf.set("ipc.10." + DecayRpcScheduler.IPC_FCQ_DECAYSCHEDULER_PERIOD_KEY, "99999999"); // Never flush scheduler = new DecayRpcScheduler(1, "ipc.10", conf); assertEquals(0, scheduler.getCallCostSnapshot().size()); // empty first getPriorityIncrementCallCount("A"); assertEquals(1, scheduler.getCallCostSnapshot().get("A").longValue()); assertEquals(1, scheduler.getCallCostSnapshot().get("A").longValue()); getPriorityIncrementCallCount("A"); getPriorityIncrementCallCount("B"); getPriorityIncrementCallCount("A"); assertEquals(3, scheduler.getCallCostSnapshot().get("A").longValue()); assertEquals(1, scheduler.getCallCostSnapshot().get("B").longValue()); }
public static int[] createBackwardCompatibleStyleable( @NonNull int[] localStyleableArray, @NonNull Context localContext, @NonNull Context remoteContext, @NonNull SparseIntArray attributeIdMap) { final String remotePackageName = remoteContext.getPackageName(); if (localContext.getPackageName().equals(remotePackageName)) { Logger.d( TAG, "This is a local context (" + remotePackageName + "), optimization will be done."); // optimization for (int attrId : localStyleableArray) { attributeIdMap.put(attrId, attrId); } return localStyleableArray; } final Resources localRes = localContext.getResources(); final Resources remoteRes = remoteContext.getResources(); List<Integer> styleableIdList = new ArrayList<>(localStyleableArray.length); for (int attrId : localStyleableArray) { final boolean isAndroidAttribute = localRes.getResourcePackageName(attrId).equals("android"); final int remoteAttrId; if (isAndroidAttribute) { // android attribute IDs are the same always. So, I can optimize. remoteAttrId = attrId; } else { final String attributeName = localRes.getResourceEntryName(attrId); remoteAttrId = remoteRes.getIdentifier(attributeName, "attr", remotePackageName); Logger.d( TAG, "attr " + attributeName + ", local id " + attrId + ", remote id " + remoteAttrId); } if (remoteAttrId != 0) { attributeIdMap.put(remoteAttrId, attrId); styleableIdList.add(remoteAttrId); } } final int[] remoteMappedStyleable = new int[styleableIdList.size()]; for (int i = 0; i < remoteMappedStyleable.length; i++) { remoteMappedStyleable[i] = styleableIdList.get(i); } return remoteMappedStyleable; }
@Test public void testSamePackageSameValues() { SparseIntArray sparseIntArray = new SparseIntArray(); int[] backwardCompatibleStyleable = Support.createBackwardCompatibleStyleable( R.styleable.KeyboardLayout, getApplicationContext(), getApplicationContext(), sparseIntArray); Assert.assertSame(backwardCompatibleStyleable, R.styleable.KeyboardLayout); Assert.assertEquals(backwardCompatibleStyleable.length, sparseIntArray.size()); for (int attrId : backwardCompatibleStyleable) { Assert.assertEquals(attrId, sparseIntArray.get(attrId)); } }
public List<Job> toScheduledJobs(Instant from, Instant upTo) { List<Job> jobs = new ArrayList<>(); Instant nextRun = getNextRun(from); while (nextRun.isBefore(upTo)) { jobs.add(toJob(new ScheduledState(nextRun, this))); nextRun = getNextRun(nextRun); } return jobs; }
@Test void testToScheduledJobsGetsAllJobsBetweenStartAndEndMultipleResults() { final RecurringJob recurringJob = aDefaultRecurringJob() .withCronExpression("*/5 * * * * *") .build(); final List<Job> jobs = recurringJob.toScheduledJobs(now().minusSeconds(15), now().plusSeconds(5)); assertThat(jobs).hasSize(4); }
@VisibleForTesting void startKsql(final KsqlConfig ksqlConfigWithPort) { cleanupOldState(); initialize(ksqlConfigWithPort); }
@Test public void shouldStartCommandStoreAndCommandRunnerBeforeCreatingLogStream() { // When: app.startKsql(ksqlConfig); // Then: final InOrder inOrder = Mockito.inOrder(commandQueue, commandRunner, ksqlResource); inOrder.verify(commandQueue).start(); inOrder.verify(commandRunner).processPriorCommands(queryCleanupArgumentCaptor.capture()); inOrder.verify(commandRunner).start(); inOrder.verify(ksqlResource).handleKsqlStatements( securityContextArgumentCaptor.capture(), eq(new KsqlRequest(logCreateStatement, Collections.emptyMap(), Collections.emptyMap(), null)) ); assertThat(securityContextArgumentCaptor.getValue().getUserPrincipal(), is(Optional.empty())); assertThat(securityContextArgumentCaptor.getValue().getServiceContext(), is(serviceContext)); }
public static GeneratedResources getGeneratedResourcesObject(String generatedResourcesString) throws JsonProcessingException { return objectMapper.readValue(generatedResourcesString, GeneratedResources.class); }
@Test void getGeneratedResourcesObjectFromString() throws JsonProcessingException { String generatedResourcesString = "[{\"step-type\":\"executable\"," + "\"modelLocalUriId\":{\"model\":\"foo\",\"basePath\":\"/this/is/fri\",\"fullPath\":\"/foo/this/is/fri\"}}," + "{\"step-type\":\"class\",\"fullClassName\":\"full.class.Name\"}]"; GeneratedResources retrieved = JSONUtils.getGeneratedResourcesObject(generatedResourcesString); assertThat(retrieved).isNotNull(); String fullClassName = "full.class.Name"; GeneratedResource expected1 = new GeneratedClassResource(fullClassName); LocalUri modelLocalUriId = new ReflectiveAppRoot("test") .get(ComponentFoo.class) .get("this", "is", "fri") .asLocalUri(); ModelLocalUriId localUriId = new ModelLocalUriId(modelLocalUriId); GeneratedResource expected2 = new GeneratedExecutableResource(localUriId, Collections.singletonList(fullClassName)); assertThat(retrieved).contains(expected1); assertThat(retrieved).contains(expected2); }
@Override protected int poll() throws Exception { // must reset for each poll shutdownRunningTask = null; pendingExchanges = 0; List<software.amazon.awssdk.services.sqs.model.Message> messages = pollingTask.call(); // okay we have some response from aws so lets mark the consumer as ready forceConsumerAsReady(); Queue<Exchange> exchanges = createExchanges(messages); return processBatch(CastUtils.cast(exchanges)); }
@Test void shouldIgnoreNullAttributeNames() throws Exception { // given configuration.setAttributeNames(null); configuration.setMessageAttributeNames(null); configuration.setSortAttributeName(null); try (var tested = createConsumer(-1)) { // when var polledMessagesCount = tested.poll(); // then var expectedRequest = expectedReceiveRequestBuilder() .messageSystemAttributeNames((List<MessageSystemAttributeName>) null) .messageAttributeNames((List<String>) null) .maxNumberOfMessages(1) .build(); assertThat(polledMessagesCount).isZero(); assertThat(receivedExchanges).isEmpty(); assertThat(sqsClientMock.getReceiveRequests()).containsExactlyInAnyOrder(expectedRequest); assertThat(sqsClientMock.getQueues()).isEmpty(); } }
@Udf(description = "Returns the inverse (arc) tangent of an INT value") public Double atan( @UdfParameter( value = "value", description = "The value to get the inverse tangent of." ) final Integer value ) { return atan(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNull() { assertThat(udf.atan((Integer) null), is(nullValue())); assertThat(udf.atan((Long) null), is(nullValue())); assertThat(udf.atan((Double) null), is(nullValue())); }
@Override public double getAndSet(double newValue) { return get(getAndSetAsync(newValue)); }
@Test public void testGetAndSet() { RAtomicDouble al = redisson.getAtomicDouble("test"); assertThat(al.getAndSet(12)).isEqualTo(0); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void returnOnErrorUsingFlowable() throws InterruptedException { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); RetryTransformer<Object> retryTransformer = RetryTransformer.of(retry); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); Flowable.fromCallable(helloWorldService::returnHelloWorld) .compose(retryTransformer) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete() .assertSubscribed(); Flowable.fromCallable(helloWorldService::returnHelloWorld) .compose(retryTransformer) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete() .assertSubscribed(); then(helloWorldService).should(times(6)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(2); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
public static ExtensibleLoadManagerImpl get(LoadManager loadManager) { if (!(loadManager instanceof ExtensibleLoadManagerWrapper loadManagerWrapper)) { throw new IllegalArgumentException("The load manager should be 'ExtensibleLoadManagerWrapper'."); } return loadManagerWrapper.get(); }
@Test(timeOut = 30 * 1000) public void testSplitBundleAdminAPI() throws Exception { final String namespace = "public/testSplitBundleAdminAPI"; admin.namespaces().createNamespace(namespace, 1); Pair<TopicName, NamespaceBundle> topicAndBundle = getBundleIsNotOwnByChangeEventTopic("test-split"); TopicName topicName = topicAndBundle.getLeft(); admin.topics().createPartitionedTopic(topicName.toString(), 10); BundlesData bundles = admin.namespaces().getBundles(namespace); int numBundles = bundles.getNumBundles(); var bundleRanges = bundles.getBoundaries().stream().map(Long::decode).sorted().toList(); String firstBundle = bundleRanges.get(0) + "_" + bundleRanges.get(1); AtomicInteger splitCount = new AtomicInteger(0); NamespaceBundleSplitListener namespaceBundleSplitListener = new NamespaceBundleSplitListener() { @Override public void onSplit(NamespaceBundle bundle) { splitCount.incrementAndGet(); } @Override public boolean test(NamespaceBundle namespaceBundle) { return namespaceBundle .toString() .equals(String.format(namespace + "/0x%08x_0x%08x", bundleRanges.get(0), bundleRanges.get(1))); } }; pulsar1.getNamespaceService().addNamespaceBundleSplitListener(namespaceBundleSplitListener); pulsar2.getNamespaceService().addNamespaceBundleSplitListener(namespaceBundleSplitListener); long mid = bundleRanges.get(0) + (bundleRanges.get(1) - bundleRanges.get(0)) / 2; admin.namespaces().splitNamespaceBundle(namespace, firstBundle, true, null); Awaitility.await() .atMost(5, TimeUnit.SECONDS) .untilAsserted(() -> { BundlesData bundlesData = admin.namespaces().getBundles(namespace); assertEquals(bundlesData.getNumBundles(), numBundles + 1); String lowBundle = String.format("0x%08x", bundleRanges.get(0)); String midBundle = String.format("0x%08x", mid); String highBundle = String.format("0x%08x", bundleRanges.get(1)); assertTrue(bundlesData.getBoundaries().contains(lowBundle)); assertTrue(bundlesData.getBoundaries().contains(midBundle)); assertTrue(bundlesData.getBoundaries().contains(highBundle)); assertEquals(splitCount.get(), 1); }); // Test split bundle with invalid bundle range. try { admin.namespaces().splitNamespaceBundle(namespace, "invalid", true, null); fail(); } catch (PulsarAdminException ex) { assertTrue(ex.getMessage().contains("Invalid bundle range")); } // delete and retry Awaitility.await() .atMost(5, TimeUnit.SECONDS) .untilAsserted(() -> { admin.namespaces().deleteNamespace(namespace); }); admin.namespaces().createNamespace(namespace, 1); admin.namespaces().splitNamespaceBundle(namespace, firstBundle, true, null); Awaitility.await() .atMost(5, TimeUnit.SECONDS) .untilAsserted(() -> { BundlesData bundlesData = admin.namespaces().getBundles(namespace); assertEquals(bundlesData.getNumBundles(), numBundles + 1); String lowBundle = String.format("0x%08x", bundleRanges.get(0)); String midBundle = String.format("0x%08x", mid); String highBundle = String.format("0x%08x", bundleRanges.get(1)); assertTrue(bundlesData.getBoundaries().contains(lowBundle)); assertTrue(bundlesData.getBoundaries().contains(midBundle)); assertTrue(bundlesData.getBoundaries().contains(highBundle)); assertEquals(splitCount.get(), 2); }); }
@Override public void removeSelector(final SelectorData selectorData) { UpstreamCacheManager.getInstance().removeByKey(selectorData.getId()); MetaDataCache.getInstance().clean(); CACHED_HANDLE.get().removeHandle(CacheKeyUtils.INST.getKey(selectorData.getId(), Constants.DEFAULT_RULE)); }
@Test public void removeSelectorTest() { dividePluginDataHandler.handlerSelector(selectorData); dividePluginDataHandler.removeSelector(selectorData); List<Upstream> result = UpstreamCacheManager.getInstance().findUpstreamListBySelectorId("handler"); assertNull(result); }
@Override public boolean equals(Object o) { if (!super.equals(o)) { return false; } DataRecordWithStats that = (DataRecordWithStats) o; return value.equals(that.value); }
@Test public void testEquals() { assertEquals(record, record); assertEquals(record, recordSameAttributes); assertNotEquals(null, record); assertNotEquals(new Object(), record); assertNotEquals(record, objectRecord); assertNotEquals(record, recordOtherKeyAndValue); }
public static String required(final HttpServletRequest req, final String key) { String value = req.getParameter(key); if (StringUtils.isEmpty(value)) { throw new IllegalArgumentException("Param '" + key + "' is required."); } String encoding = req.getParameter(ENCODING_KEY); return resolveValue(value, encoding); }
@Test void testRequired() { final String key = "key"; MockHttpServletRequest servletRequest = new MockHttpServletRequest(); try { WebUtils.required(servletRequest, key); } catch (Exception e) { assertTrue(e instanceof IllegalArgumentException); } servletRequest.addParameter(key, "value"); String val = WebUtils.required(servletRequest, key); assertEquals("value", val); }
@Override public Number nextId(Object entity) { return idGenerator.nextId(); }
@Test void nextId() { for (int i = 0; i < 10; i++) { System.out.println(generator.nextId(null)); System.out.println(generator.nextUUID(null)); } }
@Override public String getMountTable() { final List<Map<String, Object>> info = new LinkedList<>(); if (mountTableStore == null) { return "[]"; } try { // Get all the mount points in order GetMountTableEntriesRequest request = GetMountTableEntriesRequest.newInstance("/"); GetMountTableEntriesResponse response = mountTableStore.getMountTableEntries(request); final List<MountTable> mounts = response.getEntries(); List<MountTable> orderedMounts = new ArrayList<>(mounts); Collections.sort(orderedMounts, MountTable.SOURCE_COMPARATOR); // Dump mount table entries information into JSON for (MountTable entry : orderedMounts) { // Summarize destinations Set<String> nameservices = new LinkedHashSet<>(); Set<String> paths = new LinkedHashSet<>(); for (RemoteLocation location : entry.getDestinations()) { nameservices.add(location.getNameserviceId()); paths.add(location.getDest()); } Map<String, Object> map = getJson(entry); // We add some values with a cleaner format map.put("dateCreated", getDateString(entry.getDateCreated())); map.put("dateModified", getDateString(entry.getDateModified())); Map<String, Object> innerInfo = new HashMap<>(); innerInfo.putAll(map); innerInfo.put("nameserviceId", StringUtils.join(",", nameservices)); innerInfo.put("path", StringUtils.join(",", paths)); if (nameservices.size() > 1) { innerInfo.put("order", entry.getDestOrder().toString()); } else { innerInfo.put("order", ""); } innerInfo.put("readonly", entry.isReadOnly()); innerInfo.put("faulttolerant", entry.isFaultTolerant()); info.add(Collections.unmodifiableMap(innerInfo)); } } catch (IOException e) { LOG.error( "Cannot generate JSON of mount table from store: {}", e.getMessage()); return "[]"; } return JSON.toString(info); }
@Test public void testMountTableStatsDataSource() throws IOException, JSONException { RBFMetrics metrics = getRouter().getMetrics(); String jsonString = metrics.getMountTable(); JSONArray jsonArray = new JSONArray(jsonString); assertEquals(jsonArray.length(), getMockMountTable().size()); int match = 0; for (int i = 0; i < jsonArray.length(); i++) { JSONObject json = jsonArray.getJSONObject(i); String src = json.getString("sourcePath"); for (MountTable entry : getMockMountTable()) { if (entry.getSourcePath().equals(src)) { assertEquals(entry.getDefaultLocation().getNameserviceId(), json.getString("nameserviceId")); assertEquals(entry.getDefaultLocation().getDest(), json.getString("path")); assertEquals(entry.getOwnerName(), json.getString("ownerName")); assertEquals(entry.getGroupName(), json.getString("groupName")); assertEquals(entry.getMode().toString(), json.getString("mode")); assertEquals(entry.getQuota().toString(), json.getString("quota")); assertNotNullAndNotEmpty(json.getString("dateCreated")); assertNotNullAndNotEmpty(json.getString("dateModified")); match++; } } } assertEquals(match, getMockMountTable().size()); }
public static void setDeferredDeepLinkCallback(SensorsDataDeferredDeepLinkCallback callback) { mDeferredDeepLinkCallback = callback; }
@Test public void setDeferredDeepLinkCallback() { DeepLinkManager.setDeferredDeepLinkCallback(null); }
public boolean isHeadersOnly() { return headersOnly; }
@Test public void testDefaultHeadersOnly() { SplunkHECConfiguration config = new SplunkHECConfiguration(); assertFalse(config.isHeadersOnly()); }
public static String parentOf(String path) throws PathNotFoundException { List<String> elements = split(path); int size = elements.size(); if (size == 0) { throw new PathNotFoundException("No parent of " + path); } if (size == 1) { return "/"; } elements.remove(size - 1); StringBuilder parent = new StringBuilder(path.length()); for (String element : elements) { parent.append("/"); parent.append(element); } return parent.toString(); }
@Test(expected = PathNotFoundException.class) public void testParentOfRoot() throws Throwable { parentOf("/"); }
@Override public List<UsbSerialPort> getPorts() { return mPorts; }
@Test public void invalidSingleInterfaceDevice() throws Exception { UsbDeviceConnection usbDeviceConnection = mock(UsbDeviceConnection.class); UsbDevice usbDevice = mock(UsbDevice.class); UsbInterface usbInterface = mock(UsbInterface.class); UsbEndpoint controlEndpoint = mock(UsbEndpoint.class); UsbEndpoint readEndpoint = mock(UsbEndpoint.class); //UsbEndpoint writeEndpoint = mock(UsbEndpoint.class); when(usbDeviceConnection.claimInterface(usbInterface,true)).thenReturn(true); when(usbDevice.getInterfaceCount()).thenReturn(1); when(usbDevice.getInterface(0)).thenReturn(usbInterface); when(usbInterface.getEndpointCount()).thenReturn(2); when(usbInterface.getEndpoint(0)).thenReturn(controlEndpoint); when(usbInterface.getEndpoint(1)).thenReturn(readEndpoint); //when(usbInterface.getEndpoint(2)).thenReturn(writeEndpoint); when(controlEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_IN); when(controlEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_INT); when(readEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_IN); when(readEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_BULK); //when(writeEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_OUT); //when(writeEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_BULK); CdcAcmSerialDriver driver = new CdcAcmSerialDriver(usbDevice); CdcAcmSerialDriver.CdcAcmSerialPort port = (CdcAcmSerialDriver.CdcAcmSerialPort) driver.getPorts().get(0); port.mConnection = usbDeviceConnection; port.openInt(); assertNull(port.mWriteEndpoint); }
String format() { StringBuilder builder = new StringBuilder(); for (ReindexActions.Entry entry : actions.getEntries()) { builder.append(entry.name() + ": Consider re-indexing document type '" + entry.getDocumentType() + "' in cluster '" + entry.getClusterName() + "' because:\n"); int counter = 1; for (String message : entry.getMessages()) { builder.append(" " + (counter++) + ") " + message + "\n"); } } return builder.toString(); }
@Test public void formatting_of_single_action() { ReindexActions actions = new ConfigChangeActionsBuilder(). reindex(CHANGE_ID, CHANGE_MSG, DOC_TYPE, CLUSTER, SERVICE_NAME). build().getReindexActions(); assertEquals("field-type-change: Consider re-indexing document type 'music' in cluster 'foo' because:\n" + " 1) change\n", new ReindexActionsFormatter(actions).format()); }
public static CsvIOParse<Row> parseRows(Schema schema, CSVFormat csvFormat) { CsvIOParseHelpers.validateCsvFormat(csvFormat); CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema); RowCoder coder = RowCoder.of(schema); CsvIOParseConfiguration.Builder<Row> builder = CsvIOParseConfiguration.builder(); builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(row -> row); return CsvIOParse.<Row>builder().setConfigBuilder(builder).build(); }
@Test public void givenRecordToObjectError_emits() { Pipeline pipeline = Pipeline.create(); PCollection<String> input = pipeline.apply(Create.of("true,1.1,3.141592,this_is_an_error,5,foo")); Schema schema = Schema.builder() .addBooleanField("aBoolean") .addDoubleField("aDouble") .addFloatField("aFloat") .addInt32Field("anInteger") .addInt64Field("aLong") .addStringField("aString") .build(); CsvIOParse<Row> underTest = CsvIO.parseRows(schema, csvFormat().withQuote('"')); CsvIOParseResult<Row> result = input.apply(underTest); PAssert.thatSingleton(result.getErrors().apply(Count.globally())).isEqualTo(1L); PAssert.thatSingleton( stackTraceContains(result.getErrors(), CsvIORecordToObjects.class.getName())) .isEqualTo(1L); pipeline.run(); }
@SuppressWarnings("unchecked") public Output run(RunContext runContext) throws Exception { Logger logger = runContext.logger(); try (HttpClient client = this.client(runContext, this.method)) { HttpRequest<String> request = this.request(runContext); HttpResponse<String> response; try { response = client .toBlocking() .exchange(request, Argument.STRING, Argument.STRING); // check that the string is a valid Unicode string if (response.getBody().isPresent()) { OptionalInt illegalChar = response.body().chars().filter(c -> !Character.isDefined(c)).findFirst(); if (illegalChar.isPresent()) { throw new IllegalArgumentException("Illegal unicode code point in request body: " + illegalChar.getAsInt() + ", the Request task only support valid Unicode strings as body.\n" + "You can try using the Download task instead."); } } } catch (HttpClientResponseException e) { if (!allowFailed) { throw e; } //noinspection unchecked response = (HttpResponse<String>) e.getResponse(); } logger.debug("Request '{}' with the response code '{}'", request.getUri(), response.getStatus().getCode()); return this.output(runContext, request, response); } }
@Test void encrypted() throws Exception { try ( ApplicationContext applicationContext = ApplicationContext.run(); EmbeddedServer server = applicationContext.getBean(EmbeddedServer.class).start(); ) { Request task = Request.builder() .id(RequestTest.class.getSimpleName()) .type(RequestTest.class.getName()) .uri(server.getURL().toString() + "/hello") .encryptBody(true) .build(); RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, task, ImmutableMap.of()); Request.Output output = task.run(runContext); // when encrypted, this must not be the plaintext value assertThat(output.getBody(), nullValue()); assertThat(output.getEncryptedBody(), not("{ \"hello\": \"world\" }")); assertThat(output.getCode(), is(200)); } }
@VisibleForTesting boolean isReceiverLoaded(String serviceName) { return delegationTokenReceivers.containsKey(serviceName); }
@Test public void testAllReceiversLoaded() { Configuration configuration = new Configuration(); configuration.setBoolean(CONFIG_PREFIX + ".throw.enabled", false); DelegationTokenReceiverRepository delegationTokenReceiverRepository = new DelegationTokenReceiverRepository(configuration, null); assertEquals(3, delegationTokenReceiverRepository.delegationTokenReceivers.size()); assertTrue(delegationTokenReceiverRepository.isReceiverLoaded("hadoopfs")); assertTrue(delegationTokenReceiverRepository.isReceiverLoaded("hbase")); assertTrue(delegationTokenReceiverRepository.isReceiverLoaded("test")); assertTrue(ExceptionThrowingDelegationTokenReceiver.constructed.get()); assertFalse(delegationTokenReceiverRepository.isReceiverLoaded("throw")); }
static Double convertToDouble(Object toConvert) { if (!(toConvert instanceof Number )) { throw new IllegalArgumentException("Input data must be declared and sent as Number, received " + toConvert); } return (Double) DATA_TYPE.DOUBLE.getActualValue(toConvert); }
@Test void convertToDouble_validValues() { Double expected = 3.0; List<Object> inputs = Arrays.asList(3, 3.0, 3.0f); inputs.forEach(number -> { Double retrieved = KiePMMLClusteringModel.convertToDouble(number); assertThat(retrieved).isEqualTo(expected); }); }
public static boolean validateMorePermissive(FsAction first, FsAction second) { if ((first == FsAction.ALL) || (second == FsAction.NONE) || (first == second)) { return true; } switch (first) { case READ_EXECUTE: return ((second == FsAction.READ) || (second == FsAction.EXECUTE)); case READ_WRITE: return ((second == FsAction.READ) || (second == FsAction.WRITE)); case WRITE_EXECUTE: return ((second == FsAction.WRITE) || (second == FsAction.EXECUTE)); } return false; }
@Test public void testValidateMorePermissive() { assertConsistentFsPermissionBehaviour(FsAction.ALL, true, true, true, true, true, true, true, true); assertConsistentFsPermissionBehaviour(FsAction.READ, false, true, false, true, false, false, false, false); assertConsistentFsPermissionBehaviour(FsAction.WRITE, false, true, false, false, true, false, false, false); assertConsistentFsPermissionBehaviour(FsAction.EXECUTE, false, true, true, false, false, false, false, false); assertConsistentFsPermissionBehaviour(FsAction.READ_EXECUTE, false, true, true, true, false, true, false, false); assertConsistentFsPermissionBehaviour(FsAction.READ_WRITE, false, true, false, true, true, false, true, false); assertConsistentFsPermissionBehaviour(FsAction.WRITE_EXECUTE, false, true, true, false, true, false, false, true); assertConsistentFsPermissionBehaviour(FsAction.NONE, false, true, false, false, false, false, false, false); }
public static boolean compare(Object source, Object target) { if (source == target) { return true; } if (source == null || target == null) { return false; } if (source.equals(target)) { return true; } if (source instanceof Boolean) { return compare(((Boolean) source), target); } if (source instanceof Number) { return compare(((Number) source), target); } if (target instanceof Number) { return compare(((Number) target), source); } if (source instanceof Date) { return compare(((Date) source), target); } if (target instanceof Date) { return compare(((Date) target), source); } if (source instanceof String) { return compare(((String) source), target); } if (target instanceof String) { return compare(((String) target), source); } if (source instanceof Collection) { return compare(((Collection) source), target); } if (target instanceof Collection) { return compare(((Collection) target), source); } if (source instanceof Map) { return compare(((Map) source), target); } if (target instanceof Map) { return compare(((Map) target), source); } if (source.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) source), target); } if (target.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) target), source); } if (source.getClass().isArray()) { return compare(((Object[]) source), target); } if (target.getClass().isArray()) { return compare(((Object[]) target), source); } return compare(FastBeanCopier.copy(source, HashMap.class), FastBeanCopier.copy(target, HashMap.class)); }
@Test public void dateTest() { Date date = new Date(); Assert.assertTrue(CompareUtils.compare(date, new Date(date.getTime()))); Assert.assertTrue(CompareUtils.compare(date, DateFormatter.toString(date, "yyyy-MM-dd"))); Assert.assertTrue(CompareUtils.compare(date, DateFormatter.toString(date, "yyyy-MM-dd HH:mm:ss"))); Assert.assertTrue(CompareUtils.compare(date, date.getTime())); Assert.assertTrue(CompareUtils.compare(date.getTime(), date)); }
@Override public boolean evaluate(Map<String, Object> values) { boolean toReturn = false; if (values.containsKey(name)) { logger.debug("found matching parameter, evaluating... "); toReturn = evaluation(values.get(name)); } return toReturn; }
@Test void evaluateStringEqual() { Object value = "43"; KiePMMLSimplePredicate kiePMMLSimplePredicate = getKiePMMLSimplePredicate(OPERATOR.EQUAL, value); Map<String, Object> inputData = new HashMap<>(); inputData.put("FAKE", "NOT"); assertThat(kiePMMLSimplePredicate.evaluate(inputData)).isFalse(); inputData = new HashMap<>(); inputData.put(SIMPLE_PREDICATE_NAME, "NOT"); assertThat(kiePMMLSimplePredicate.evaluate(inputData)).isFalse(); inputData = new HashMap<>(); inputData.put(SIMPLE_PREDICATE_NAME, value); assertThat(kiePMMLSimplePredicate.evaluate(inputData)).isTrue(); }
@VisibleForTesting Optional<Xpp3Dom> getSpringBootRepackageConfiguration() { Plugin springBootPlugin = project.getPlugin("org.springframework.boot:spring-boot-maven-plugin"); if (springBootPlugin != null) { for (PluginExecution execution : springBootPlugin.getExecutions()) { if (execution.getGoals().contains("repackage")) { Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration(); if (configuration == null) { return Optional.of(new Xpp3Dom("configuration")); } boolean skip = Boolean.parseBoolean(getChildValue(configuration, "skip").orElse("false")); return skip ? Optional.empty() : Optional.of(configuration); } } } return Optional.empty(); }
@Test public void testGetSpringBootRepackageConfiguration_skipped() { when(mockMavenProject.getPlugin("org.springframework.boot:spring-boot-maven-plugin")) .thenReturn(mockPlugin); when(mockPlugin.getExecutions()).thenReturn(Arrays.asList(mockPluginExecution)); when(mockPluginExecution.getGoals()).thenReturn(Arrays.asList("repackage")); when(mockPluginExecution.getConfiguration()).thenReturn(pluginConfiguration); addXpp3DomChild(pluginConfiguration, "skip", "true"); assertThat(mavenProjectProperties.getSpringBootRepackageConfiguration()).isEmpty(); }
public boolean isUnqualifiedShorthandProjection() { if (1 != projections.size()) { return false; } Projection projection = projections.iterator().next(); return projection instanceof ShorthandProjection && !((ShorthandProjection) projection).getOwner().isPresent(); }
@Test void assertUnqualifiedShorthandProjectionWithWrongProjection() { ProjectionsContext projectionsContext = new ProjectionsContext(0, 0, true, Collections.singleton(getColumnProjection())); assertFalse(projectionsContext.isUnqualifiedShorthandProjection()); }
public void isAssignableTo(Class<?> clazz) { if (!clazz.isAssignableFrom(checkNotNull(actual))) { failWithActual("expected to be assignable to", clazz.getName()); } }
@Test public void testIsAssignableTo_same() { assertThat(String.class).isAssignableTo(String.class); }
public Expression resolveSelect(final int idx, final Expression expression) { return expression; }
@Test public void shouldResolvingSelectAsNoOp() { // Given: final Expression exp = mock(Expression.class); // Then: assertThat(planNode.resolveSelect(10, exp), is(exp)); }