focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public int diff(String... names) { return get(diffAsync(names)); }
@Test public void testDiff() throws InterruptedException { redisson.getKeys().flushall(); RSetCache<Integer> cache1 = redisson.getSetCache("cache1", IntegerCodec.INSTANCE); cache1.add(1); cache1.add(2, 1, TimeUnit.SECONDS); cache1.add(5, 1, TimeUnit.SECONDS); cache1.add(3, 1, TimeUnit.SECONDS); RSetCache<Integer> cache2 = redisson.getSetCache("cache2", IntegerCodec.INSTANCE); cache2.add(4); cache2.add(2, 1, TimeUnit.SECONDS); cache2.add(5, 1, TimeUnit.SECONDS); cache2.add(7); RSetCache<Integer> cache3 = redisson.getSetCache("cache3", IntegerCodec.INSTANCE); assertThat(cache3.diff("cache1", "cache2")).isEqualTo(2); assertThat(cache3).containsExactlyInAnyOrder(1, 3); cache3.clear(); Thread.sleep(1500); assertThat(cache3.diff("cache1", "cache2")).isEqualTo(1); assertThat(cache3).containsExactlyInAnyOrder(1); assertThat(redisson.getKeys().getKeys()).containsExactlyInAnyOrder("cache1", "cache2", "cache3"); }
@Override public CompletableFuture<T> toCompletableFuture() { return _task.toCompletionStage().toCompletableFuture(); }
@Test public void testCreateStageFromSupplierAsync() throws Exception { String testResult = "testCreateStageFromCompletableFuture"; ParSeqBasedCompletionStage<String> stageFromCompletionStage = _parSeqBasedCompletionStageFactory.buildStageFromSupplierAsync(() -> testResult); Assert.assertEquals(testResult, stageFromCompletionStage.toCompletableFuture().get()); }
public static Read<String> readStrings() { return Read.newBuilder( (PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8)) .setCoder(StringUtf8Coder.of()) .build(); }
@Test public void testNullSubscription() { String topic = "projects/project/topics/topic"; PubsubIO.Read<String> read = PubsubIO.readStrings().fromTopic(StaticValueProvider.of(topic)); assertNotNull(read.getTopicProvider()); assertNull(read.getSubscriptionProvider()); assertNotNull(DisplayData.from(read)); }
public static KeyStore loadKeyStore(File certificateChainFile, File privateKeyFile, String keyPassword) throws IOException, GeneralSecurityException { PrivateKey key; try { key = createPrivateKey(privateKeyFile, keyPassword); } catch (OperatorCreationException | IOException | GeneralSecurityException | PKCSException e) { throw new GeneralSecurityException("Private Key issues", e); } List<X509Certificate> certificateChain = readCertificateChain(certificateChainFile); if (certificateChain.isEmpty()) { throw new CertificateException("Certificate file does not contain any certificates: " + certificateChainFile); } KeyStore keyStore = KeyStore.getInstance("JKS"); keyStore.load(null, null); keyStore.setKeyEntry("key", key, keyPassword.toCharArray(), certificateChain.stream().toArray(Certificate[]::new)); return keyStore; }
@Test void testParsingPKCS1WithoutPassword() throws IOException, GeneralSecurityException { KeyStore keystore = PEMImporter.loadKeyStore(pemCert, privkeyWithoutPasswordPKCS1, ""); assertEquals(1, keystore.size()); assertTrue(keystore.containsAlias("key")); assertEquals(1, keystore.getCertificateChain("key").length); }
public void start() { taskExecutor = Executors.newSingleThreadScheduledExecutor( new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r, "Eureka-PeerNodesUpdater"); thread.setDaemon(true); return thread; } } ); try { updatePeerEurekaNodes(resolvePeerUrls()); Runnable peersUpdateTask = new Runnable() { @Override public void run() { try { updatePeerEurekaNodes(resolvePeerUrls()); } catch (Throwable e) { logger.error("Cannot update the replica Nodes", e); } } }; taskExecutor.scheduleWithFixedDelay( peersUpdateTask, serverConfig.getPeerEurekaNodesUpdateIntervalMs(), serverConfig.getPeerEurekaNodesUpdateIntervalMs(), TimeUnit.MILLISECONDS ); } catch (Exception e) { throw new IllegalStateException(e); } for (PeerEurekaNode node : peerEurekaNodes) { logger.info("Replica node URL: {}", node.getServiceUrl()); } }
@Test public void testReloadWithNoPeerChange() throws Exception { // Start peerEurekaNodes.withPeerUrls(PEER_EUREKA_URL_A); peerEurekaNodes.start(); PeerEurekaNode peerNode = getPeerNode(PEER_EUREKA_URL_A); assertThat(peerEurekaNodes.awaitNextReload(60, TimeUnit.SECONDS), is(true)); assertThat(getPeerNode(PEER_EUREKA_URL_A), is(equalTo(peerNode))); }
@Override public String toString() { return translations.toString(); }
@Test public void testToString() { Translation enMap = SINGLETON.getWithFallBack(Locale.UK); assertEquals("continue onto blp street", enMap.tr("continue_onto", "blp street")); Translation trMap = SINGLETON.getWithFallBack(Locale.GERMANY); assertEquals("Zu Fuß", trMap.tr("web.FOOT")); Translation ruMap = SINGLETON.getWithFallBack(new Locale("ru")); assertEquals("Пешком", ruMap.tr("web.FOOT")); Translation zhMap = SINGLETON.getWithFallBack(new Locale("vi", "VI")); assertEquals("Đi bộ", zhMap.tr("web.FOOT")); trMap = SINGLETON.get("de_DE"); assertEquals("Zu Fuß", trMap.tr("web.FOOT")); trMap = SINGLETON.get("de"); assertEquals("Zu Fuß", trMap.tr("web.FOOT")); trMap = SINGLETON.get("de_AT"); assertEquals("Zu Fuß", trMap.tr("web.FOOT")); trMap = SINGLETON.get("he"); assertEquals("רגל", trMap.tr("web.FOOT")); trMap = SINGLETON.get("iw"); assertEquals("רגל", trMap.tr("web.FOOT")); // Indonesian // for jdk17 and later "id" is returned, before "in" was returned String lang = SINGLETON.get("id").getLanguage(); assertTrue(Arrays.asList("id", "in").contains(lang)); assertEquals(lang, SINGLETON.get("in").getLanguage()); assertEquals(lang, SINGLETON.get("in_ID").getLanguage()); // Vietnamese assertEquals("vi", SINGLETON.get("vi").getLanguage()); assertEquals("vi", SINGLETON.get("vi_VN").getLanguage()); }
@Override public List<URI> get() { return resultsCachingSupplier.get(); }
@Test void testSkippedConfigWithDefaultIndexer() { final IndexerDiscoveryProvider provider = new IndexerDiscoveryProvider( Collections.emptyList(), 1, Duration.seconds(1), preflightConfig(PreflightConfigResult.SKIPPED), nodes(), NOOP_CERT_PROVISIONER ); Assertions.assertThat(provider.get()) .hasSize(1) .extracting(URI::toString) .contains("http://127.0.0.1:9200"); }
@Override public CharSequence get(CharSequence name) { return get0(name); }
@Test public void testGet() { Http2Headers headers = newClientHeaders(); assertTrue(AsciiString.contentEqualsIgnoreCase("value1", headers.get("Name1"))); assertTrue(AsciiString.contentEqualsIgnoreCase("/foo", headers.get(Http2Headers.PseudoHeaderName.PATH.value()))); assertNull(headers.get(Http2Headers.PseudoHeaderName.STATUS.value())); assertNull(headers.get("a missing header")); }
public static InetSocketAddress toInetSocketAddress(String address) { String[] ipPortStr = splitIPPortStr(address); String host; int port; if (null != ipPortStr) { host = ipPortStr[0]; port = Integer.parseInt(ipPortStr[1]); } else { host = address; port = 0; } return new InetSocketAddress(host, port); }
@Test public void testToInetSocketAddress() { try { NetUtil.toInetSocketAddress("23939:ks"); } catch (Exception e) { assertThat(e).isInstanceOf(NumberFormatException.class); } }
@Override public Num calculate(BarSeries series, Position position) { return series.one(); }
@Test public void calculateWithNoPositions() { MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105); AnalysisCriterion buyAndHold = getCriterion(); assertNumEquals(0, buyAndHold.calculate(series, new BaseTradingRecord())); }
@Override public boolean isSatisfied(int index, TradingRecord tradingRecord) { if (tradingRecord.getCurrentPosition().isOpened()) { final int entryIndex = tradingRecord.getLastEntry().getIndex(); final int currentBarCount = index - entryIndex; return currentBarCount >= barCount; } return false; }
@Test public void testAtLeastOneBarRuleForOpenedTrade() { final OpenedPositionMinimumBarCountRule rule = new OpenedPositionMinimumBarCountRule(1); final BarSeries series = new MockBarSeries(DecimalNum::valueOf, 1, 2, 3, 4); final TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series)); assertFalse(rule.isSatisfied(0, tradingRecord)); assertTrue(rule.isSatisfied(1, tradingRecord)); assertTrue(rule.isSatisfied(2, tradingRecord)); assertTrue(rule.isSatisfied(3, tradingRecord)); }
public String getConstraintsAsString() { return constraintsString; }
@Test void getConstraintsAsString() { KiePMMLFieldOperatorValue kiePMMLFieldOperatorValue = getKiePMMLFieldOperatorValueWithName(); String expected = "value < 35 surrogate value > 85"; String retrieved = kiePMMLFieldOperatorValue.getConstraintsAsString(); assertThat(retrieved).isEqualTo(expected); kiePMMLFieldOperatorValue = getKiePMMLFieldOperatorValueWithoutName(); expected = "value < 35 surrogate value > 85"; retrieved = kiePMMLFieldOperatorValue.buildConstraintsString(); assertThat(retrieved).isEqualTo(expected); }
protected String resolveOffchain( String lookupData, OffchainResolverContract resolver, int lookupCounter) throws Exception { if (EnsUtils.isEIP3668(lookupData)) { OffchainLookup offchainLookup = OffchainLookup.build(Numeric.hexStringToByteArray(lookupData.substring(10))); if (!resolver.getContractAddress().equals(offchainLookup.getSender())) { throw new EnsResolutionException( "Cannot handle OffchainLookup raised inside nested call"); } String gatewayResult = ccipReadFetch( offchainLookup.getUrls(), offchainLookup.getSender(), Numeric.toHexString(offchainLookup.getCallData())); if (gatewayResult == null) { throw new EnsResolutionException("CCIP Read disabled or provided no URLs."); } ObjectMapper objectMapper = ObjectMapperFactory.getObjectMapper(); EnsGatewayResponseDTO gatewayResponseDTO = objectMapper.readValue(gatewayResult, EnsGatewayResponseDTO.class); String resolvedNameHex = resolver.resolveWithProof( Numeric.hexStringToByteArray(gatewayResponseDTO.getData()), offchainLookup.getExtraData()) .send(); // This protocol can result in multiple lookups being requested by the same contract. if (EnsUtils.isEIP3668(resolvedNameHex)) { if (lookupCounter <= 0) { throw new EnsResolutionException("Lookup calls is out of limit."); } return resolveOffchain(lookupData, resolver, --lookupCounter); } else { byte[] resolvedNameBytes = DefaultFunctionReturnDecoder.decodeDynamicBytes(resolvedNameHex); return DefaultFunctionReturnDecoder.decodeAddress( Numeric.toHexString(resolvedNameBytes)); } } return lookupData; }
@Test void resolveOffchainNotEIP() throws Exception { String lookupData = "some data"; String resolveResponse = ensResolver.resolveOffchain(lookupData, null, 4); assertEquals(lookupData, resolveResponse); }
@Override public void initialize(ServiceConfiguration config) throws IOException { this.allowedAudiences = validateAllowedAudiences(getConfigValueAsSet(config, ALLOWED_AUDIENCES)); this.roleClaim = getConfigValueAsString(config, ROLE_CLAIM, ROLE_CLAIM_DEFAULT); this.isRoleClaimNotSubject = !ROLE_CLAIM_DEFAULT.equals(roleClaim); this.acceptedTimeLeewaySeconds = getConfigValueAsInt(config, ACCEPTED_TIME_LEEWAY_SECONDS, ACCEPTED_TIME_LEEWAY_SECONDS_DEFAULT); boolean requireHttps = getConfigValueAsBoolean(config, REQUIRE_HTTPS, REQUIRE_HTTPS_DEFAULT); this.fallbackDiscoveryMode = FallbackDiscoveryMode.valueOf(getConfigValueAsString(config, FALLBACK_DISCOVERY_MODE, FallbackDiscoveryMode.DISABLED.name())); this.issuers = validateIssuers(getConfigValueAsSet(config, ALLOWED_TOKEN_ISSUERS), requireHttps, fallbackDiscoveryMode != FallbackDiscoveryMode.DISABLED); int connectionTimeout = getConfigValueAsInt(config, HTTP_CONNECTION_TIMEOUT_MILLIS, HTTP_CONNECTION_TIMEOUT_MILLIS_DEFAULT); int readTimeout = getConfigValueAsInt(config, HTTP_READ_TIMEOUT_MILLIS, HTTP_READ_TIMEOUT_MILLIS_DEFAULT); String trustCertsFilePath = getConfigValueAsString(config, ISSUER_TRUST_CERTS_FILE_PATH, null); SslContext sslContext = null; // When config is in the conf file but is empty, it defaults to the empty string, which is not meaningful and // should be ignored. if (StringUtils.isNotBlank(trustCertsFilePath)) { // Use default settings for everything but the trust store. sslContext = SslContextBuilder.forClient() .trustManager(new File(trustCertsFilePath)) .build(); } AsyncHttpClientConfig clientConfig = new DefaultAsyncHttpClientConfig.Builder() .setConnectTimeout(connectionTimeout) .setReadTimeout(readTimeout) .setSslContext(sslContext) .build(); httpClient = new DefaultAsyncHttpClient(clientConfig); k8sApiClient = fallbackDiscoveryMode != FallbackDiscoveryMode.DISABLED ? Config.defaultClient() : null; this.openIDProviderMetadataCache = new OpenIDProviderMetadataCache(config, httpClient, k8sApiClient); this.jwksCache = new JwksCache(config, httpClient, k8sApiClient); }
@Test public void ensureInsecureIssuerFailsInitialization() throws IOException { @Cleanup AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID(); Properties props = new Properties(); props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, "https://myissuer.com,http://myissuer.com"); ServiceConfiguration config = new ServiceConfiguration(); config.setProperties(props); Assert.assertThrows(IllegalArgumentException.class, () -> provider.initialize(config)); }
public SerializableFunction<Row, T> getFromRowFunction() { return fromRowFunction; }
@Test public void testMapRowToProto() { ProtoDynamicMessageSchema schemaProvider = schemaFromDescriptor(MapPrimitive.getDescriptor()); SerializableFunction<Row, DynamicMessage> fromRow = schemaProvider.getFromRowFunction(); MapPrimitive proto = parseFrom(fromRow.apply(MAP_PRIMITIVE_ROW).toString(), MapPrimitive.newBuilder()).build(); assertEquals(MAP_PRIMITIVE_PROTO, proto); }
public static Map<String, String> parseParams(HttpRequest request) { if (request instanceof HttpGet) { return parseParamsForGet(request); } if (request instanceof HttpEntityEnclosingRequestBase) { return parseParamsForRequestWithEntity((HttpEntityEnclosingRequestBase) request); } return new LinkedHashMap<>(); }
@Test public void parseParams_returnsNullForUnsupportedOperations() throws Exception { HttpDelete httpDelete = new HttpDelete("http://example.com/deleteme"); assertThat(ParamsParser.parseParams(httpDelete)).isEmpty(); }
public static int getMaxTileNumber(byte zoomLevel) { if (zoomLevel < 0) { throw new IllegalArgumentException("zoomLevel must not be negative: " + zoomLevel); } else if (zoomLevel == 0) { return 0; } return (2 << zoomLevel - 1) - 1; }
@Test public void getMaxTileNumberTest() { Assert.assertEquals(0, Tile.getMaxTileNumber((byte) 0)); Assert.assertEquals(1, Tile.getMaxTileNumber((byte) 1)); Assert.assertEquals(3, Tile.getMaxTileNumber((byte) 2)); Assert.assertEquals(7, Tile.getMaxTileNumber((byte) 3)); Assert.assertEquals(1023, Tile.getMaxTileNumber((byte) 10)); Assert.assertEquals(1048575, Tile.getMaxTileNumber((byte) 20)); Assert.assertEquals(1073741823, Tile.getMaxTileNumber((byte) 30)); verifyInvalidMaxTileNumber((byte) -1); verifyInvalidMaxTileNumber(Byte.MIN_VALUE); }
List<String> decorateTextWithHtml(String text, DecorationDataHolder decorationDataHolder) { return decorateTextWithHtml(text, decorationDataHolder, null, null); }
@Test public void should_escape_ampersand_char() { String javadocWithAmpersandChar = "/**\n" + " * Definition of a dashboard.\n" + " * <p/>\n" + " * Its name and description can be retrieved using the i18n mechanism, using the keys \"dashboard.&lt;id&gt;.name\" and\n" + " * \"dashboard.&lt;id&gt;.description\".\n" + " *\n" + " * @since 2.13\n" + " */\n"; DecorationDataHolder decorationData = new DecorationDataHolder(); decorationData.loadSyntaxHighlightingData("0,220,cppd;"); HtmlTextDecorator htmlTextDecorator = new HtmlTextDecorator(); List<String> htmlOutput = htmlTextDecorator.decorateTextWithHtml(javadocWithAmpersandChar, decorationData); assertThat(htmlOutput).containsExactly( "<span class=\"cppd\">/**</span>", "<span class=\"cppd\"> * Definition of a dashboard.</span>", "<span class=\"cppd\"> * &lt;p/&gt;</span>", "<span class=\"cppd\"> * Its name and description can be retrieved using the i18n mechanism, using the keys \"dashboard.&amp;lt;id&amp;gt;.name\" and</span>", "<span class=\"cppd\"> * \"dashboard.&amp;lt;id&amp;gt;.description\".</span>", "<span class=\"cppd\"> *</span>", "<span class=\"cppd\"> * @since 2.13</span>", "<span class=\"cppd\"> */</span>", ""); }
@Override public Map<String, List<TopicPartition>> assignPartitions(Map<String, List<PartitionInfo>> partitionsPerTopic, Map<String, Subscription> subscriptions) { Map<String, List<TopicPartition>> consumerToOwnedPartitions = new HashMap<>(); Set<TopicPartition> partitionsWithMultiplePreviousOwners = new HashSet<>(); List<PartitionInfo> allPartitions = new ArrayList<>(); partitionsPerTopic.values().forEach(allPartitions::addAll); RackInfo rackInfo = new RackInfo(allPartitions, subscriptions); AbstractAssignmentBuilder assignmentBuilder; if (allSubscriptionsEqual(partitionsPerTopic.keySet(), subscriptions, consumerToOwnedPartitions, partitionsWithMultiplePreviousOwners)) { log.debug("Detected that all consumers were subscribed to same set of topics, invoking the " + "optimized assignment algorithm"); partitionsTransferringOwnership = new HashMap<>(); assignmentBuilder = new ConstrainedAssignmentBuilder(partitionsPerTopic, rackInfo, consumerToOwnedPartitions, partitionsWithMultiplePreviousOwners); } else { log.debug("Detected that not all consumers were subscribed to same set of topics, falling back to the " + "general case assignment algorithm"); // we must set this to null for the general case so the cooperative assignor knows to compute it from scratch partitionsTransferringOwnership = null; assignmentBuilder = new GeneralAssignmentBuilder(partitionsPerTopic, rackInfo, consumerToOwnedPartitions, subscriptions); } return assignmentBuilder.build(); }
@Test public void testSubscriptionNotEqualAndAssignSamePartitionWith3Generation() { Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>(); partitionsPerTopic.put(topic, partitionInfos(topic, 6)); partitionsPerTopic.put(topic1, partitionInfos(topic1, 1)); int[][] sequence = new int[][]{{1, 2, 3}, {1, 3, 2}, {2, 1, 3}, {2, 3, 1}, {3, 1, 2}, {3, 2, 1}}; for (int[] ints : sequence) { subscriptions.put( consumer1, buildSubscriptionV2Above(topics(topic), partitions(tp(topic, 0), tp(topic, 2)), ints[0], 0) ); subscriptions.put( consumer2, buildSubscriptionV2Above(topics(topic), partitions(tp(topic, 1), tp(topic, 2), tp(topic, 3)), ints[1], 1) ); subscriptions.put( consumer3, buildSubscriptionV2Above(topics(topic), partitions(tp(topic, 2), tp(topic, 4), tp(topic, 5)), ints[2], 2) ); subscriptions.put( consumer4, buildSubscriptionV2Above(topics(topic1), partitions(tp(topic1, 0)), 2, 3) ); Map<String, List<TopicPartition>> assign = assignor.assignPartitions(partitionsPerTopic, subscriptions); assertEquals(assign.values().stream().mapToInt(List::size).sum(), assign.values().stream().flatMap(List::stream).collect(Collectors.toSet()).size()); for (List<TopicPartition> list: assign.values()) { assertTrue(list.size() >= 1 && list.size() <= 2); } } }
public EndpointResponse checkHealth() { return EndpointResponse.ok(getResponse()); }
@Test public void shouldRecheckHealthIfCachedResponseExpired() throws Exception { // Given: healthCheckResource = new HealthCheckResource(healthCheckAgent, Duration.ofMillis(10)); healthCheckResource.checkHealth(); // When / Then: assertThatEventually( "Should receive response2 once response1 expires.", () -> healthCheckResource.checkHealth().getEntity(), is(response2), 1000, TimeUnit.MILLISECONDS ); }
public static <T> T toObj(byte[] json, Class<T> cls) { try { return mapper.readValue(json, cls); } catch (Exception e) { throw new NacosDeserializationException(cls, e); } }
@Test void testToObject11() { assertEquals(Collections.singletonMap("key", "value"), JacksonUtils.toObj(new ByteArrayInputStream("{\"key\":\"value\"}".getBytes()), TypeUtils.parameterize(Map.class, String.class, String.class))); assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")), JacksonUtils.toObj(new ByteArrayInputStream("[{\"key\":\"value\"}]".getBytes()), TypeUtils.parameterize(List.class, TypeUtils.parameterize(Map.class, String.class, String.class)))); }
public synchronized void rewind() { this.readPosition = 0; this.curReadBufferIndex = 0; this.readPosInCurBuffer = 0; if (CollectionUtils.isNotEmpty(bufferList)) { this.curBuffer = bufferList.get(0); for (ByteBuffer buffer : bufferList) { buffer.rewind(); } } }
@Test public void testCommitLogTypeInputStream() { List<ByteBuffer> uploadBufferList = new ArrayList<>(); int bufferSize = 0; for (int i = 0; i < MSG_NUM; i++) { ByteBuffer byteBuffer = MessageFormatUtilTest.buildMockedMessageBuffer(); uploadBufferList.add(byteBuffer); bufferSize += byteBuffer.remaining(); } // build expected byte buffer for verifying the FileSegmentInputStream ByteBuffer expectedByteBuffer = ByteBuffer.allocate(bufferSize); for (ByteBuffer byteBuffer : uploadBufferList) { expectedByteBuffer.put(byteBuffer); byteBuffer.rewind(); } // set real physical offset for (int i = 0; i < MSG_NUM; i++) { long physicalOffset = COMMIT_LOG_START_OFFSET + i * MSG_LEN; int position = i * MSG_LEN + MessageFormatUtil.PHYSICAL_OFFSET_POSITION; expectedByteBuffer.putLong(position, physicalOffset); } int finalBufferSize = bufferSize; int[] batchReadSizeTestSet = { MessageFormatUtil.PHYSICAL_OFFSET_POSITION - 1, MessageFormatUtil.PHYSICAL_OFFSET_POSITION, MessageFormatUtil.PHYSICAL_OFFSET_POSITION + 1, MSG_LEN - 1, MSG_LEN, MSG_LEN + 1 }; verifyReadAndReset(expectedByteBuffer, () -> FileSegmentInputStreamFactory.build( FileSegmentType.COMMIT_LOG, COMMIT_LOG_START_OFFSET, uploadBufferList, null, finalBufferSize), finalBufferSize, batchReadSizeTestSet); }
@Override public void open() throws Exception { super.open(); inputActivityClock = new PausableRelativeClock(getProcessingTimeService().getClock()); getContainingTask() .getEnvironment() .getMetricGroup() .getIOMetricGroup() .registerBackPressureListener(inputActivityClock); // watermark and timestamp should start from 0 this.currentWatermark = 0; this.watermarkInterval = getExecutionConfig().getAutoWatermarkInterval(); long now = getProcessingTimeService().getCurrentProcessingTime(); this.lastWatermarkPeriodicEmitTime = now; this.timeSinceLastIdleCheck = now; if (watermarkInterval > 0 || idleTimeout > 0) { this.timerInterval = calculateProcessingTimeTimerInterval(watermarkInterval, idleTimeout); getProcessingTimeService().registerTimer(now + timerInterval, this); } FunctionUtils.setFunctionRuntimeContext(watermarkGenerator, getRuntimeContext()); FunctionUtils.openFunction(watermarkGenerator, DefaultOpenContext.INSTANCE); }
@Test public void testIdleTimeoutUnderBackpressure() throws Exception { long idleTimeout = 100; OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(0, WATERMARK_GENERATOR, idleTimeout); testHarness.getExecutionConfig().setAutoWatermarkInterval(idleTimeout); testHarness.open(); TaskIOMetricGroup taskIOMetricGroup = testHarness.getEnvironment().getMetricGroup().getIOMetricGroup(); taskIOMetricGroup.getHardBackPressuredTimePerSecond().markStart(); stepProcessingTime(testHarness, 0, idleTimeout * 10, idleTimeout / 10); assertThat(testHarness.getOutput()).isEmpty(); taskIOMetricGroup.getHardBackPressuredTimePerSecond().markEnd(); taskIOMetricGroup.getSoftBackPressuredTimePerSecond().markStart(); stepProcessingTime(testHarness, idleTimeout * 10, idleTimeout * 20, idleTimeout / 10); assertThat(testHarness.getOutput()).isEmpty(); taskIOMetricGroup.getSoftBackPressuredTimePerSecond().markEnd(); stepProcessingTime(testHarness, idleTimeout * 20, idleTimeout * 30, idleTimeout / 10); assertThat(testHarness.getOutput()).containsExactly(WatermarkStatus.IDLE); }
public Set<FactIdentifier> getFactIdentifiers() { return factMappings.stream().map(FactMapping::getFactIdentifier).collect(Collectors.toSet()); }
@Test public void getFactIdentifiers() { modelDescriptor.addFactMapping(factIdentifier, expressionIdentifier); assertThat(modelDescriptor.getFactIdentifiers()).isNotNull().hasSize(1).containsExactly(factIdentifier); }
int lookupResourceValue(Ref<Res_value> value) { byte resolvedType = DataType.REFERENCE.code(); Res_value inValue = value.get(); DataType dataType; try { dataType = DataType.fromCode(inValue.dataType); } catch (IllegalArgumentException e) { return BAD_TYPE; } switch (dataType) { case ATTRIBUTE: resolvedType = DataType.ATTRIBUTE.code(); // fallthrough case REFERENCE: if (!mAppAsLib) { return NO_ERROR; } // If the package is loaded as shared library, the resource reference // also need to be fixed. break; case DYNAMIC_ATTRIBUTE: resolvedType = DataType.ATTRIBUTE.code(); // fallthrough case DYNAMIC_REFERENCE: break; default: return NO_ERROR; } final Ref<Integer> resIdRef = new Ref<>(inValue.data); int err = lookupResourceId(resIdRef); value.set(inValue.withData(resIdRef.get())); if (err != NO_ERROR) { return err; } value.set(new Res_value(resolvedType, resIdRef.get())); return NO_ERROR; }
@Test public void lookupResourceValue_returnsBadTypeIfTypeOutOfEnumRange() { DynamicRefTable pseudoRefTable = new DynamicRefTable(/* packageId= */ (byte) 0, /* appAsLib= */ true); assertThat(pseudoRefTable.lookupResourceValue(RES_VALUE_OF_BAD_TYPE)).isEqualTo(BAD_TYPE); }
@Operation(summary = "Resolve SAML artifact") @PostMapping(value = {"/backchannel/saml/v4/entrance/resolve_artifact", "/backchannel/saml/v4/idp/resolve_artifact"}) public ResponseEntity resolveArtifact(HttpServletRequest request, HttpServletResponse response) throws SamlParseException { try { final var artifactResolveRequest = artifactResolveService.startArtifactResolveProcess(request); artifactResponseService.generateResponse(response, artifactResolveRequest); return new ResponseEntity(HttpStatus.OK); } catch (ClassCastException ex) { return new ResponseEntity(HttpStatus.BAD_REQUEST); } }
@Test void failedResolveArtifactTest() throws SamlParseException { when(artifactResolveServiceMock.startArtifactResolveProcess(any(HttpServletRequest.class))).thenThrow(ClassCastException.class); ResponseEntity response = artifactController.resolveArtifact(httpServletRequestMock, httpServletResponseMock); assertEquals(response.getStatusCode(), HttpStatus.BAD_REQUEST); verify(artifactResolveServiceMock, times(1)).startArtifactResolveProcess(any(HttpServletRequest.class)); }
public Optional<UfsStatus[]> listFromUfs(String path, boolean isRecursive) throws IOException { ListOptions ufsListOptions = ListOptions.defaults().setRecursive(isRecursive); UnderFileSystem ufs = getUfsInstance(path); try { UfsStatus[] listResults = ufs.listStatus(path, ufsListOptions); if (listResults != null) { return Optional.of(listResults); } } catch (IOException e) { if (!(e instanceof FileNotFoundException)) { throw e; } } // TODO(yimin) put the ufs status into the metastore // If list does not give a result, // the request path might either be a regular file/object or not exist. // Try getStatus() instead. try { UfsStatus status = ufs.getStatus(path); if (status == null) { return Optional.empty(); } // Success. Create an array with only one element. status.setName(""); // listStatus() expects relative name to the @path. return Optional.of(new UfsStatus[] {status}); } catch (FileNotFoundException e) { return Optional.empty(); } }
@Test public void listFromUfsWhenGetFail() throws IOException { UnderFileSystem system = mock(UnderFileSystem.class); when(system.listStatus(anyString())).thenReturn(null); when(system.getStatus(anyString())).thenThrow(new FileNotFoundException()); doReturn(system).when(mDoraUfsManager).getOrAdd(any(), any()); Optional<UfsStatus[]> status = mManager.listFromUfs("/test", false); assertEquals(status, Optional.empty()); }
public int get(final K key) { final int initialValue = this.initialValue; final K[] keys = this.keys; final int[] values = this.values; @DoNotSub final int mask = values.length - 1; @DoNotSub int index = Hashing.hash(key, mask); int value; while (initialValue != (value = values[index])) { if (Objects.equals(keys[index], key)) { break; } index = ++index & mask; } return value; }
@Test void getShouldReturnInitialValueWhenEmpty() { assertEquals(INITIAL_VALUE, map.get(1)); }
static Object parseValue( String key, String value, Class<?> valueType ) throws IllegalArgumentException { return parseValue( key, value, valueType, null, v -> v, Collections.emptyList() ); }
@Test void parseValueWithJavaType() { assertEquals( null, UIDefaultsLoader.parseValue( "dummy", "null", String.class ) ); assertEquals( false, UIDefaultsLoader.parseValue( "dummy", "false", boolean.class ) ); assertEquals( true, UIDefaultsLoader.parseValue( "dummy", "true", Boolean.class ) ); assertEquals( "hello", UIDefaultsLoader.parseValue( "dummy", "hello", String.class ) ); assertEquals( "hello", UIDefaultsLoader.parseValue( "dummy", "\"hello\"", String.class ) ); assertEquals( "null", UIDefaultsLoader.parseValue( "dummy", "\"null\"", String.class ) ); assertEquals( null, UIDefaultsLoader.parseValue( "dummy", "null", String.class ) ); assertEquals( 'a', UIDefaultsLoader.parseValue( "dummy", "a", char.class ) ); assertEquals( 'a', UIDefaultsLoader.parseValue( "dummy", "a", Character.class ) ); assertEquals( 123, UIDefaultsLoader.parseValue( "dummy", "123", int.class ) ); assertEquals( 123, UIDefaultsLoader.parseValue( "dummy", "123", Integer.class ) ); assertEquals( 1.23f, UIDefaultsLoader.parseValue( "dummy", "1.23", float.class ) ); assertEquals( 1.23f, UIDefaultsLoader.parseValue( "dummy", "1.23", Float.class ) ); assertEquals( new Insets( 1,2,3,4 ), UIDefaultsLoader.parseValue( "dummy", "1,2,3,4", Insets.class ) ); assertEquals( new Dimension( 1,2 ), UIDefaultsLoader.parseValue( "dummy", "1,2", Dimension.class ) ); assertEquals( new Color( 0xff0000 ), UIDefaultsLoader.parseValue( "dummy", "#f00", Color.class ) ); }
@Override public XAConnection wrap(final XADataSource xaDataSource, final Connection connection) throws SQLException { return createXAConnection(connection.unwrap(jdbcConnectionClass)); }
@Test void assertWrap() throws SQLException { XAConnection actual = DatabaseTypedSPILoader.getService(XAConnectionWrapper.class, databaseType).wrap(createXADataSource(), mockConnection()); assertThat(actual.getXAResource(), instanceOf(PGXAConnection.class)); }
private KafkaRebalanceStatus updateStatus(KafkaRebalance kafkaRebalance, KafkaRebalanceStatus desiredStatus, Throwable e) { // Leave the current status when the desired state is null if (desiredStatus != null) { Condition cond = KafkaRebalanceUtils.rebalanceStateCondition(desiredStatus); List<Condition> previous = Collections.emptyList(); if (desiredStatus.getConditions() != null) { previous = desiredStatus.getConditions().stream().filter(condition -> condition != cond).collect(Collectors.toList()); } // If a throwable is supplied, it is set in the status with priority if (e != null) { StatusUtils.setStatusConditionAndObservedGeneration(kafkaRebalance, desiredStatus, KafkaRebalanceState.NotReady.toString(), e); desiredStatus.setConditions(Stream.concat(desiredStatus.getConditions().stream(), previous.stream()).collect(Collectors.toList())); } else if (cond != null) { StatusUtils.setStatusConditionAndObservedGeneration(kafkaRebalance, desiredStatus, cond); desiredStatus.setConditions(Stream.concat(desiredStatus.getConditions().stream(), previous.stream()).collect(Collectors.toList())); } else { throw new IllegalArgumentException("Status related exception and the Status condition's type cannot both be null"); } return desiredStatus; } return kafkaRebalance.getStatus(); }
@Test public void testCruiseControlDisabledToEnabledBehaviour(VertxTestContext context) { // build a Kafka cluster without the cruiseControl definition Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .withCruiseControl(null) .endSpec() .withNewStatus() .withObservedGeneration(1L) .withConditions(new ConditionBuilder() .withType("Ready") .withStatus("True") .build()) .endStatus() .build(); KafkaRebalance kr = createKafkaRebalance(namespace, CLUSTER_NAME, RESOURCE_NAME, EMPTY_KAFKA_REBALANCE_SPEC, false); Crds.kafkaRebalanceOperation(client).inNamespace(namespace).resource(kr).create(); Crds.kafkaOperation(client).inNamespace(namespace).resource(kafka).create(); Crds.kafkaOperation(client).inNamespace(namespace).resource(kafka).updateStatus(); Checkpoint checkpoint = context.checkpoint(); krao.reconcile(new Reconciliation("test-trigger", KafkaRebalance.RESOURCE_KIND, namespace, RESOURCE_NAME)) .onComplete(context.succeeding(v -> context.verify(() -> { // the resource moved from New to NotReady due to the error assertState(context, client, namespace, RESOURCE_NAME, KafkaRebalanceState.NotReady, InvalidResourceException.class, "Kafka resource lacks 'cruiseControl' declaration"); }))) .compose(v -> { try { // Set up the rebalance endpoint with the number of pending calls before a response is received. cruiseControlServer.setupCCRebalanceResponse(0, CruiseControlEndpoints.REBALANCE); } catch (IOException | URISyntaxException e) { context.failNow(e); } Kafka kafkaPatch = new KafkaBuilder(Crds.kafkaOperation(client).inNamespace(namespace).withName(CLUSTER_NAME).get()) .editSpec() .withNewCruiseControl() .endCruiseControl() .endSpec() .build(); Crds.kafkaOperation(client).inNamespace(namespace).resource(kafkaPatch).update(); crdCreateCruiseControlSecrets(); // trigger another reconcile to process the NotReady state return krao.reconcile(new Reconciliation("test-trigger", KafkaRebalance.RESOURCE_KIND, namespace, RESOURCE_NAME)); }) .onComplete(context.succeeding(v -> { // the resource transitioned from 'NotReady' to 'ProposalReady' assertState(context, client, namespace, RESOURCE_NAME, KafkaRebalanceState.ProposalReady); checkpoint.flag(); })); }
public static <T extends RecordTemplate> PatchRequest<T> applyProjection(PatchRequest<T> patch, MaskTree projection) { try { /** * Implementation consists of 3 steps: * 1) move data conveyed by patch out of meta-commands (expose method) * 2) apply projection on it * 2) trim original patch with result of projection * In order to limit amount of generated garbage operations are * performed in-place */ DataMap forProjecting = patch.getPatchDocument().copy(); expose(forProjecting); DataMap projected = project(forProjecting, projection); DataMap forTrimming = patch.getPatchDocument().copy(); trim(forTrimming, projected); return PatchRequest.createFromPatchDocument(forTrimming); } catch (CloneNotSupportedException e) { throw new IllegalArgumentException("Patch must be cloneable in order to apply projection to it", e); } }
@Test(dataProvider = "data") public void testProjectionOnPatch(String[] patchAndProjection, String expectedResult) throws IOException { DataMap patch = dataMapFromString(patchAndProjection[0].replace('\'', '"')); DataMap projection = dataMapFromString(patchAndProjection[1].replace('\'', '"')); DataMap expected = dataMapFromString(expectedResult.replace('\'', '"')); assertEquals(PatchHelper.applyProjection(PatchRequest.<RecordTemplate>createFromPatchDocument(patch), new MaskTree(projection)).getPatchDocument(), expected); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("eue.listing.chunksize")); }
@Test public void testListForSharedFile() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path file = new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); createFile(fileid, file, RandomUtils.nextBytes(0)); assertTrue(new EueFindFeature(session, fileid).find(file)); final ShareCreationResponseEntry shareCreationResponseEntry = createShare(fileid, file); final String shareName = shareCreationResponseEntry.getEntity().getName(); final PathAttributes attr = new EueListService(session, fileid).list(sourceFolder, new DisabledListProgressListener()).get(file).attributes(); assertNotNull(attr.getLink()); assertEquals(attr.getLink(), new EueShareUrlProvider(session.getHost(), session.userShares()).toUrl(file).find(DescriptiveUrl.Type.signed)); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(sourceFolder), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
@Override public final Optional<IdentifierValue> getAlias() { return Optional.ofNullable(alias); }
@Test void assertGetAlias() { Projection projection = new AggregationProjection(AggregationType.COUNT, "COUNT( A.\"DIRECTION\" )", new IdentifierValue("AVG_DERIVED_COUNT_0"), mock(DatabaseType.class)); Optional<IdentifierValue> actual = projection.getAlias(); assertTrue(actual.isPresent()); assertThat(actual.get().getValue(), is("AVG_DERIVED_COUNT_0")); assertThat(actual.get().getQuoteCharacter(), is(QuoteCharacter.NONE)); }
@Override public Schema getSourceSchema() { if (schema == null) { try { Schema.Parser parser = new Schema.Parser(); schema = parser.parse(schemaString); } catch (Exception e) { throw new HoodieSchemaException("Failed to parse schema: " + schemaString, e); } } return schema; }
@Test public void validateRecursiveSchemaGeneration_depth2() throws IOException { TypedProperties properties = new TypedProperties(); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_CLASS_NAME.key(), Parent.class.getName()); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_MAX_RECURSION_DEPTH.key(), String.valueOf(2)); ProtoClassBasedSchemaProvider protoToAvroSchemaProvider = new ProtoClassBasedSchemaProvider(properties, null); Schema convertedSchema = protoToAvroSchemaProvider.getSourceSchema(); Schema.Parser parser = new Schema.Parser(); Schema expectedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/parent_schema_recursive_depth_2.avsc")); Assertions.assertEquals(expectedSchema, convertedSchema); }
public static byte[] generateFor(byte[] unidentifiedAccessKey) { try { if (unidentifiedAccessKey.length != UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH) { throw new IllegalArgumentException("Invalid UAK length: " + unidentifiedAccessKey.length); } Mac mac = Mac.getInstance("HmacSHA256"); mac.init(new SecretKeySpec(unidentifiedAccessKey, "HmacSHA256")); return mac.doFinal(new byte[32]); } catch (NoSuchAlgorithmException | InvalidKeyException e) { throw new AssertionError(e); } }
@Test public void generateForIllegalArgument() { final byte[] invalidLengthUnidentifiedAccessKey = new byte[15]; new SecureRandom().nextBytes(invalidLengthUnidentifiedAccessKey); assertThrows(IllegalArgumentException.class, () -> UnidentifiedAccessChecksum.generateFor(invalidLengthUnidentifiedAccessKey)); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void testFunctionWithNoGenericSuperclass() { RichMapFunction<?, ?> function = new Mapper2(); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, (TypeInformation) Types.STRING); assertThat(ti.isBasicType()).isTrue(); assertThat(ti).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO); }
@Override public List<Intent> compile(PointToPointIntent intent, List<Intent> installable) { log.trace("compiling {} {}", intent, installable); ConnectPoint ingressPoint = intent.filteredIngressPoint().connectPoint(); ConnectPoint egressPoint = intent.filteredEgressPoint().connectPoint(); //TODO: handle protected path case with suggested path!! //Idea: use suggested path as primary and another path from path service as protection if (intent.suggestedPath() != null && intent.suggestedPath().size() > 0) { Path path = new DefaultPath(PID, intent.suggestedPath(), new ScalarWeight(1)); //Check intent constraints against suggested path and suggested path availability if (checkPath(path, intent.constraints()) && pathAvailable(intent)) { allocateIntentBandwidth(intent, path); return asList(createLinkCollectionIntent(ImmutableSet.copyOf(intent.suggestedPath()), DEFAULT_COST, intent)); } } if (ingressPoint.deviceId().equals(egressPoint.deviceId())) { return createZeroHopLinkCollectionIntent(intent); } // proceed with no protected paths if (!ProtectionConstraint.requireProtectedPath(intent)) { return createUnprotectedLinkCollectionIntent(intent); } try { // attempt to compute and implement backup path return createProtectedIntent(ingressPoint, egressPoint, intent, installable); } catch (PathNotFoundException e) { log.warn("Could not find disjoint Path for {}", intent); // no disjoint path extant -- maximum one path exists between devices return createSinglePathIntent(ingressPoint, egressPoint, intent, installable); } }
@Test public void testBandwidthConstrainedIntentAllocation() { final double bpsTotal = 1000.0; String[] hops = {S1, S2, S3}; final ResourceService resourceService = MockResourceService.makeCustomBandwidthResourceService(bpsTotal); final List<Constraint> constraints = Collections.singletonList(new BandwidthConstraint(Bandwidth.bps(BPS_TO_RESERVE))); final PointToPointIntent intent = makeIntent(new ConnectPoint(DID_1, PORT_1), new ConnectPoint(DID_3, PORT_2), constraints); PointToPointIntentCompiler compiler = makeCompiler(hops, resourceService); compiler.compile(intent, null); Key intentKey = intent.key(); ResourceAllocation rAOne = new ResourceAllocation(RESOURCE_SW1_P1, intentKey); ResourceAllocation rATwo = new ResourceAllocation(RESOURCE_SW1_P2, intentKey); ResourceAllocation rAThree = new ResourceAllocation(RESOURCE_SW2_P1, intentKey); ResourceAllocation rAFour = new ResourceAllocation(RESOURCE_SW2_P2, intentKey); ResourceAllocation rAFive = new ResourceAllocation(RESOURCE_SW3_P1, intentKey); ResourceAllocation rASix = new ResourceAllocation(RESOURCE_SW3_P2, intentKey); Set<ResourceAllocation> expectedresourceAllocations = ImmutableSet.of(rAOne, rATwo, rAThree, rAFour, rAFive, rASix); Set<ResourceAllocation> resourceAllocations = ImmutableSet.copyOf(resourceService.getResourceAllocations(intentKey)); assertThat(resourceAllocations, hasSize(6)); assertEquals(expectedresourceAllocations, resourceAllocations); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuilder buf = new StringBuilder(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case START_STATE: handleStartState(c, tokenList, buf); break; case DEFAULT_VAL_STATE: handleDefaultValueState(c, tokenList, buf); default: } } // EOS switch (state) { case LITERAL_STATE: addLiteralToken(tokenList, buf); break; case DEFAULT_VAL_STATE: // trailing colon. see also LOGBACK-1140 buf.append(CoreConstants.COLON_CHAR); addLiteralToken(tokenList, buf); break; case START_STATE: // trailing $. see also LOGBACK-1149 buf.append(CoreConstants.DOLLAR); addLiteralToken(tokenList, buf); break; } return tokenList; }
@Test public void LOGBACK_1101() throws ScanException { String input = "a:{y}"; Tokenizer tokenizer = new Tokenizer(input); List<Token> tokenList = tokenizer.tokenize(); witnessList.add(new Token(Token.Type.LITERAL, "a")); witnessList.add(new Token(Token.Type.LITERAL, ":")); witnessList.add(Token.CURLY_LEFT_TOKEN); witnessList.add(new Token(Token.Type.LITERAL, "y")); witnessList.add(Token.CURLY_RIGHT_TOKEN); assertEquals(witnessList, tokenList); }
UuidGenerator loadUuidGenerator() { Class<? extends UuidGenerator> objectFactoryClass = options.getUuidGeneratorClass(); ClassLoader classLoader = classLoaderSupplier.get(); ServiceLoader<UuidGenerator> loader = ServiceLoader.load(UuidGenerator.class, classLoader); if (objectFactoryClass == null) { return loadSingleUuidGeneratorOrDefault(loader); } return loadSelectedUuidGenerator(loader, objectFactoryClass); }
@Test void test_case_2() { Options options = () -> null; UuidGeneratorServiceLoader loader = new UuidGeneratorServiceLoader( UuidGeneratorServiceLoaderTest.class::getClassLoader, options); assertThat(loader.loadUuidGenerator(), instanceOf(RandomUuidGenerator.class)); }
public final void sort(long startIndex, long length) { quickSort(startIndex, length - 1); }
@Test public void testQuickSortInt() { final int[] array = intArrayWithRandomElements(); final long baseAddr = memMgr.getAllocator().allocate(ARRAY_LENGTH * INT_SIZE_IN_BYTES); final MemoryAccessor mem = memMgr.getAccessor(); for (int i = 0; i < ARRAY_LENGTH; i++) { mem.putInt(baseAddr + INT_SIZE_IN_BYTES * i, array[i]); } Arrays.sort(array); new IntMemArrayQuickSorter(mem, baseAddr).sort(0, ARRAY_LENGTH); for (int i = 0; i < ARRAY_LENGTH; i++) { assertEquals("Mismatch at " + i, array[i], mem.getInt(baseAddr + INT_SIZE_IN_BYTES * i)); } }
@Override public void processWatermark(org.apache.flink.streaming.api.watermark.Watermark mark) throws Exception { // if we receive a Long.MAX_VALUE watermark we forward it since it is used // to signal the end of input and to not block watermark progress downstream if (mark.getTimestamp() == Long.MAX_VALUE) { wmOutput.emitWatermark(Watermark.MAX_WATERMARK); } }
@Test void longMaxInputWatermarkIsForwarded() throws Exception { OneInputStreamOperatorTestHarness<Long, Long> testHarness = createTestHarness( WatermarkStrategy.forGenerator((ctx) -> new PeriodicWatermarkGenerator()) .withTimestampAssigner((ctx) -> new LongExtractor())); testHarness.processWatermark(createLegacyWatermark(Long.MAX_VALUE)); assertThat(pollNextLegacyWatermark(testHarness)) .is(matching(legacyWatermark(Long.MAX_VALUE))); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (response instanceof HttpServletResponse) { final HttpServletResponse resp = (HttpServletResponse) response; resp.setHeader("Cache-Control", CACHE_SETTINGS); } chain.doFilter(request, response); }
@Test void passesThroughNonHttpRequests() throws Exception { final ServletRequest req = mock(ServletRequest.class); final ServletResponse res = mock(ServletResponse.class); filter.doFilter(req, res, chain); verify(chain).doFilter(req, res); verifyNoInteractions(res); }
static TimelineFilterList parseKVFilters(String expr, boolean valueAsString) throws TimelineParseException { return parseFilters(new TimelineParserForKVFilters(expr, valueAsString)); }
@Test void testConfigFiltersParsing() throws Exception { String expr = "(((key11 ne 234 AND key12 eq val12) AND " + "(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " + "val.22))"; TimelineFilterList expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( Operator.AND, new TimelineFilterList( Operator.AND, new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "key11", "234", false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key12", "val12", true) ), new TimelineFilterList( Operator.OR, new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "key13", "val13", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key14", "567", true) ) ), new TimelineFilterList( Operator.OR, new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key21", "val_21", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key22", "val.22", true) ) ); verifyFilterList(expr, TimelineReaderWebServicesUtils. parseKVFilters(expr, true), expectedList); expr = "abc ne 234 AND def eq 23 OR rst ene 24 OR xyz eq 456 AND pqr eq 2"; expectedList = new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "abc", "234", false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "def", "23", true) ), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "rst", "24", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "xyz", "456", true) ), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "pqr", "2", true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils. parseKVFilters(expr, true), expectedList); // Test with unnecessary spaces. expr = " abc ne 234 AND def eq 23 OR rst ene " + " 24 OR xyz eq 456 AND pqr eq 2 "; expectedList = new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "abc", "234", false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "def", "23", true) ), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "rst", "24", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "xyz", "456", true) ), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "pqr", "2", true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils. parseKVFilters(expr, true), expectedList); expr = "abc gt 234 AND def eq 23 OR rst ene 24 OR xyz eq 456 AND pqr eq 2"; try { TimelineReaderWebServicesUtils.parseKVFilters(expr, true); fail("Invalid compareop specified for config filters. Should be either" + " eq,ne or ene and exception should have been thrown."); } catch (TimelineParseException e) { } }
@Override public boolean isFetchSizeSupported() { return false; }
@Test public void testIsFetchSizeSupported() throws Exception { assertFalse( dbMeta.isFetchSizeSupported() ); }
public static Node build(final List<JoinInfo> joins) { Node root = null; for (final JoinInfo join : joins) { if (root == null) { root = new Leaf(join.getLeftSource()); } if (root.containsSource(join.getRightSource()) && root.containsSource(join.getLeftSource())) { throw new KsqlException("Cannot perform circular join - both " + join.getRightSource() + " and " + join.getLeftJoinExpression() + " are already included in the current join tree: " + root.debugString(0)); } else if (root.containsSource(join.getLeftSource())) { root = new Join(root, new Leaf(join.getRightSource()), join); } else if (root.containsSource(join.getRightSource())) { root = new Join(root, new Leaf(join.getLeftSource()), join.flip()); } else { throw new KsqlException( "Cannot build JOIN tree; neither source in the join is the FROM source or included " + "in a previous JOIN: " + join + ". The current join tree is " + root.debugString(0) ); } } return root; }
@Test public void handlesLeftThreeWayJoin() { // Given: when(j1.getLeftSource()).thenReturn(a); when(j1.getRightSource()).thenReturn(b); when(j2.getLeftSource()).thenReturn(a); when(j2.getRightSource()).thenReturn(c); final List<JoinInfo> joins = ImmutableList.of(j1, j2); // When: final Node root = JoinTree.build(joins); // Then: assertThat(root, instanceOf(Join.class)); assertThat(root, is( new Join( new Join( new Leaf(a), new Leaf(b), j1 ), new Leaf(c), j2 ) )); }
public static NameStep newBuilder() { return new CharacterSteps(); }
@Test void testBuildWizard() { final var character = CharacterStepBuilder.newBuilder() .name("Merlin") .wizardClass("alchemist") .withSpell("poison") .withAbility("invisibility") .withAbility("wisdom") .noMoreAbilities() .build(); assertEquals("Merlin", character.getName()); assertEquals("alchemist", character.getWizardClass()); assertEquals("poison", character.getSpell()); assertNotNull(character.toString()); final var abilities = character.getAbilities(); assertNotNull(abilities); assertEquals(2, abilities.size()); assertTrue(abilities.contains("invisibility")); assertTrue(abilities.contains("wisdom")); }
public static String escapeHtml4(CharSequence html) { Html4Escape escape = new Html4Escape(); return escape.replace(html).toString(); }
@Test public void escapeSingleQuotesTest(){ // 单引号不做转义 String str = "'some text with single quotes'"; final String s = EscapeUtil.escapeHtml4(str); assertEquals("'some text with single quotes'", s); }
public static int[] colMin(int[][] matrix) { int[] x = new int[matrix[0].length]; Arrays.fill(x, Integer.MAX_VALUE); for (int[] row : matrix) { for (int j = 0; j < x.length; j++) { if (x[j] > row[j]) { x[j] = row[j]; } } } return x; }
@Test public void testColMin() { System.out.println("colMin"); double[][] A = { {0.7220180, 0.07121225, 0.6881997}, {-0.2648886, -0.89044952, 0.3700456}, {-0.6391588, 0.44947578, 0.6240573} }; double[] r = {-0.6391588, -0.89044952, 0.3700456}; double[] result = MathEx.colMin(A); for (int i = 0; i < r.length; i++) { assertEquals(result[i], r[i], 1E-7); } }
public static JavaBeanDescriptor serialize(Object obj) { return serialize(obj, JavaBeanAccessor.FIELD); }
@Test void test_Circular_Reference() { Parent parent = new Parent(); parent.setAge(Integer.MAX_VALUE); parent.setEmail("a@b"); parent.setName("zhangsan"); Child child = new Child(); child.setAge(100); child.setName("lisi"); child.setParent(parent); parent.setChild(child); JavaBeanDescriptor descriptor = JavaBeanSerializeUtil.serialize(parent, JavaBeanAccessor.METHOD); Assertions.assertTrue(descriptor.isBeanType()); assertEqualsPrimitive(parent.getAge(), descriptor.getProperty("age")); assertEqualsPrimitive(parent.getName(), descriptor.getProperty("name")); assertEqualsPrimitive(parent.getEmail(), descriptor.getProperty("email")); JavaBeanDescriptor childDescriptor = (JavaBeanDescriptor) descriptor.getProperty("child"); Assertions.assertSame(descriptor, childDescriptor.getProperty("parent")); assertEqualsPrimitive(child.getName(), childDescriptor.getProperty("name")); assertEqualsPrimitive(child.getAge(), childDescriptor.getProperty("age")); }
public static GenericRecord rewriteRecord(GenericRecord oldRecord, Schema newSchema) { GenericRecord newRecord = new GenericData.Record(newSchema); boolean isSpecificRecord = oldRecord instanceof SpecificRecordBase; for (Schema.Field f : newSchema.getFields()) { if (!(isSpecificRecord && isMetadataField(f.name()))) { copyOldValueOrSetDefault(oldRecord, newRecord, f); } } return newRecord; }
@Test public void testJsonNodeNullWithDefaultValues() { List<Schema.Field> fields = new ArrayList<>(); Schema initialSchema = Schema.createRecord("test_record", "test record", "org.test.namespace", false); Schema.Field field1 = new Schema.Field("key", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field field2 = new Schema.Field("key1", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field field3 = new Schema.Field("key2", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); fields.add(field1); fields.add(field2); fields.add(field3); initialSchema.setFields(fields); GenericRecord rec = new GenericData.Record(initialSchema); rec.put("key", "val"); rec.put("key1", "val1"); rec.put("key2", "val2"); List<Schema.Field> evolvedFields = new ArrayList<>(); Schema evolvedSchema = Schema.createRecord("evolved_record", "evolved record", "org.evolved.namespace", false); Schema.Field evolvedField1 = new Schema.Field("key", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field evolvedField2 = new Schema.Field("key1", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field evolvedField3 = new Schema.Field("key2", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field evolvedField4 = new Schema.Field("evolved_field", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); Schema.Field evolvedField5 = new Schema.Field("evolved_field1", HoodieAvroUtils.METADATA_FIELD_SCHEMA, "", JsonProperties.NULL_VALUE); evolvedFields.add(evolvedField1); evolvedFields.add(evolvedField2); evolvedFields.add(evolvedField3); evolvedFields.add(evolvedField4); evolvedFields.add(evolvedField5); evolvedSchema.setFields(evolvedFields); GenericRecord rec1 = HoodieAvroUtils.rewriteRecord(rec, evolvedSchema); //evolvedField4.defaultVal() returns a JsonProperties.Null instance. assertNull(rec1.get("evolved_field")); //evolvedField5.defaultVal() returns null. assertNull(rec1.get("evolved_field1")); }
public static <KLeftT, KRightT> KTableHolder<KLeftT> build( final KTableHolder<KLeftT> left, final KTableHolder<KRightT> right, final ForeignKeyTableTableJoin<KLeftT, KRightT> join, final RuntimeBuildContext buildContext ) { final LogicalSchema leftSchema = left.getSchema(); final LogicalSchema rightSchema = right.getSchema(); final ProcessingLogger logger = buildContext.getProcessingLogger( join.getProperties().getQueryContext() ); final ExpressionEvaluator expressionEvaluator; final CodeGenRunner codeGenRunner = new CodeGenRunner( leftSchema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry() ); final Optional<ColumnName> leftColumnName = join.getLeftJoinColumnName(); final Optional<Expression> leftJoinExpression = join.getLeftJoinExpression(); if (leftColumnName.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( new UnqualifiedColumnReferenceExp(leftColumnName.get()), "Left Join Expression" ); } else if (leftJoinExpression.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( leftJoinExpression.get(), "Left Join Expression" ); } else { throw new IllegalStateException("Both leftColumnName and leftJoinExpression are empty."); } final ForeignKeyJoinParams<KRightT> joinParams = ForeignKeyJoinParamsFactory .create(expressionEvaluator, leftSchema, rightSchema, logger); final Formats formats = join.getFormats(); final PhysicalSchema physicalSchema = PhysicalSchema.from( joinParams.getSchema(), formats.getKeyFeatures(), formats.getValueFeatures() ); final Serde<KLeftT> keySerde = left.getExecutionKeyFactory().buildKeySerde( formats.getKeyFormat(), physicalSchema, join.getProperties().getQueryContext() ); final Serde<GenericRow> valSerde = buildContext.buildValueSerde( formats.getValueFormat(), physicalSchema, join.getProperties().getQueryContext() ); final KTable<KLeftT, GenericRow> result; switch (join.getJoinType()) { case INNER: result = left.getTable().join( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; case LEFT: result = left.getTable().leftJoin( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; default: throw new IllegalStateException("invalid join type: " + join.getJoinType()); } return KTableHolder.unmaterialized( result, joinParams.getSchema(), left.getExecutionKeyFactory() ); }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) public void shouldDoLeftJoinOnSubKey() { // Given: givenLeftJoin(leftMultiKey, L_KEY_2); // When: final KTableHolder<Struct> result = join.build(planBuilder, planInfo); // Then: final ArgumentCaptor<KsqlKeyExtractor> ksqlKeyExtractor = ArgumentCaptor.forClass(KsqlKeyExtractor.class); verify(leftKTableMultiKey).leftJoin( same(rightKTable), ksqlKeyExtractor.capture(), eq(new KsqlValueJoiner(LEFT_SCHEMA_MULTI_KEY.value().size(), RIGHT_SCHEMA.value().size(), 0)), any(Materialized.class) ); verifyNoMoreInteractions(leftKTable, rightKTable, resultKTable); final GenericKey extractedKey = GenericKey.genericKey(LEFT_KEY_2); assertThat(ksqlKeyExtractor.getValue().apply(LEFT_ROW_MULTI), is(extractedKey)); assertThat(result.getTable(), is(resultKTable)); assertThat(result.getExecutionKeyFactory(), is(executionKeyFactory)); }
public Pet status(StatusEnum status) { this.status = status; return this; }
@Test public void statusTest() { // TODO: test status }
public List<CompactionTask> produce() { // get all CF files sorted by key range start (L1+) List<SstFileMetaData> sstSortedByCfAndStartingKeys = metadataSupplier.get().stream() .filter(l -> l.level() > 0) // let RocksDB deal with L0 .sorted(SST_COMPARATOR) .collect(Collectors.toList()); LOG.trace("Input files: {}", sstSortedByCfAndStartingKeys.size()); List<CompactionTask> tasks = groupIntoTasks(sstSortedByCfAndStartingKeys); tasks.sort(Comparator.<CompactionTask>comparingInt(t -> t.files.size()).reversed()); return tasks.subList(0, Math.min(tasks.size(), settings.maxManualCompactions)); }
@Test void testSkipBeingCompacted() { assertThat(produce(configBuilder().build(), sstBuilder().setBeingCompacted(true).build())) .isEmpty(); }
public static String[] getCheckProcessIsAliveCommand(String pid) { return getSignalKillCommand(0, pid); }
@Test public void testGetCheckProcessIsAliveCommand() throws Exception { String anyPid = "9999"; String[] checkProcessAliveCommand = getCheckProcessIsAliveCommand( anyPid); String[] expectedCommand; if (Shell.WINDOWS) { expectedCommand = new String[]{getWinUtilsPath(), "task", "isAlive", anyPid }; } else if (Shell.isSetsidAvailable) { expectedCommand = new String[] { "bash", "-c", "kill -0 -- -'" + anyPid + "'"}; } else { expectedCommand = new String[] {"bash", "-c", "kill -0 '" + anyPid + "'" }; } Assert.assertArrayEquals(expectedCommand, checkProcessAliveCommand); }
public SortedSet<Long> validWindows(Cluster cluster, double minMonitoredPartitionsPercentage) { AggregationOptions<String, PartitionEntity> options = new AggregationOptions<>(minMonitoredPartitionsPercentage, 0.0, 1, _maxAllowedExtrapolationsPerPartition, allPartitions(cluster), AggregationOptions.Granularity.ENTITY, true); MetricSampleCompleteness<String, PartitionEntity> completeness = completeness(-1, Long.MAX_VALUE, options); return windowIndicesToWindows(completeness.validWindowIndices(), _windowMs); }
@Test public void testValidWindows() { TestContext ctx = setupScenario1(); KafkaPartitionMetricSampleAggregator aggregator = ctx.aggregator(); MetadataClient.ClusterAndGeneration clusterAndGeneration = ctx.clusterAndGeneration(0); SortedSet<Long> validWindows = aggregator.validWindows(clusterAndGeneration.cluster(), 1.0); assertEquals(NUM_WINDOWS, validWindows.size()); assertValidWindows(validWindows, NUM_WINDOWS, Collections.emptySet()); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testLike() { UnboundPredicate<?> expected = org.apache.iceberg.expressions.Expressions.startsWith("field5", "abc"); Expression expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("abc%"))); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(expr); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("%abc"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("%abc%"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("abc%d"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("%"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("a_"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.LIKE, Expressions.$("field5"), Expressions.lit("a%b"))); actual = FlinkFilters.convert(expr); assertThat(actual).isNotPresent(); }
@Override @CheckForNull public String revisionId(Path path) { RepositoryBuilder builder = getVerifiedRepositoryBuilder(path); try { return Optional.ofNullable(getHead(builder.build())) .map(Ref::getObjectId) .map(ObjectId::getName) .orElse(null); } catch (IOException e) { throw new IllegalStateException("I/O error while getting revision ID for path: " + path, e); } }
@Test public void revisionId_should_return_different_sha1_after_commit() throws IOException, GitAPIException { Path projectDir = worktree.resolve("project"); Files.createDirectory(projectDir); GitScmProvider provider = newGitScmProvider(); String sha1before = provider.revisionId(projectDir); assertThat(sha1before).hasSize(40); createAndCommitFile("project/file1"); String sha1after = provider.revisionId(projectDir); assertThat(sha1after) .hasSize(40) .isNotEqualTo(sha1before); assertThat(provider.revisionId(projectDir)).isEqualTo(sha1after); }
private StorageVolume getStorageVolumeOfTable(String svName, long dbId) throws DdlException { StorageVolume sv = null; if (svName.isEmpty()) { String dbStorageVolumeId = getStorageVolumeIdOfDb(dbId); if (dbStorageVolumeId != null) { return getStorageVolume(dbStorageVolumeId); } else { sv = getDefaultStorageVolume(); if (sv == null) { throw ErrorReportException.report(ErrorCode.ERR_NO_DEFAULT_STORAGE_VOLUME); } } } else if (svName.equals(StorageVolumeMgr.DEFAULT)) { sv = getDefaultStorageVolume(); if (sv == null) { throw ErrorReportException.report(ErrorCode.ERR_NO_DEFAULT_STORAGE_VOLUME); } } else { sv = getStorageVolumeByName(svName); if (sv == null) { throw new DdlException("Unknown storage volume \"" + svName + "\""); } } return sv; }
@Test public void testGetStorageVolumeOfTable() throws DdlException, AlreadyExistsException { new Expectations() { { editLog.logSetDefaultStorageVolume((SetDefaultStorageVolumeLog) any); } }; SharedDataStorageVolumeMgr sdsvm = new SharedDataStorageVolumeMgr(); String svName = "test"; List<String> locations = Arrays.asList("s3://abc"); Map<String, String> storageParams = new HashMap<>(); storageParams.put(AWS_S3_REGION, "region"); storageParams.put(AWS_S3_ENDPOINT, "endpoint"); storageParams.put(AWS_S3_USE_AWS_SDK_DEFAULT_BEHAVIOR, "true"); String testSVId = sdsvm.createStorageVolume(svName, "S3", locations, storageParams, Optional.empty(), ""); new MockUp<SharedDataStorageVolumeMgr>() { @Mock public String getStorageVolumeIdOfDb(long dbId) { if (dbId == 1L) { return testSVId; } return null; } }; StorageVolume sv = Deencapsulation.invoke(sdsvm, "getStorageVolumeOfTable", "", 1L); Assert.assertEquals(testSVId, sv.getId()); Config.enable_load_volume_from_conf = false; ErrorReportException ex = Assert.assertThrows(ErrorReportException.class, () -> Deencapsulation.invoke(sdsvm, "getStorageVolumeOfTable", "", 2L)); Assert.assertEquals(ErrorCode.ERR_NO_DEFAULT_STORAGE_VOLUME, ex.getErrorCode()); Config.enable_load_volume_from_conf = true; ex = Assert.assertThrows(ErrorReportException.class, () -> Deencapsulation.invoke(sdsvm, "getStorageVolumeOfTable", "", 2L)); Assert.assertEquals(ErrorCode.ERR_NO_DEFAULT_STORAGE_VOLUME, ex.getErrorCode()); sdsvm.createBuiltinStorageVolume(); String defaultSVId = sdsvm.getStorageVolumeByName(SharedDataStorageVolumeMgr.BUILTIN_STORAGE_VOLUME).getId(); sv = Deencapsulation.invoke(sdsvm, "getStorageVolumeOfTable", StorageVolumeMgr.DEFAULT, 1L); Assert.assertEquals(defaultSVId, sv.getId()); sv = Deencapsulation.invoke(sdsvm, "getStorageVolumeOfTable", svName, 1L); Assert.assertEquals(testSVId, sv.getId()); }
public final <KIn, VIn, KOut, VOut> void addProcessor(final String name, final ProcessorSupplier<KIn, VIn, KOut, VOut> supplier, final String... predecessorNames) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(supplier, "supplier must not be null"); Objects.requireNonNull(predecessorNames, "predecessor names must not be null"); ApiUtils.checkSupplier(supplier); if (nodeFactories.containsKey(name)) { throw new TopologyException("Processor " + name + " is already added."); } if (predecessorNames.length == 0) { throw new TopologyException("Processor " + name + " must have at least one parent"); } for (final String predecessor : predecessorNames) { Objects.requireNonNull(predecessor, "predecessor name must not be null"); if (predecessor.equals(name)) { throw new TopologyException("Processor " + name + " cannot be a predecessor of itself."); } if (!nodeFactories.containsKey(predecessor)) { throw new TopologyException("Predecessor processor " + predecessor + " is not added yet for " + name); } } nodeFactories.put(name, new ProcessorNodeFactory<>(name, predecessorNames, supplier)); nodeGrouper.add(name); nodeGrouper.unite(name, predecessorNames); nodeGroups = null; }
@Test public void testAddProcessorWithNullParents() { assertThrows(NullPointerException.class, () -> builder.addProcessor("processor", new MockApiProcessorSupplier<>(), (String) null)); }
static int getNext(final CronEntry entry, final int current, final Calendar working) throws MessageFormatException { int result = 0; if (entry.currentWhen == null) { entry.currentWhen = calculateValues(entry); } List<Integer> list = entry.currentWhen; int next = -1; for (Integer i : list) { if (i > current) { next = i; break; } } if (next != -1) { result = next - current; } else { int first = list.get(0); int fixedEnd = entry.end; //months have different max values if("DayOfMonth".equals(entry.name)) { fixedEnd = working.getActualMaximum(Calendar.DAY_OF_MONTH)+1; } result = fixedEnd + first - entry.start - current; // Account for difference of one vs zero based indices. if (entry.name.equals("DayOfWeek") || entry.name.equals("Month")) { result++; } } return result; }
@Test public void testGetNextExact() throws MessageFormatException { String token = "3"; int next = CronParser.getNext(createEntry(token, 0, 10), 2, null); assertEquals(1, next); next = CronParser.getNext(createEntry(token, 0, 10), 3, null); assertEquals(10, next); next = CronParser.getNext(createEntry(token, 0, 10), 1, null); assertEquals(2, next); }
@Override public Object lock() { return controller; }
@Test public void testLock() { final AbstractController c = new AbstractController() { @Override public void invoke(final MainAction runnable, final boolean wait) { // } }; assertEquals(c, new ControllerMainAction(c) { @Override public void run() { // } }.lock()); }
public static PodTemplateSpec createPodTemplateSpec( String workloadName, Labels labels, PodTemplate template, Map<String, String> defaultPodLabels, Map<String, String> podAnnotations, Affinity affinity, List<Container> initContainers, List<Container> containers, List<Volume> volumes, List<LocalObjectReference> defaultImagePullSecrets, PodSecurityContext podSecurityContext ) { return new PodTemplateSpecBuilder() .withNewMetadata() .withLabels(labels.withAdditionalLabels(Util.mergeLabelsOrAnnotations(defaultPodLabels, TemplateUtils.labels(template))).toMap()) .withAnnotations(Util.mergeLabelsOrAnnotations(podAnnotations, TemplateUtils.annotations(template))) .endMetadata() .withNewSpec() .withServiceAccountName(workloadName) .withEnableServiceLinks(template != null ? template.getEnableServiceLinks() : null) .withAffinity(affinity) .withInitContainers(initContainers) .withContainers(containers) .withVolumes(volumes) .withTolerations(template != null && template.getTolerations() != null ? template.getTolerations() : null) .withTerminationGracePeriodSeconds(template != null ? (long) template.getTerminationGracePeriodSeconds() : 30L) .withImagePullSecrets(imagePullSecrets(template, defaultImagePullSecrets)) .withSecurityContext(podSecurityContext) .withPriorityClassName(template != null ? template.getPriorityClassName() : null) .withSchedulerName(template != null && template.getSchedulerName() != null ? template.getSchedulerName() : "default-scheduler") .withHostAliases(template != null ? template.getHostAliases() : null) .withTopologySpreadConstraints(template != null ? template.getTopologySpreadConstraints() : null) .endSpec() .build(); }
@Test public void testCreatePodTemplateSpecWithNullTemplate() { PodTemplateSpec pod = WorkloadUtils.createPodTemplateSpec( NAME, LABELS, null, Map.of("default-label", "default-value"), Map.of("extra", "annotations"), DEFAULT_AFFINITY, List.of(new ContainerBuilder().withName("init-container").build()), List.of(new ContainerBuilder().withName("container").build()), VolumeUtils.createPodSetVolumes(NAME + "-0", DEFAULT_STORAGE, false), List.of(new LocalObjectReference("some-pull-secret")), DEFAULT_POD_SECURITY_CONTEXT ); assertThat(pod.getMetadata().getLabels(), is(LABELS.withAdditionalLabels(Map.of("default-label", "default-value")).toMap())); assertThat(pod.getMetadata().getAnnotations(), is(Map.of("extra", "annotations"))); assertThat(pod.getSpec().getServiceAccountName(), is(NAME)); assertThat(pod.getSpec().getEnableServiceLinks(), is(nullValue())); assertThat(pod.getSpec().getAffinity(), is(DEFAULT_AFFINITY)); assertThat(pod.getSpec().getInitContainers().size(), is(1)); assertThat(pod.getSpec().getInitContainers().get(0).getName(), is("init-container")); assertThat(pod.getSpec().getContainers().size(), is(1)); assertThat(pod.getSpec().getContainers().get(0).getName(), is("container")); assertThat(pod.getSpec().getVolumes(), is(VolumeUtils.createPodSetVolumes(NAME + "-0", DEFAULT_STORAGE, false))); assertThat(pod.getSpec().getTolerations(), is(nullValue())); assertThat(pod.getSpec().getTerminationGracePeriodSeconds(), is(30L)); assertThat(pod.getSpec().getImagePullSecrets(), is(List.of(new LocalObjectReference("some-pull-secret")))); assertThat(pod.getSpec().getSecurityContext(), is(DEFAULT_POD_SECURITY_CONTEXT)); assertThat(pod.getSpec().getPriorityClassName(), is(nullValue())); assertThat(pod.getSpec().getSchedulerName(), is("default-scheduler")); assertThat(pod.getSpec().getHostAliases(), is(nullValue())); assertThat(pod.getSpec().getTopologySpreadConstraints(), is(nullValue())); }
public abstract String encrypt(String plaintext) throws GeneralSecurityException;
@Test void encrypt() throws GeneralSecurityException { // given RunContext runContext = runContextFactory.of(); String plainText = "toto"; String encrypted = runContext.encrypt(plainText); String decrypted = EncryptionService.decrypt(secretKey, encrypted); assertThat(encrypted, not(plainText)); assertThat(decrypted, is(plainText)); }
@Override public boolean equals(Object obj) { if (!(obj instanceof JobParameters rhs)) { return false; } if (obj == this) { return true; } return this.parameters.equals(rhs.parameters); }
@Test void testEquals() { jobParameter = new JobParameter("test", String.class, true); JobParameter testParameter = new JobParameter("test", String.class, true); assertEquals(jobParameter, testParameter); }
@Override public double calcEdgeWeight(EdgeIteratorState edgeState, boolean reverse) { double priority = edgeToPriorityMapping.get(edgeState, reverse); if (priority == 0) return Double.POSITIVE_INFINITY; final double distance = edgeState.getDistance(); double seconds = calcSeconds(distance, edgeState, reverse); if (Double.isInfinite(seconds)) return Double.POSITIVE_INFINITY; // add penalty at start/stop/via points if (edgeState.get(EdgeIteratorState.UNFAVORED_EDGE)) seconds += headingPenaltySeconds; double distanceCosts = distance * distanceInfluence; if (Double.isInfinite(distanceCosts)) return Double.POSITIVE_INFINITY; return seconds / priority + distanceCosts; }
@Test public void testArea() throws Exception { EdgeIteratorState edge1 = graph.edge(0, 1).setDistance(10). set(roadClassEnc, PRIMARY).set(avSpeedEnc, 80); EdgeIteratorState edge2 = graph.edge(2, 3).setDistance(10). set(roadClassEnc, PRIMARY).set(avSpeedEnc, 80); graph.getNodeAccess().setNode(0, 50.0120, 11.582); graph.getNodeAccess().setNode(1, 50.0125, 11.585); graph.getNodeAccess().setNode(2, 40.0, 8.0); graph.getNodeAccess().setNode(3, 40.1, 8.1); CustomModel customModel = createSpeedCustomModel(avSpeedEnc).setDistanceInfluence(70d). addToPriority(If("in_custom1", MULTIPLY, "0.5")); ObjectMapper om = new ObjectMapper().registerModule(new JtsModule()); JsonFeature json = om.readValue("{ \"geometry\":{ \"type\": \"Polygon\", \"coordinates\": " + "[[[11.5818,50.0126], [11.5818,50.0119], [11.5861,50.0119], [11.5861,50.0126], [11.5818,50.0126]]] }}", JsonFeature.class); json.setId("custom1"); customModel.getAreas().getFeatures().add(json); Weighting weighting = createWeighting(customModel); // edge1 is located within the area custom1, edge2 is not assertEquals(1.6, weighting.calcEdgeWeight(edge1, false), 0.01); assertEquals(1.15, weighting.calcEdgeWeight(edge2, false), 0.01); }
private static Schema optional(Schema original) { // null is first in the union because Parquet's default is always null return Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), original)); }
@Test public void testOptionalArrayElement() throws Exception { Schema schema = Schema.createRecord("record1", null, null, false); Schema optionalIntArray = Schema.createArray(optional(Schema.create(INT))); schema.setFields(Arrays.asList(new Schema.Field("myintarray", optionalIntArray, null, null))); testRoundTripConversion( NEW_BEHAVIOR, schema, "message record1 {\n" + " required group myintarray (LIST) {\n" + " repeated group list {\n" + " optional int32 element;\n" + " }\n" + " }\n" + "}\n"); }
@Nullable public static String getRelativeDisplayNameFrom(@CheckForNull Item p, @CheckForNull ItemGroup g) { return getRelativeNameFrom(p, g, true); }
@Test public void testGetRelativeDisplayNameInsideItemGroup() { Item i = mock(Item.class); when(i.getName()).thenReturn("jobName"); when(i.getDisplayName()).thenReturn("displayName"); TopLevelItemAndItemGroup ig = mock(TopLevelItemAndItemGroup.class); ItemGroup j = mock(Jenkins.class); when(ig.getName()).thenReturn("parent"); when(ig.getDisplayName()).thenReturn("parentDisplay"); when(ig.getParent()).thenReturn(j); when(i.getParent()).thenReturn(ig); Item i2 = mock(Item.class); when(i2.getDisplayName()).thenReturn("top"); when(i2.getParent()).thenReturn(j); assertEquals("displayName", Functions.getRelativeDisplayNameFrom(i, ig)); assertEquals("parentDisplay » displayName", Functions.getRelativeDisplayNameFrom(i, j)); assertEquals(".. » top", Functions.getRelativeDisplayNameFrom(i2, ig)); }
public static Finder specializedFinder(String... queries) { var finder = identMult(); for (String query : queries) { finder = finder.and(Finder.contains(query)); } return finder; }
@Test void specializedFinderTest() { var res = specializedFinder("love", "heaven").find(text()); assertEquals(1, res.size()); assertEquals("With a love that the winged seraphs of heaven", res.get(0)); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = clock.getTime() / 1000; // oh it'd be lovely to use Java 7 here try { graphite.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), timestamp); } graphite.flush(); } catch (IOException e) { LOGGER.warn("Unable to report to Graphite", graphite, e); } finally { try { graphite.close(); } catch (IOException e1) { LOGGER.warn("Error closing Graphite", graphite, e1); } } }
@Test public void reportsByteGaugeValues() throws Exception { reporter.report(map("gauge", gauge((byte) 1)), map(), map(), map(), map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).flush(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } else if (!(obj instanceof MapPosition)) { return false; } MapPosition other = (MapPosition) obj; if (!this.latLong.equals(other.latLong)) { return false; } else if (this.zoomLevel != other.zoomLevel) { return false; } else if (!Objects.equals(this.rotation, other.rotation)) { return false; } return true; }
@Test public void equalsTest() { MapPosition mapPosition1 = new MapPosition(new LatLong(1.0, 2.0), (byte) 3); MapPosition mapPosition2 = new MapPosition(new LatLong(1.0, 2.0), (byte) 3); MapPosition mapPosition3 = new MapPosition(new LatLong(1.0, 2.0), (byte) 0); MapPosition mapPosition4 = new MapPosition(LatLong.fromMicroDegrees(0, 0), (byte) 3); TestUtils.equalsTest(mapPosition1, mapPosition2); TestUtils.notEqualsTest(mapPosition1, mapPosition3); TestUtils.notEqualsTest(mapPosition1, mapPosition4); TestUtils.notEqualsTest(mapPosition1, new Object()); TestUtils.notEqualsTest(mapPosition1, null); }
public ForComputation forComputation(String computation) { return new ForComputation(computation); }
@Test public void testMultipleFamilies() throws Exception { TestStateTag tag = new TestStateTag("tag1"); WindmillStateCache.ForKey keyCache = cache.forComputation("comp1").forKey(computationKey("comp1", "key1", SHARDING_KEY), 0L, 0L); WindmillStateCache.ForKeyAndFamily family1 = keyCache.forFamily("family1"); WindmillStateCache.ForKeyAndFamily family2 = keyCache.forFamily("family2"); TestState state1 = new TestState("g1"); family1.put(StateNamespaces.global(), tag, state1, 2); assertEquals(Optional.of(state1), family1.get(StateNamespaces.global(), tag)); family1.persist(); TestState state2 = new TestState("g2"); family2.put(StateNamespaces.global(), tag, state2, 2); family2.persist(); assertEquals(Optional.of(state2), family2.get(StateNamespaces.global(), tag)); keyCache = cache.forComputation("comp1").forKey(computationKey("comp1", "key1", SHARDING_KEY), 0L, 1L); family1 = keyCache.forFamily("family1"); family2 = keyCache.forFamily("family2"); WindmillStateCache.ForKeyAndFamily family3 = keyCache.forFamily("family3"); assertEquals(Optional.of(state1), family1.get(StateNamespaces.global(), tag)); assertEquals(Optional.of(state2), family2.get(StateNamespaces.global(), tag)); assertEquals(Optional.empty(), family3.get(StateNamespaces.global(), tag)); }
public String getStyle() { return getCOSObject().getNameAsString(COSName.S, PDTransitionStyle.R.name()); }
@Test void getStyle() { PDTransition transition = new PDTransition(PDTransitionStyle.Fade); assertEquals(COSName.TRANS, transition.getCOSObject().getCOSName(COSName.TYPE)); assertEquals(PDTransitionStyle.Fade.name(), transition.getStyle()); }
@Override public Collection<String> getLogicTableNames() { return logicTableMapper; }
@Test void assertGetLogicTableMapper() { assertThat(new LinkedList<>(ruleAttribute.getLogicTableNames()), is(Collections.singletonList("foo_tbl"))); }
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory( final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration, final Configuration jobConfiguration, final Configuration clusterConfiguration, final boolean isCheckpointingEnabled) { checkNotNull(jobRestartStrategyConfiguration); checkNotNull(jobConfiguration); checkNotNull(clusterConfiguration); return getJobRestartStrategyFactory(jobRestartStrategyConfiguration) .orElse( getRestartStrategyFactoryFromConfig(jobConfiguration) .orElse( (getRestartStrategyFactoryFromConfig(clusterConfiguration) .orElse( getDefaultRestartStrategyFactory( isCheckpointingEnabled))))); }
@Test void testNoRestartStrategySpecifiedInExecutionConfig() { final Configuration conf = new Configuration(); conf.set(RestartStrategyOptions.RESTART_STRATEGY, FAILURE_RATE.getMainValue()); final RestartBackoffTimeStrategy.Factory factory = RestartBackoffTimeStrategyFactoryLoader.createRestartBackoffTimeStrategyFactory( RestartStrategies.noRestart(), conf, conf, false); assertThat(NoRestartBackoffTimeStrategy.NoRestartBackoffTimeStrategyFactory.INSTANCE) .isEqualTo(factory); }
@Nullable String getCollectionName(BsonDocument command, String commandName) { if (COMMANDS_WITH_COLLECTION_NAME.contains(commandName)) { String collectionName = getNonEmptyBsonString(command.get(commandName)); if (collectionName != null) { return collectionName; } } // Some other commands, like getMore, have a field like {"collection": collectionName}. return getNonEmptyBsonString(command.get("collection")); }
@Test void getCollectionName_notAllowListedCommandAndCollectionField() { BsonDocument command = new BsonDocument(Arrays.asList( new BsonElement("collection", new BsonString("coll")), new BsonElement("cmd", new BsonString("bar")) )); assertThat(listener.getCollectionName(command, "cmd")).isEqualTo( "coll"); // collection field wins }
@Override public Mono<SinglePage> get(String name) { return client.fetch(SinglePage.class, name); }
@Test void get() { when(client.fetch(eq(SinglePage.class), any())) .thenReturn(Mono.empty()); SinglePage singlePage = new SinglePage(); singlePage.setMetadata(new Metadata()); singlePage.getMetadata().setName("fake-single-page"); when(client.fetch(eq(SinglePage.class), eq("fake-single-page"))) .thenReturn(Mono.just(singlePage)); singlePageCommentSubject.get("fake-single-page") .as(StepVerifier::create) .expectNext(singlePage) .verifyComplete(); singlePageCommentSubject.get("fake-single-page-2") .as(StepVerifier::create) .verifyComplete(); verify(client, times(1)).fetch(eq(SinglePage.class), eq("fake-single-page")); }
@Override public boolean match(Message msg, StreamRule rule) { Double msgVal = getDouble(msg.getField(rule.getField())); if (msgVal == null) { return false; } Double ruleVal = getDouble(rule.getValue()); if (ruleVal == null) { return false; } return rule.getInverted() ^ (msgVal > ruleVal); }
@Test public void testSuccessfulDoubleMatch() { StreamRule rule = getSampleRule(); rule.setValue("1.0"); Message msg = getSampleMessage(); msg.addField("something", "1.1"); StreamRuleMatcher matcher = getMatcher(rule); assertTrue(matcher.match(msg, rule)); }
public IsJson(Matcher<? super ReadContext> jsonMatcher) { this.jsonMatcher = jsonMatcher; }
@Test public void shouldDescribeMismatchOfValidJson() { Matcher<Object> matcher = isJson(withPathEvaluatedTo(true)); Description description = new StringDescription(); matcher.describeMismatch(BOOKS_JSON_STRING, description); assertThat(description.toString(), containsString(TestingMatchers.MISMATCHED_TEXT)); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_list_of_unknown_type__throws_exception__register_transformer() { DataTable table = parse("", " | firstName | lastName | birthDate |", " | Annie M. G. | Schmidt | 1911-03-20 |", " | Roald | Dahl | 1916-09-13 |", " | Astrid | Lindgren | 1907-11-14 |"); CucumberDataTableException exception = assertThrows( CucumberDataTableException.class, () -> converter.convert(table, LIST_OF_AUTHOR)); assertThat(exception.getMessage(), is("" + "Can't convert DataTable to List<io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author>.\n" + "Please review these problems:\n" + "\n" + " - There was no table entry or table row transformer registered for io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author.\n" + " Please consider registering a table entry or row transformer.\n" + "\n" + " - There was no default table entry transformer registered to transform io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author.\n" + " Please consider registering a default table entry transformer.\n" + "\n" + "Note: Usually solving one is enough")); }
@Override public void createPod(Pod pod) { checkNotNull(pod, ERR_NULL_POD); checkArgument(!Strings.isNullOrEmpty(pod.getMetadata().getUid()), ERR_NULL_POD_UID); k8sPodStore.createPod(pod); log.info(String.format(MSG_POD, pod.getMetadata().getName(), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullPod() { target.createPod(null); }
public List<String> getGlobalWhiteAddrs() { return globalWhiteAddrs; }
@Test public void testGetGlobalWhiteAddrs() { AclConfig aclConfig = new AclConfig(); List<String> expected = Arrays.asList("192.168.1.1", "192.168.1.2"); aclConfig.setGlobalWhiteAddrs(expected); assertEquals("Global white addresses should match", expected, aclConfig.getGlobalWhiteAddrs()); assertEquals("The globalWhiteAddrs list should be equal to 2", 2, aclConfig.getGlobalWhiteAddrs().size()); }
public static StatementExecutorResponse execute( final ConfiguredStatement<AssertSchema> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { return AssertExecutor.execute( statement.getMaskedStatementText(), statement.getStatement(), executionContext.getKsqlConfig().getInt(KSQL_ASSERT_SCHEMA_DEFAULT_TIMEOUT_MS), serviceContext, (stmt, sc) -> assertSchema( sc.getSchemaRegistryClient(), ((AssertSchema) stmt).getSubject(), ((AssertSchema) stmt).getId(), stmt.checkExists()), (str, stmt) -> new AssertSchemaEntity( str, ((AssertSchema) stmt).getSubject(), ((AssertSchema) stmt).getId(), stmt.checkExists()) ); }
@Test public void shouldFailToAssertSchemaBySubject() { // Given final AssertSchema assertSchema = new AssertSchema(Optional.empty(), Optional.of("abc"), Optional.empty(), Optional.empty(), true); final ConfiguredStatement<AssertSchema> statement = ConfiguredStatement .of(KsqlParser.PreparedStatement.of("", assertSchema), SessionConfig.of(ksqlConfig, ImmutableMap.of())); // When: final KsqlRestException e = assertThrows(KsqlRestException.class, () -> AssertSchemaExecutor.execute(statement, mock(SessionProperties.class), engine, serviceContext)); // Then: assertThat(e.getResponse().getStatus(), is(417)); assertThat(((KsqlErrorMessage) e.getResponse().getEntity()).getMessage(), is("Schema with subject name abc does not exist")); }
@CheckReturnValue protected final boolean tryEmit(int ordinal, @Nonnull Object item) { return outbox.offer(ordinal, item); }
@Test public void when_tryEmitToAll_then_emittedToAll() { // When boolean emitted = p.tryEmit(MOCK_ITEM); // Then assertTrue(emitted); validateReceptionAtOrdinals(MOCK_ITEM, ALL_ORDINALS); }
@Override public Metric root() { return new RootMetricImpl(this.threadContext, this.metrics.root(this.threadContext)); }
@Test public void testRoot() { final NamespacedMetric metrics = this.getInstance().namespace("test"); final Metric root = metrics.root(); final NamespacedMetric namespaced = root.namespace("someothernamespace"); assertThat(namespaced.namespaceName()).containsExactly("someothernamespace"); }
@Override public synchronized int read() throws IOException { checkNotClosed(); if (finished) { return -1; } file.readLock().lock(); try { int b = file.read(pos++); // it's ok for pos to go beyond size() if (b == -1) { finished = true; } else { file.setLastAccessTime(fileSystemState.now()); } return b; } finally { file.readLock().unlock(); } }
@Test public void testRead_wholeArray() throws IOException { JimfsInputStream in = newInputStream(1, 2, 3, 4, 5, 6, 7, 8); byte[] bytes = new byte[8]; assertThat(in.read(bytes)).isEqualTo(8); assertArrayEquals(bytes(1, 2, 3, 4, 5, 6, 7, 8), bytes); assertEmpty(in); }
public Object unmarshal(Exchange exchange, Document document) throws Exception { InputStream is = exchange.getIn().getMandatoryBody(InputStream.class); return unmarshal(exchange, is); }
@Test public void testXMLElementDecryptionWithoutEncryptedKey() throws Exception { if (!TestHelper.HAS_3DES) { return; } String passPhrase = "this is a test passphrase"; byte[] bytes = passPhrase.getBytes(); final byte[] keyBytes = Arrays.copyOf(bytes, 24); for (int j = 0, k = 16; j < 8;) { keyBytes[k++] = keyBytes[j++]; } context.addRoutes(new RouteBuilder() { public void configure() { from("timer://foo?period=5000&repeatCount=1") .to("language:constant:resource:classpath:org/apache/camel/component/xmlsecurity/EncryptedMessage.xml") .unmarshal() .xmlSecurity("/*[local-name()='Envelope']/*[local-name()='Body']", true, keyBytes, XMLCipher.TRIPLEDES) .to("mock:decrypted"); } }); xmlsecTestHelper.testDecryptionNoEncryptedKey(context); }
@POST @Path("/generate_regex") @Timed @ApiOperation(value = "Generates a regex that can be used as a value for a whitelist entry.") @NoAuditEvent("Utility function only.") @Consumes(MediaType.APPLICATION_JSON) public WhitelistRegexGenerationResponse generateRegex(@ApiParam(name = "JSON body", required = true) @Valid @NotNull final WhitelistRegexGenerationRequest generationRequest) { final String regex; if (generationRequest.placeholder() == null) { regex = regexHelper.createRegexForUrl(generationRequest.urlTemplate()); } else { regex = regexHelper.createRegexForUrlTemplate(generationRequest.urlTemplate(), generationRequest.placeholder()); } return WhitelistRegexGenerationResponse.create(regex); }
@Test public void generateRegexForUrl() { final WhitelistRegexGenerationRequest request = WhitelistRegexGenerationRequest.create("https://example.com/api/lookup", null); final WhitelistRegexGenerationResponse response = urlWhitelistResource.generateRegex(request); assertThat(response.regex()).isNotBlank(); }
public static int calculateDefaultNumSlots( ResourceProfile totalResourceProfile, ResourceProfile defaultSlotResourceProfile) { // For ResourceProfile.ANY in test case, return the maximum integer if (totalResourceProfile.equals(ResourceProfile.ANY)) { return Integer.MAX_VALUE; } Preconditions.checkArgument(!defaultSlotResourceProfile.equals(ResourceProfile.ZERO)); int numSlots = 0; ResourceProfile remainResource = totalResourceProfile; while (remainResource.allFieldsNoLessThan(defaultSlotResourceProfile)) { remainResource = remainResource.subtract(defaultSlotResourceProfile); numSlots += 1; } return numSlots; }
@Test void testCalculateDefaultNumSlots() { final ResourceProfile defaultSlotResource = ResourceProfile.newBuilder() .setCpuCores(1.0) .setTaskHeapMemoryMB(1) .setTaskOffHeapMemoryMB(2) .setNetworkMemoryMB(3) .setManagedMemoryMB(4) .build(); final ResourceProfile totalResource1 = defaultSlotResource.multiply(5); final ResourceProfile totalResource2 = totalResource1.merge(ResourceProfile.newBuilder().setCpuCores(0.1).build()); assertThat(SlotManagerUtils.calculateDefaultNumSlots(totalResource1, defaultSlotResource)) .isEqualTo(5); assertThat(SlotManagerUtils.calculateDefaultNumSlots(totalResource2, defaultSlotResource)) .isEqualTo(5); // For ResourceProfile.ANY in test case, return the maximum integer assertThat( SlotManagerUtils.calculateDefaultNumSlots( ResourceProfile.ANY, defaultSlotResource)) .isEqualTo(Integer.MAX_VALUE); }
@Override public boolean isReadable(Class serializableClass, Type type, Annotation[] annotations, MediaType mediaType) { return isSupportedMediaType(mediaType) && isSupportedCharset(mediaType) && isSupportedEntity(serializableClass); }
@Test public void testNonUtf8CharsetIsNotAccepted() throws Exception { Map<String, String> params = new HashMap<>(); params.put("charset", "ISO-8859"); MediaType mediaTypeWithNonSupportedCharset = new MediaType("application", "json", params); assertThat(jerseyProvider.isReadable(InstanceInfo.class, InstanceInfo.class, null, mediaTypeWithNonSupportedCharset), is(false)); }
public BlobOperationResponse uploadBlockBlob(final Exchange exchange) throws IOException { ObjectHelper.notNull(exchange, MISSING_EXCHANGE); final BlobStreamAndLength blobStreamAndLength = BlobStreamAndLength.createBlobStreamAndLengthFromExchangeBody(exchange); final BlobCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange); if (LOG.isTraceEnabled()) { LOG.trace("Putting a block blob [{}] from exchange [{}]...", configurationProxy.getBlobName(exchange), exchange); } try { final Response<BlockBlobItem> response = client.uploadBlockBlob(blobStreamAndLength.getInputStream(), blobStreamAndLength.getStreamLength(), commonRequestOptions.getBlobHttpHeaders(), commonRequestOptions.getMetadata(), commonRequestOptions.getAccessTier(), commonRequestOptions.getContentMD5(), commonRequestOptions.getBlobRequestConditions(), commonRequestOptions.getTimeout()); return BlobOperationResponse.createWithEmptyBody(response); } finally { closeInputStreamIfNeeded(blobStreamAndLength.getInputStream()); } }
@Test void testUploadBlockBlob() throws Exception { // mocking final BlockBlobItem blockBlobItem = new BlockBlobItem("testTag", OffsetDateTime.now(), null, false, null); final HttpHeaders httpHeaders = new HttpHeaders().set("x-test-header", "123"); when(client.uploadBlockBlob(any(), anyLong(), any(), any(), any(), any(), any(), any())) .thenReturn(new ResponseBase<>(null, 200, httpHeaders, blockBlobItem, null)); final Exchange exchange = new DefaultExchange(context); exchange.getIn().setBody(new ByteArrayInputStream("test".getBytes(Charset.defaultCharset()))); // test upload with input stream final BlobOperations operations = new BlobOperations(configuration, client); final BlobOperationResponse operationResponse = operations.uploadBlockBlob(exchange); assertNotNull(operationResponse); assertTrue((boolean) operationResponse.getBody()); assertNotNull(operationResponse.getHeaders()); assertEquals("testTag", operationResponse.getHeaders().get(BlobConstants.E_TAG)); assertEquals("123", ((HttpHeaders) operationResponse.getHeaders().get(BlobConstants.RAW_HTTP_HEADERS)) .get("x-test-header").getValue()); }
public static boolean inTccBranch() { return BranchType.TCC == getBranchType(); }
@Test public void testInTccBranch() { RootContext.bind(DEFAULT_XID); assertThat(RootContext.inTccBranch()).isFalse(); RootContext.bindBranchType(BranchType.TCC); assertThat(RootContext.inTccBranch()).isTrue(); RootContext.unbindBranchType(); assertThat(RootContext.inTccBranch()).isFalse(); RootContext.unbind(); }
public final Sink sink(final Sink sink) { return new Sink() { @Override public void write(Buffer source, long byteCount) throws IOException { boolean throwOnTimeout = false; enter(); try { sink.write(source, byteCount); throwOnTimeout = true; } catch (IOException e) { throw exit(e); } finally { exit(throwOnTimeout); } } @Override public void flush() throws IOException { boolean throwOnTimeout = false; enter(); try { sink.flush(); throwOnTimeout = true; } catch (IOException e) { throw exit(e); } finally { exit(throwOnTimeout); } } @Override public void close() throws IOException { boolean throwOnTimeout = false; enter(); try { sink.close(); throwOnTimeout = true; } catch (IOException e) { throw exit(e); } finally { exit(throwOnTimeout); } } @Override public Timeout timeout() { return AsyncTimeout.this; } @Override public String toString() { return "AsyncTimeout.sink(" + sink + ")"; } }; }
@Test public void wrappedThrowsWithoutTimeout() throws Exception { Sink sink = new ForwardingSink(new Buffer()) { @Override public void write(Buffer source, long byteCount) throws IOException { throw new IOException("no timeout occurred"); } }; AsyncTimeout timeout = new AsyncTimeout(); timeout.timeout(250, TimeUnit.MILLISECONDS); Sink timeoutSink = timeout.sink(sink); try { timeoutSink.write(null, 0); fail(); } catch (IOException expected) { assertEquals("no timeout occurred", expected.getMessage()); } }
@Override public void execute(ComputationStep.Context context) { executeForBranch(treeRootHolder.getRoot()); }
@Test public void added_event_uses_language_key_in_message_if_language_not_found() { QualityProfile qp = qp(QP_NAME_1, LANGUAGE_KEY_1, new Date()); qProfileStatusRepository.register(qp.getQpKey(), ADDED); mockLanguageNotInRepository(LANGUAGE_KEY_1); mockQualityProfileMeasures(treeRootHolder.getRoot(), null, arrayOf(qp)); underTest.execute(new TestComputationStepContext()); verify(eventRepository).add(eventArgumentCaptor.capture()); verifyNoMoreInteractions(eventRepository); verifyEvent(eventArgumentCaptor.getValue(), "Use \"" + qp.getQpName() + "\" (" + qp.getLanguageKey() + ")", null, null); }
@Override public InterpreterResult interpret(final String st, final InterpreterContext context) throws InterpreterException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("st:\n{}", st); } final FormType form = getFormType(); RemoteInterpreterProcess interpreterProcess = null; try { interpreterProcess = getOrCreateInterpreterProcess(); } catch (IOException e) { throw new InterpreterException(e); } if (!interpreterProcess.isRunning()) { return new InterpreterResult(InterpreterResult.Code.ERROR, "Interpreter process is not running\n" + interpreterProcess.getErrorMessage()); } return interpreterProcess.callRemoteFunction(client -> { RemoteInterpreterResult remoteResult = client.interpret( sessionId, className, st, convert(context)); Map<String, Object> remoteConfig = (Map<String, Object>) GSON.fromJson( remoteResult.getConfig(), new TypeToken<Map<String, Object>>() { }.getType()); context.getConfig().clear(); if (remoteConfig != null) { context.getConfig().putAll(remoteConfig); } GUI currentGUI = context.getGui(); GUI currentNoteGUI = context.getNoteGui(); if (form == FormType.NATIVE) { GUI remoteGui = GUI.fromJson(remoteResult.getGui()); GUI remoteNoteGui = GUI.fromJson(remoteResult.getNoteGui()); currentGUI.clear(); currentGUI.setParams(remoteGui.getParams()); currentGUI.setForms(remoteGui.getForms()); currentNoteGUI.setParams(remoteNoteGui.getParams()); currentNoteGUI.setForms(remoteNoteGui.getForms()); } else if (form == FormType.SIMPLE) { final Map<String, Input> currentForms = currentGUI.getForms(); final Map<String, Object> currentParams = currentGUI.getParams(); final GUI remoteGUI = GUI.fromJson(remoteResult.getGui()); final Map<String, Input> remoteForms = remoteGUI.getForms(); final Map<String, Object> remoteParams = remoteGUI.getParams(); currentForms.putAll(remoteForms); currentParams.putAll(remoteParams); } return convert(remoteResult); } ); }
@Test void testEnvironmentAndProperty() throws InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); interpreterSetting.setProperty("ENV_1", "VALUE_1"); interpreterSetting.setProperty("property_1", "value_1"); final Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "get"); final InterpreterContext context1 = createDummyInterpreterContext(); assertEquals("VALUE_1", interpreter1.interpret("getEnv ENV_1", context1).message().get(0).getData()); assertEquals("null", interpreter1.interpret("getEnv ENV_2", context1).message().get(0).getData()); assertEquals("value_1", interpreter1.interpret("getProperty property_1", context1).message().get(0).getData()); assertEquals("null", interpreter1.interpret("getProperty not_existed_property", context1).message().get(0).getData()); }
@Override public ConsumerBuilder<T> priorityLevel(int priorityLevel) { checkArgument(priorityLevel >= 0, "priorityLevel needs to be >= 0"); conf.setPriorityLevel(priorityLevel); return this; }
@Test(expectedExceptions = IllegalArgumentException.class) public void testConsumerBuilderImplWhenPriorityLevelPropertyIsNegative() { consumerBuilderImpl.priorityLevel(-1); }
Future<RecordMetadata> send(final ProducerRecord<byte[], byte[]> record, final Callback callback) { maybeBeginTransaction(); try { return producer.send(record, callback); } catch (final KafkaException uncaughtException) { if (isRecoverable(uncaughtException)) { // producer.send() call may throw a KafkaException which wraps a FencedException, // in this case we should throw its wrapped inner cause so that it can be // captured and re-wrapped as TaskMigratedException throw new TaskMigratedException( formatException("Producer got fenced trying to send a record"), uncaughtException.getCause() ); } else { throw new StreamsException( formatException(String.format("Error encountered trying to send record to topic %s", record.topic())), uncaughtException ); } } }
@Test public void shouldFailOnSendFatal() { nonEosMockProducer.sendException = new RuntimeException("KABOOM!"); final RuntimeException thrown = assertThrows( RuntimeException.class, () -> nonEosStreamsProducer.send(record, null) ); assertThat(thrown.getMessage(), is("KABOOM!")); }
public static void refreshSuperUserGroupsConfiguration() { //load server side configuration; refreshSuperUserGroupsConfiguration(new Configuration()); }
@Test public void testWildcardUser() { Configuration conf = new Configuration(); conf.set( DefaultImpersonationProvider.getTestProvider(). getProxySuperuserUserConfKey(REAL_USER_NAME), "*"); conf.set( DefaultImpersonationProvider.getTestProvider(). getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); // First try proxying a user that's allowed UserGroupInformation realUserUgi = UserGroupInformation .createRemoteUser(REAL_USER_NAME); UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting( AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES); // From good IP assertAuthorized(proxyUserUgi, "1.2.3.4"); // From bad IP assertNotAuthorized(proxyUserUgi, "1.2.3.5"); // Now try proxying a different user (just to make sure we aren't getting spill over // from the other test case!) realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME); proxyUserUgi = UserGroupInformation.createProxyUserForTesting( PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES); // From good IP assertAuthorized(proxyUserUgi, "1.2.3.4"); // From bad IP assertNotAuthorized(proxyUserUgi, "1.2.3.5"); }
public static Map<String, String> fromEnvironment() { Map<String, String> p = System.getenv(); return new CucumberPropertiesMap(p); }
@Test void looks_up_value_from_environment() { Map<String, String> properties = CucumberProperties.fromEnvironment(); String path = properties.get("PATH"); if (path == null) { // on some Windows flavors, the PATH environment variable is named // "Path" path = properties.get("Path"); } assertThat(path, is(notNullValue())); }
public JmxCollector register() { return register(PrometheusRegistry.defaultRegistry); }
@Test public void testCompositeData() throws Exception { JmxCollector jc = new JmxCollector( "\n---\nrules:\n- pattern: `io.prometheus.jmx.test<name=PerformanceMetricsMBean><PerformanceMetrics>.*`\n attrNameSnakeCase: true" .replace('`', '"')) .register(prometheusRegistry); Double value = getSampleValue( "io_prometheus_jmx_test_PerformanceMetricsMBean_PerformanceMetrics_active_sessions", new String[] {}, new String[] {}); assertEquals(Double.valueOf(2), value); value = getSampleValue( "io_prometheus_jmx_test_PerformanceMetricsMBean_PerformanceMetrics_bootstraps", new String[] {}, new String[] {}); assertEquals(Double.valueOf(4), value); value = getSampleValue( "io_prometheus_jmx_test_PerformanceMetricsMBean_PerformanceMetrics_bootstraps_deferred", new String[] {}, new String[] {}); assertEquals(Double.valueOf(6), value); }